max_stars_repo_path
stringlengths 3
269
| max_stars_repo_name
stringlengths 4
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.05M
| score
float64 0.23
5.13
| int_score
int64 0
5
|
---|---|---|---|---|---|---|
abc/226/a/answer.py | TakuyaNoguchi/atcoder | 0 | 12788051 | X = input()
a, b = X.split('.')
answer = int(a)
if int(b[0]) >= 5:
answer += 1
print(answer) | 3.234375 | 3 |
boj/bruteforce/boj_1107.py | ruslanlvivsky/python-algorithm | 3 | 12788052 | import sys
num = int(sys.stdin.readline().strip())
n = int(sys.stdin.readline().strip())
if n:
btn = sys.stdin.readline().strip().split()
else:
btn = []
result = abs(100 - num)
for i in range(1_000_000):
for j in str(i):
if j in btn:
break
else:
result = min(result, len(str(i)) + abs(i - num))
sys.stdout.write(str(result))
| 2.5 | 2 |
zagred/tools/receptor.py | kaetaen/zagred | 1 | 12788053 | import speech_recognition
import pyttsx3
class Receptor:
def listen(self):
microphone = speech_recognition.Recognizer()
phrase = ''
with speech_recognition.Microphone() as source:
microphone.adjust_for_ambient_noise(source)
audio = microphone.listen(source)
try:
print("Ouvindo... ")
phrase = microphone.recognize_google(audio,language='pt-BR')
except: # speech_recognition.UnknownValueError
self._speak("Não entendi")
if (phrase):
return phrase
def _speak(self, phrase):
speaker = pyttsx3.init()
voices = speaker.getProperty('voices')
speaker.setProperty('voice', voices[53].id)
rate = speaker.getProperty('rate')
speaker.setProperty('rate', rate-80)
speaker.say(phrase)
speaker.runAndWait()
| 3.015625 | 3 |
proxygrab/package/__init__.py | Divkix/ProxyGrab | 15 | 12788054 | """initialise main workplace."""
| 0.980469 | 1 |
django_mongoengine/__init__.py | lsaint/django-mongoengine | 0 | 12788055 | from .document import (
Document,
DynamicDocument,
EmbeddedDocument,
DynamicEmbeddedDocument,
)
from .queryset import QuerySet, QuerySetNoCache
__all__ = [
"QuerySet",
"QuerySetNoCache",
"Document",
"DynamicDocument",
"EmbeddedDocument",
"DynamicEmbeddedDocument",
]
# default_app_config = 'django_mongoengine.apps.DjangoMongoEngineConfig'
| 1.679688 | 2 |
test/test_alert_definition_api.py | hpcc-systems/uptrends-python | 0 | 12788056 | # coding: utf-8
"""
Uptrends API v4
This document describes Uptrends API version 4. This Swagger environment also lets you execute API methods directly. Please note that this is not a sandbox environment: these API methods operate directly on your actual Uptrends account. For more information, please visit https://www.uptrends.com/api. # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import uptrends
from uptrends.api.alert_definition_api import AlertDefinitionApi # noqa: E501
from uptrends.rest import ApiException
class TestAlertDefinitionApi(unittest.TestCase):
"""AlertDefinitionApi unit test stubs"""
def setUp(self):
self.api = uptrends.api.alert_definition_api.AlertDefinitionApi() # noqa: E501
def tearDown(self):
pass
def test_alert_definition_add_monitor_group_to_alert_definition(self):
"""Test case for alert_definition_add_monitor_group_to_alert_definition
Adds a monitor group to the specified alert definition. # noqa: E501
"""
pass
def test_alert_definition_add_monitor_to_alert_definition(self):
"""Test case for alert_definition_add_monitor_to_alert_definition
Adds a monitor to the specified alert definition. # noqa: E501
"""
pass
def test_alert_definition_add_operator_group_to_escalation_level(self):
"""Test case for alert_definition_add_operator_group_to_escalation_level
Adds an operator group to the specified escalation level. # noqa: E501
"""
pass
def test_alert_definition_add_operator_to_escalation_level(self):
"""Test case for alert_definition_add_operator_to_escalation_level
Adds an operator to the specified escalation level. # noqa: E501
"""
pass
def test_alert_definition_create_alert_definition(self):
"""Test case for alert_definition_create_alert_definition
Creates a new alert definition. # noqa: E501
"""
pass
def test_alert_definition_delete_alert_definition(self):
"""Test case for alert_definition_delete_alert_definition
Deletes an existing alert definition. # noqa: E501
"""
pass
def test_alert_definition_get_all_alert_definitions(self):
"""Test case for alert_definition_get_all_alert_definitions
Gets a list of all alert definitions. # noqa: E501
"""
pass
def test_alert_definition_get_all_members(self):
"""Test case for alert_definition_get_all_members
Gets a list of all monitor and monitor group guids of the specified alert definition. # noqa: E501
"""
pass
def test_alert_definition_get_escalation_level(self):
"""Test case for alert_definition_get_escalation_level
Gets the escalation level information of the specified alert definition. # noqa: E501
"""
pass
def test_alert_definition_get_escalation_level_integration(self):
"""Test case for alert_definition_get_escalation_level_integration
Gets the integrations for the specified escalation level. # noqa: E501
"""
pass
def test_alert_definition_get_escalation_level_operator(self):
"""Test case for alert_definition_get_escalation_level_operator
Gets the operator and operator group guids for the specified escalation level. # noqa: E501
"""
pass
def test_alert_definition_get_specified_alert_definitions(self):
"""Test case for alert_definition_get_specified_alert_definitions
Gets the specified alert definition. # noqa: E501
"""
pass
def test_alert_definition_patch_alert_definition(self):
"""Test case for alert_definition_patch_alert_definition
Partially updates the definition of the specified alert definition. # noqa: E501
"""
pass
def test_alert_definition_put_alert_definition(self):
"""Test case for alert_definition_put_alert_definition
Updates the definition of the specified alert definition. # noqa: E501
"""
pass
def test_alert_definition_remove_monitor_from_alert_definition(self):
"""Test case for alert_definition_remove_monitor_from_alert_definition
Removes a monitor for the specified alert definition. # noqa: E501
"""
pass
def test_alert_definition_remove_monitor_group_from_alert_definition(self):
"""Test case for alert_definition_remove_monitor_group_from_alert_definition
Removes a monitor group for the specified alert definition. # noqa: E501
"""
pass
def test_alert_definition_remove_operator_from_escalation_level(self):
"""Test case for alert_definition_remove_operator_from_escalation_level
Removes an operator for the specified escalation level. # noqa: E501
"""
pass
def test_alert_definition_remove_operator_group_from_escalation_level(self):
"""Test case for alert_definition_remove_operator_group_from_escalation_level
Removes an operator group for the specified escalation level. # noqa: E501
"""
pass
def test_alert_definition_update_integration_for_escalation_with_patch(self):
"""Test case for alert_definition_update_integration_for_escalation_with_patch
Partially updates an integration to the specified escalation level. # noqa: E501
"""
pass
def test_alert_definition_update_integration_for_escalation_with_put(self):
"""Test case for alert_definition_update_integration_for_escalation_with_put
Updates an integration for the specified escalation level. # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| 1.671875 | 2 |
tests/test_utils.py | jannikluhn/ethereum-accounts | 0 | 12788057 | <filename>tests/test_utils.py<gh_stars>0
import pytest
from ethereum.utils import (
privtoaddr,
privtopub,
)
from eth_utils import (
remove_0x_prefix,
encode_hex,
decode_hex,
is_0x_prefixed,
is_checksum_address,
is_hex,
is_same_address,
)
from eth_accounts import (
random_private_key,
private_key_to_address,
private_key_to_public_key,
public_key_to_address,
)
from eth_accounts.utils import (
normalize_message,
normalize_password,
normalize_private_key,
normalize_public_key,
normalize_signature,
)
@pytest.mark.parametrize('key', [random_private_key() for _ in range(100)])
def test_random_private_key(key):
assert is_hex(key)
assert is_0x_prefixed(key)
assert len(key) == 64 + 2
@pytest.mark.parametrize('key', [random_private_key() for _ in range(100)])
def test_private_key_to_public_key(key):
# tests against pyethereum
reference = encode_hex(privtopub(decode_hex(key)))
public_key = private_key_to_public_key(key)
assert is_0x_prefixed(public_key)
assert is_hex(public_key)
assert len(public_key) == 130 + 2
assert public_key == reference
assert private_key_to_public_key(decode_hex(key)) == reference
assert private_key_to_public_key(remove_0x_prefix(key)) == reference
@pytest.mark.parametrize('key', [random_private_key() for _ in range(100)])
def test_private_key_to_address(key):
# tests against pyethereum
reference = encode_hex(privtoaddr(decode_hex(key)))
address = private_key_to_address(key)
assert is_0x_prefixed(address)
assert is_checksum_address(address)
assert is_same_address(address, reference)
assert is_same_address(private_key_to_address(decode_hex(key)), reference)
assert is_same_address(private_key_to_address(remove_0x_prefix(key)), reference)
@pytest.mark.parametrize('key', [random_private_key() for _ in range(100)])
def test_public_key_to_address(key):
# tests against pyethereum
public_key = encode_hex(privtopub(decode_hex(key)))
reference = privtoaddr(decode_hex(key))
address = public_key_to_address(public_key)
assert is_0x_prefixed(address)
assert is_checksum_address(address)
assert is_same_address(address, reference)
assert is_same_address(public_key_to_address(decode_hex(public_key)), reference)
assert is_same_address(public_key_to_address(remove_0x_prefix(public_key)), reference)
@pytest.mark.parametrize(('input', 'output', 'error'), [
('0x0000000000000000000000000000000000000000000000000000000000000000', None, ValueError),
('0x0000000000000000000000000000000000000000000000000000000000000001', None, None),
('0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140', None, None),
('0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141', None, ValueError),
('0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'.upper(),
'0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', None),
('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', None),
('nohex', None, ValueError),
(-1, None, ValueError),
(0, None, ValueError),
(1, '0x0000000000000000000000000000000000000000000000000000000000000001', None),
(0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140,
'0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140',
None),
(0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141, None, ValueError),
('0x01', '0x0000000000000000000000000000000000000000000000000000000000000001', None),
('0x000000000000000000000000000000000000000000000000000000000000000001',
'0x0000000000000000000000000000000000000000000000000000000000000001', None),
(b'\0', None, ValueError),
(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', None, ValueError),
(b'\x01', '0x0000000000000000000000000000000000000000000000000000000000000001', None),
(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01',
'0x0000000000000000000000000000000000000000000000000000000000000001', None),
(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01',
'0x0000000000000000000000000000000000000000000000000000000000000001', None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xba'
b'\xae\xdc\xe6\xafH\xa0;\xbf\xd2^\x8c\xd06A@',
'0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140', None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xba'
b'\xae\xdc\xe6\xafH\xa0;\xbf\xd2^\x8c\xd06AA', None, ValueError),
(None, None, TypeError),
(1.0, None, TypeError),
([], None, TypeError)
])
def test_private_key_normalization(input, output, error):
if error is None:
if output is None:
output = input
assert output == normalize_private_key(input)
else:
with pytest.raises(error):
normalize_private_key(input)
@pytest.mark.parametrize(['input', 'output', 'error'], [
('0x0000000000000000000000000000000000000000000000000000000000000000'
'000000000000000000000000000000000000000000000000000000000000000000', None, None),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, None),
('0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF'
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, ValueError),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, ValueError),
('nohex', None, ValueError),
(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
'0x0000000000000000000000000000000000000000000000000000000000000000'
'000000000000000000000000000000000000000000000000000000000000000000',
None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
'0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
None, ValueError),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
None, ValueError),
(None, None, TypeError),
(5, None, TypeError),
(5.0, None, TypeError),
([], None, TypeError)
])
def test_public_key_normalization(input, output, error):
if error is None:
if output is None:
output = input
assert output == normalize_public_key(input)
else:
with pytest.raises(error):
normalize_public_key(input)
@pytest.mark.parametrize(['input', 'output', 'error'], [
(b'', None, None),
(b'password', None, None),
('password', None, TypeError),
(None, None, TypeError),
(5, None, TypeError),
(5.0, None, TypeError),
([], None, TypeError),
([b'password'], None, TypeError)
])
def test_password_normalization(input, output, error):
if error is None:
if output is None:
output = input
assert output == normalize_password(input)
else:
with pytest.raises(error):
normalize_password(input)
@pytest.mark.parametrize(['input', 'output', 'error'], [
(b'', None, None),
(b'message', None, None),
('0xabcd', b'\xab\xcd', None),
('abcd', b'\xab\xcd', None),
('0xAbCd', b'\xab\xcd', None),
('nohex', None, ValueError),
(None, None, TypeError),
(5, None, TypeError),
(5.0, None, TypeError),
([], None, TypeError),
([b'message'], None, TypeError)
])
def test_message_normalization(input, output, error):
if error is None:
if output is None:
output = input
assert output == normalize_message(input)
else:
with pytest.raises(error):
normalize_message(input)
@pytest.mark.parametrize(['input', 'output', 'error'], [
('0x0000000000000000000000000000000000000000000000000000000000000000'
'000000000000000000000000000000000000000000000000000000000000000000', None, None),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, None),
('0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF'
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, ValueError),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, ValueError),
('nohex', None, ValueError),
(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
'0x0000000000000000000000000000000000000000000000000000000000000000'
'000000000000000000000000000000000000000000000000000000000000000000',
None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
'0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
None, ValueError),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
None, ValueError),
(None, None, TypeError),
(5, None, TypeError),
(5.0, None, TypeError),
([], None, TypeError)
])
def test_signature_normalization(input, output, error):
if error is None:
if output is None:
output = input
assert output == normalize_signature(input)
else:
with pytest.raises(error):
normalize_signature(input)
| 2.453125 | 2 |
Python/Examples/Macros/Tracker_CreatePoints.py | halmusaibeli/RoboDK-API | 0 | 12788058 | <reponame>halmusaibeli/RoboDK-API
# This macro shows how we can take a group of laser tracker measurements
# from a laser tracker and save it to a file
CREATE_MEASUREMENTS = False
# Set the name of the reference frame to add measurements attached to id
REFERENCE_NAME = "Tracker Reference"
#REFERENCE_NAME = None
MEASUREMENT_RATE_S = 50
MEASUREMENT_PAUSE_S = 1 / MEASUREMENT_RATE_S
# Start the RoboDK API
from robodk.robolink import *
from robodk.robomath import *
RDK = Robolink()
# Get the reference frame if available
if REFERENCE_NAME is None:
reference = None
else:
reference = RDK.Item(REFERENCE_NAME, ITEM_TYPE_FRAME)
if not reference.Valid():
#reference = None
reference = RDK.AddFrame(REFERENCE_NAME)
# Start the counter
tic()
count = 0
path_file = RDK.getParam('PATH_OPENSTATION') or RDK.getParam('PATH_DESKTOP')
is_lasertracker = True
# Open a csv file in the same folder as the RDK file to store the data
with open(path_file + '/tracker_point_test.csv', 'w') as csvfile:
# Infinite loop until we decide to stop
while True:
count = count + 1
data = "Invalid measurement"
measurement = None
if is_lasertracker:
measurement = RDK.LaserTracker_Measure()
if measurement is not None:
# Block rendering (faster)
RDK.Render(False)
if len(measurement) >= 6:
# We have a pose measurement (eg Leica 6 DOF T-Mac)
x, y, z, w, p, r = measurement
data = '%.3f, %.6f, %.6f, %.6f, %.6f, %.6f, %.6f' % (toc(), x, y, z, w, p, r)
RDK.ShowMessage("Measured XYZWPR: " + data, False)
# Convert position and orientation Euler angles to poses (rot functions are in radians)
pose_tool_wrt_tracker = transl(x, y, z) * rotx(w * pi / 180) * roty(p * pi / 180) * rotz(r * pi / 180)
# Add the object as a reference (easier to copy/paste coordinates):
if CREATE_MEASUREMENTS:
item = RDK.AddFrame('Pose %i' % count)
item.setPose(pose_tool_wrt_tracker)
# Set the reference relative to the tracker reference if available
if reference is not None:
item.setParent(reference)
else:
# We have an XYZ tracker measurement
x, y, z = measurement
# Display the data as [time, x,y,z]
data = '%.3f, %.6f, %.6f, %.6f' % (toc(), x, y, z)
RDK.ShowMessage("Measured XYZ: " + data, False)
# Add the object as a point object
#item = RDK.AddPoints([[x,y,z]])
#item.setName('Point %i' % count)
# Add the object as a reference (easier to copy/paste coordinates):
#item = RDK.AddFrame('Point %i' % count)
#item.setPose(transl(x,y,z))
#if reference is not None:
# item.setParent(reference)
else:
# Stop trying to use the laser tracker
is_lasertracker = False
#RDK.ShowMessage("Unable to measure with a laser tracker. Trying with pose input", False)
#pause(2)
#continue
# Take the measurement (make sure we are connected from the RoboDK API
pose1, pose2, np1, np2, time, aux = RDK.StereoCamera_Measure()
if np1 == 0:
print("Unable to see the tracker")
else:
#print(pose1)
#print(pose2)
station_2_tracker = pose1
if reference is not None:
reference.setPoseAbs(station_2_tracker)
x, y, z, a, b, c = Pose_2_KUKA(station_2_tracker)
data = '%.3f, %.6f, %.6f, %.6f, %.6f, %.6f, %.6f' % (toc(), x, y, z, a, b, c)
# Save the data to the CSV file
print(data)
csvfile.write(data + '\n')
# Set default rendering back
RDK.Render(True)
# Take a break, if desired:
pause(MEASUREMENT_PAUSE_S)
| 2.703125 | 3 |
Python2.x/Python2.x-0-basic/004_loop_1.1_while.py | mrxuyong/Python-dev | 0 | 12788059 | # -*- coding: UTF-8 -*-
# @desc while loop
count = 10
while (count > 0):
print 'the count is:', count;
count = count - 1;
print 'it is over...'
| 3.921875 | 4 |
Q-A.py | lzps/Q-A | 1 | 12788060 | <gh_stars>1-10
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# 存在utf8显示问题
import os, random
t = os.listdir(os.path.split(os.path.realpath(__file__))[0])
f, j, csv = [], 0, []
for i in t:
if '.csv' in i:
j = j + 1
f.append(i)
print(str(j) + ' --> ' + i[0:-4])
with open(f[int(input('\n 请选择: ')) - 1], 'rb') as f:
exec(f.read())
headers = csv.pop(0).split('|')
for i, j in enumerate(headers):
print(str(i) + ' -- ' + j + '(例如:' + csv[0].split('|')[i] + ')')
Q = exec(input('请选择提供什么信息(即已知、输出)' + '\n 可多选,以","分隔 : '))
A = exec(input('请接着选择求什么(即输入)' + '\n 可多选,以"|"分隔 : '))
print('\n1.做选择题' + '\n2.做填空题')
t = input(' 请选择: ') # 如果不选2来做填空题,就是选1
out = 0 if t == '2' else int(input('\n输出几个选项?\n 请输入:'.format(len(csv))))-1
while True:
random.shuffle(csv)
for f in csv:
if os.system('cls'):
t = os.system('clear')
doing = f.split('|')
question, known, rightA = [], [], []
for i in Q:
known.append(headers[i] + ':' + doing[i])
for i in A:
question.append(headers[i])
rightA.append(str(doing[i]))
print('已知:\n' + ' ' + ','.join(known) + '\n则 ' + '或'.join(question) + '为:')
if not out:
answer = input('请输入:')
else:
csv2, answer = list(csv), []
random.shuffle(csv2)
csv2.remove(f)
csv2.insert(random.randint(0, out), f)
for i, j in enumerate(csv2):
if i > out:
break
for k, l in enumerate(j.split('|')):
if k in A:
answer.append(l)
for i, j in enumerate(answer):
print(str(i) + ' --> ' + str(j))
answer = answer[int(input('请输入:'))]
print('The Right Answer: ' + 'or'.join(rightA))
if answer in rightA:
print('Congratulations, You\'re right!')
input('Press Enter to continue . . .')
| 2.75 | 3 |
django_mt/mt_core/mysql/base.py | maheshgawali/django-multi-tenancy | 0 | 12788061 | from importlib import import_module
import logging
import time
import json
from django.core.exceptions import ImproperlyConfigured
from django.conf import settings
import django.db.backends.mysql.base
WRAPPED_BACKEND = import_module('django.db.backends.mysql.base')
LOGGER = logging.getLogger('django_mt')
def lower_dict(d):
new_dict = dict((k.lower(), v) for k, v in d.items())
return new_dict
class DatabaseWrapper(WRAPPED_BACKEND.DatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.db_info = None
# self.default_db_info = None
self.default_db_info = settings.DATABASES['default']
def _cursor(self):
cursor = super(DatabaseWrapper, self)._cursor()
# create a new db object here and set it to connection and db_name
conn_params = None
if self.db_info:
LOGGER.info('--- using %s db connection ---', self.db_info.name)
# now init the connection using data from db_info and set it to cursor
conn_params = {
'ENGINE': self.db_info.engine,
'NAME': self.db_info.name,
'USER': self.db_info.user,
'PASSWORD': self.db_info.password,
'HOST': self.db_info.host,
'PORT': self.db_info.port,
# 'OPTIONS': json.loads(self.db_info.options)
'OPTIONS': {},
'AUTOCOMMIT': False
}
self.settings_dict = conn_params
updated_conn_params = self.get_connection_params()
connection = self.get_new_connection(updated_conn_params)
# self.connection = connection
return connection.cursor()
else:
LOGGER.info('--- using default db connection ---')
return cursor
| 2.109375 | 2 |
tests/heartbeat_test.py | buraksezer/hazelcast-python-client | 3 | 12788062 | <reponame>buraksezer/hazelcast-python-client<filename>tests/heartbeat_test.py
from hazelcast import HazelcastClient
from hazelcast.core import Address
from tests.base import HazelcastTestCase
from hazelcast.config import ClientConfig, ClientProperties
from tests.util import configure_logging, open_connection_to_address
class HeartbeatTest(HazelcastTestCase):
@classmethod
def setUpClass(cls):
configure_logging()
cls.rc = cls.create_rc()
@classmethod
def tearDownClass(cls):
cls.rc.exit()
def setUp(self):
self.cluster = self.create_cluster(self.rc)
self.member = self.rc.startMember(self.cluster.id)
self.config = ClientConfig()
self.config.set_property(ClientProperties.HEARTBEAT_INTERVAL.name, 500)
self.config.set_property(ClientProperties.HEARTBEAT_TIMEOUT.name, 2000)
self.client = HazelcastClient(self.config)
def tearDown(self):
self.client.shutdown()
self.rc.shutdownCluster(self.cluster.id)
def test_heartbeat_stopped(self):
def connection_collector():
connections = []
def collector(c):
connections.append(c)
collector.connections = connections
return collector
heartbeat_stopped_collector = connection_collector()
heartbeat_restored_collector = connection_collector()
self.client.heartbeat.add_listener(on_heartbeat_stopped=heartbeat_stopped_collector,
on_heartbeat_restored=heartbeat_restored_collector)
member2 = self.rc.startMember(self.cluster.id)
addr = Address(member2.host, member2.port)
open_connection_to_address(self.client, addr)
self.simulate_heartbeat_lost(self.client, addr, 2)
def assert_heartbeat_stopped_and_restored():
self.assertEqual(1, len(heartbeat_stopped_collector.connections))
self.assertEqual(1, len(heartbeat_restored_collector.connections))
connection_stopped = heartbeat_stopped_collector.connections[0]
connection_restored = heartbeat_restored_collector.connections[0]
self.assertEqual(connection_stopped._address, (member2.host, member2.port))
self.assertEqual(connection_restored._address, (member2.host, member2.port))
self.assertTrueEventually(assert_heartbeat_stopped_and_restored)
@staticmethod
def simulate_heartbeat_lost(client, address, timeout):
client.connection_manager.connections[address].last_read_in_seconds -= timeout
| 2.203125 | 2 |
digital_badge/urls.py | CSElonewolf/Digital_Badge | 1 | 12788063 | <gh_stars>1-10
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path,include
from . import views
urlpatterns = [
path('admin/', admin.site.urls),
path('', views.home,name='home'),
path('badge/', include('badge.urls')),
]+ static(settings.MEDIA_URL ,document_root=settings.MEDIA_ROOT) | 1.515625 | 2 |
Python/URI 1435.py | LGUSTM/URI | 0 | 12788064 | #-*- coding: utf-8 -*-
x = [][]
while True:
n = int(input())
if n == 0:
break;
else:
for i in range(n):
for j in range(n)
x.append(0)
for i in range(n):
for j in range(n)
x.insert(j, +1)
if n-2 > 0:
for i in range(1, n-1):
for j in range(1, n-1):
x.
| 3.328125 | 3 |
modules/registration.py | diSp1rIt/Bot-channels-info | 0 | 12788065 | <filename>modules/registration.py<gh_stars>0
from telethon import TelegramClient, sync
from cfg_loader import *
client_data = load_configs()
API_ID = client_data['API_ID']
API_HASH = client_data['API_HASH']
client = TelegramClient('bot', API_ID, API_HASH)
client.connect()
loop = None
async def send_code(phone: str) -> None:
global client
await client.send_code_request(phone)
async def sing_in(phone: str, code: int) -> None:
global client
await client.sign_in(phone, code)
def get_client() -> TelegramClient:
global client
return client
def set_loop(custom_loop) -> None:
global loop
loop = custom_loop
| 2.171875 | 2 |
python/ranks.py | alanc10n/snippets | 0 | 12788066 | """
Rank Vectors
Given an array (or list) of scores, return the array of ranks for each value in the array. The largest value has rank 1, the second largest value has rank 2, and so on. Ties should be handled by assigning the same rank to all tied values. For example:
ranks([9,3,6,10]) = [2,4,3,1] and ranks([3,3,3,3,3,5,1]) = [2,2,2,2,2,1,7]
ranks([8, 98, 10, 3, 3, 4, 4, 89]) # [4, 1, 3, 7, 7, 5, 5, 2]
"""
def ranks(scores):
""" Slick solution using the offset in the sorted list of the first occurence
to determine the rank.
"""
sorted_scores = sorted(scores, reverse=True)
rank_list = [sorted_scores.index(n) + 1 for n in scores]
return rank_list
if __name__ == '__main__':
instr = raw_input('Enter comma-separated numbers (e.g. 3,5,2)')
scores = [int(n) for n in instr.split(',')]
print 'Ranking {0}'.format(scores)
ret = ranks(scores)
print 'Ranks: {0}'.format(ret)
| 4.15625 | 4 |
src/pcbLibraryManager/libraries/libraryAVR.py | NiceCircuits/pcbLibraryManager | 0 | 12788067 | <reponame>NiceCircuits/pcbLibraryManager
# -*- coding: utf-8 -*-
"""
Created on Sun Aug 2 18:04:03 2015
@author: piotr at nicecircuits.com
"""
from libraryManager.library import libraryClass
from libraryManager.part import part
from footprints.footprintSmdQuad import footprintQfp
from footprints.footprintSmdDualRow import footprintSot23
from libraryManager.footprintPrimitive import *
from libraryManager.defaults import *
from symbols.symbolsIC import symbolIC
from libraryManager.symbolPrimitive import *
from libraries.libraryPinheaders import footprintPinheader
from libraryManager.generateLibraries import generateLibraries
from parts.icGenerator import icGenerator
import os
class libraryAVR(libraryClass):
"""
"""
def __init__(self):
super().__init__("niceAVR")
self.parts.append(partAtmega48("ATmega48", "AU"))
self.parts.append(partAtmega48("ATmega328", "AU"))
self.parts.append(partAttiny10("ATtiny10-TSHR"))
self.parts.append(partAvrProg())
# ============== Atmega48/88/168/328 in MLF32/VFQFN32 package ==============
path=os.path.join(os.path.dirname(__file__),"ATmega48_88_168_328_MLF32.ods")
self.parts.extend(icGenerator.generate_advanced(path))
class partAtmega48(part):
"""
Atmega48/88/168/328/P/A part
"""
def __init__(self, name="ATmega48", version="AU"):
name = name + "-" + version
super().__init__(name, "U")
self.symbols.append(symbolAtmega48(name, version))
for density in ["N", "L", "M"]:
self.footprints.append(footprintQfp(32, 0.8, density=density))
class partAttiny10(part):
"""
Attiny4/5/9/10 part
"""
def __init__(self, name="ATtiny10"):
super().__init__(name, defaults.icRefDes)
self.symbols.append(symbolAttiny10(name))
for density in ["N", "L", "M"]:
self.footprints.append(footprintSot23(6, density=density))
class partAvrProg(part):
"""AVR programming connector part
"""
def __init__(self, name="AVR_Prog"):
super().__init__(name, defaults.conRefDes)
self.symbols.append(symbolAvrProg())
for density in ["N", "L", "M"]:
self.footprints.append(footprintPinheader(2, 3, density))
class symbolAtmega48(symbolIC):
"""
Atmega48/88/168/328/P/A symbol
"""
def __init__(self, name, refDes="U", showPinNames=True, showPinNumbers=True):
pinsRight = [
['(PCINT0/CLKO/ICP1) PB0', 12, pinType.IO],
['(PCINT1/OC1A) PB1', 13, pinType.IO],
['(PCINT2/SS/OC1B) PB2', 14, pinType.IO],
['(PCINT3/OC2A/MOSI) PB3', 15, pinType.IO],
['(PCINT4/MISO) PB4', 16, pinType.IO],
['(SCK/PCINT5) PB5', 17, pinType.IO],
['(ADC0/PCINT8) PC0', 23, pinType.IO],
['(ADC1/PCINT9) PC1', 24, pinType.IO],
['(ADC2/PCINT10) PC2', 25, pinType.IO],
['(ADC3/PCINT11) PC3', 26, pinType.IO],
['(ADC4/SDA/PCINT12) PC4', 27, pinType.IO],
['(ADC5/SCL/PCINT13) PC5', 28, pinType.IO],
['(RXD/PCINT16) PD0', 30, pinType.IO],
['(TXD/PCINT17) PD1', 31, pinType.IO],
['(INT0/PCINT18) PD2', 32, pinType.IO],
['(PCINT19/OC2B/INT1) PD3', 1, pinType.IO],
['(PCINT20/XCK/T0) PD4', 2, pinType.IO],
['(PCINT21/OC0B/T1) PD5', 9, pinType.IO],
['(PCINT22/OC0A/AIN0) PD6', 10, pinType.IO],
['(PCINT23/AIN1) PD7', 11, pinType.IO]
]
pinsLeft = [
['PC6 (RESET/PCINT14)', 29, pinType.IO],
None,
['PB6 (PCINT6/XTAL1/TOSC1)', 7, pinType.IO],
['PB7 (PCINT7/XTAL2/TOSC2)', 8, pinType.IO],
None,
None,
None,
None,
['VCC', 4, pinType.pwrIn],
['VCC', 6, pinType.pwrIn],
['AVCC', 18, pinType.pwrIn],
['AREF', 20, pinType.input],
None,
['GND', 3, pinType.pwrIn],
['GND', 5, pinType.pwrIn],
['GND', 21, pinType.pwrIn],
None,
None,
['ADC6', 19, pinType.input],
['ADC7', 22, pinType.input]
]
super().__init__(name, pinsLeft=pinsLeft, pinsRight=pinsRight, width=3000)
class symbolAttiny10(symbolIC):
"""
Attiny4/5/9/10 symbol
"""
def __init__(self, name, refDes="U", showPinNames=True, showPinNumbers=True):
pinsRight = [
['TPIDATA/PB0', 1, pinType.IO],
['TPICLK/PB1', 3, pinType.IO],
['PB2', 4, pinType.IO],
['RESET/PB3', 6, pinType.IO]
]
pinsLeft = [
['VCC', 5, pinType.pwrIn],
None,
None,
['GND', 2, pinType.pwrIn],
]
super().__init__(name, pinsLeft=pinsLeft, pinsRight=pinsRight, width=1200)
class symbolAvrProg(symbolIC):
"""AVR programming connector symbol symbol
"""
def __init__(self, name="AVR_Prog", refDes=defaults.conRefDes, showPinNames=True, showPinNumbers=True):
pinsRight = [
['VCC', 2, pinType.passive],
['MOSI', 4, pinType.passive],
['GND', 6, pinType.passive]
]
pinsLeft = [
['MISO', 1, pinType.passive],
['SCK', 3, pinType.passive],
['RST', 5, pinType.passive]
]
super().__init__(name, pinsLeft=pinsLeft, pinsRight=pinsRight, width=800, refDes=refDes)
if __name__ == "__main__":
generateLibraries([libraryAVR()]) | 2.03125 | 2 |
tests/utils.py | frwickst/pyhuum | 1 | 12788068 | <reponame>frwickst/pyhuum<filename>tests/utils.py<gh_stars>1-10
from typing import Any
class MockResponse:
def __init__(
self, json_data: dict[str, Any], status_code: int, text: str = ""
) -> None:
self._json = json_data
self._text = text
self.status = status_code
async def json(self) -> dict[str, Any]:
return self._json
async def text(self) -> str:
return self._text
| 2.484375 | 2 |
tests/test_router.py | elizabrock/adventofcode | 0 | 12788069 | import unittest
from day3 import Router, Path, Traveler
class TestRouter(unittest.TestCase):
def test_houses_visited_single_visitor_2(self):
input = '>'
expected = 2
path = Path()
Router.route(input, Traveler(path))
self.assertEqual(expected, path.houses_visited())
def test_houses_visited_single_visitor_4(self):
input = '^>v<'
expected = 4
path = Path()
Router.route(input, Traveler(path))
self.assertEqual(expected, path.houses_visited())
def test_houses_visited_long_single_visitor_2(self):
input = '^v^v^v^v^v'
expected = 2
path = Path()
Router.route(input, Traveler(path))
self.assertEqual(expected, path.houses_visited())
def test_houses_visited_multiple_visitors_2(self):
input = '^v'
expected = 3
path = Path()
Router.route(input, Traveler(path), Traveler(path))
self.assertEqual(expected, path.houses_visited())
def test_houses_visited_multiple_visitors_4(self):
input = '^>v<'
expected = 3
path = Path()
Router.route(input, Traveler(path), Traveler(path))
self.assertEqual(expected, path.houses_visited())
def test_houses_visited_long_multiple_visitors_2(self):
input = '^v^v^v^v^v'
expected = 11
path = Path()
Router.route(input, Traveler(path), Traveler(path))
self.assertEqual(expected, path.houses_visited())
if __name__ == '__main__':
unittest.main()
| 3.640625 | 4 |
src/portfolio/test/test_models.py | dunneff/stock-price-app | 0 | 12788070 | <filename>src/portfolio/test/test_models.py
from django.test import TestCase
from portfolio.models import StockPortfolio, StockPortfolioItem
class StockPortfolioTestCase(TestCase):
def test_portfolio(self):
StockPortfolio.objects.create(title='Test Portfolio')
portfolio = StockPortfolio.objects.get(title='Test Portfolio')
self.assertEqual(portfolio.title, 'Test Portfolio')
portfolio.delete()
try:
retrieved_portfolio = StockPortfolio.objects.get(title='Test Portfolio')
except StockPortfolio.DoesNotExist:
retrieved_portfolio = None
self.assertEqual(retrieved_portfolio, None)
def test_portfolio_stock(self):
portfolio = StockPortfolio.objects.create(title='Test Portfolio')
portfolio_stock = StockPortfolioItem.objects.create(
portfolio_list=portfolio,
title='Test Stock',
description="This is a test stock item in the portfolio"
)
self.assertEqual(portfolio.stocks.count(), 1)
self.assertEqual(portfolio.stocks.first(), portfolio_stock)
portfolio.delete()
try:
retrieved_item = StockPortfolioItem.objects.get(title='Test Stock')
except StockPortfolioItem.DoesNotExist:
retrieved_item = None
self.assertEqual(retrieved_item, None)
| 2.390625 | 2 |
tests/case_manager/test_database.py | IfengAutomation/uitester | 4 | 12788071 | # @Time : 2016/9/1 19:09
# @Author : lixintong
import unittest
import time
from uitester.case_manager.database import DBCommandLineHelper, Tag, Case, DB
class TestDataBase(unittest.TestCase):
def setUp(self):
self.db_helper = DBCommandLineHelper()
def test_operate_tag_data(self):
ms_str = str(time.time())
tag_name = "test_tag_name_" + ms_str
tag_description = "test_tag_name_" + ms_str
tag = self.db_helper.insert_tag(tag_name, tag_description) # 插入tag
dst_tag = self.db_helper.query_tag_by_id(tag.id) # 根据tag.id 查询tag
self.assertTrue(tag == dst_tag)
tag_list = self.db_helper.fuzzy_query_tag_by_name(tag.name)
self.assertTrue(tag in tag_list)
dst_tag = self.db_helper.query_tag_by_name(tag.name)
self.assertTrue(tag == dst_tag)
tag_list = self.db_helper.query_tag_all() # 查询所有tag
self.assertTrue(type(tag_list[0]) is Tag)
self.db_helper.delete_tag(tag.id) # 删除tag
dst_tag = self.db_helper.query_tag_by_id(tag.id) # 根据tag.id 查询tag、
self.assertTrue(dst_tag is None)
def test_operate_case_data(self):
ms_str = str(time.time())
tag_name = "test_tag_name_" + ms_str
tag_description = "test_tag_name_" + ms_str
tag = self.db_helper.insert_tag(tag_name, tag_description) # 插入tag
tags = [tag]
case_name = case_content = "test_case_name_" + ms_str
case = self.db_helper.insert_case_with_tags(case_name, case_content, tags) # 插入case
dst_case = self.db_helper.query_case_by_id(case.id)
self.assertTrue(case == dst_case)
dst_case = self.db_helper.query_case_by_name(True, case.name)
self.assertTrue(case == dst_case)
dst_case_list = self.db_helper.query_case_by_name(False, case.name)
self.assertTrue(case in dst_case_list)
case_list = self.db_helper.query_case_by_tag_names([tag.name])
self.assertTrue(type(case_list[0]) is Case)
# tag_name = "test_tag_name_" + str(time.time())
# 更改case:
case = self.db_helper.query_case_by_id(case.id)
case_id = case.id
case_name = 'test_case_name_' + str(time.time())
case.name = case_name
case.content = 'test_case_name_' + str(time.time())
tags = self.db_helper.query_tag_all()
case.tags = tags
self.db_helper.update_case()
case = self.db_helper.query_case_by_id(case.id)
self.assertTrue(case.name == case_name)
tag_name = "test_tag_name_" + str(time.time())
case = self.db_helper.insert_case_with_tagnames(case.name, case.content, [tag.name], [tag_name])
self.assertTrue(type(case) is Case and case.id)
result = self.db_helper.get_table_data_by_cases_id(str(case.id))
self.assertTrue(result['case'] and result['tag'] and result['case_tag'])
self.db_helper.delete_case(case.id)
dst_case = self.db_helper.query_case_by_id(case.id)
self.assertTrue(dst_case is None)
def test_delete_tag_by_name(self):
tag_name = 'test_123'
tag = self.db_helper.insert_tag(tag_name, 'test tag')
case = self.db_helper.insert_case_with_tags('test_123', 'test case', [tag])
self.db_helper.delete_tag_by_name(tag_name)
self.assertTrue(self.db_helper.query_case_by_id(case.id) is not None)
self.assertTrue(self.db_helper.query_tag_by_name(tag_name) is None)
| 2.515625 | 3 |
venv/lib/python3.8/site-packages/pylint_django/tests/input/func_noerror_gettext_lazy_format.py | DiegoSilvaHoffmann/Small-Ecommerce | 1 | 12788072 | """
Checks that Pylint does not complain about django lazy proxy
when using gettext_lazy
"""
from django.utils.translation import gettext_lazy
gettext_lazy('{something}').format(something='lala')
| 1.867188 | 2 |
cogs/owner/__init__.py | iomintz/Chiaki-Nanami | 1 | 12788073 | <gh_stars>1-10
"""**For the bot owner only!"""
__hidden__ = True
| 1.203125 | 1 |
debug/debug.py | MacHu-GWU/crawl_trulia-project | 1 | 12788074 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from dataIO import js, textfile
from crawl_trulia import trulia_urlencoder, trulia_htmlparser
from crawl_trulia.packages.crawlib.spider import spider
PATH = "test.html"
address = "22 Yew Rd"
city = "Baltimore"
zipcode = "21221"
# url = urlencoder.by_address_and_zipcode(address, zipcode)
url = trulia_urlencoder.by_address_city_and_zipcode(address, city, zipcode)
if not os.path.exists(PATH):
html = spider.get_html(url, encoding="utf-8")
textfile.write(html, PATH)
html = textfile.read(PATH)
data = trulia_htmlparser.get_house_detail(html)
js.pprint(data) | 2.765625 | 3 |
setup.py | K0lb3/pvrtc_decoder | 2 | 12788075 | import os
from setuptools import Extension, setup
try:
from Cython.Build import cythonize
except ImportError:
cythonize = None
extensions = [
Extension(
name="pvrtc_decoder",
version="1.0.2",
author="K0lb3",
author_email="",
description="A PVRTC decoder for PIL",
long_description=open('README.md', 'rt', encoding='utf8').read(),
long_description_content_type="text/markdown",
url="https://github.com/K0lb3/pvrtc_decoder",
download_url="https://github.com/K0lb3/pvrtc_decoder/tarball/master",
keywords=['PVRTC', 'PVRT', 'decoder', "PIL", "Pillow", "texture"],
classifiers=[
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Multimedia :: Graphics",
],
sources=[
"pvrtc_decoder.pyx",
'src/PVRTDecompress.cpp',
],
language="c++",
include_dirs=[
"src"
],
install_requires=[
"cython"
],
)
]
if cythonize:
extensions = cythonize(extensions)
setup(ext_modules=extensions)
| 1.632813 | 2 |
src/ts_analysis/utilities/func.py | tedchengf/ts_analysis | 1 | 12788076 | # func.py
import numpy as np
from numba import njit, jit, prange
#------------------------ Distance Functions -----------------------#
def corr_dist(A):
return 1 - np.corrcoef(A)
def abs_diff(A):
target_matrix = np.zeros((len(A), len(A)))
mat_dim = target_matrix.shape[0]
for r in range(mat_dim):
for c in range(r, mat_dim):
target_matrix[r,c] = np.absolute(np.subtract(A[r], A[c]))
target_matrix[c,r] = target_matrix[r,c]
return target_matrix
def cond_diff(A):
target_matrix = np.ones((len(A), len(A)), dtype = bool)
mat_dim = target_matrix.shape[0]
for r in range(mat_dim):
for c in range(r, mat_dim):
target_matrix[r,c] = (A[r] == A[c])
target_matrix[c,r] = target_matrix[r,c]
return target_matrix
def len_diff(A):
target_matrix = np.ones((len(A), len(A)), dtype = int)
mat_dim = target_matrix.shape[0]
for r in range(mat_dim):
for c in range(r, mat_dim):
target_matrix[r,c] = np.absolute(np.subtract(len(A[r]), len(A[c])))
target_matrix[c,r] = target_matrix[r,c]
return target_matrix
def levenshtein_dist(A):
target_matrix = np.ones((len(A), len(A)), dtype = int)
mat_dim = target_matrix.shape[0]
for r in range(mat_dim):
for c in range(r, mat_dim):
target_matrix[r,c] = levenshtein(A[r], A[c])
target_matrix[c,r] = target_matrix[r,c]
return target_matrix
def weighted_euclidian(A, weights):
matrices = []
for arr in A:
mat = np.zeros((len(arr), len(arr)))
matrix_iteration(arr, mat, squared_dist)
matrices.append(mat)
weighted_dist = np.zeros((len(arr), len(arr)))
for ind in range(len(weights)):
weighted_dist = weighted_dist + weights[ind] * matrices[ind]
return np.sqrt(weighted_dist)
#------------------------ Transform Functions -----------------------#
def corrcoef_z_transform(A):
A = np.subtract(1, A)
results = np.empty(len(A), dtype = A.dtype)
quick_z_transform(A, results)
return results
def invert_corrcoef(A):
return np.subtract(1, A)
def z_transform(A):
results = np.empty(len(A), dtype = A.dtype)
quick_z_transform(A, results)
return results
@njit(parallel = True)
def quick_z_transform(A, results):
for i in prange(len(A)):
results[i] = np.log((1+A[i])/(1-A[i]))/2
#------------------------ Other Functions -----------------------#
def levenshtein(seq1, seq2):
size_x = len(seq1) + 1
size_y = len(seq2) + 1
matrix = np.zeros((size_x, size_y))
for x in range(size_x):
matrix [x, 0] = x
for y in range(size_y):
matrix [0, y] = y
for x in range(1, size_x):
for y in range(1, size_y):
if seq1[x-1] == seq2[y-1]:
matrix [x,y] = min(
matrix[x-1, y] + 1,
matrix[x-1, y-1],
matrix[x, y-1] + 1
)
else:
matrix [x,y] = min(
matrix[x-1,y] + 1,
matrix[x-1,y-1] + 1,
matrix[x,y-1] + 1
)
return (matrix[size_x - 1, size_y - 1])
| 2.84375 | 3 |
vk_air/objects/pretty.py | sultan1k/vk_air | 5 | 12788077 | <gh_stars>1-10
"""
MIT License
Copyright (c) 2021 sultan1k
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from typing import List, Optional
from .photo import PhotoSize
from .button import Button
class PrettyCard:
"""
Объект карточки.
"""
def __init__(self, obj):
self.obj = obj
@property
def card_id(self) -> int:
return self.obj.get('card_id')
@property
def link_url(self) -> str:
return self.obj.get('link_url')
@property
def title(self) -> str:
return self.obj.get('title')
@property
def images(self) -> Optional[List[PhotoSize]]:
imagelist = []
for i in self.obj.get('images'):
imagelist.append(PhotoSize(i))
return imagelist
@property
def button(self) -> Optional[Button]:
return Button(self.obj.get('button')) if self.obj.get('button') else None
@property
def price(self) -> str:
return self.obj.get('price')
@property
def price_old(self) -> Optional[str]:
return self.obj.get('price_old')
class PrettyCards:
"""
Массив элементов
"""
def __init__(self, obj):
self.obj = obj
@property
def cards(self) -> Optional[List[PrettyCard]]:
cardlist = []
for i in self.obj.get('cards'):
cardlist.append(PrettyCard(i))
return cardlist | 2.515625 | 3 |
src/data_manager/urls.py | MonkeyApproved/RealEstateScraper | 0 | 12788078 | from django.urls import path, include
from django.views.generic import TemplateView
from rest_framework.schemas import get_schema_view
from rest_framework.routers import DefaultRouter
from . import views
from scraping.xe import PropertyType, Xe
router = DefaultRouter()
router.register(r'load_config', views.LoadConfigView)
router.register(r'data_loads', views.DataLoadView)
urlpatterns = [
path("", include(router.urls)),
]
| 1.835938 | 2 |
criticalConnections.py | sarveshbhatnagar/CompetetiveProgramming | 0 | 12788079 | <filename>criticalConnections.py<gh_stars>0
from collections import defaultdict
class Solution:
def criticalConnections(self, connections):
dis = [0] * len(connections)
low = [0] * len(connections)
self.time = 0
res = []
visited = set()
g = defaultdict(list)
for s, t in connections:
g[s].append(t)
g[t].append(s)
def dfs(node, parent):
nonlocal self, res, visited, low, dis
visited.add(node)
low[node] = dis[node] = self.time
self.time += 1
for n in g[node]:
if n == parent:
continue
if n not in visited:
dfs(n, node)
low[node] = min(low[node], low[n])
if low[n] > dis[node]:
res.append([node, n])
else:
low[node] = min(low[node], dis[n])
dfs(0, -1)
return res
| 3.078125 | 3 |
chapter2/old/10_armed_bandits_UCB/main.py | otaviojacobi/rl_an_introduction | 0 | 12788080 | <gh_stars>0
import matplotlib.pyplot as plt
from ArmedBandit import *
def main():
'''
- ε-greedy policy implementation for a stateless problem
'''
STEPS = 1000
RUNS = 2000
reward_regular, _ = ArmedBanditRunner(STEPS, RUNS, bandits=10, episolon=0.1).run()
reward_UCB, _ = ArmedBanditRunner(STEPS, RUNS, bandits=10, episolon=0.1).run(policy='UCB', c=2)
plt.plot([k+1 for k in range(STEPS)], reward_regular, color='green', label='ε-greedy, ε=0.1')
plt.plot([k+1 for k in range(STEPS)], reward_UCB, color='black', label='UCB, c=2')
plt.legend(loc=4, prop={'size': 17})
plt.xlabel('Steps')
plt.ylabel('Average reward')
plt.show()
if __name__ == '__main__':
main() | 2.65625 | 3 |
unittest/employee.py | petsan/coding-challenges | 0 | 12788081 | <filename>unittest/employee.py
import requests
import unittest
from unittest.mock import patch
class TestEmployee(unittest.TestCase):
def setUp(self):
self.emp_1 = Employee('Jane', 'Doe', 50000)
self.emp_2 = Employee('Jim', 'James', 60000)
def tearDown(self):
pass
def test_email(self):
self.assertEqual(self.emp_1.email, '<EMAIL>')
self.assertEqual(self.emp_2.email, '<EMAIL>')
self.emp_1.first = 'Jenny'
self.emp_2.first = 'Chris'
self.assertEqual(self.emp_1.email, '<EMAIL>')
self.assertEqual(self.emp_2.email, '<EMAIL>')
def test_fullname(self):
self.assertEqual(self.emp_1.fullname, '<NAME>')
self.assertEqual(self.emp_2.fullname, '<NAME>')
self.emp_1.first = 'Jenny'
self.emp_2.first = 'Chris'
self.assertEqual(self.emp_1.fullname, '<NAME>')
self.assertEqual(self.emp_2.fullname, '<NAME>')
def test_apply_raise(self):
self.emp_1.apply_raise()
self.emp_2.apply_raise()
self.assertEqual(self.emp_1.pay, 52500)
self.assertEqual(self.emp_2.pay, 63000)
def test_monthly_schedule(self):
with patch('requests.get') as mocked_get:
# test passing case
mocked_get.return_value.ok = True
mocked_get.return_value.text = 'Success'
schedule = self.emp_1.monthly_schedule('May')
mocked_get.assert_called_with('http://company.com/Doe/May')
self.assertEqual(schedule, 'Success')
# test failing case
mocked_get.return_value.ok = False
schedule = self.emp_2.monthly_schedule('June')
mocked_get.assert_called_with('http://company.com/James/June')
self.assertEqual(schedule, 'Bad Response!')
class Employee:
raise_amt = 1.05
def __init__(self, first, last, pay):
self.first = first
self.last = last
self.pay = pay
@property
def email(self):
return <EMAIL>'.format(self.first, self.last)
@property
def fullname(self):
return '{} {}'.format(self.first, self.last)
def apply_raise(self):
self.pay = int(self.pay * self.raise_amt)
def monthly_schedule(self, month):
response = requests.get(f'http://company.com/{self.last}/{month}')
if response.ok:
return response.text
else:
return 'Bad Response!'
if __name__ == '__main__':
unittest.main()
| 3.125 | 3 |
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/panther/calculators/calc_dsa.py | lmnotran/gecko_sdk | 82 | 12788082 | from pyradioconfig.parts.nixi.calculators.calc_dsa import CALC_DSA_nixi
class CALC_DSA_panther(CALC_DSA_nixi):
pass | 1.195313 | 1 |
amplicons/make-ILMN_V34_primers.py | taejoonlab/microbe-toolbox | 1 | 12788083 | <filename>amplicons/make-ILMN_V34_primers.py
#!/usr/bin/env python3
# Source: https://stackoverflow.com/questions/27551921/how-to-extend-ambiguous-dna-sequence
from Bio import Seq
from itertools import product
def extend_ambiguous_dna(seq):
"""return list of all possible sequences given an ambiguous DNA input"""
d = Seq.IUPAC.IUPACData.ambiguous_dna_values
r = []
for i in product(*[d[j] for j in seq]):
r.append("".join(i))
return r
primer_V34F = 'CTACGGGNGGCWGCAG'
primer_V34R = 'GACTACHVGGGTATCTAATCC'
idx = 1
for tmp_F in extend_ambiguous_dna(primer_V34F):
for tmp_R in extend_ambiguous_dna(primer_V34R):
print("ILMN_V4-%02d\t%s\t%s\t%d\t%d" % (idx, tmp_F, tmp_R, 300, 500))
idx += 1
#print(extend_ambiguous_dna(primer_V3F))
#print(extend_ambiguous_dna(primer_V34R))
#print(extend_ambiguous_dna(primer_V34F))
| 2.828125 | 3 |
office365/planner/tasks/check_list_items.py | theodoriss/Office365-REST-Python-Client | 544 | 12788084 | from office365.planner.tasks.check_list_item import PlannerChecklistItem
from office365.runtime.client_value_collection import ClientValueCollection
class PlannerChecklistItems(ClientValueCollection):
"""The plannerChecklistItemCollection resource represents the collection of checklist items on a task.
It is an Open Type. It is part of the task details object.
The value in the property-value pair is the checklistItem object.
"""
def __init__(self, initial_values=None):
super(PlannerChecklistItems, self).__init__(PlannerChecklistItem, initial_values)
| 2.125 | 2 |
main.py | hantabaru1014/youtube_live-highlighter | 0 | 12788085 | import json
import os
import argparse
import re
from comment_getter import CommentGetter
from analyzer import Analyzer
from settings_loader import SettingsLoader
DEFAULT_SETTINGS_JSON_PATH = "default_settings.json"
YOUTUBE_VIDEO_ID_PATTERN = r"\?v=([^&]+)"
def get_timed_link(video_id, sec):
return f"https://www.youtube.com/watch?v={video_id}&t={sec}s"
def get_video_id(text):
m = re.search(YOUTUBE_VIDEO_ID_PATTERN, text)
if m is not None:
return m.group(1)
else:
return text
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('video_id', help="Youtubeの動画ID または 動画URL")
parser.add_argument('--settings', nargs='*', help="設定ファイル。複数指定で指定した順に読み込み。")
parser.add_argument('--force_download', action='store_true', help="コメントデータが存在する場合でもDLし直す")
parser.add_argument('--gen_default_settings', action='store_true', help="設定ファイルを出力")
parser.add_argument('--debug', action='store_true', help="デバッグメッセージを表示")
args = parser.parse_args()
target_video_id = get_video_id(args.video_id)
settings = SettingsLoader().get_init_settings()
to_loads = []
if os.path.exists(DEFAULT_SETTINGS_JSON_PATH):
to_loads.append(DEFAULT_SETTINGS_JSON_PATH)
if args.settings is not None and len(args.settings) > 0:
to_loads.extend(args.settings)
settings = SettingsLoader().load(to_loads)
if args.force_download:
settings['force_download'] = args.force_download
if args.debug:
settings['debug'] = args.debug
if args.gen_default_settings:
settings = SettingsLoader().get_init_settings()
with open(DEFAULT_SETTINGS_JSON_PATH, mode='w', encoding='utf-8') as fh:
json.dump(settings, fh, indent=4, ensure_ascii=False)
if target_video_id == "" or target_video_id is None:
print("video_idは必須です!")
exit(0)
comment_data_path = os.path.join(settings['comment_data_directory'], f"comment_data-{target_video_id}.json")
comment_data = {}
if os.path.exists(comment_data_path) and not settings['force_download']:
with open(comment_data_path, mode='r', encoding='utf-8') as fh:
comment_data = json.load(fh)
print(f"Load Comment Data File: {comment_data_path}")
else:
print(f"Start download comment data. id={target_video_id}")
comment_data = CommentGetter(settings).get_comment_data(target_video_id)
with open(comment_data_path, mode='w', encoding="utf-8") as fh:
json.dump(comment_data, fh, indent=4, ensure_ascii=False)
print("Finish download.")
analyzer = Analyzer(settings)
before_secs = settings['link_before_secs']
print("### Total ###")
i = 1
for dt, score in analyzer.analyze(comment_data):
print(f"{i}. {dt} - {score}\n {get_timed_link(target_video_id, dt.seconds-before_secs)}")
i += 1
| 2.671875 | 3 |
neuralpredictors/data/datasets/statics/__init__.py | Shahdsaf/neuralpredictors | 9 | 12788086 | from .base import StaticImageSet
from .filetree import FileTreeDataset
| 1.023438 | 1 |
fwks/stage/stage_misc.py | Zantyr/fwks | 0 | 12788087 | <filename>fwks/stage/stage_misc.py
"""
Things that do not fit elsewhere
"""
from .stage_meta import ToDo, Analytic, DType
from functools import reduce
import numpy as np
class Pointwise(ToDo):
"""
Squash and so on
"""
class LogPower(Analytic):
def __init__(self, negative=True):
self.negative = negative
def output_dtype(self, input_dtype):
if self.previous:
input_dtype = self.previous.output_dtype(input_dtype)
return input_dtype
def _function(self, recording):
return (-1 if self.negative else 1) * np.log(np.abs(recording) + 2e-12)
class ComposeFeatures(Analytic):
def __init__(self, feature_transforms):
self.feature_transforms = feature_transforms
def bind(self, prev):
self.previous = prev
prev = None
for i in self.feature_transforms:
prev = i.bind(prev)
return self
def output_dtype(self, input_dtype):
for transform in self.feature_transforms:
input_dtype = transform.output_dtype(input_dtype)
return input_dtype
def _function(self, recording):
return self.feature_transforms[-1].map(recording)
class ConcatFeatures(Analytic):
def __init__(self, feature_transforms, max_fit = 10):
self.feature_transforms = [(ComposeFeatures(x) if isinstance(x, list) else x)
for x in feature_transforms]
self.max_fit = max_fit
def bind(self, prev):
self.previous = prev
[transform.bind(None)
for transform in self.feature_transforms]
return self
def output_dtype(self, input_dtype):
dtypes = [print(input_dtype) or transform.output_dtype(input_dtype)
for transform in self.feature_transforms]
shape = sum([dtype.shape[-1] for dtype in dtypes])
shape = dtypes[0].shape[:-1] + [shape]
print(dtypes, shape, input_dtype)
return DType("Array", shape, np.float32)
def _function(self, recording):
transforms = [transform._function(recording)
for transform in self.feature_transforms]
times = np.array([x.shape[0] for x in transforms])
times -= times.min()
if times.max() < self.max_fit:
max_time = np.array([x.shape[0] for x in transforms]).max()
transforms = [
np.pad(x, tuple([(0, max_time - x.shape[0])] + [
(0, 0) for dim in x.shape[1:]
]), 'constant') for x in transforms
]
transforms = np.concatenate(transforms, axis=(len(transforms[0].shape) - 1))
return transforms
| 2.296875 | 2 |
bisellium/lib/pagan/pagan.py | colosseum-project/app-bisellium | 0 | 12788088 | from . import generator
import os
MD5 = generator.HASH_MD5
SHA1 = generator.HASH_SHA1
SHA224 = generator.HASH_SHA224
SHA256 = generator.HASH_SHA256
SHA384 = generator.HASH_SHA384
SHA512 = generator.HASH_SHA512
class Avatar:
DEFAULT_OUTPUT_PATH = os.path.join(os.getcwd(), "output/")
DEFAULT_FILENAME = "pagan"
DEFAULT_EXTENSION = "png"
ALLOWED_EXTENSIONS = ["bmp", "gif", "png", "tiff"]
DEFAULT_HASHFUN = generator.HASH_MD5
def __init__(self, inpt, hashfun=DEFAULT_HASHFUN):
"""Initialize the avatar and creates the image."""
self.img = self.__create_image(inpt, hashfun)
def __create_image(self, inpt, hashfun):
"""Creates the avatar based on the input and
the chosen hash function."""
if hashfun not in generator.HASHES.keys():
print(
"Unknown or unsupported hash function. Using default: %s"
% self.DEFAULT_HASHFUN
)
algo = self.DEFAULT_HASHFUN
else:
algo = hashfun
return generator.generate(inpt, algo)
def show(self):
"""Shows a preview of the avatar in an external
image viewer."""
self.img.show()
def change(self, inpt, hashfun=DEFAULT_HASHFUN):
"""Change the avatar by providing a new input.
Uses the standard hash function if no one is given."""
self.img = self.__create_image(inpt, hashfun)
def save(
self,
path=DEFAULT_OUTPUT_PATH,
filename=DEFAULT_FILENAME,
extension=DEFAULT_EXTENSION,
):
"""Saves a avatar under the given output path to
a given filename. The file ending ".png" is appended
automatically. If the path does not exist, it will be
created. When no parameters are omitted, a default path
and/or filename will be used."""
if extension not in self.ALLOWED_EXTENSIONS:
raise Exception(
'Extension "%s" is not supported. Supported extensions are: %s'
% (extension, ", ".join(self.ALLOWED_EXTENSIONS))
)
if not os.path.exists(path):
os.makedirs(path)
if extension.startswith("."):
extension = extension[1:]
if filename[-len(extension) :] == extension:
filename = filename[: -len(extension) - 1]
filepath = "%s%s.%s" % (path, filename, extension)
filepath = os.path.join(path, "%s.%s" % (filename, extension))
self.img.save(filepath, extension.upper())
| 3.09375 | 3 |
app/main/routes.py | Island-Daoist/CS50-Project | 0 | 12788089 | from app.main import bp
from app import db
from flask import render_template, url_for, flash, redirect, request, current_app
from flask_login import login_required, fresh_login_required, current_user
from app.main.forms import UpdateProfileForm, StatusForm, MessageForm
from app.models import Users, Status, Messages, Blogs
from datetime import datetime
@bp.route('/', methods=['GET', 'POST'])
@bp.route('/index', methods=['GET', 'POST'])
def index():
form = StatusForm()
# if request.method == 'POST' and form.submit():
if form.validate_on_submit():
status = Status(body=form.status.data, author=current_user)
db.session.add(status)
db.session.commit()
flash('Your status has been updated!')
return redirect(url_for('main.index'))
# verbose version of following line of operating code
# if current_user.is_authenticated:
# user_status = Status.query.filter_by(user_id=current_user.id).order_by(Status.timestamp.desc())
# else:
# user_status = Status.query.order_by(Status.timestamp.desc())
if current_user.is_authenticated:
post_page = request.args.get('post_page', 1, type=int)
shown_posts = current_user.related_posts().paginate(
post_page, current_app.config['POSTS_PER_PAGE'], False, max_per_page=10)
if not shown_posts.items:
shown_posts = Status.query.order_by(Status.timestamp.desc()).paginate(
post_page, current_app.config['POSTS_PER_PAGE'], False, max_per_page=10)
post_next_url = url_for('main.index', post_page=shown_posts.next_num) if shown_posts.has_next else None
post_prev_url = url_for('main.index', post_page=shown_posts.prev_num) if shown_posts.has_prev else None
else:
post_page = request.args.get('post_page', 1, type=int)
shown_posts = Status.query.order_by(Status.timestamp.desc()).paginate(
post_page, current_app.config['POSTS_PER_PAGE'], False, max_per_page=10)
post_next_url = url_for('main.index', post_page=shown_posts.next_num) if shown_posts.has_next else None
post_prev_url = url_for('main.index', post_page=shown_posts.prev_num) if shown_posts.has_prev else None
blog_page = request.args.get('blog_page', 1, type=int)
blogs = Blogs.query.order_by(Blogs.timestamp.desc()).paginate(
blog_page, current_app.config['POSTS_PER_PAGE'], False, max_per_page=10)
blog_next_url = url_for('main.index', blog_page=blogs.next_num) if blogs.has_next else None
blog_prev_url = url_for('main.index', blog_page=blogs.prev_num) if blogs.has_prev else None
return render_template('main/index.html', title='Welcome to the Blog!', form=form,
shown_posts=shown_posts.items, post_next_url=post_next_url, post_prev_url=post_prev_url,
blogs=blogs.items, blog_next_url=blog_next_url, blog_prev_url=blog_prev_url)
@bp.route('/user/<username>')
@login_required
def profile(username):
user = Users.query.filter_by(username=username).first_or_404()
status_page = request.args.get('status_page', 1, type=int)
statuses = user.status.order_by(Status.timestamp.desc()).paginate(
status_page, current_app.config["POSTS_PER_PAGE"], False)
status_next_url = url_for('main.profile', username=username,
status_page=statuses.next_num) if statuses.has_next else None
status_prev_url = url_for('main.profile', username=username,
status_page=statuses.prev_num) if statuses.has_prev else None
blog_page = request.args.get('blog_page', 1, type=int)
blogs = Blogs.query.filter_by(user_id=user.id).paginate(
blog_page, current_app.config['POSTS_PER_PAGE'], False)
blog_next_url = url_for('main.profile', username=username,
blog_page=blogs.next_num) if blogs.has_next else None
blog_prev_url = url_for('main.profile', username=username,
blog_page=blogs.next_num) if blogs.has_next else None
return render_template('main/profile.html', title='Profile', user=user,
statuses=statuses.items, status_next_url=status_next_url,
status_prev_url=status_prev_url,
blogs=blogs.items, blog_next_url=blog_next_url, blog_prev_url=blog_prev_url)
@bp.route('/user/<username>/update', methods=['GET', 'POST'])
@fresh_login_required
def update_profile(username):
user = Users.query.filter_by(username=username).first()
if current_user != user:
flash('This is not your profile!')
return redirect(url_for('main.index'))
form = UpdateProfileForm(obj=user, original_username=current_user.username)
if form.validate_on_submit():
form.populate_obj(user)
db.session.commit()
flash('Your profile has been updated!')
return redirect(url_for('main.profile', username=current_user.username))
return render_template('main/update_profile.html', title='Update your Profile', form=form)
@bp.route('/follow/<username>')
@login_required
def follow(username):
user = Users.query.filter_by(username=username).first()
if user is None:
flash(f'User {username} was not found.')
return redirect(url_for('main.index'))
if current_user == user:
flash('You cannot follow yourself!')
return redirect(url_for('main.index'))
current_user.follow(user)
db.session.commit()
flash(f'You are now following {username}!')
return redirect(url_for('main.profile', username=username))
@bp.route('/unfollow/<username>')
@login_required
def unfollow(username):
user = Users.query.filter_by(username=username).first()
if user is None:
flash(f'User {username} was not found.')
return redirect(url_for('main.index'))
if current_user == user:
flash('You cannot unfollow yourself!')
return redirect(url_for('main.index'))
current_user.unfollow(user)
db.session.commit()
flash(f'You are no longer following {username}.')
return redirect(url_for('main.profile', username=username))
@bp.route('/friend-request/<username>')
@login_required
def friend_request(username):
user = Users.query.filter_by(username=username).first()
if user is None:
flash(f'User {username} could not be found.')
return redirect(url_for('main.index'))
if user == current_user:
flash('You cannot send yourself a friend request!')
return redirect(url_for('main.index'))
user.add_friend_request(current_user)
db.session.commit()
flash(f'You have sent a friend request to {username}.')
return redirect(url_for('main.profile', username=username))
@bp.route('/requests/<username>', methods=['GET', 'POST'])
@login_required
def pending_requests(username):
if request.method == 'POST':
user = Users.query.filter_by(id=request.form.get('accept')).first() if request.form.get('accept') \
else Users.query.filter_by(id=request.form.get('deny')).first()
if user is not None and user in current_user.pending_friend_requests:
if request.form.get('accept'):
flash(f'On your way to accepting friend request from {user.username}!')
current_user.add_friend(user)
current_user.pending_friend_requests.remove(user)
db.session.commit()
return redirect(url_for('main.index'))
elif request.form.get('deny'):
flash(f'You are readying to deny a friend request from {user.username}.')
current_user.pending_friend_requests.remove(user)
db.session.commit()
return redirect(url_for('main.index'))
user = Users.query.filter_by(username=username).first()
if user is None:
flash(f'Could not find user {username}.')
return redirect(url_for('main.index'))
if user != current_user:
flash('This is not your page!')
return redirect(url_for('main.index'))
pending_friend_requests = user.pending_friend_requests.all()
return render_template('main/pending_requests.html', title='View Your Pending Requests',
user=user, requests=pending_friend_requests)
@bp.route('/unfriend/<username>')
@login_required
def unfriend(username):
user = Users.query.filter_by(username=username).first()
if user is None:
flash(f'User {username} could not be found.')
return redirect(url_for('main.index'))
if user == current_user:
flash('You cannot unfriend yourself!')
return redirect(url_for('main.index'))
current_user.unfriend(user)
db.session.commit()
flash(f'You are no longer friends with {username}.')
return redirect(url_for('main.profile', username=username))
@bp.route('/send-message/<user>', methods=['GET', 'POST'])
@login_required
def send_message(user):
user = Users.query.filter_by(username=user).first_or_404()
form = MessageForm()
if form.validate_on_submit():
message = Messages(
author=current_user,
recipient=user,
body=form.message.data)
db.session.add(message)
db.session.commit()
flash('Your message was sent.')
return redirect(url_for('main.profile', username=user.username))
return render_template('main/send_message.html', title='Send a Message',
form=form, user=user)
@bp.route('/messages')
@login_required
def messages():
current_user.message_last_read_time = datetime.utcnow()
db.session.commit()
page = request.args.get('page', 1, type=int)
messages = current_user.messages_received.order_by(
Messages.timestamp.desc()).paginate(
page, current_app.config['POSTS_PER_PAGE'], False)
next_url = url_for('main.messages', page=messages.next_num) \
if messages.has_next else None
prev_url = url_for('main.messages', page=messages.prev_num) \
if messages.has_prev else None
return render_template('main/view_messages.html', messages=messages.items,
next_url=next_url, prev_url=prev_url)
@bp.route('/<user>/user-popup')
@login_required
def user_popup(user):
user = Users.query.filter_by(username=user).first_or_404()
return render_template('user_popup.html', user=user)
| 2.171875 | 2 |
Codeforces Online Judge Solve/47A - Triangular numbers.py | Remonhasan/programming-solve | 0 | 12788090 | <gh_stars>0
# <NAME>
n = int(input())
triangular = 1
i = 1
while triangular < n:
i += 1
triangular += i
if triangular == n:
print("YES")
else:
print("NO")
| 3.515625 | 4 |
tests/test_views.py | markliederbach/nm-launch-api | 0 | 12788091 | <gh_stars>0
"""
Tests for the nm_launch_api module.
"""
import os
import json
import unittest
from unittest import mock
from werkzeug.datastructures import ImmutableMultiDict
from nm_launch_api import app
from nm_launch_api.api.v1 import views as v1_views
from tests.mock_clients import launch_library
from tests import asserts
TEST_DIR = os.path.dirname(os.path.realpath(__file__))
JSON_DIRECTORY_PATH = os.path.join(TEST_DIR, "mock_requests")
class TestNMLaunchAPIV1(unittest.TestCase):
def setUp(self):
self.app = app
self.view = v1_views.LaunchSchedule()
@mock.patch('nm_launch_api.clients.base.BaseAPIClient._get_data_for_request',
side_effect=launch_library.mock_launch_library_get_data_for_request)
def wrapped_request(self, test_profile, query_mock_method, **kwargs):
request, response = self.submit_request('{}.json'.format(test_profile), **kwargs)
return query_mock_method, request, response
@staticmethod
def build_request_args(json_filename):
with open(os.path.join(JSON_DIRECTORY_PATH, json_filename), "r") as f:
return ImmutableMultiDict(json.load(f))
def submit_request(self, request_filename, view=None, **kwargs):
view = view if view is not None else self.view
request_args = self.build_request_args(request_filename)
with self.app.test_request_context() as context:
context.request.args = request_args
return request_args, view.get()
def test_000_launch_schedule(self):
test_profile = "launch_schedule_normal"
self.app.config["CLIENT_SETTINGS"]["launch_library"]["base_url"] = test_profile
query_mock_method, request, response = self.wrapped_request(test_profile)
asserts.LaunchScheduleAssert(self, request, response, query_mock_method, test_profile)
if __name__ == "__main__":
import sys
sys.exit(unittest.main())
| 2.28125 | 2 |
svc/eonbackup.py | undure/eonbackup | 2 | 12788092 | import os
import hashlib
import base64
from binascii import hexlify
import datetime
import getpass
import select
import socket
import sys
import time
import traceback
import json
import logging
import paramiko
import subprocess
import config
from wificlient import get_active_clients
logger = logging.getLogger('eonbackup')
logger.setLevel(logging.INFO)
ch = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
def calculate_file_hash(filename):
sha256_hash = hashlib.sha256()
with open(filename,"rb") as f:
for byte_block in iter(lambda: f.read(4096),b""):
sha256_hash.update(byte_block)
return (sha256_hash.hexdigest())
def download_files(sftp, session_name, file_defs):
directory = os.path.join(config.root_dir, session_name)
if not os.path.exists(directory):
os.makedirs(directory)
failed = False
for fd in file_defs:
h = fd[0]
fn = fd[1]
if os.path.exists(fn):
logger.info("File {} was downloaded already".format(fn))
continue
fn_d = fn+".tdl"
logger.info("Downloading: " + str(fn) + " " + h)
sftp.get(fn, fn_d)
h2 = calculate_file_hash(fn_d)
if h2 == h:
os.rename(fn_d, fn)
logger.info("Download of {} complete".format(fn))
elif os.path.exists(fn_d):
os.remove(fn_d)
failed = True
if not failed:
status_file = get_session_status_file_path(session_name)
with open(status_file, "w") as fs:
fs.write(json.dumps(file_defs))
def get_file_stat(t, sftp, fn):
command = "sha256sum " + fn
logger.info(command)
session = t.open_channel(kind='session')
session.exec_command(command)
while not session.recv_ready():
pass
sha_result = filter(None, session.recv(512).strip().split(' '))
stat = sftp.stat(sha_result[1])
fd = {
"name": sha_result[1],
"sha256hash": sha_result[0],
"atime": stat.st_atime,
"mtime": stat.st_mtime,
"size": stat.st_size
}
return fd , sha_result
def get_session_status_file_path(session_name):
return os.path.join(config.status_dir, session_name)
def sesson_sync_complete(session_name):
directory = os.path.join(config.root_dir, session_name)
if not os.path.exists(directory):
return False
status_file = get_session_status_file_path(session_name)
if os.path.exists(status_file):
return True
return False
def init():
if not os.path.exists(config.root_dir):
os.makedirs(config.root_dir)
if not os.path.exists(config.status_dir):
os.makedirs(config.status_dir)
def connect(hostname, port, key):
t = None
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((hostname, port))
t = paramiko.Transport(sock)
t.start_client()
t.auth_publickey(config.username, key)
except Exception as e:
logger.info("Connection failed: " + str(e))
return None
return t
def load_files(t, sftp, session_dir, age):
start_time = int((datetime.datetime.utcnow() - datetime.datetime(1970,1,1)).total_seconds())
files = sftp.listdir(session_dir)
sha_results = []
for f in files:
fn = os.path.join(session_dir, f)
fd, sha_result = get_file_stat(t, sftp, fn)
file_age = start_time - fd["atime"]
if file_age < age:
return None
sha_results.append(sha_result)
return sha_results
def process_session(t, sftp, session):
if sesson_sync_complete(session):
logger.info("Ignoring complete session {}".format(session))
return
session_dir = os.path.join(config.root_dir, session)
sha_results = load_files(t, sftp, session_dir, 2 * 60)
if sha_results:
download_files(sftp, session, sha_results)
else:
logger.info("Ignoring recent session {}".format(session))
def process_host(t):
sftp = paramiko.SFTPClient.from_transport(t)
dirlist = sftp.listdir(config.root_dir)
for d in dirlist:
if not disk_ok():
return
process_session(t, sftp, d)
def disk_ok():
df = subprocess.Popen(["df", "/data/"], stdout=subprocess.PIPE)
output = df.communicate()[0]
device, size, used, available, percent, mountpoint = \
output.split("\n")[1].split()
u = int(used)
a = int(available)
s = int(size)
logger.info("Disk usage {}/{} {}".format(u, s,percent))
if s == 0:
return False
return (u * 100 / s) < config.disk_full_percent
def main():
init()
if not disk_ok():
logger.error("Disk full. Stopping.")
return
key = paramiko.RSAKey.from_private_key_file(config.key_path)
hosts = get_active_clients()
for host in hosts:
logger.info("Trying host {}".format(host))
hostname = host["ip"]
t = connect(hostname, config.port, key)
if not t:
continue
process_host(t)
if __name__ == "__main__":
main()
| 2.203125 | 2 |
humanoid_2d/ocp/humanoid_ocp.py | Ipuch/Humanoid2D | 0 | 12788093 | import warnings
import biorbd_casadi as biorbd
import numpy as np
from scipy import interpolate
from bioptim import (
OdeSolver,
Node,
OptimalControlProgram,
ConstraintFcn,
DynamicsFcn,
ObjectiveFcn,
QAndQDotBounds,
QAndQDotAndQDDotBounds,
ConstraintList,
ObjectiveList,
DynamicsList,
Bounds,
BoundsList,
InitialGuessList,
ControlType,
Solver,
InitialGuess,
InterpolationType,
PhaseTransitionList,
PhaseTransitionFcn,
RigidBodyDynamics,
)
from ..initial_guess.humanoid_initial_pose import set_initial_pose
class HumanoidOcp:
def __init__(
self,
biorbd_model_path: str = None,
n_shooting: int = 10,
phase_time: float = 0.3,
n_threads: int = 8,
control_type: ControlType = ControlType.CONSTANT,
ode_solver: OdeSolver = OdeSolver.COLLOCATION(),
rigidbody_dynamics: RigidBodyDynamics = RigidBodyDynamics.ODE,
step_length: float = 0.8,
right_foot_location: np.array = np.zeros(3),
use_sx: bool = False,
):
self.biorbd_model_path = biorbd_model_path
self.n_shooting = n_shooting
self.phase_time = phase_time
self.n_threads = n_threads
self.control_type = control_type
self.ode_solver = ode_solver
self.rigidbody_dynamics = rigidbody_dynamics
if biorbd_model_path is not None:
self.biorbd_model = biorbd.Model(biorbd_model_path)
self.n_shooting = n_shooting
self.phase_time = phase_time
self._set_head()
self._set_knee()
self._set_shoulder()
self.n_q = self.biorbd_model.nbQ()
self.n_qdot = self.biorbd_model.nbQdot()
self.n_qddot = self.biorbd_model.nbQddot()
self.n_qdddot = self.n_qddot
self.n_tau = self.biorbd_model.nbGeneralizedTorque()
self.tau_min, self.tau_init, self.tau_max = -500, 0, 500
self.qddot_min, self.qddot_init, self.qddot_max = -1000, 0, 1000
self.qdddot_min, self.qdddot_init, self.qdddot_max = -10000, 0, 10000
self.right_foot_location = right_foot_location
self.step_length = step_length
self.initial_left_foot_location = right_foot_location - np.array([0, step_length / 2, 0])
self.final_left_foot_location = right_foot_location + np.array([0, step_length / 2, 0])
self.dynamics = DynamicsList()
self.constraints = ConstraintList()
self.objective_functions = ObjectiveList()
self.phase_transitions = PhaseTransitionList()
self.x_bounds = BoundsList()
self.u_bounds = BoundsList()
self.initial_states = []
self.x_init = InitialGuessList()
self.u_init = InitialGuessList()
self.control_type = control_type
self.control_nodes = Node.ALL if self.control_type == ControlType.LINEAR_CONTINUOUS else Node.ALL_SHOOTING
self._set_dynamics()
self._set_constraints()
self._set_objective_functions()
self._set_phase_transition()
self._set_boundary_conditions()
self._set_initial_guesses()
self.ocp = OptimalControlProgram(
self.biorbd_model,
self.dynamics,
self.n_shooting,
self.phase_time,
x_init=self.x_init,
x_bounds=self.x_bounds,
u_init=self.u_init,
u_bounds=self.u_bounds,
objective_functions=self.objective_functions,
constraints=self.constraints,
n_threads=n_threads,
control_type=self.control_type,
ode_solver=ode_solver,
use_sx=use_sx,
)
def _set_head(self):
self.has_head = False
for i in range(self.biorbd_model.nbSegment()):
seg = self.biorbd_model.segment(i)
if seg.name().to_string() == "Head":
self.has_head = True
break
def _set_knee(self):
self.has_knee = False
for i in range(self.biorbd_model.nbSegment()):
seg = self.biorbd_model.segment(i)
if seg.name().to_string() == "RShank":
self.has_knee = True
break
def _set_shoulder(self):
self.has_shoulder = False
for i in range(self.biorbd_model.nbSegment()):
seg = self.biorbd_model.segment(i)
if seg.name().to_string() == "RArm":
self.has_shoulder = True
break
def _set_dynamics(self):
# warnings.warn("not implemented under this version of bioptim")
self.dynamics.add(
DynamicsFcn.TORQUE_DRIVEN, rigidbody_dynamics=self.rigidbody_dynamics, with_contact=True, phase=0
)
# self.dynamics.add(DynamicsFcn.TORQUE_DRIVEN, with_contact=True, phase=0)
def _set_objective_functions(self):
# --- Objective function --- #
self.objective_functions.add(ObjectiveFcn.Lagrange.MINIMIZE_CONTROL, key="tau", phase=0)
idx_stability = [0, 1, 2]
if self.has_head:
idx_stability.append(3)
# torso stability
self.objective_functions.add(ObjectiveFcn.Lagrange.MINIMIZE_QDDOT, phase=0, index=idx_stability, weight=0.01)
# head stability
if self.has_head:
self.objective_functions.add(
ObjectiveFcn.Lagrange.MINIMIZE_QDDOT, derivative=True, phase=0, index=3, weight=0.01
)
self.objective_functions.add(
ObjectiveFcn.Lagrange.MINIMIZE_STATE, key="qdot", phase=0, index=3, weight=0.01
)
# keep velocity CoM around 1.5 m/s
self.objective_functions.add(
ObjectiveFcn.Mayer.MINIMIZE_COM_VELOCITY, index=1, target=1.5, node=Node.START, weight=1000
)
self.objective_functions.add(
ObjectiveFcn.Mayer.MINIMIZE_COM_VELOCITY, index=1, target=1.5, node=Node.END, weight=1000
)
# instead of phase transition along z
self.objective_functions.add(ObjectiveFcn.Lagrange.MINIMIZE_COM_VELOCITY, index=2, weight=0.1)
if (
self.rigidbody_dynamics == RigidBodyDynamics.DAE_INVERSE_DYNAMICS_JERK
or self.rigidbody_dynamics == RigidBodyDynamics.DAE_FORWARD_DYNAMICS_JERK
):
self.objective_functions.add(ObjectiveFcn.Lagrange.MINIMIZE_CONTROL, phase=0, key="qdddot", weight=1e-4)
def _set_constraints(self):
# --- Constraints --- #
# Contact force in Z are positive
self.constraints.add(
ConstraintFcn.TRACK_CONTACT_FORCES, min_bound=0, max_bound=np.inf, node=Node.ALL, contact_index=1, phase=0
) # FP0 > 0 en Z
# contact node at zero position and zero speed
# node = Node.ALL if self.implicit_dynamics else Node.START
node = Node.START
self.constraints.add(
ConstraintFcn.TRACK_MARKERS, node=node, target=self.right_foot_location, marker_index="RFoot", phase=0
)
self.constraints.add(ConstraintFcn.TRACK_MARKERS_VELOCITY, node=node, marker_index="RFoot", phase=0)
# node = Node.END
# self.constraints.add(
# ConstraintFcn.TRACK_MARKERS, node=node, target=self.right_foot_location, marker_index="RFoot", phase=0
# )
# self.constraints.add(ConstraintFcn.TRACK_MARKERS_VELOCITY, node=node, marker_index="RFoot", phase=0)
# first and last step constraints
self.constraints.add(
ConstraintFcn.TRACK_MARKERS,
target=self.initial_left_foot_location,
node=Node.START,
marker_index="LFoot",
phase=0,
)
self.constraints.add(
ConstraintFcn.TRACK_MARKERS,
target=self.final_left_foot_location,
node=Node.END,
marker_index="LFoot",
phase=0,
)
# Ensure lift of foot
if self.has_knee:
self.constraints.add(
ConstraintFcn.TRACK_MARKERS,
index=2,
min_bound=0.05,
max_bound=np.inf,
node=Node.MID,
marker_index="LFoot",
phase=0,
)
def _set_phase_transition(self):
idx = [0, 1, 2]
idx = idx.append(3) if self.has_head else idx
self.phase_transitions.add(PhaseTransitionFcn.CYCLIC, index=idx, weight=1000)
def _set_boundary_conditions(self):
self.x_bounds = BoundsList()
self.x_bounds.add(
bounds=QAndQDotAndQDDotBounds(self.biorbd_model)
if self.rigidbody_dynamics == RigidBodyDynamics.DAE_INVERSE_DYNAMICS_JERK
or self.rigidbody_dynamics == RigidBodyDynamics.DAE_FORWARD_DYNAMICS_JERK
else QAndQDotBounds(self.biorbd_model)
)
nq = self.n_q
self.x_bounds[0].max[2, :] = 0 # torso bended forward
if self.has_head:
self.x_bounds[0][nq + 3, 0] = 0 # head velocity zero at the beginning
self.x_bounds[0][nq + 3, -1] = 0 # head velocity zero at the end
if self.has_knee:
self.x_bounds[0].min[nq - 2 : nq, 0] = -np.pi / 8 # driving knees
# Supervised shoulders
if self.has_shoulder:
i = 1 if self.has_head else 0
self.x_bounds[0][5 + i, 0] = -np.pi / 6
self.x_bounds[0][6 + i, 0] = np.pi / 6
self.x_bounds[0][5 + i, -1] = np.pi / 6
self.x_bounds[0][6 + i, -1] = -np.pi / 6
self.x_bounds[0][5 + i + nq, 0] = 0
self.x_bounds[0][5 + i + nq, -1] = 0
self.x_bounds[0][6 + i + nq, 0] = 0
self.x_bounds[0][6 + i + nq, -1] = 0
# Unsupervised arms not working trying another time with cyclic constraints
# x_bounds[0].max[5, 0] = -1e-5 # position is negative at start
# x_bounds[0].min[6, 0] = 1e-5 # position is positive at start
#
# x_bounds[0].min[5, -1] = 1e-5 # position is positive at the end
# x_bounds[0].max[6, -1] = -1e-5 # position is negative at the end
#
# x_bounds[0][n_q + 5, [0, -1]] = 0 # velocity of shoulders zero at begining and end
# x_bounds[0][n_q + 6, [0, -1]] = 0 # velocity of shoulders zero at begining and end
# x_bounds[0].max[n_q + 6, 1] = -1e-5 # velocity of left shoulder negative
# x_bounds[0].min[n_q + 6, 1] = -5 # velocity of left shoulder negative
# x_bounds[0].min[n_q + 5, 1] = 1e-5 # velocity of right shoulder positive
# x_bounds[0].max[n_q + 5, 1] = 5 # velocity of right shoulder positive
if self.rigidbody_dynamics == RigidBodyDynamics.DAE_INVERSE_DYNAMICS:
self.u_bounds.add(
[self.tau_min] * self.n_tau
+ [self.qddot_min] * self.n_qddot
+ [self.qddot_min] * self.biorbd_model.nbContacts(),
[self.tau_max] * self.n_tau
+ [self.qddot_max] * self.n_qddot
+ [self.qddot_max] * self.biorbd_model.nbContacts(),
)
elif self.rigidbody_dynamics == RigidBodyDynamics.DAE_FORWARD_DYNAMICS:
self.u_bounds.add(
[self.tau_min] * self.n_tau + [self.qddot_min] * self.n_qddot,
[self.tau_max] * self.n_tau + [self.qddot_max] * self.n_qddot,
)
elif self.rigidbody_dynamics == RigidBodyDynamics.DAE_INVERSE_DYNAMICS_JERK:
self.u_bounds.add(
[self.tau_min] * self.n_tau
+ [self.qdddot_min] * self.n_qddot
+ [self.qddot_min] * self.biorbd_model.nbContacts(),
[self.tau_max] * self.n_tau
+ [self.qdddot_max] * self.n_qddot
+ [self.qddot_max] * self.biorbd_model.nbContacts(),
)
elif self.rigidbody_dynamics == RigidBodyDynamics.DAE_FORWARD_DYNAMICS_JERK:
self.u_bounds.add(
[self.tau_min] * self.n_tau + [self.qdddot_min] * self.n_qddot,
[self.tau_max] * self.n_tau + [self.qdddot_max] * self.n_qddot,
)
else:
self.u_bounds.add([self.tau_min] * self.n_tau, [self.tau_max] * self.n_tau)
# root is not actuated
self.u_bounds[0][:3, :] = 0
def _set_initial_guesses(self):
"""
Set initial guess for the optimization problem.
"""
# --- Initial guess --- #
q0 = [0] * self.n_q
# Torso over the floor and bent
q0[1] = 0.8
q0[2] = -np.pi / 6
self.q0i = set_initial_pose(
self.biorbd_model_path, np.array(q0), self.right_foot_location, self.initial_left_foot_location
)
self.q0end = set_initial_pose(
self.biorbd_model_path, np.array(q0), self.right_foot_location, self.final_left_foot_location
)
qdot0 = [0] * self.n_qdot
X0i = []
X0i.extend(self.q0i)
X0i.extend(qdot0)
X0end = []
X0end.extend(self.q0end)
X0end.extend(qdot0)
if (
self.rigidbody_dynamics == RigidBodyDynamics.DAE_INVERSE_DYNAMICS_JERK
or self.rigidbody_dynamics == RigidBodyDynamics.DAE_FORWARD_DYNAMICS_JERK
):
X0i.extend([0] * self.n_qddot)
X0end.extend([0] * self.n_qddot)
# X0i.extend([0] * self.n_qddot + [0] * self.biorbd_model.nbContacts())
# X0end.extend([0] * self.n_qddot + [0] * self.biorbd_model.nbContacts())
x = np.linspace(0, self.phase_time, 2)
y = np.array([X0i, X0end]).T
f = interpolate.interp1d(x, y)
x_new = np.linspace(0, self.phase_time, self.n_shooting + 1)
X0 = f(x_new) # use interpolation function returned by `interp1d`
self._set_initial_states(X0)
self._set_initial_controls()
def _set_initial_states(self, X0: np.array = None):
if X0 is None:
self.x_init.add([0] * (self.n_q + self.n_q))
else:
if X0.shape[1] != self.n_shooting + 1:
X0 = self._interpolate_initial_states(X0)
if not self.ode_solver.is_direct_shooting:
n = self.ode_solver.polynomial_degree
X0 = np.repeat(X0, n + 1, axis=1)
X0 = X0[:, :-n]
self.x_init.add(X0, interpolation=InterpolationType.EACH_FRAME)
def _set_initial_controls(self, U0: np.array = None):
if U0 is None:
if self.rigidbody_dynamics == RigidBodyDynamics.DAE_INVERSE_DYNAMICS:
self.u_init.add(
[self.tau_init] * self.n_tau
+ [self.qddot_init] * self.n_qddot
+ [5] * self.biorbd_model.nbContacts()
)
elif self.rigidbody_dynamics == RigidBodyDynamics.DAE_INVERSE_DYNAMICS_JERK:
self.u_init.add(
[self.tau_init] * self.n_tau
+ [self.qdddot_init] * self.n_qdddot
+ [5] * self.biorbd_model.nbContacts()
)
elif self.rigidbody_dynamics == RigidBodyDynamics.DAE_FORWARD_DYNAMICS_JERK:
self.u_init.add([self.tau_init] * self.n_tau + [self.qdddot_init] * self.n_qdddot)
elif self.rigidbody_dynamics == RigidBodyDynamics.DAE_FORWARD_DYNAMICS:
self.u_init.add([self.tau_init] * self.n_tau + [self.qddot_init] * self.n_qddot)
else:
self.u_init.add([self.tau_init] * self.n_tau)
else:
if U0.shape[1] != self.n_shooting:
U0 = self._interpolate_initial_controls(U0)
self.u_init.add(U0, interpolation=InterpolationType.EACH_FRAME)
def _interpolate_initial_states(self, X0: np.array):
print("interpolating initial states to match the number of shooting nodes")
x = np.linspace(0, self.phase_time, X0.shape[1])
y = X0
f = interpolate.interp1d(x, y)
x_new = np.linspace(0, self.phase_time, self.n_shooting + 1)
y_new = f(x_new) # use interpolation function returned by `interp1d`
return y_new
def _interpolate_initial_controls(self, U0: np.array):
print("interpolating initial controls to match the number of shooting nodes")
x = np.linspace(0, self.phase_time, U0.shape[1])
y = U0
f = interpolate.interp1d(x, y)
x_new = np.linspace(0, self.phase_time, self.n_shooting)
y_new = f(x_new) # use interpolation function returned by `interp1d`
return y_new
| 2.125 | 2 |
LianJia-Crawler/LianJia.py | xrandx/Python-Items | 0 | 12788094 | # 需要安装 openpyxl, pandas, BeautifulSoup
# pip install 即可
import requests
import re
import pandas as pd
from bs4 import BeautifulSoup
region = ['ganjingzi', 'shahekou', 'zhongshan', 'xigang', 'gaoxinyuanqu']
#这个变量里放区域名称的拼音
regnam = ['甘井子', '沙河口', '中山', '西岗', '高新园']#这个变量里放区域名称的中文
page = 5
reTryTime = 5
price=[] #这个变量里放房屋总价
uprice=[] #这个变量里放房屋均价
house=[] #这个变量里放房屋信息
room=[]
area=[]
direct=[]
decorate=[]
elevator=[]
def generate_allurl(page):
url = 'http://dl.lianjia.com/ershoufang/{}/pg{}/'
# 改url换城市
for url_region in range(len(region)):
print("\n开始爬取地区:"+ regnam[url_region] + "\n")
for url_next in range(1,int(page) + 1):
print("正在爬取第"+ str(url_next) + "页")
yield url.format(region[url_region], url_next)
def get_allurl(generate_allurl):
gotData = False
reTry = 0
while reTry < reTryTime and not gotData:
try:
reTry += 1
get_url = requests.get(generate_allurl, timeout=1)
if get_url.status_code == 200:
re_set = re.compile('<li.*?class="clear">.*?<a.*?class="img.*?".*?href="(.*?)"')
re_get = re.findall(re_set,get_url.text)
gotData = True
return re_get
except:
pass
def open_url(re_get):
gotData = False
reTry = 0
while reTry < reTryTime and not gotData:
try:
reTry += 1
res = requests.get(re_get, timeout=1)
if res.status_code == 200:
soup = BeautifulSoup(res.text,'lxml')
price.append(soup.select('.total')[0].text + '万')
uprice.append(soup.select('.unitPriceValue')[0].text)
house.append(soup.select('.communityName > a')[0].text)
room.append(soup.find("div", class_="room").find("div", class_="mainInfo").text)
area.append(soup.find("div", class_="area").find("div", class_="mainInfo").text)
direct.append(soup.find("div", class_="type").find("div", class_="mainInfo").text)
decorate.append(soup.find("div", class_="introContent").find_all("li")[8].text[4:])
elevator.append(soup.find("div", class_="introContent").find_all("li")[11].text[4:])
gotData = True
except:
pass
def toTxt():
print("\n开始保存txt文件……\n")
for regnum in range(len(region)):
print("录入" + regnam[regnum] + "数据")
with open(regnam[regnum] + '.txt', 'w') as f: # 建立并打开一个txt文件
for i in range(len(price)): # 建立一个循环
f.write(str(price[i]) + ' | ' + str(uprice[i]) + ' | ' + str(house[i]) + ' | ' + str(room[i]) + ' | ' + str(area[i]) + ' | ' + str(direct[i]) + ' | ' + str(decorate[i]) + ' | ' + str(elevator[i]) +'\n') # 将房屋总价写入txt文件
print('已保存为 ' + regnam[regnum] + '.txt ')
def toXls():
print("\n开始将所有地区数据保存为xls文件……\n")
df = pd.DataFrame({
"总价": price,
"每平米均价": uprice,
"房屋名称": house,
"格局": room,
"面积": area,
"朝向": direct,
"装修": decorate,
"电梯": elevator
})
df.to_excel('大连链家二手房.xlsx',sheet_name='大连链家二手房')
print("已保存为 大连链家二手房.xlsx")
def main():
page = input('输入各地区生成页数:')
print()
for i in generate_allurl(page):
print(i)
url_tmp = get_allurl(i)
for j in url_tmp:
info = open_url(j)
toTxt()
print()
toXls()
print("完成")
if __name__ == '__main__':
main()
# def get_allurl(generate_allurl):
# get_url = requests.get(generate_allurl,)
# if get_url.status_code == 200:
# re_set = re.compile('<li.*?class="clear">.*?<a.*?class="img.*?".*?href="(.*?)"')
# re_get = re.findall(re_set,get_url.text)
# return re_get
# def open_url(re_get):
# res = requests.get(re_get, timeout=0.1)
# if res.status_code == 200:
# soup = BeautifulSoup(res.text,'lxml')
# price.append(soup.select('.total')[0].text + '万')
# uprice.append(soup.select('.unitPriceValue')[0].text)
# house.append(soup.select('.communityName > a')[0].text)
# room.append(soup.find("div", class_="room").find("div", class_="mainInfo").text)
# area.append(soup.find("div", class_="area").find("div", class_="mainInfo").text)
# direct.append(soup.find("div", class_="type").find("div", class_="mainInfo").text)
# decorate.append(soup.find("div", class_="introContent").find_all("li")[8].text[4:])
# elevator.append(soup.find("div", class_="introContent").find_all("li")[11].text[4:])
| 2.84375 | 3 |
Main.py | DNLINYJ/Biilibili_All_Danmu | 13 | 12788095 | from GetAllDanmuInfo_V2 import GetAllDanmuInfo
from GetClearCommandInstruction import GetClearCommandInstruction
from GetVideoTitle import GetVideoTitle
from ExportAllDanmu import ExportAllDanmu
from CheckLoginSituation import CheckLoginSituation
import base64
import Sqlite3_Bilibili
import sys
import os
headers = {
'cookie': "",
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/92.0.4515.131 Safari/537.36',
'Accept': '*/*',
'Accept-Encoding': 'gzip, deflate',
'Accept-Language': 'zh-CN,zh;q=0.9',
'Referer': 'https://www.bilibili.com'
}
def isnum(n):
try:
float(int(n))
return True
except:
return False
def FromUrlGetAidOrBvid(video_url):
base_url_list = [
"https://www.bilibili.com/video/"
]
if "http" in video_url:
for i in range(len(base_url_list)):
if base_url_list[i] in video_url:
return str(video_url).replace(base_url_list[i],"").split("?",1)[0].replace("/","")
return video_url
def Meum():
clear_comand_instruction = GetClearCommandInstruction()
Index_Server = Sqlite3_Bilibili.Bilibili_Danmu_Index_Server("root", "root")
if os.path.exists(".config") == False:
while True:
print("检测到您第一次使用本程序,请输入您的SESSDATA便于接下来的操作")
print("若不清楚自己的SESSDATA的话,请查看README中的教程链接解决该问题")
print("格式为:SESSDATA=您获取的SESSDATA")
user_input = input(">>")
if "SESSDATA=" in user_input.upper():
with open(".config", "w", encoding="utf-8") as f:
f.write(base64.b64encode(user_input.encode("utf-8")).decode())
break
else:
print("请输入正确格式的SESSDATA!")
os.system(clear_comand_instruction)
else:
with open(".config","r",encoding="utf-8") as f:
temp_sessdata = f.read()
temp_sessdata = base64.b64decode(temp_sessdata).decode()
headers["cookie"] = temp_sessdata
while CheckLoginSituation(headers) == 1:
os.system(clear_comand_instruction)
print("SESSDATA已过期,请重新输入您的SESSDATA")
print("若不清楚自己的SESSDATA的话,请查看README中的教程链接解决该问题")
print("格式为:SESSDATA=您获取的SESSDATA")
user_input = input(">>")
if "SESSDATA=" in user_input.upper():
with open(".config", "w", encoding="utf-8") as f:
f.write(base64.b64encode(user_input.encode("utf-8")).decode())
headers["cookie"] = temp_sessdata
else:
print("请输入正确格式的SESSDATA!")
os.system(clear_comand_instruction)
while True:
os.system(clear_comand_instruction)
while CheckLoginSituation(headers) == 1:
if os.path.exists(".config") == True:
print("SESSDATA已过期,请重新输入您的SESSDATA")
print("若不清楚自己的SESSDATA的话,请查看README中的教程链接解决该问题")
print("格式为:SESSDATA=您获取的SESSDATA")
user_input = input(">>")
if "SESSDATA=" in user_input.upper():
with open(".config", "w", encoding="utf-8") as f:
f.write(base64.b64encode(user_input.encode("utf-8")).decode())
headers["cookie"] = temp_sessdata
else:
print("请输入正确格式的SESSDATA!")
os.system(clear_comand_instruction)
else:
print("警告!!!未登录!!!无法获取历史弹幕!!!")
print("请查看文档进行登录!!")
input("按下任意键退出...")
sys.exit(0)
if os.path.exists("sqlite3") == False:
os.makedirs("sqlite3")
if os.path.exists("Export") == False:
os.makedirs("Export")
print("Bilibili(B站)全弹幕获取程序")
print("作者:菠萝小西瓜(DNLINYJ)")
print("Github:https://github.com/DNLINYJ")
print("注意:仅供个人学习交流使用,切勿用于非法用途!")
print("---------------------------------------------------------")
print("1) 收集指定视频全部历史弹幕(数据量较大时所用时间较久)")
print("2) 导出数据库内指定视频全部历史弹幕")
print("3) 收集并导出指定视频全部历史弹幕(数据量较大时所用时间较久,谨慎使用)")
print("4) 退出")
user_input = str(input(">>"))
if user_input == "1":
os.system(clear_comand_instruction)
print("请输入B站视频的AV号/BV号,或者输入B站视频地址(仅支持单P视频/多P视频中的单P下载)")
user_input = str(input(">>"))
user_input = FromUrlGetAidOrBvid(user_input)
result = GetAllDanmuInfo(user_input, headers)
if result == 0:
print(f"获取视频:{GetVideoTitle(user_input, headers)} 的所有历史弹幕成功.")
input("按下任意键继续...")
elif result == 2:
input("按下任意键继续...")
else:
print(f"获取视频:{GetVideoTitle(user_input, headers)} 的所有历史弹幕失败.")
input("按下任意键继续...")
elif user_input == "2":
os.system(clear_comand_instruction)
Video_Info_List = Index_Server.GetAllVideoDatabaseName()
if Video_Info_List != None:
print("历史弹幕数据库中存在的视频如下:")
print("-----------------------------------------------------------")
for i in range(len(Video_Info_List)):
print(f"{i + 1}) 视频标题:{Video_Info_List[i][1]} 视频AV号:{Video_Info_List[i][2]} 保存的弹幕结束日期:{Video_Info_List[i][4]}")
print("-----------------------------------------------------------")
print("请输入您想导出的视频序号")
user_input = input(">>")
if isnum(user_input) == False:
print("请输入正确的选项!")
input("按下回车继续运行...")
os.system(clear_comand_instruction)
else:
if int(user_input) > len(Video_Info_List) + 1:
print("请输入正确的选项!")
input("按下回车继续运行...")
else:
ExportAllDanmu(Video_Info_List[i][2])
input("按下回车继续运行...")
os.system(clear_comand_instruction)
else:
print("历史弹幕数据库中无可用视频历史弹幕可导出!")
input("按下回车继续运行...")
os.system(clear_comand_instruction)
elif user_input == "3":
os.system(clear_comand_instruction)
print("请输入B站视频的AV号/BV号,或者输入B站视频地址(仅支持单P视频/多P视频中的单P下载)")
user_input = str(input(">>"))
user_input = FromUrlGetAidOrBvid(user_input)
result = GetAllDanmuInfo(user_input, headers)
if result == 0:
print(f"获取视频{GetVideoTitle(user_input, headers)} 的所有历史弹幕成功.")
ExportAllDanmu(user_input)
input("按下任意键继续...")
elif result == 2:
input("按下任意键继续...")
else:
print(f"获取视频{GetVideoTitle(user_input, headers)} 的所有历史弹幕失败.")
input("按下任意键继续...")
elif user_input == "4":
sys.exit(0)
if __name__ == '__main__':
Meum() | 2.3125 | 2 |
utils/build_samplers.py | MoonBlvd/deeplabv3 | 0 | 12788096 | <gh_stars>0
import torch
import samplers
def make_data_sampler(dataset, shuffle, distributed, is_train=True):
# Only do weighted sampling for training
if distributed:
# if is_train:
# return samplers.DistributedWeightedSampler(dataset, shuffle=shuffle)
# else:
return samplers.DistributedSampler(dataset, shuffle=shuffle)
if shuffle:
sampler = torch.utils.data.sampler.RandomSampler(dataset)
# sampler = torch.utils.data.sampler.WeightedRandomSampler(dataset.weights, num_samples=len(dataset))
else:
sampler = torch.utils.data.sampler.SequentialSampler(dataset)
return sampler
def make_batch_data_sampler(dataset,
sampler,
aspect_grouping,
batch_per_gpu,
max_iters=None,
start_iter=0,
dataset_name=None):
if aspect_grouping:
if not isinstance(aspect_grouping, (list, tuple)):
aspect_grouping = [aspect_grouping]
aspect_ratios = _compute_aspect_ratios(dataset, dataset_name=dataset_name)
group_ids = _quantize(aspect_ratios, aspect_grouping)
batch_sampler = samplers.GroupedBatchSampler(
sampler, group_ids, batch_per_gpu, drop_uneven=False)
else:
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, batch_per_gpu, drop_last=False)
if max_iters is not None:
batch_sampler = samplers.IterationBasedBatchSampler(batch_sampler, max_iters, start_iter)
return batch_sampler | 2.34375 | 2 |
app/auth/views.py | kangangi/pitchIP | 1 | 12788097 | <filename>app/auth/views.py
from flask import render_template,redirect,url_for,flash, request
from app.models import User
from .forms import RegistrationForm, LoginForm
from .. import db
from flask_login import login_user, login_required, logout_user
from . import auth
from ..email import mail_message
#Views
@auth.route('/register', methods = ["GET", "POST"])
def register():
form = RegistrationForm()
title = "New User"
if form.validate_on_submit():
email = form.email.data
username = form.username.data
password = form.password.data
new_user = User(email = email,username = username, password =password)
new_user.save_user()
mail_message("Welcome to Pitcher", "email/welcome_user", new_user.email, user = new_user)
return redirect(url_for('auth.login'))
return render_template('auth/register.html', registration_form = form)
@auth.route('/login', methods = ['GET', 'POST'])
def login():
form = LoginForm()
title = 'Pitcher login'
if form.validate_on_submit():
user_email = form.email.data
user_password = form.password.data
remember = form.remember_me.data
user = User.query.filter_by(email = user_email).first()
if user is not None and user.verify_password(user_password):
login_user(user,remember)
flash("Welcome to Pitch Perfect")
return redirect(request.args.get('next') or url_for('main.index'))
flash("Invalid username or pasword")
return render_template("auth/login.html", login_form = form,title = title)
@auth.route('/logout')
@login_required
def logout():
logout_user()
return redirect(url_for('main.index')) | 2.703125 | 3 |
venv/lib/python3.6/site-packages/ansible_collections/community/hashi_vault/.github/actions/ansible-codecov/process.py | usegalaxy-no/usegalaxy | 1 | 12788098 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, <NAME> (@briantist)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import subprocess
import re
import getopt
from pathlib import Path
def get_flags(pattern, input):
patpat = r'\{([^\}]+)\}'
pats = re.findall(patpat, pattern)
matcher = re.sub(patpat, r'(.*?)', pattern)
match = re.search(matcher, input)
if match:
return [pats[i].replace('%', result) for i, result in enumerate(match.groups())]
return None
def main(argv):
additional_flags = file_flag_pattern = directory_flag_pattern = directory = fail_on_error = None
opts, args = getopt.getopt(argv, '', [
'directory=',
'directory-flag-pattern=',
'file-flag-pattern=',
'additional-flags=',
'fail-on-error',
])
for opt, arg in opts:
if opt == '--directory':
directory = arg
elif opt == '--directory-flag-pattern':
directory_flag_pattern = arg
elif opt == '--file-flag-pattern':
file_flag_pattern = arg
elif opt == '--additional-flags':
additional_flags = arg
elif opt == '--fail-on-error':
fail_on_error = True
extra_flags = additional_flags.split(',') if additional_flags else []
flags = {}
directory = Path(directory) if directory else Path.cwd()
for f in directory.rglob('*'):
if f.is_file():
iflags = set()
if directory_flag_pattern:
for part in f.parent.parts:
dflags = get_flags(directory_flag_pattern, part)
if dflags:
iflags.update(dflags)
fflags = get_flags(file_flag_pattern, str(f.name))
if fflags:
iflags.update(fflags)
for flag in iflags:
flags.setdefault(flag, []).append(str(f.resolve()))
logextra = ' (+%r)' % extra_flags if extra_flags else ''
for flag, files in flags.items():
cmd = ['codecov', '-F', flag]
[cmd.extend(['-F', extra]) for extra in extra_flags]
[cmd.extend(['-f', file]) for file in files]
if fail_on_error:
cmd.append('-Z')
print('::group::Flag: %s%s' % (flag, logextra))
print('Executing: %r' % cmd)
subprocess.run(cmd, stderr=subprocess.STDOUT, check=True)
print('::endgroup::')
if __name__ == '__main__':
main(sys.argv[1:])
| 2.265625 | 2 |
cellphonedb/src/tests/cellphone_flask_test_case.py | chapuzzo/cellphonedb | 278 | 12788099 | import os
import random
import string
import time
from flask_testing import TestCase
from cellphonedb.src.app.cellphonedb_app import cellphonedb_app
from cellphonedb.src.local_launchers.local_collector_launcher import LocalCollectorLauncher
from cellphonedb.utils import utils
class CellphoneFlaskTestCase(TestCase):
@staticmethod
def fixtures_dir():
current_dir = os.path.dirname(os.path.realpath(__file__))
fixtures_dir = '{}/fixtures'.format(current_dir)
return fixtures_dir
@staticmethod
def reset_db():
cellphonedb_app.cellphonedb.database_manager.database.drop_everything()
cellphonedb_app.cellphonedb.database_manager.database.create_all()
def populate_db(self):
LocalCollectorLauncher().all('collect_protein.csv', 'collect_gene.csv', 'collect_complex.csv',
'collect_interaction.csv', self.fixtures_dir())
@staticmethod
def remove_file(file):
os.remove(file)
@staticmethod
def rand_string(digits=5):
return ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(digits))
@staticmethod
def get_test_filename(original_namefile, extension, prefix='TESTING'):
namefile = '{}_{}_{}_{}.{}'.format(prefix, original_namefile, int(time.time()),
CellphoneFlaskTestCase.rand_string(),
extension)
return namefile
def assert_file_not_empty(self, file, message=''):
if not message:
message = 'File {} is empty'.format(file)
read_data = utils.read_data_table_from_file(file)
self.assertFalse(read_data.empty, message)
def assert_file_exist(self, path_file, message=''):
if not message:
message = 'File {} didnt exist'.format(path_file)
self.assertTrue(os.path.isfile(path_file), message)
| 2.09375 | 2 |
Python/qaly/qaly.py | rvrheenen/OpenKattis | 12 | 12788100 | <reponame>rvrheenen/OpenKattis
years = int(input())
qaly = 0
for _ in range(years):
q, p = [float(x) for x in input().split()]
qaly += q * p
print(qaly)
| 3.125 | 3 |
bindings/python/hgdb/__init__.py | mithro/hgdb | 0 | 12788101 | try:
from .client import HGDBClient, HGDBClientException
from .symbol import (SymbolTableProvider, VariableSymbol, GeneratorVariableSymbol, ContextVariableSymbol,
BreakpointSymbol)
except ImportError:
pass
from .db import DebugSymbolTable
| 1.34375 | 1 |
scripts/kernelshap_ssd.py | dentou/detex | 1 | 12788102 | <gh_stars>1-10
import warnings
import context
import os
import torchvision
import torchvision.transforms.functional as TF
import torch
import captum
from captum.attr import visualization as captumvis
from detex.models import SSDWrapper
import numpy as np
import argparse
from detex.utils import (
draw_img_boxes,
compute_idx_to_class,
set_seed,
segment,
collect_detections,
)
from detex.utils.convert import tensorimg_to_npimg
from tqdm.auto import tqdm
import seaborn as sns
from detex.utils.storage import save_attribution
sns.reset_orig()
sns.set(rc={"savefig.bbox": "tight", "figure.dpi": 300, "savefig.dpi": 300})
class KernelShapExplainer:
def __init__(self, n_samples=2000, baseline=0.5, perturbations_per_eval=16):
self.n_samples = n_samples
self.baseline = baseline
self.perturbations_per_eval = perturbations_per_eval
@torch.no_grad()
def explain_single(self, img, segment_mask, model, box_id, box_attr, seed):
assert len(img.shape) == 3, img.shape # (C, H, W)
assert torch.is_tensor(img)
assert img.shape[0] == 3
assert img.dtype == torch.float, img.dtype
assert len(segment_mask.shape) == 3, segment_mask.shape # (1, H, W)
f = model.make_blackbox("captum", box_id, box_attr, device)
ks = captum.attr.KernelShap(f)
set_seed(seed)
feature_mask = segment_mask.unsqueeze(0)
input_img = img.unsqueeze(0)
attributions = ks.attribute(
input_img,
feature_mask=feature_mask,
baselines=self.baseline,
n_samples=self.n_samples,
perturbations_per_eval=self.perturbations_per_eval,
show_progress=True,
).cpu()
return attributions
def explain_coco(self, dataset, model, img_id_list, filepath=None, visdir=None):
for img_id in tqdm(img_id_list, desc="Picking img with id: "):
img_orig = dataset[img_id][0] # (C, H, W)
img = img_orig.clone()
spixel_mask = segment(img) # (H, W)
segment_mask = TF.to_tensor(spixel_mask) # (1, H, W)
with torch.no_grad():
orig_dets = model(img.unsqueeze(0).to(device))
dets = collect_detections(orig_dets)
del orig_dets
torch.cuda.empty_cache()
if not dets:
warnings.warn(
f"Empty detection for image {img_id}. Explanation skipped."
)
continue
total_box_nums = len(dets[0]["box_ids"])
for box_num in tqdm(range(total_box_nums), desc="Explaning box_num: "):
box_id = dets[0]["box_ids"][box_num] # only have 1 image
class_label = dets[0]["labels"][box_num]
box = dets[0]["boxes"][box_num]
score = dets[0]["scores"][box_num]
box_attr = 4 + class_label
attribution = self.explain_single(
img,
segment_mask=segment_mask,
model=model,
box_id=box_id,
box_attr=box_attr,
seed=42,
) # (1, C, H, W)
if filepath:
attribution_save = np.transpose(
attribution.squeeze().cpu().detach().numpy(), (1, 2, 0)
)[
None
] # (1, H, W, C)
meta = {
"explainer_engine": 'kSHAP',
"img_id": img_id,
"box_id": box_id,
"box_attr": box_attr,
"box_num": box_num,
"box": box,
"label": class_label,
"score": score,
}
save_attribution(attribution_save, filepath, meta)
if visdir:
if torch.is_tensor(attribution):
attr_vis = np.transpose(
attribution.squeeze().cpu().detach().numpy(), (1, 2, 0)
)
else:
attr_vis = attribution.squeeze()
img_orig_vis = tensorimg_to_npimg(img_orig) # uint8
idx_to_class = compute_idx_to_class(dataset.coco)
img_det_vis = draw_img_boxes(
img_orig_vis,
idx_to_class,
pred={
"boxes": [box],
"scores": [score],
"labels": [class_label],
"box_nums": [box_num],
},
)
fig, ax = captumvis.visualize_image_attr_multiple(
attr_vis,
img_det_vis,
["original_image", "blended_heat_map"],
["all", "all"],
show_colorbar=True,
alpha_overlay=0.5,
fig_size=(8, 8),
titles=[
f"[{box_id}]({box_num}){idx_to_class[class_label]}: {score}",
f"KSHAP(box_attr={box_attr})",
],
outlier_perc=1,
use_pyplot=False,
)
figname = f"kshap_{img_id}_{box_id}_{box_attr}.png"
os.makedirs(visdir, exist_ok=True)
figpath = os.path.join(visdir, figname)
print(f"Saving image to: {figpath}")
fig.savefig(figpath, dpi=300)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="KernelSHAP")
parser.add_argument(
"--first-images",
type=int,
default=1,
help="Run kshap on first x images in the dataset",
)
parser.add_argument(
"--batch-size",
default=16,
type=int,
help="Batch size for model pass",
)
parser.add_argument(
"--shap-samples",
default=2000,
type=int,
help="Number of samples for approximating Shapley values",
)
parser.add_argument(
"--baseline-value",
default=0.5,
type=float,
help="Value assigned to perturbed pixels (in the range [0, 1])",
)
parser.add_argument(
"--result-file",
default=None,
type=str,
help="HDF5 file to save attributions",
)
parser.add_argument(
"--show-dir",
default=None,
type=str,
help="Directory to store visualization",
)
parser.add_argument(
"--show",
action="store_true",
help="Visualize and save in dir specified by --show-dir",
)
args = parser.parse_args()
ROOT_DIR = os.path.abspath(".")
DATA_DIR = os.path.join(ROOT_DIR, "data")
print(DATA_DIR)
VAL_IMG_DIR = os.path.join(DATA_DIR, "val2017")
VAL_ANN_FILE = os.path.join(DATA_DIR, "annotations", "instances_val2017.json")
val_set = torchvision.datasets.CocoDetection(
root=VAL_IMG_DIR,
annFile=VAL_ANN_FILE,
transform=torchvision.transforms.ToTensor(),
)
device = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
print(f"Using device: {device}")
model = SSDWrapper(torchvision.models.detection.ssd300_vgg16(pretrained=True))
model.eval()
model.to(device)
img_id_list = np.arange(0, min(args.first_images, len(val_set))).tolist()
if args.show:
VISDIR = args.show_dir
else:
VISDIR = None
KSHAP_FILE = args.result_file
kernelshap = KernelShapExplainer(
n_samples=args.shap_samples,
baseline=args.baseline_value,
perturbations_per_eval=args.batch_size,
)
kernelshap.explain_coco(
val_set,
model,
img_id_list,
visdir=VISDIR,
filepath=KSHAP_FILE,
)
| 1.804688 | 2 |
hw3/test.py | danieltsai0/CS294-112-HW3 | 0 | 12788103 | <gh_stars>0
import tensorflow as tf
import numpy as np
x = tf.placeholder(tf.int32, [None])
act_t = tf.one_hot(x, depth=5, on_value=1.0, off_value=0.0, dtype=tf.float32, name="action_one_hot")
y = tf.placeholder(tf.float32, [4,5])
k = tf.reduce_max(tf.multiply(y, act_t), axis=1)
# ind = tf.transpose(tf.stack([tf.to_int32(tf.range(y.get_shape()[0])),x]))
# k = tf.gather_nd(y,ind)
sess = tf.Session()
y = sess.run(k, feed_dict={x:[0,1,2,3], y:[[-1,2,3,4,0],[-5,6,-7,8,9],[9,10,-11,12,-21],[13,-14,15,-16,0]]})
print(y) | 2.046875 | 2 |
mandiokito/main.py | alefemoreira/mandiokito | 0 | 12788104 | from kivy.app import App
from kivy.uix.screenmanager import ScreenManager, Screen
import re
#from kivy.config import Config
from telas import *
#from kivy.garden.mapview import MapView, MapMarker
from kivy.uix.textinput import TextInput
from kivy.clock import Clock, mainthread
from kivy.uix.popup import Popup
#from plyer import gps
from kivy.uix.label import Label
from kivy.metrics import sp
#import openssl
#For buildoze spec
# (list) Permissions
#android.permissions = INTERNET,ACCESS_FINE_LOCATION,ACCESS_COARSE_LOCATION
# (list) Application requirements
#requirements = kivy,plyer
#Config.read('config.ini')
class LabelAdap(Label):
def __init__(self,**kwargs):
super().__init__(**kwargs)
self.size_hint = (1, None)
def on_size(self,*args):
# vamos colocar um espaço de 10 sp
self.text_size = (self.width - sp(10), None)
def on_texture_size(self,*args):
self.size = self.texture_size
self.height += sp(20)
class FloatInput(TextInput):
pat = re.compile('[^0-9]')
def insert_text(self, substring, from_undo=False):
pat = self.pat
if '.' in self.text:
s = re.sub(pat, '', substring)
else:
s = '.'.join([re.sub(pat, '', s) for s in substring.split('.', 1)])
return super(FloatInput, self).insert_text(s, from_undo=from_undo)
class Gerenciador(ScreenManager):
def __init__(self, **kw):
super().__init__(**kw)
#def current_location(self):
# try:
# gps.configure(on_location=self.on_location)
# gps.start()
# except NotImplementedError:
# popup = Popup(title="GPS Error",
# content=Label(text="GPS support is not implemented on your platform")
# ).open()
# Clock.schedule_once(lambda d: popup.dismiss(), 3)
#
#@mainthread
#def on_location(self, **kwargs):
# print(kwargs)
class Mandiokito(App):
def build (self):
return Gerenciador()
if __name__ == '__main__':
Mandiokito().run()
'''map = MapView(zoom=11, lon=50.6394, lat=3.057)
m1 = MapMarker(lon=-34.977078, lat=-7.138594)
map.add_marker(m1)
return map''' | 2.171875 | 2 |
main/courses/exams/tasks.py | csev/class2go | 2 | 12788105 | from celery import task
from c2g.models import ExamRecord, Course, Exam
from django.core.mail import EmailMessage, get_connection
from django.core.mail import send_mail
from storages.backends.s3boto import S3BotoStorage
import json
import settings
import datetime
FILE_DIR = getattr(settings, 'FILE_UPLOAD_TEMP_DIR', '/tmp')
AWS_ACCESS_KEY_ID = getattr(settings, 'AWS_ACCESS_KEY_ID', '')
AWS_SECRET_ACCESS_KEY = getattr(settings, 'AWS_SECRET_ACCESS_KEY', '')
AWS_SECURE_STORAGE_BUCKET_NAME = getattr(settings, 'AWS_SECURE_STORAGE_BUCKET_NAME', '')
@task()
def generate_submission_csv_task(course_id, exam_id, email_to):
course = Course.objects.get(id=course_id)
exam = Exam.objects.get(id=exam_id)
course_prefix = course.prefix
course_suffix = course.suffix
exam_slug = exam.slug
submitters = ExamRecord.objects.filter(exam=exam, complete=True, time_created__lt=exam.grace_period).values('student').distinct()
fname = course_prefix+"-"+course_suffix+"-"+exam_slug+"-"+datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")+".csv"
outfile = open(FILE_DIR+"/"+fname,"w+")
could_not_parse = ""
for s in submitters: #yes, there is sql in a loop here. We'll optimize later
latest_sub = ExamRecord.objects.values('student__username', 'time_created', 'json_data').filter(exam=exam, time_created__lt=exam.grace_period, student=s['student']).latest('time_created')
try:
sub_obj = json.loads(latest_sub['json_data']).iteritems()
for k,v in sub_obj:
vals = parse_val(v)
outstring = '"%s","%s","%s"\n' % (latest_sub['student__username'], k, vals)
outfile.write(outstring)
except ValueError:
could_not_parse += latest_sub['student__username']+ " " #Don't output if the latest submission was erroneous
outfile.write("\n")
#if there were items we could not parse
if could_not_parse:
#write the usernames at the beginning of the file
outfile.seek(0)
data=outfile.read()
outfile.seek(0)
outfile.truncate()
outfile.write("Could not parse data from the following users: " + could_not_parse + "\n")
outfile.write(data)
#write to S3
secure_file_storage = S3BotoStorage(bucket=AWS_SECURE_STORAGE_BUCKET_NAME, access_key=AWS_ACCESS_KEY_ID, secret_key=AWS_SECRET_ACCESS_KEY)
s3file = secure_file_storage.open("/%s/%s/reports/exams/%s" % (course_prefix, course_suffix, fname),'w')
try:
outfile.seek(0)
s3file.write(outfile.read())
finally:
s3file.close()
outfile.close()
dl_url = secure_file_storage.url_monkeypatched("/%s/%s/reports/exams/%s" % (course_prefix, course_suffix, fname), response_headers={'response-content-disposition': 'attachment'})
email = EmailMessage('%s: Submission CSV for %s' % (course.title, exam.title), "The student submissions CSV for %s is ready. Because the file can be large, please download it at %s." % (exam.title, dl_url),
settings.SERVER_EMAIL,
[email_to])
email.send()
def parse_val(v):
"""Helper function to parse AJAX submissions"""
if isinstance(v,list):
sorted_list = sorted(map(lambda li: li['value'], v))
return reduce(lambda x,y: x+y+",", sorted_list, "")
else:
try:
return v.get('value', "")
except (TypeError, AttributeError):
return str(v)
| 1.984375 | 2 |
src/braket/circuits/noise.py | orclassiq/amazon-braket-sdk-python | 0 | 12788106 | <filename>src/braket/circuits/noise.py
# Copyright Amazon.com Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from typing import Any, Dict, Optional, Sequence
from braket.circuits.quantum_operator import QuantumOperator
from braket.circuits.qubit_set import QubitSet
class Noise(QuantumOperator):
"""
Class `Noise` represents a noise channel that operates on one or multiple qubits. Noise
are considered as building blocks of quantum circuits that simulate noise. It can be
used as an operator in an `Instruction` object. It appears in the diagram when user prints
a circuit with `Noise`. This class is considered the noise channel definition containing
the metadata that defines what the noise channel is and what it does.
"""
def __init__(self, qubit_count: Optional[int], ascii_symbols: Sequence[str]):
"""
Args:
qubit_count (int, optional): Number of qubits this noise channel interacts with.
ascii_symbols (Sequence[str]): ASCII string symbols for this noise channel. These
are used when printing a diagram of circuits. Length must be the same as
`qubit_count`, and index ordering is expected to correlate with target ordering
on the instruction.
Raises:
ValueError: `qubit_count` is less than 1, `ascii_symbols` are None, or
length of `ascii_symbols` is not equal to `qubit_count`
"""
super().__init__(qubit_count=qubit_count, ascii_symbols=ascii_symbols)
@property
def name(self) -> str:
"""
Returns the name of the quantum operator
Returns:
The name of the quantum operator as a string
"""
return self.__class__.__name__
def to_ir(self, target: QubitSet) -> Any:
"""Returns IR object of quantum operator and target
Args:
target (QubitSet): target qubit(s)
Returns:
IR object of the quantum operator and target
"""
raise NotImplementedError("to_ir has not been implemented yet.")
def to_matrix(self, *args, **kwargs) -> Any:
"""Returns a list of matrices defining the Kraus matrices of the noise channel.
Returns:
Iterable[np.ndarray]: list of matrices defining the Kraus matrices of the noise channel.
"""
raise NotImplementedError("to_matrix has not been implemented yet.")
def __eq__(self, other):
if isinstance(other, Noise):
return self.name == other.name
return NotImplemented
def __repr__(self):
return f"{self.name}('qubit_count': {self.qubit_count})"
@classmethod
def register_noise(cls, noise: "Noise"):
"""Register a noise implementation by adding it into the Noise class.
Args:
noise (Noise): Noise class to register.
"""
setattr(cls, noise.__name__, noise)
class SingleProbabilisticNoise(Noise):
"""
Class `SingleProbabilisticNoise` represents the bit/phase flip noise channel on N qubits
parameterized by a single probability.
"""
def __init__(
self, probability: float, qubit_count: Optional[int], ascii_symbols: Sequence[str]
):
"""
Args:
probability (float): The probability that the noise occurs.
qubit_count (int, optional): The number of qubits to apply noise.
ascii_symbols (Sequence[str]): ASCII string symbols for the noise. These are used when
printing a diagram of a circuit. The length must be the same as `qubit_count`, and
index ordering is expected to correlate with the target ordering on the instruction.
Raises:
ValueError: If the `qubit_count` is less than 1, `ascii_symbols` are `None`, or
`ascii_symbols` length != `qubit_count`, `probability` is not `float`,
`probability` > 1/2, or `probability` < 0
"""
super().__init__(qubit_count=qubit_count, ascii_symbols=ascii_symbols)
if not isinstance(probability, float):
raise TypeError("probability must be float type")
if not (probability <= 0.5 and probability >= 0.0):
raise ValueError("probability must be a real number in the interval [0,1/2]")
self._probability = probability
@property
def probability(self) -> float:
"""
Returns:
probability (float): The probability that parametrizes the noise channel.
"""
return self._probability
def __repr__(self):
return f"{self.name}('probability': {self.probability}, 'qubit_count': {self.qubit_count})"
class SingleProbabilisticNoise_34(Noise):
"""
Class `SingleProbabilisticNoise` represents the Depolarizing and TwoQubitDephasing noise
channels parameterized by a single probability.
"""
def __init__(
self, probability: float, qubit_count: Optional[int], ascii_symbols: Sequence[str]
):
"""
Args:
probability (float): The probability that the noise occurs.
qubit_count (int, optional): The number of qubits to apply noise.
ascii_symbols (Sequence[str]): ASCII string symbols for the noise. These are used when
printing a diagram of a circuit. The length must be the same as `qubit_count`, and
index ordering is expected to correlate with the target ordering on the instruction.
Raises:
ValueError: If the `qubit_count` is less than 1, `ascii_symbols` are `None`, or
`ascii_symbols` length != `qubit_count`, `probability` is not `float`,
`probability` > 3/4, or `probability` < 0
"""
super().__init__(qubit_count=qubit_count, ascii_symbols=ascii_symbols)
if not isinstance(probability, float):
raise TypeError("probability must be float type")
if not (probability <= 0.75 and probability >= 0.0):
raise ValueError("probability must be a real number in the interval [0,3/4]")
self._probability = probability
@property
def probability(self) -> float:
"""
Returns:
probability (float): The probability that parametrizes the noise channel.
"""
return self._probability
def __repr__(self):
return f"{self.name}('probability': {self.probability}, 'qubit_count': {self.qubit_count})"
class SingleProbabilisticNoise_1516(Noise):
"""
Class `SingleProbabilisticNoise` represents the TwoQubitDepolarizing noise channel
parameterized by a single probability.
"""
def __init__(
self, probability: float, qubit_count: Optional[int], ascii_symbols: Sequence[str]
):
"""
Args:
probability (float): The probability that the noise occurs.
qubit_count (int, optional): The number of qubits to apply noise.
ascii_symbols (Sequence[str]): ASCII string symbols for the noise. These are used when
printing a diagram of a circuit. The length must be the same as `qubit_count`, and
index ordering is expected to correlate with the target ordering on the instruction.
Raises:
ValueError: If the `qubit_count` is less than 1, `ascii_symbols` are `None`, or
`ascii_symbols` length != `qubit_count`, `probability` is not `float`,
`probability` > 15/16, or `probability` < 0
"""
super().__init__(qubit_count=qubit_count, ascii_symbols=ascii_symbols)
if not isinstance(probability, float):
raise TypeError("probability must be float type")
if not (probability <= 0.9375 and probability >= 0.0):
raise ValueError("probability must be a real number in the interval [0,15/16]")
self._probability = probability
@property
def probability(self) -> float:
"""
Returns:
probability (float): The probability that parametrizes the noise channel.
"""
return self._probability
def __repr__(self):
return f"{self.name}('probability': {self.probability}, 'qubit_count': {self.qubit_count})"
class MultiQubitPauliNoise(Noise):
"""
Class `MultiQubitPauliNoise` represents a general multi-qubit Pauli channel,
parameterized by up to 4**N - 1 probabilities.
"""
_allowed_substrings = {"I", "X", "Y", "Z"}
def __init__(
self,
probabilities: Dict[str, float],
qubit_count: Optional[int],
ascii_symbols: Sequence[str],
):
"""[summary]
Args:
probabilities (Dict[str, float]): A dictionary with Pauli string as the keys,
and the probabilities as values, i.e. {"XX": 0.1. "IZ": 0.2}.
qubit_count (Optional[int]): The number of qubits the Pauli noise acts on.
ascii_symbols (Sequence[str]): ASCII string symbols for the noise. These are used when
printing a diagram of a circuit. The length must be the same as `qubit_count`, and
index ordering is expected to correlate with the target ordering on the instruction.
Raises:
ValueError: If the `qubit_count` is less than 1, `ascii_symbols` are `None`, or
`ascii_symbols` length != `qubit_count`. Also if `probabilities` are not `float`s,
any `probabilities` > 1, or `probabilities` < 0, or if the sum of all
probabilities is > 1,
or if "II" is specified as a Pauli string.
Also if any Pauli string contains invalid strings.
Also if the length of probabilities is greater than 4**qubit_count.
TypeError: If the type of the dictionary keys are not strings.
If the probabilities are not floats.
"""
super().__init__(qubit_count=qubit_count, ascii_symbols=ascii_symbols)
self.probabilities = probabilities
if not probabilities:
raise ValueError("Pauli dictionary must not be empty.")
identity = self.qubit_count * "I"
if identity in probabilities:
raise ValueError(
f"{identity} is not allowed as a key. Please enter only non-identity Pauli strings."
)
for pauli_string, prob in probabilities.items():
if not isinstance(pauli_string, str):
raise TypeError(f"Type of {pauli_string} was not a string.")
if len(pauli_string) != self.qubit_count:
raise ValueError(
(
"Length of each Pauli string must be equal to number of qubits. "
f"{pauli_string} had length {len(pauli_string)} instead of length {self.qubit_count}." # noqa
)
)
if not isinstance(prob, float):
raise TypeError(
(
"Probabilities must be a float type. "
f"The probability for {pauli_string} was of type {type(prob)}."
)
)
if not set(pauli_string) <= self._allowed_substrings:
raise ValueError(
(
"Strings must be Pauli strings consisting of only [I, X, Y, Z]. "
f"Received {pauli_string}."
)
)
if prob < 0.0 or prob > 1.0:
raise ValueError(
(
"Individual probabilities must be real numbers in the interval [0, 1]. "
f"Probability for {pauli_string} was {prob}."
)
)
total_prob = sum(probabilities.values())
if total_prob > 1.0 or total_prob < 0.0:
raise ValueError(
(
"Total probability must be a real number in the interval [0, 1]. "
f"Total probability was {total_prob}."
)
)
def __repr__(self):
return f"{self.name}('probabilities' : {self.probabilities}, 'qubit_count': {self.qubit_count})" # noqa
class PauliNoise(Noise):
"""
Class `PauliNoise` represents the a single-qubit Pauli noise channel
acting on one qubit. It is parameterized by three probabilities.
"""
def __init__(
self,
probX: float,
probY: float,
probZ: float,
qubit_count: Optional[int],
ascii_symbols: Sequence[str],
):
"""
Args:
probX [float], probY [float], probZ [float]: The coefficients of the Kraus operators
in the channel.
qubit_count (int, optional): The number of qubits to apply noise.
ascii_symbols (Sequence[str]): ASCII string symbols for the noise. These are used when
printing a diagram of a circuit. The length must be the same as `qubit_count`, and
index ordering is expected to correlate with the target ordering on the instruction.
Raises:
ValueError: If the `qubit_count` is less than 1, `ascii_symbols` are `None`, or
`ascii_symbols` length != `qubit_count`, `probX` or `probY` or `probZ`
is not `float`, `probX` or `probY` or `probZ` > 1.0, or
`probX` or `probY` or `probZ` < 0.0, or `probX`+`probY`+`probZ` > 1
"""
super().__init__(qubit_count=qubit_count, ascii_symbols=ascii_symbols)
if not isinstance(probX, float):
raise TypeError("probX must be float type")
if not (probX <= 1.0 and probX >= 0.0):
raise ValueError("probX must be a real number in the interval [0,1]")
if not isinstance(probY, float):
raise TypeError("probY must be float type")
if not (probY <= 1.0 and probY >= 0.0):
raise ValueError("probY must be a real number in the interval [0,1]")
if not isinstance(probZ, float):
raise TypeError("probZ must be float type")
if not (probZ <= 1.0 and probZ >= 0.0):
raise ValueError("probZ must be a real number in the interval [0,1]")
if probX + probY + probZ > 1:
raise ValueError("the sum of probX, probY, probZ cannot be larger than 1")
self._probX = probX
self._probY = probY
self._probZ = probZ
@property
def probX(self) -> float:
"""
Returns:
probX (float): The probability of a Pauli X error.
"""
return self._probX
@property
def probY(self) -> float:
"""
Returns:
probY (float): The probability of a Pauli Y error.
"""
return self._probY
@property
def probZ(self) -> float:
"""
Returns:
probZ (float): The probability of a Pauli Z error.
"""
return self._probZ
def __repr__(self):
return f"{self.name}('probX': {self.probX}, 'probY': {self.probY}, \
'probZ': {self.probZ}, 'qubit_count': {self.qubit_count})"
class DampingNoise(Noise):
"""
Class `DampingNoise` represents a damping noise channel
on N qubits parameterized by gamma.
"""
def __init__(self, gamma: float, qubit_count: Optional[int], ascii_symbols: Sequence[str]):
"""
Args:
gamma (float): Probability of damping.
qubit_count (int, optional): The number of qubits to apply noise.
ascii_symbols (Sequence[str]): ASCII string symbols for the noise. These are used when
printing a diagram of a circuit. The length must be the same as `qubit_count`, and
index ordering is expected to correlate with the target ordering on the instruction.
Raises:
ValueError: If the `qubit_count` is less than 1, `ascii_symbols` are `None`, or
`ascii_symbols` length != `qubit_count`, `gamma` is not `float`,
`gamma` > 1.0, or `gamma` < 0.0.
"""
super().__init__(qubit_count=qubit_count, ascii_symbols=ascii_symbols)
if not isinstance(gamma, float):
raise TypeError("gamma must be float type")
if not (gamma <= 1.0 and gamma >= 0.0):
raise ValueError("gamma must be a real number in the interval [0,1]")
self._gamma = gamma
@property
def gamma(self) -> float:
"""
Returns:
gamma (float): Probability of damping.
"""
return self._gamma
def __repr__(self):
return f"{self.name}('gamma': {self.gamma}, 'qubit_count': {self.qubit_count})"
class GeneralizedAmplitudeDampingNoise(DampingNoise):
"""
Class `GeneralizedAmplitudeDampingNoise` represents the generalized amplitude damping
noise channel on N qubits parameterized by gamma and probability.
"""
def __init__(
self,
gamma: float,
probability: float,
qubit_count: Optional[int],
ascii_symbols: Sequence[str],
):
"""
Args:
gamma (float): Probability of damping.
probability (float): Probability of the system being excited by the environment.
qubit_count (int): The number of qubits to apply noise.
ascii_symbols (Sequence[str]): ASCII string symbols for the noise. These are used when
printing a diagram of a circuit. The length must be the same as `qubit_count`, and
index ordering is expected to correlate with the target ordering on the instruction.
Raises:
ValueError: If the `qubit_count` is less than 1, `ascii_symbols` are `None`, or
`ascii_symbols` length != `qubit_count`, `probability` or `gamma` is not `float`,
`probability` > 1.0, or `probability` < 0.0, `gamma` > 1.0, or `gamma` < 0.0.
"""
super().__init__(gamma=gamma, qubit_count=qubit_count, ascii_symbols=ascii_symbols)
if not isinstance(probability, float):
raise TypeError("probability must be float type")
if not (probability <= 1.0 and probability >= 0.0):
raise ValueError("probability must be a real number in the interval [0,1]")
self._probability = probability
@property
def probability(self) -> float:
"""
Returns:
probability (float): Probability of the system being excited by the environment.
"""
return self._probability
def __repr__(self):
return f"{self.name}('gamma': {self.gamma}, 'probability': {self.probability}, \
'qubit_count': {self.qubit_count})"
| 3 | 3 |
tests/step_1_unit/test_orbs.py | logikal-code/pyorbs | 7 | 12788107 | from collections import namedtuple
from pytest import fixture, raises
from pyorbs.orbs import Orbs
@fixture
def orbs(tmp_path):
orbs = Orbs(str(tmp_path))
orbs.orbs = ['test']
return orbs
@fixture
def orb(mocker, orbs):
mocker.patch('pyorbs.orbs.exists', return_value=True)
return orbs.orb('test')
@fixture
def make(mocker):
return mocker.patch('pyorbs.orbs.Orb.make')
@fixture
def make_actions(mocker):
execute = mocker.patch('pyorbs.orbs.execute')
execute.return_value.returncode = 0
mocker.patch('pyorbs.orbs.Orb.activate')
return {
'write_text': mocker.patch('pyorbs.orbs.Path.write_text'),
'execute': execute,
'lock_reqs': mocker.patch('pyorbs.reqs.Requirements.lock')
}
def test_list(capsys, orbs):
orbs.list()
assert 'test' in capsys.readouterr().out
def test_freeze_invalid_paths(orbs):
with raises(ValueError):
orbs.freeze('invalid')
with raises(ValueError):
orbs.freeze('tests/reqs/empty')
def test_freeze_changed(orbs, make, reqs):
orbs.freeze(reqs('changed', raw=True))
assert make.called
def test_freeze_unchanged(orbs, make, reqs):
orbs.freeze(reqs(raw=True))
assert not make.called
def test_freeze_folder(orbs, make):
orbs.freeze('tests/reqs')
assert make.called
def test_toggle_glow_invalid_name(orbs):
with raises(ValueError):
orbs.toggle_glow('invalid')
def test_toggle_glow(orbs, monkeypatch):
assert orbs.glowing() is None
orbs.toggle_glow('test')
assert orbs.glowing() == 'test'
orbs.toggle_glow('test', force_on=True)
assert orbs.glowing() == 'test'
orbs.toggle_glow('test')
assert orbs.glowing() is None
monkeypatch.setenv('PYORBS_ACTIVE_ORB', 'test')
orbs.toggle_glow()
assert orbs.glowing() == 'test'
def test_orb_errors(orbs):
with raises(ValueError):
Orbs('invalid').orb()
with raises(ValueError):
orbs.orb('invalid')
with raises(RuntimeError):
orbs.orb()
def test_orb_shell(mocker, orbs):
execute = mocker.patch('pyorbs.orbs.execute')
orbs.orb(shell=True)
assert execute.called
def test_orb_glowing(orbs):
orbs.toggle_glow('test')
assert orbs.orb().name == 'test'
def test_orb(orbs):
assert orbs.orb('test').name == 'test'
def test_make_reqs_changed(orbs, reqs):
with raises(RuntimeError):
orbs.orb('test').make(reqs('changed'))
def test_make_venv_error(make_actions, orbs, reqs):
make_actions['execute'].return_value.returncode = 1
with raises(RuntimeError):
orbs.orb('test').make(reqs())
def test_make_install_error(make_actions, orbs, reqs):
make_actions['execute'].side_effect = [
namedtuple('CompletedProcess', 'returncode')(0),
namedtuple('CompletedProcess', 'returncode')(1),
]
with raises(RuntimeError):
orbs.orb('test').make(reqs())
def test_make(make_actions, orbs, reqs):
orbs.orb('test').make(reqs())
assert make_actions['write_text'].called
assert make_actions['execute'].called
assert not make_actions['lock_reqs'].called
def test_make_reqs_new(make_actions, orbs, reqs):
orbs.orb('test').make(reqs('new'))
assert make_actions['lock_reqs'].called
def test_make_update(make_actions, orbs, reqs):
orbs.orb('test').make(reqs('changed'), update=True)
assert make_actions['lock_reqs'].called
def test_make_quiet(mocker, make_actions, orbs, reqs):
mocked_print = mocker.patch('builtins.print')
orbs.orb('test').make(reqs(), quiet=True)
assert not mocked_print.called
assert not make_actions['lock_reqs'].called
def test_destroy_exit(monkeypatch, orbs):
monkeypatch.setenv('PYORBS_ACTIVE_ORB', 'test')
with raises(RuntimeError):
orbs.orb('test').destroy()
def test_destroy(mocker, orbs):
mocker.patch('pyorbs.orbs.Orbs.glowing', return_value='test')
toggle_glow = mocker.patch('pyorbs.orbs.Orbs.toggle_glow')
rmtree = mocker.patch('pyorbs.orbs.rmtree')
orbs.orb('test').destroy()
assert toggle_glow.called
assert rmtree.called
def test_info(capsys, mocker, orb):
execute = mocker.patch('pyorbs.orbs.execute')
execute.return_value.stdout = 'outdated'
orb.info()
assert 'outdated' in capsys.readouterr().out
def test_activate_invalid(orbs):
with raises(RuntimeError):
orbs.orb('test').activate()
def test_activate(mocker, orb):
toggle_glow = mocker.patch('pyorbs.orbs.Orbs.toggle_glow')
execute = mocker.patch('pyorbs.orbs.execute')
orb.activate()
toggle_glow.assert_called_with(orb.name, force_on=True)
execute.assert_called_with(init=orb.orb(), command=None, replace=True, capture=False)
def test_activate_run(mocker, orb):
execute = mocker.patch('pyorbs.orbs.execute')
command = 'source "%s"; test' % orb.orb()
orb.activate(run='test')
execute.assert_called_with(init=None, command=command, replace=True, capture=False)
orb.activate(run='test', no_cd=True, capture=True)
execute.assert_called_with(init=None, command=command, replace=False, capture=True)
| 2.171875 | 2 |
src/preprocess.py | KushalBKusram/AdvancedLaneFinder | 32 | 12788108 | <reponame>KushalBKusram/AdvancedLaneFinder
import cv2
import numpy as np
def grayscale(image):
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
def warp(image):
w = image.shape[1]
h = image.shape[0]
src = np.float32([[200, 460], [1150, 460], [436, 220], [913, 220]])
dst = np.float32([[300, 720], [1000, 720], [400, 0], [1200, 0]])
M = cv2.getPerspectiveTransform(src, dst)
invM = cv2.getPerspectiveTransform(dst, src)
warped = cv2.warpPerspective(image, M, (image.shape[1], image.shape[0]), flags=cv2.INTER_LINEAR)
return warped, invM
def threshold(image):
ret, image = cv2.threshold(image, 220, 225, cv2.THRESH_BINARY)
if(ret == False):
print('Error in thresholding')
else:
return image | 2.8125 | 3 |
exercises/test_01_05_02.py | rly/nwb-python-course | 8 | 12788109 | def test():
# only check that the code runs and x is in the last line of the solution
assert "nwbfile.acquisition['MyTimeSeries']" in __solution__.strip().splitlines()[-1], "Use the name of the time series to get it from nwbfile.acquisition"
__msg__.good("Nice work!")
| 1.859375 | 2 |
django_giropay/admin.py | ParticulateSolutions/django-giropay | 3 | 12788110 | from django.contrib import admin
from .models import GiropayTransaction
class GiropayTransactionAdmin(admin.ModelAdmin):
list_display = ('merchant_tx_id', 'reference', 'latest_response_code')
list_filter = ('latest_response_code',)
ordering = ('-created_at',)
fields = ('merchant_tx_id', 'reference', 'latest_response_code')
admin.site.register(GiropayTransaction, GiropayTransactionAdmin)
| 1.679688 | 2 |
tests/test_job.py | jschnurr/scrapyscript | 96 | 12788111 | import pytest
from scrapyscript import Job, Processor, ScrapyScriptException
from spiders import ParamReturnSpider, TitleSpider
def test_job_raises_if_no_spider_provided():
with pytest.raises(TypeError):
Job()
def test_create_valid_job():
spider = TitleSpider
job = Job(spider)
assert isinstance(job, Job)
| 2.359375 | 2 |
runut.py | yzhang3beatit/dcdetector | 0 | 12788112 | import os
import sys
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../'))
import unittest
from test import test_rc
if __name__ == '__main__':
SeTestSuite = unittest.defaultTestLoader.discover(start_dir='./')
unittest.TextTestRunner(verbosity=2).run(unittest.TestSuite(SeTestSuite))
| 1.960938 | 2 |
secu/controls/todo_work.py | wancy86/tornado-seed | 0 | 12788113 | from ..base.request import BaseHandler, JsonResponse
from ..base.decrators import handle_request_exception, authenticated
from ..models import Work, PersonInCharge
from sqlalchemy import and_
from common.json import json_by_result
class WorkHandler(BaseHandler):
@authenticated
def post(self):
model = Work(**Work.Form(**self.POST).data)
self.db.add(model)
self.db.commit()
return JsonResponse(self, '000', data=model.json)
@authenticated
def delete(self):
model = self.db.query(Work).filter(Work.id == self.GETPOST['id']).first()
if model:
self.db.delete(model)
self.db.commit()
return JsonResponse(self, '000')
else:
self.db.commit()
return JsonResponse(self, '001', msg="你要删除的记录不存在!")
@authenticated
def put(self):
data = Work.PutForm(**self.POST).data
model = self.db.query(Work).filter(Work.id == self.POST['id']).first()
if model:
model.update(**data)
self.db.commit()
return JsonResponse(self, '000')
else:
return JsonResponse(self, '001', msg="你要更新的记录不存在!")
@authenticated
def get(self):
ACTION = self.GET.get('ACTION', '')
if ACTION == 'ONE':
pk = self.GET.get('id')
if pk:
model = self.db.query(Work).filter(Work.id == pk).first()
if model:
return JsonResponse(self, '000', data=model.json)
else:
return JsonResponse(self, '001', msg="你查询的记录不存在!")
else:
return JsonResponse(self, '100', msg="请传入参数id!")
elif ACTION == 'QUERY':
query = '''
select w.id,
w.itemid, -- 任务ID
w.desp, -- 工作内容
w.duration, -- 消耗时间
w.entry_date, -- 创建时间
u.fullname as create_user -- 日志记录人
from todo_work as w
left join secu_user as u on w.entry_user = u.id
where w.itemid = :itemid
order by w.identity desc limit {},{} ;
'''
count_query = '''
select count(1)
from todo_work as w
left join secu_user as u on w.entry_user = u.id
where w.itemid = :itemid;
'''
condition = {
'itemid': self.GET.get('itemid', ''),
}
record = self.GET.get('record')
pagesize = self.GET.get('pagesize', '10')
record = record if record else int(self.GET.get('pageindex', 0)) * int(pagesize)
query = query.format(record, pagesize)
count = self.db.execute(count_query, condition).scalar()
data = json_by_result(self.db.execute(query, condition).fetchall())
return JsonResponse(self, '000', data={'count': count, 'list': data})
elif ACTION == 'MYLOGS':
query = '''
SELECT w.id,
w.desp,
w.duration,
w.entry_date,
u.username,
i.title,
p.name as project_name
FROM todo_work w
JOIN secu_user u ON w.entry_user=u.id
LEFT JOIN todo_item i ON w.itemid=i.id
LEFT JOIN todo_project p ON i.projectid=p.id
where u.id = :userid and
(:projectid = '' or p.id = :projectid) and
(:item_name = '' or i.title like :item_name) and
(w.duration >= :min_duration) and
(w.duration <= :max_duration) and
datediff(w.entry_date,:min_entry_date)>=0 and
datediff(w.entry_date,:max_entry_date)<=0
order by w.entry_date desc limit {},{} ;
'''
count_query = '''
SELECT count(1)
FROM todo_work w
JOIN secu_user u ON w.entry_user=u.id
LEFT JOIN todo_item i ON w.itemid=i.id
LEFT JOIN todo_project p ON i.projectid=p.id
where u.id = :userid and
(:projectid = '' or p.id = :projectid) and
(:item_name = '' or i.title like :item_name) and
(w.duration >= :min_duration) and
(w.duration <= :max_duration) and
datediff(w.entry_date,:min_entry_date)>=0 and
datediff(w.entry_date,:max_entry_date)<=0;
'''
condition = {
'userid': self.GET.get('userid', self.session['userid']),
'projectid': self.GET.get('projectid', ''),
'item_name': '' if not self.GET.get('item_name') else '%{}%'.format(self.GET.get('item_name')),
'min_duration': int(self.GET.get('min_duration', 0)),
'max_duration': int(self.GET.get('max_duration', 999999)),
'min_entry_date': self.GET.get('min_entry_date', '1900-1-1'),
'max_entry_date': self.GET.get('max_entry_date', '2100-1-1')
}
record = self.GET.get('record')
pagesize = self.GET.get('pagesize', '10')
record = record if record else int(self.GET.get('pageindex', 0)) * int(pagesize)
query = query.format(record, pagesize)
count = self.db.execute(count_query, condition).scalar()
self.db.commit()
data = json_by_result(self.db.execute(query, condition).fetchall())
return JsonResponse(self, '000', data={'count': count, 'list': data})
else:
return JsonResponse(self, '100', msg="缺失参数ACTION")
| 2.078125 | 2 |
mininet/examples/helpful/exemplo_1/exemplo_1.py | gustavo978/helpful | 0 | 12788114 | <reponame>gustavo978/helpful
from mininet.net import Mininet
from mininet.node import Node, Switch, RemoteController
from mininet.link import Link, Intf
from mininet.log import setLogLevel, info
from mininet.cli import CLI
from p4_mininet import P4Switch, P4Host
import mininet.ns3
from mininet.ns3 import WIFISegment
import ns.core
import ns.network
import ns.wifi
import ns.csma
import ns.wimax
import ns.uan
import ns.netanim
from mininet.opennet import *
sw_path = "--behavioral-exe l2_switch"
json_path = "--json l2_switch.json"
thrift_port = 9090
pcap_dump = False
def main():
net = Mininet()
""" Uncomment following lines to add controller """
# net.addController('c0', controller=RemoteController, ip="127.0.0.1", port=6633)
sw0 = net.addSwitch('sw0',
sw_path = sw_path,
json_path = json_path,
thrift_port = thrift_port,
pcap_dump = pcap_dump)
#sw0 = net.addSwitch('sw0', ip=None, failMode='standalone')
ap0 = net.addSwitch('ap0', ip=None, failMode='standalone')
ap1 = net.addSwitch('ap1', ip=None, failMode='standalone')
sta0 = net.addHost('sta0', ip="10.0.0.1")
sta1 = net.addHost('sta1', ip="10.0.0.2")
wifi = WIFISegment ()
wifi.addAp(ap0, channelNumber=11, ssid="opennet_0")
wifi.addAp(ap1, channelNumber=11, ssid="opennet_1")
wifi.addSta(sta0, channelNumber=11, ssid="opennet_0")
wifi.addSta(sta1, channelNumber=11, ssid="opennet_1")
net.addLink(sw0, ap0)
net.addLink(sw0, ap1)
net.start()
mininet.ns3.start()
sta0.cmdPrint('ping -c2 ' + sta1.IP())
sta1.cmdPrint('ping -c2 ' + sta0.IP())
CLI(net)
mininet.ns3.stop()
mininet.ns3.clear()
net.stop()
if __name__ == '__main__':
setLogLevel('info')
main()
| 2.1875 | 2 |
tests/mdts/tests/functional_tests/test_midolman_and_interfaces.py | abel-navarro/midonet | 1 | 12788115 | # Copyright 2014 Midokura SARL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Tests if Midolman agent / interface status update
"""
from hamcrest import assert_that
from hamcrest import none
from hamcrest import not_none
from nose.plugins.attrib import attr
from mdts.lib.physical_topology_manager import PhysicalTopologyManager
from mdts.tests.utils.utils import get_midonet_api
from mdts.tests.utils.utils import start_midolman_agents
from mdts.tests.utils.utils import stop_midolman_agents
from mdts.tests.utils.utils import check_all_midolman_hosts
import time
# Only a physical topology containing a single interface.
PTM = PhysicalTopologyManager(
'../topologies/mmm_physical_test_midolman_and_interfaces.yaml')
# We don't build a Physical Topology Manager in the setup. Instead we create
# a new interface inside 'test_new_interface_becomes_visible'.
def teardown():
time.sleep(2)
PTM.destroy()
@attr(version="v1.2.0", slow=False)
def test_host_status():
"""
Title: Test host status update
Scenario:
When: The test starts up,
Then: check if all Midolman agents are alive,
Then: stops all Midolman agents,
Then: check if all Midolman agents are now dead,
Then: restarts all Midolman agetns,
And: check again if all Midolman agents are alive,
"""
midonet_api = get_midonet_api()
check_all_midolman_hosts(midonet_api, alive=True)
stop_midolman_agents()
time.sleep(5)
check_all_midolman_hosts(midonet_api, alive=False)
start_midolman_agents()
time.sleep(30)
check_all_midolman_hosts(midonet_api, alive=True)
def get_interface(midonet_api, host_name, interface_name):
"""Returns an interface with the given name.
Args:
midonet_api: A MidonetApi instance
host_name: A MidoNet host name.
interface_name: An interface name.
Returns:
An interface if one is found with the specified host, otherwise
None.
"""
host = None
for h in midonet_api.get_hosts():
if h.get_id() == host_name: host = h
# No matching host found. Return None.
if not host: return None
interface = None
for i in host.get_interfaces():
if i.get_name() == interface_name:
interface = i
break
return interface
@attr(version="v1.2.0", slow=False)
def test_new_interface_becomes_visible():
"""
Title: Test new interface becomes visible
Scenario:
When: On start up, a Midolman sees no interface,
Then: adds a new interface,
And: Midolman detects a new interface.
"""
midonet_api = get_midonet_api()
new_interface = get_interface(
midonet_api, '00000000-0000-0000-0000-000000000001', 'interface_01')
# Test that no interface with name 'interface_01' exists.
assert_that(new_interface, none(), 'interface interface_01')
# Create a new interface 'interface_01'.
PTM.build()
time.sleep(5)
new_interface = get_interface(
midonet_api, '00000000-0000-0000-0000-000000000001', 'interface_01')
# Test that the created interface is visible.
assert_that(new_interface, not_none(), 'interface interface_01.')
| 2.046875 | 2 |
src/pyheatintegration/temperature_range.py | tarao1006/pyheatexchanger | 0 | 12788116 | from __future__ import annotations
from .base_range import BaseRange, flatten, get_ranges, merge
class TemperatureRange(BaseRange):
"""温度範囲を表すクラス。"""
BaseRange.register(TemperatureRange)
def merge_temperature_range(
range_: TemperatureRange,
other: TemperatureRange
) -> TemperatureRange:
return merge(range_, other)
def get_temperature_ranges(temperatures: list[float]) -> list[TemperatureRange]:
return get_ranges(temperatures, TemperatureRange)
def flatten_temperature_ranges(temperature_ranges: list[TemperatureRange]) -> list[float]:
return flatten(temperature_ranges)
def get_temperature_transition(
temperature_ranges: list[TemperatureRange]
) -> list[float]:
"""単調増加となるような温度の推移を返します。
Args:
temperature_ranges (list[TemperatureRange]): 温度領域のリスト。
Returns:
list[float]: 温度の推移。
Examples:
>>> temperature_ranges = [
TemperatureRange(0, 10),
TemperatureRange(20, 50),
TemperatureRange(30, 30),
TemperatureRange(40, 70),
TemperatureRange(70, 70)
TemperatureRange(70, 70)
]
>>> sorted(get_temperature_transition(temperature_ranges))
[0, 10, 20, 30, 30, 40, 50, 70, 70]
"""
temperatures_set: set[float] = set()
for temperature_range in temperature_ranges:
if temperature_range.delta == 0:
continue
temperatures_set |= set(temperature_range())
temperatures = list(temperatures_set)
for temperature_range in temperature_ranges:
if temperature_range.delta != 0:
continue
temp = temperature_range.start
temp_count = temperatures.count(temp)
if temp_count == 0:
temperatures.extend([temp, temp])
elif temp_count == 1:
temperatures.extend([temp])
elif temp_count == 2:
# 二つ含まれる場合は何もしない。
pass
else:
raise ValueError(f'同じ値が3つ以上含まれます。値: {temp_count}')
return temperatures
def accumulate_heats(
temperature_ranges_: list[TemperatureRange],
temperature_range_heats: dict[TemperatureRange, float]
) -> list[float]:
"""温度領域ごとの必要熱量から全体で必要な熱量を求めます。
Args:
temperature_ranges_ (list[TemperatureRange]): 温度領域のリスト。
temperature_range_heats (dict[TemperatureRange, float]):
温度領域ごとの必要熱量。
Returns:
list[float]: 温度領域ごとの必要熱量を集計した結果。
"""
temperature_ranges = sorted(temperature_ranges_)
if temperature_ranges != (keys := sorted(list(temperature_range_heats.keys()))):
raise ValueError(
'temperature_range_heatsが不正です。'
f'必要なキー: {temperature_ranges} '
f'存在するキー: {keys}'
)
heats = [0.0] * (len(temperature_ranges) + 1)
for i, temp_range in enumerate(temperature_ranges):
heats[i + 1] = heats[i] + temperature_range_heats[temp_range]
return heats
| 3.15625 | 3 |
Milestone 2 - Blackjack/main.py | spacedrabbit/PythonBootcamp | 1 | 12788117 | import Deck
class Blackjack(object):
def __init__(self):
self.start()
def start(self):
deck = Deck.Deck(1)
deck.display_decks()
Blackjack().start() | 2.375 | 2 |
news.py | stuartelimu/liverpool-bot | 0 | 12788118 | <filename>news.py
import random
import time
from commentary import create_api, NLTK_DATA_PATH
import nltk
nltk.data.path.append(NLTK_DATA_PATH)
import requests
from bs4 import BeautifulSoup
import tweepy
url = 'https://theathletic.com'
tokenizer = nltk.data.load('tokenizers/punkt/english.pickle')
HEADERS = {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5)'
' AppleWebKit/537.36 (KHTML, like Gecko) Cafari/537.36'
}
def extract_paratext(para_soup):
para = para_soup.find('div', id='the_paywall').text
return tokenizer.tokenize(para)
def extract_text(para_tokenized):
"""Returns a sufficiently-large random text from a tokenized paragraph,
if such text exists. Otherwise, returns None."""
for _ in range(10):
text = random.choice(para_tokenized)
if text and 60 < len(text) < 210:
return text
return None
def scrape_the_athletic():
"""Scrapes content from The Athletic blog."""
r = requests.get(f'{url}/author/james-pearce/', headers=HEADERS).text
soup = BeautifulSoup(r, 'lxml')
latest_articles = soup.find_all(attrs={"data-object-type": "article", "class": "col-sm-3"})
latest_article_links = [latest_article.a['href'] for latest_article in latest_articles]
for link in latest_article_links:
link = f"{url}{link}"
r = requests.get(link, headers=HEADERS).text
soup = BeautifulSoup(r, 'lxml')
para = extract_paratext(soup)
text = extract_text(para)
if not text:
continue
yield f'{text} {link}'
def scrape_liverpool_echo():
r = requests.get('https://www.liverpoolecho.co.uk/all-about/liverpool-fc', headers=HEADERS).text
soup = BeautifulSoup(r, 'lxml')
articles = soup.find(attrs={"data-group": "topStories", "data-group-index": 1})
latest_articles = articles.find_all('div', class_='teaser')
latest_article_links = [latest_article.a['href'] for latest_article in latest_articles]
for link in latest_article_links:
r = requests.get(link, headers=HEADERS).text
soup = BeautifulSoup(r, 'lxml')
paras_body = soup.find('div', class_='article-body')
paras = paras_body.find_all('p')
paras_text = [para.text for para in paras if para.text]
para = random.choice(paras_text)
para_tokenized = tokenizer.tokenize(para)
text = extract_text(para_tokenized)
if not text:
continue
yield f'{text} {link}'
def main():
"""Encompasses the main loop of the bot."""
api = create_api()
print('---Bot started---\n')
news_funcs = ['scrape_the_athletic', 'scrape_liverpool_echo']
news_iterators = []
for func in news_funcs:
news_iterators.append(globals()[func]())
while True:
for i, iterator in enumerate(news_iterators):
try:
tweet = next(iterator)
api.update_status(tweet)
print(tweet, end='\n\n')
time.sleep(1800)
except StopIteration:
news_iterators[i] = globals()[newsfuncs[i]]()
except tweepy.TweepError as e:
print(e.reason)
if __name__ == "__main__":
main()
| 3.1875 | 3 |
ietf/xml/rfc.py | lafrenierejm/ietf-cli | 0 | 12788119 | import sqlalchemy.orm
import xml.etree.ElementTree
from ietf.sql.rfc import (Abstract, Author, FileFormat, IsAlso, Keyword,
ObsoletedBy, Obsoletes, Rfc, SeeAlso, Stream,
UpdatedBy, Updates,)
import ietf.xml.parse as parse
def _add_keyword(session: sqlalchemy.orm.session.Session,
word: str,
) -> Keyword:
"""Create Keyword instances without violating uniqueness restraint."""
keyword = session.query(Keyword).filter(Keyword.word == word).one_or_none()
if keyword is None:
keyword = Keyword(word)
session.add(keyword)
return keyword
def add_all(session: sqlalchemy.orm.session.Session,
root: xml.etree.ElementTree.Element):
"""Add all RFC entries from XML `root` to sqlalchemy `session`."""
entries = parse.findall(root, 'rfc-entry')
for entry in entries:
doc_id = parse.find_doc_id(entry)
title = parse.find_title(entry)
authors = parse.find_author(entry)
year, month, day = parse.find_date(entry)
formats = parse.find_format(entry)
keywords = parse.find_keywords(entry)
abstract_pars = parse.find_abstract(entry)
draft = parse.find_draft(entry)
notes = parse.find_notes(entry)
obsoletes = parse.find_obsoletes(entry)
obsoleted_by = parse.find_obsoleted_by(entry)
updates = parse.find_updates(entry)
updated_by = parse.find_updated_by(entry)
is_also = parse.find_is_also(entry)
see_also = parse.find_see_also(entry)
cur_status = parse.find_current_status(entry)
pub_status = parse.find_publication_status(entry)
streams = parse.find_stream(entry)
area = parse.find_area(entry)
wg = parse.find_wg_acronym(entry)
errata = parse.find_errata_url(entry)
doi = parse.find_doi(entry)
rfc = Rfc(
# Create the Rfc object with its single-column values set
id=doc_id,
title=title,
date_year=year, date_month=month, date_day=day,
draft=draft,
notes=notes,
current_status=cur_status,
publication_status=pub_status,
area=area,
wg_acronym=wg,
errata_url=errata,
doi=doi,
)
for author in authors:
# Add authors to rfc
rfc.authors.append(Author(name=author['name'],
title=author['title'],
organization=author['organization'],
org_abbrev=author['org_abbrev']))
for entry in formats:
# Add formats to rfc
filetype, char_count, page_count = entry
rfc.formats.append(FileFormat(filetype=filetype,
char_count=char_count,
page_count=page_count))
for word in keywords:
# Add keywords to rfc
keyword = _add_keyword(session, word)
rfc.keywords.append(keyword)
for par in abstract_pars:
# Add abstract to rfc
rfc.abstract.append(Abstract(par=par))
for doc in obsoletes:
# Add obsoletes to rfc
doc_type, doc_id = doc
rfc.obsoletes.append(Obsoletes(doc_id=doc_id, doc_type=doc_type))
for doc in obsoleted_by:
# Add obsoleted_by to rfc
doc_type, doc_id = doc
rfc.obsoleted_by.append(ObsoletedBy(doc_id=doc_id,
doc_type=doc_type))
for doc in updates:
# Add updates to rfc
doc_type, doc_id = doc
rfc.updates.append(Updates(doc_id=doc_id, doc_type=doc_type))
for doc in updated_by:
# Add updated_by to rfc
doc_type, doc_id = doc
rfc.updated_by.append(UpdatedBy(doc_id=doc_id, doc_type=doc_type))
for doc in is_also:
# Add is_also to rfc
doc_type, doc_id = doc
rfc.is_also.append(IsAlso(doc_id=doc_id, doc_type=doc_type))
for doc in see_also:
# Add see_also to rfc
doc_type, doc_id = doc
rfc.see_also.append(SeeAlso(doc_id=doc_id, doc_type=doc_type))
for value in streams:
# Add stream to rfc
rfc.stream.append(Stream(value))
session.add(rfc)
| 2.359375 | 2 |
python/numericVector.py | radioactivit/big-data | 4 | 12788120 | <filename>python/numericVector.py<gh_stars>1-10
class Vector:
def __init__(self, anIterable):
self.values = list(anIterable)
def assertt(boolean, text=None):
if text is None:
assert boolean
return 0
assert boolean, text
def isFloatOrInteger(aValue):
return isinstance(aValue, int) or isinstance(aValue, float)
class NumericVector(Vector):
def __init__(self, anIterable):
for element in anIterable:
assert(isFloatOrInteger(element))
super().__init__(anIterable)
def __add__(self, aNumberOrNumericVector):
if isFloatOrInteger(aNumberOrNumericVector):
return NumericVector([value + aNumberOrNumericVector for value in self.values])
if isinstance(aNumberOrNumericVector, NumericVector):
assert len(aNumberOrNumericVector.values) == len(
self.values), "Numeric vectors should have same size !!!"
newValues = [value + aNumberOrNumericVector.values[index]
for index, value in enumerate(self.values)]
return NumericVector(newValues)
def __sub__(self, aNumberOrNumericVector):
return self.__add__(-aNumberOrNumericVector)
def __eq__(self, anotherNumericVector):
assert isinstance(anotherNumericVector, NumericVector)
assert len(anotherNumericVector) == len(self)
return True
def __len__(self):
return len(self.values)
n1 = NumericVector(range(0, 10))
n2 = n1 + 4
print(n2.values)
n3 = n2 + n1
print(n3.values)
print((n1 + n2).values)
#n1 + NumericVector(range(0, 100))
n8 = n1
print(n1 == NumericVector(range(0, 10)))
print(n1 == n8)
| 3.453125 | 3 |
2TLCS/web_agent_training.py | GameDisplayer/Deep-QLearning-Multi-Agent-Perspective-for-Traffic-Signal-Control | 1 | 12788121 | <reponame>GameDisplayer/Deep-QLearning-Multi-Agent-Perspective-for-Traffic-Signal-Control
from flask import Flask, request, jsonify
import numpy as np
from memory import Memory
from model import TrainModel
from waitress import serve
#Remove verbose except errors
import logging
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
app = Flask(__name__)
#Random agent hyperparameters
num_layers = 4
width_layers = 480
batch_size = 100
learning_rate = 0.001
training_epochs = 800
num_states = 321
num_actions = 4
memory_size_min = 600
memory_size_max = 50000
#Left intersection agent
model_1 = TrainModel(
num_layers,
width_layers,
batch_size,
learning_rate,
input_dim=num_states,
output_dim=num_actions
)
mem_1 = Memory(
memory_size_max,
memory_size_min
)
#Right intersection agent
model_2 = TrainModel(
num_layers,
width_layers,
batch_size,
learning_rate,
input_dim=num_states,
output_dim=num_actions
)
mem_2 = Memory(
memory_size_max,
memory_size_min
)
@app.route('/initialize_agents', methods=['POST'])
def initialize_agents():
#First agent
model_1._num_layers = request.get_json()['num_layers']
model_1._width = request.get_json()['width_layers']
model_1._batch_size = request.get_json()['batch_size']
model_1._learning_rate = request.get_json()['learning_rate']
model_1._input_dim = request.get_json()['num_states']
model_1._output_dim = request.get_json()['num_actions']
mem_1._size_max = request.get_json()['memory_size_max']
mem_1._size_min = request.get_json()['memory_size_min']
#Second agent
model_2._num_layers = request.get_json()['num_layers']
model_2._width = request.get_json()['width_layers']
model_2._batch_size = request.get_json()['batch_size']
model_2._learning_rate = request.get_json()['learning_rate']
model_2._input_dim = request.get_json()['num_states']
model_2._output_dim = request.get_json()['num_actions']
mem_2._size_max = request.get_json()['memory_size_max']
mem_2._size_min = request.get_json()['memory_size_min']
return "ok"
@app.route('/add_samples', methods=['POST'])
def add_sample():
old_state_one = np.array(request.get_json()['old_state_one'])
old_action_one = request.get_json()['old_action_one']
reward_one = request.get_json()['reward_one']
current_state_one = np.array(request.get_json()['current_state_one'])
mem_1.add_sample((old_state_one, old_action_one, reward_one, current_state_one))
old_state_two = np.array(request.get_json()['old_state_two'])
old_action_two = request.get_json()['old_action_two']
reward_two = request.get_json()['reward_two']
current_state_two = np.array(request.get_json()['current_state_two'])
mem_2.add_sample((old_state_two, old_action_two, reward_two, current_state_two))
return "ok"
@app.route('/predict', methods=['POST'])
def predict():
num = request.get_json()['num']
if num == 1:
model = model_1
elif num == 2:
model = model_2
else:
print("Error only 2 agents are involved (indices from 1 to 2)")
state = np.array(request.get_json()['state'])
prediction = model.predict_one(state)
return jsonify(prediction=prediction.tolist())
@app.route('/replay', methods=['POST'])
def replay():
num_states = request.get_json()['num_states']
num_actions = request.get_json()['num_actions']
gamma = request.get_json()['gamma']
num_agent = request.get_json()['num_agent']
if num_agent == 1:
model = model_1
mem = mem_1
elif num_agent == 2:
model = model_2
mem = mem_2
else:
print('Error only 2 agents are involved. Index must be only 1 or 2')
batch = mem.get_samples(model.batch_size)
if len(batch) > 0: # if the memory is full enough
states = np.array([val[0] for val in batch]) # extract states from the batch
next_states = np.array([val[3] for val in batch]) # extract next states from the batch
# prediction
q_s_a = model.predict_batch(states) # predict Q(state), for every sample
q_s_a_d = model.predict_batch(next_states) # predict Q(next_state), for every sample
# setup training arrays
x = np.zeros((len(batch), num_states))
y = np.zeros((len(batch), num_actions))
for i, b in enumerate(batch):
state, action, reward, _ = b[0], b[1], b[2], b[3] # extract data from one sample
current_q = q_s_a[i] # get the Q(state) predicted before
current_q[action] = reward + gamma * np.amax(q_s_a_d[i]) # update Q(state, action)
x[i] = state
y[i] = current_q # Q(state) that includes the updated action value
model.train_batch(x, y) # train the NN
return jsonify(loss=model._training_loss)
@app.route('/save_models', methods=['POST'])
def save_model():
path = request.get_json()['path']
model_1.save_model(path, 1)
model_2.save_model(path, 2)
#plot_model(model, to_file=os.path.join(path, 'model_structure.png'), show_shapes=True, show_layer_names=True)
return "ok"
if __name__ == '__main__':
# Start Web App
#local : app.run(threaded=False)
#Remote :
serve(app, host='127.0.0.1', port=5000)
| 2.34375 | 2 |
example/example/urls.py | nicokant/django-htmx | 0 | 12788122 | <gh_stars>0
from django.urls import path
from example.core.views import attribute_test, index
urlpatterns = [
path("", index),
path("attribute-test", attribute_test),
]
| 1.460938 | 1 |
cogs/music.py | dev-shah-2204/discord-music-bot | 0 | 12788123 | <reponame>dev-shah-2204/discord-music-bot
import discord
import youtube_dl
import urllib.request
import re
from asyncio import sleep
from discord.ext import commands
#Static functions/variables that don't need to be in the class
pause_time = {}
playlist = {}
playlist_with_names = {}
FFMPEG_OPTIONS = {
'before_options': '-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5',
'options': '-vn'
}
YDL_OPTIONS = {
'format': 'bestaudio',
'postprocessors': [{
'key': 'FFmpegExtractAudio',
'preferredcodec': 'mp3',
'preferredquality': '192'
}]
}
async def get_title(url):
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
info = ydl.extract_info(url, download=False)
title = info['title']
return title
async def find_song(args):
args = args.replace(' ', '+')
html_content = urllib.request.urlopen(f"https://youtube.com/results?search_query={args}") # YouTube's search link structure
video_ids = re.findall(r"watch\?v=(\S{11})", html_content.read().decode()) # Each video has a unique ID, 11 characters long.
try:
args = f"https://youtube.com/watch?v={video_ids[0]}"
return args
except KeyError: # If video_ids[0] doesn't exist
return None
async def check_if_playlist(ctx):
guild = str(ctx.guild.id)
if guild not in playlist:
playlist[guild] = []
return None
if playlist[guild] == []:
return None
if len(playlist[guild]) > 0:
return playlist[guild][0]
# Main cog
class Music(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.command(name="join", help="Make the bot join a voice channel")
async def join(self, ctx):
if ctx.author.voice is None:
await ctx.send("You need to be in a voice channel to run that command")
return
voice_channel = ctx.author.voice.channel
if ctx.voice_client is None:
await voice_channel.connect()
return
else:
await ctx.voice_client.move_to(voice_channel)
@commands.command(name="leave", aliases=["fuckoff"], help="Make the bot leave a voice channel")
async def leave(self, ctx):
global playlist
global playlist_with_names
if ctx.voice_client is None:
await ctx.send("I'm not in a voice channel.")
return
await ctx.voice_client.disconnect()
playlist[str(ctx.guild.id)] = [] # Reset the playlist
playlist_with_names[str(ctx.guild.id)] = [] # Reset the playlist
await ctx.message.add_reaction("👋")
async def wait_until_song_complete(self, ctx, args):
"""
Function to check every few seconds
if the voice client is done playing
a song.
"""
global playlist
global playlist_with_names
vc = ctx.voice_client
guild = str(ctx.guild.id)
# Adding guild id in the playlist dictionaries
if guild not in playlist:
playlist[guild] = []
if guild not in playlist_with_names:
playlist_with_names[guild] = []
# Checking different states of the voice client
if vc is None:
return
if vc.is_playing(): # If something is being played
if args not in playlist[guild]:
playlist[guild].append(args)
if not args.startswith("https://") or not args.startswith("http://"):
song = await find_song(args)
if song is None:
await ctx.send("Couldn't find that on YouTube.")
return
else:
song = args
song_title = await get_title(song)
if song_title is None:
await ctx.send("Couldn't find that on YouTube.")
return
em = discord.Embed(
title="Added song to playlist",
description=song_title,
color=0x60FF60
)
await ctx.send(embed=em)
playlist_with_names[guild].append(song_title)
await sleep(3) # Check every 3 seconds. I know it's not a very good method.
await self.wait_until_song_complete(ctx, args)
if vc.is_paused():
if guild not in pause_time:
pause_time[guild] = 0
else:
pause_time[guild] += 5
if pause_time[guild] > 120:
await ctx.send("I was paused for more than 2 minutes, so I left the voice channel. Your playlist has been cleared")
if guild in playlist:
playlist[guild] = []
return
await sleep(5)
await self.wait_until_song_complete(ctx, args)
else: # If the voice client is idle
try:
song = playlist[guild][0]
song_title = playlist_with_names[guild][0]
except IndexError:
playlist[guild].append(args)
if not args.startswith("https://") or not args.startswith("http://"):
song = await find_song(args)
if song is None:
await ctx.send("Couldn't find that on YouTube.")
return
else:
song = args
song_title = await get_title(song)
try:
await self.play_song(ctx, song)
del song
del song_title
except discord.ClientException or discord.errors.ClientException:
await sleep(1) # Try again in a second
await self.wait_until_song_complete(ctx, args)
async def play_song(self, ctx, args):
vc = ctx.voice_client
if not args.startswith("https://") or not args.startswith("http://"):
args = await find_song(args)
if args is not None:
try:
with youtube_dl.YoutubeDL(YDL_OPTIONS) as ydl:
info = ydl.extract_info(args, download=False)
if info['duration'] > 1200:
await ctx.send("You cannot play videos longer than 20 minutes.")
return
url = info['formats'][0]['url']
source = await discord.FFmpegOpusAudio.from_probe(url, **FFMPEG_OPTIONS)
vc.play(source)
em = discord.Embed(
title="Now Playing",
description=f"[{info['title']}]({args})",
color=0x60FF60
)
em.set_image(url=info['thumbnail'])
await ctx.send(embed=em)
except youtube_dl.utils.DownloadError:
await ctx.send("There was an error playing from the given arguements.")
except discord.errors.ClientException or discord.ClientException:
playlist[str(ctx.guild.id)].append(args)
else:
await ctx.send("I couldn't find that song/video.")
@commands.command(name="play", aliases=["p"], help="Play a song.")
async def play(self, ctx, *, args:str):
await self.join(ctx)
await self.wait_until_song_complete(ctx, args)
@commands.command(name="pause", help="Pause a song.")
async def pause(self, ctx):
if ctx.author.voice is None:
await ctx.send("You need to be in a voice channel to run that command")
return
voice = ctx.voice_client
if voice is None:
await ctx.send("I'm not in a voice channel.")
return
voice_channel = ctx.author.voice.channel
bot_voice_channel = voice.channel
if voice_channel != bot_voice_channel:
await ctx.send("You need to be in the same voice channel as me to run that command.")
if voice.is_playing():
voice.pause()
await ctx.send(f"⏸️ Paused")
else:
await ctx.send("No audio is being played.")
@commands.command(name="resume", help="Resume a paused song.")
async def resume(self, ctx):
if ctx.author.voice is None:
await ctx.send("You need to be in a voice channel to run that command")
return
voice = ctx.voice_client
voice_channel = ctx.author.voice.channel
bot_voice_channel = voice.channel
if voice_channel != bot_voice_channel:
await ctx.send("You need to be in the same voice channel as me to run that command.")
if voice is None:
await ctx.send("I'm not in a voice channel")
return
if voice.is_paused():
await ctx.message.add_reaction("▶️")
voice.resume()
else:
await ctx.send("No audio is being played.")
@commands.command(name="stop", help="Stop playing a song. Completely.")
async def stop(self, ctx):
if ctx.author.voice is None:
await ctx.send("You need to be in a voice channel to run that command")
return
voice = ctx.voice_client
voice_channel = ctx.author.voice.channel
bot_voice_channel = voice.channel
if voice_channel != bot_voice_channel:
await ctx.send("You need to be in the same voice channel as me to run that command.")
if voice is not None:
await ctx.message.add_reaction("⏹️")
voice.stop()
else:
await ctx.send("I'm not in a voice channel")
@commands.command(name="skip", help="Skip a song")
async def skip(self, ctx):
if ctx.author.voice is None:
await ctx.send("You need to be in a voice channel to run that command")
return
voice = ctx.voice_client
if voice is None:
await ctx.send("I'm not in a voice channel.")
return
voice_channel = ctx.author.voice.channel
bot_voice_channel = voice.channel
if voice_channel != bot_voice_channel:
await ctx.send("You need to be in the same voice channel as me to run that command.")
if voice.is_playing():
voice.stop()
try:
if str(ctx.guild.id) in playlist:
del playlist[str(ctx.guild.id)][0] # Remove that song from the playlist
if str(ctx.guild.id) in playlist_with_names:
del playlist_with_names[str(ctx.guild.id)][0] # Remove that song from the playlist
except IndexError:
pass
await ctx.send(f"Song skipped by {ctx.author.mention}")
if voice.is_paused():
voice.resume()
voice.stop()
try:
if str(ctx.guild.id) in playlist:
del playlist[str(ctx.guild.id)][0] # Remove that song from the playlist
if str(ctx.guild.id) in playlist_with_names:
del playlist_with_names[str(ctx.guild.id)][0] # Remove that song from the playlist
except IndexError:
pass
await ctx.send(f"Song skipped by {ctx.author.mention}")
check = await check_if_playlist(ctx)
if check is None:
return
else:
await self.wait_until_song_complete(ctx, check)
@commands.command(name="playlist", aliases=["q","queue"], help="Show the upcoming songs")
async def queue(self, ctx):
global playlist_with_names
if ctx.author.voice is None:
await ctx.send("You need to be in a voice channel to run that command")
return
voice = ctx.voice_client
if voice is None:
await ctx.send("I'm not in a voice channel")
return
voice_channel = ctx.author.voice.channel
bot_voice_channel = voice.channel
if voice_channel != bot_voice_channel:
await ctx.send("You need to be in the same voice channel as me to run that command.")
return
if str(ctx.guild.id) not in playlist_with_names:
await ctx.send("The playlist is empty")
return
else:
add_footer = False
desc = ""
i = 1
await ctx.send("Hang on, playlist loading.")
for song in playlist_with_names[str(ctx.guild.id)]:
print(song)
desc += f"{i}. {song}\n"
i += 1
if desc != "":
em = discord.Embed(
title="Upcoming songs:",
description=desc,
color=0x60FF60
)
if desc == "":
await ctx.send("The playlist is empty")
return
await ctx.send(embed=em)
def setup(bot):
bot.add_cog(Music(bot))
print("Music cog loaded")
| 2.921875 | 3 |
Baseline/num_to_words.py | shreyansh26/RevOpiD-IJCNLP-17- | 6 | 12788124 | import re
import pickle
with open('words2num_dict.pickle', 'rb') as handle:
words2num = pickle.load(handle)
num2words = {v: k for k, v in words2num.items()}
freq_raw = []
with open('output.txt') as f:
for i in range(31):
f.readline()
for line in f:
line = re.sub('\[\w+\]', '', line)
line = re.sub('\n', '', line)
line = re.sub('{', '', line)
line = re.sub('}', '', line)
freq_raw.append(line.strip())
num_word_list = []
freq_list = []
for i in range(len(freq_raw)):
if freq_raw[i] != '':
num_list = freq_raw[i].split(' ')
#print(num_list)
for j in range(len(num_list)-2):
num_list[j] = int(num_list[j])
num_list[-1] = int(num_list[-1])
#print(num_list[-1])
num_word_list.append(num_list[:-2])
freq_list.append(num_list[-1])
words_tuple = []
for i in range(len(num_word_list)):
words = []
for j in range(len(num_word_list[i])):
if(num_word_list[i][j] != len(num2words)+5):
words.append(num2words[num_word_list[i][j]].strip())
words_tuple.append(tuple((words, freq_list[i])))
'''
for i in words_tuple:
print(i)
'''
with open('words_tuple.pickle', 'wb') as handle:
pickle.dump(words_tuple, handle) | 3.21875 | 3 |
src/ehr_classification/classifier.py | obi-ml-public/EHR-automatic-event-adjudication | 0 | 12788125 | import os
from pathlib import Path
import numpy as np
import pandas as pd
import spacy
from spacy.compat import pickle
import lz4.frame
from tqdm import tqdm
from tensorflow.keras.callbacks import ModelCheckpoint, TensorBoard, EarlyStopping
from ehr_classification.tokenizer import get_features, get_custom_tokenizer
from ehr_classification.classifier_model import compile_lstm
def run_multiple_models(df,
features,
weights,
word_vectors,
max_note_length=2000,
batch_size=64,
gpu_device='0'
):
'''
Run model on infile, adds columns for predictions and save it to outfile
:param df:
:param features:
:param weights:
:param word_vectors:
:param max_note_length:
:param batch_size:
:param gpu_device:
:return:
'''
# use specified gpu device
os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'
os.environ['CUDA_VISIBLE_DEVICES'] = str(gpu_device)
nlp = get_custom_tokenizer(word_vectors)
embeddings = nlp.vocab.vectors.data
model = compile_lstm(embeddings,
{'nr_hidden': 64, 'max_length': max_note_length, 'nr_class': 4},
{'dropout': 0.5, 'lr': 0.0001})
for target, weight in tqdm(list(weights.items())):
model.load_weights(weight)
print(f'Predicting {target}.')
predictions = model.predict(features, batch_size=batch_size, verbose=True)
print(f'Done predicting {target}.')
df[(target + '_predictions')] = predictions[0]
df[(target + '_raw')] = predictions[1]
return df
def run_multiple_models_pickle(infile,
outfile,
word_vectors,
overwrite=False,
**kwargs
):
# only run when not already there
outfile = Path(outfile)
if not outfile.exists() or overwrite:
outfile.touch()
from .utils import lz4_load
data_dict = lz4_load(infile)
predictions = run_multiple_models(df=data_dict['meta'],
features=data_dict['data'],
word_vectors=word_vectors,
**kwargs)
print('Writing to file')
predictions.to_parquet(outfile)
print('Done writing to file')
def run_multiple_models_parquet(infile,
outfile,
word_vectors,
note_column='NoteTXT',
max_note_length=2000,
**kwargs
):
def select_rows(df): # Remove rows with empty note text
df = pd.DataFrame(df.loc[df[note_column].notnull()])
return df
eval_data = pd.read_parquet(infile)
lz4_file = infile.replace('.parquet', '.pic.lz4')
if Path(lz4_file).exists():
print('Loading features')
with lz4.frame.open(lz4_file, mode='r') as f:
eval_docs = pickle.load(f)
else:
print('Extracting tokens')
tokenizer = get_custom_tokenizer(word_vectors)
note_texts = eval_data[note_column]
tokens = list(tokenizer.pipe(note_texts))
print('Extracting features')
eval_features = get_features(tokens, max_note_length)
eval_data = select_rows(eval_data)
eval_data = run_multiple_models(df=eval_data,
features=eval_features,
word_vectors=word_vectors,
**kwargs)
print('Writing to file')
eval_data.to_parquet(outfile)
print('Done writing to file')
def run_current_models(infile, outfile, classifier_type, input_type='parquet', **kwargs):
# use models and vectors path from environment (or use defaults)
models_path = os.getenv("PREDICT_EHR_MODELS")
if not models_path:
models_path = '/mnt/obi0/phi/ehr/models/'
vectors_path = os.getenv("PREDICT_EHR_VECTORS")
if not vectors_path:
vectors_path = '/mnt/obi0/phi/ehr/word_vectors/filtered_20-05-23.bigram'
if classifier_type == 'event':
weights = {
'Event_PCI': f'{models_path}/Events/PCI/LSTM_CNN_BEST_model.hdf5',
'Event_ACS': f'{models_path}/Events/ACS/LSTM_CNN_BEST_model.hdf5',
'Event_HF': f'{models_path}/Events/HF/LSTM_CNN_BEST_model.hdf5',
'Event_IS': f'{models_path}/Events/IS/LSTM_CNN_BEST_model.hdf5'
}
elif classifier_type == 'history':
weights = {
'History_CAD': f'{models_path}/History/CAD/LSTM_CNN_BEST_model.hdf5',
'History_CAD_UI': f'{models_path}/History/CAD_UI/LSTM_CNN_BEST_model.hdf5',
'History_HF': f'{models_path}/History/HF/LSTM_CNN_BEST_model.hdf5',
'History_HF_UI': f'{models_path}/History/HF_UI/LSTM_CNN_BEST_model.hdf5',
}
else:
raise NotImplementedError
print(f'Predicting using weights: {weights}')
if input_type == 'parquet':
run_multiple_models_parquet(infile=infile,
outfile=outfile,
weights=weights,
word_vectors=vectors_path,
**kwargs)
elif input_type == 'pickle':
run_multiple_models_pickle(infile=infile,
outfile=outfile,
weights=weights,
word_vectors=vectors_path,
**kwargs)
def predict(output_directory,
classifier_type: ('note classifier, `event` or `history`', 'option', 't') = 'event',
gpu: ('gpu to use', 'option', 'g') = 0,
gpu_offset: ('subtract gpu offset', 'option', 's') = 0,
input_type: ('input type, can be `parquet` or `pickle`', 'option', 'i') = 'parquet',
*file_names):
"""Takes one or more parquet files and writes tokenized text to output file.
# set environment variables for models and word vectors
export PREDICT_EHR_VECTORS=en_core_web_lg
export PREDICT_EHR_MODELS=PATH/TO/MODELS
# run predictions for events on one or more parquet files
predict_ehr -t event out_dir text1.parquet
predict_ehr -t event out_dir text1.parquet text2.parquet text3.parquet
# run on multiple files in parallel with 4 gpus, using text that has been tokenized before:
'parallel -j 4 predict_ehr . -t event -g {%} -s 1 -i pickle {} ::: *.pic.lz4'
'parallel -j 4 predict_ehr . -t history -g {%} -s 1 -i pickle {} ::: *.pic.lz4'
"""
print(f'Predicting with the following input files: {file_names}')
for infile in file_names:
input_file = Path(infile)
assert Path(output_directory).exists()
output_file = Path(output_directory) / (input_file.name + '.predictions.pq')
print('Processing', infile)
run_current_models(infile,
str(output_file),
classifier_type=classifier_type,
gpu_device=int(gpu) - int(gpu_offset),
input_type=input_type)
def predict_():
"""Entry point for console_scripts
"""
import plac
plac.call(predict)
def train_model(train_texts,
train_labels,
validation_texts,
validation_labels,
model_name,
output_path='.',
max_note_length=2000,
learning_rate=0.0001,
epochs=150,
batch_size=64,
gpu_device='0',
save_best_only=True,
**kwargs):
"""
Train a model with train_texts and train_labels and validate on validation_texts and validation_labels.
train_texts: array of notes to be used for model training.
train_labels: a binary label to be used for training. The index should correspond to the train_texts
"""
# use specified gpu device
os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'
os.environ['CUDA_VISIBLE_DEVICES'] = str(gpu_device)
# use word vectors from environment variable (or defaults)
vectors_path = os.getenv("PREDICT_EHR_VECTORS")
if not vectors_path:
vectors_path = '/mnt/obi0/phi/ehr/word_vectors/filtered_20-05-23.bigram'
nlp = get_custom_tokenizer(vectors_path)
embeddings = nlp.vocab.vectors.data
print('Parsing texts...')
train_docs = list(nlp.pipe(train_texts, batch_size=2000))
validation_docs = list(nlp.pipe(validation_texts, batch_size=2000))
train_x = get_features(train_docs, max_note_length)
validation_x = get_features(validation_docs, max_note_length)
train_labels = [train_labels, train_labels]
validation_labels = [validation_labels, validation_labels]
model = compile_lstm(embeddings, {'max_length': max_note_length}, {'lr': learning_rate})
# define callbacks
checkpoint_file = model_name + '_{epoch:02d}-{val_loss:.2f}.hdf5'
checkpoint_path = os.path.join(output_path, 'checkpoints', checkpoint_file)
print(f'Saving checkpoints to {checkpoint_path}')
checkpoint_callback = ModelCheckpoint(
filepath=checkpoint_path,
monitor='val_loss', save_best_only=save_best_only, save_weights_only=True
)
tensorboard_path = os.path.join(output_path, 'tensorboard', model_name)
print(f'Writing tensorboard output to {tensorboard_path}')
tensorboard_callback = TensorBoard(
log_dir=tensorboard_path,
write_graph=False, profile_batch=0
)
early_stopping_callback = EarlyStopping(monitor='val_loss', patience=50)
print('Training...')
model.fit(train_x,
train_labels,
validation_data=(validation_x, validation_labels),
epochs=epochs,
batch_size=batch_size,
callbacks=[checkpoint_callback, tensorboard_callback, early_stopping_callback])
return model
def train(labels_path, model_name, output_path,
epochs: ('number of epochs', 'option', 'e') = 150,
gpu: ('gpu to use', 'option', 'g') = 0,
gpu_offset: ('subtract gpu offset', 'option', 's') = 0,
testrun: ('do short testrun on 200 samples', 'flag', 't') = False,
all_checkpoints: ('save all or best checkpoint only', 'flag', 'a') = False):
"""Basic training method that takes parquet file with labeled data, splits into training and validation set
and trains model (with early stopping).
# first configure a spacy model to use as word vector mapping
export PREDICT_EHR_VECTORS=en_core_web_lg
# then train a classifier model given labels
train_ehr --gpu 0 mgb_predictions_event/Event_PCI_labels.parquet Event_PCI mimic_models_event
"""
if not Path(output_path).exists():
Path(output_path).mkdir(parents=True)
print('Processing', labels_path)
labels_df = pd.read_parquet(labels_path)
# shuffle the labels
labels_df = labels_df.sample(frac=1, random_state=42)
if testrun:
labels_df = labels_df.iloc[:100]
# split into two sets for training and validation
train_df, validation_df = np.array_split(labels_df, 2)
print(f'Train data shape: {train_df.shape}')
print(f'Validation data shape: {validation_df.shape}')
print(f'Training model: {model_name}')
model = train_model(train_texts=train_df['NoteTXT'],
train_labels=train_df['label'],
validation_texts=validation_df['NoteTXT'],
validation_labels=validation_df['label'],
model_name=model_name,
output_path=output_path,
epochs=int(epochs),
save_best_only=not all_checkpoints,
gpu_device=int(gpu) - int(gpu_offset))
model.save_weights(os.path.join(output_path, model_name + '.hdf5'))
def train_():
"""Entry point for console_scripts
"""
import plac
plac.call(train)
if __name__ == "__main__":
predict_()
| 2.140625 | 2 |
write_qpid_json.py | bendoerr/collectd-write-qpid-json | 0 | 12788126 | # The MIT License (MIT)
#
# Copyright (c) 2015 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import collectd
import write_common
from qpid.messaging import *
################################################################################
# Globals
################################################################################
#
# Name of this plugin for logging
#
plugin_name = 'write_qpid_json'
#
# Default configuration values
#
config = {
'TypesDB': ['/usr/share/collectd/types.db'],
'Host': 'localhost',
'Port': '5672',
'User': 'guest',
'Password': '<PASSWORD>',
'Exchange': 'amq.fanout'
}
#
# Parsed types.db values
#
types = {}
#
# QPid Connection
#
connection = None
################################################################################
# Handlers
################################################################################
def handle_config(given_config):
global config
config = write_common.merge_configs(config, given_config, plugin_name)
collectd.info('%s: Configured: %s' % (plugin_name, config))
def handle_init():
global types
types = write_common.parse_types_db(config['TypesDB'], plugin_name)
# global connection
# connection = Connection("%s/%s@%s:%s" % (
# config['User'], config['Password'], config['Host'], config['Port']))
# If we made it this far, go ahead and register the write plugin
collectd.register_write(handle_write)
collectd.info(
'%s: Initialized and registered write handler.' % plugin_name)
def handle_shutdown():
if connection != None and connection.opened():
try:
connection.close()
collectd.info("%s: Closed connection to endpoint." % plugin_name)
except MessagingError,m:
collectd.info("%s: Couldn't close connection." % plugin_name)
if connection != None:
connection = None
def handle_write(vl):
global connection
if connection == None:
connection = Connection("%s/%s@%s:%s" % (
config['User'], config['Password'], config['Host'], config['Port']))
try:
if not connection.opened():
# Open the connection if needed.
connection.open()
collectd.info("%s: Opened connection to endpoint." % plugin_name)
# Open a session
session = connection.session()
sender = session.sender(config['Exchange'])
# Send the message
sender.send(Message(write_common.value_to_json(vl, types)))
# Close the session
session.close()
except ConnectionError,m:
collectd.error("%s: Failed to connect - %s" % (plugin_name, m))
connection = None
except MessagingError,m:
collectd.error("%s: Failed to send AMQP message - %s" % (plugin_name, m))
handle_shutdown()
except Error,m:
collectd.error("%s: Unknown Error - %s" % (plugin_name, m))
handle_shutdown()
################################################################################
# Register handlers
################################################################################
# register_config(...)
# register_config(callback[, data][, name]) -> identifier
#
# Register a callback function for config file entries.
#
# 'callback' is a callable object that will be called for every config block
# 'data' is an optional object that will be passed back to the callback
# function every time it is called.
# 'name' is an optional identifier for this callback. The default name
# is 'python.<module>'.
# Every callback needs a unique identifier, so if you want to
# register this callback multiple time from the same module you need
# to specify a name here.
# 'identifier' is the full identifier assigned to this callback.
#
# The callback function will be called with one or two parameters:
# config: A Config object.
# data: The optional data parameter passed to the register function.
# If the parameter was omitted it will be omitted here, too.
collectd.register_config(handle_config)
# register_init(...)
# register_init(callback[, data][, name]) -> identifier
#
# Register a callback function that will be executed once after the config.
# file has been read, all plugins heve been loaded and the collectd has
# forked into the background.
#
# 'callback' is a callable object that will be executed.
# 'data' is an optional object that will be passed back to the callback
# function when it is called.
# 'name' is an optional identifier for this callback. The default name
# is 'python.<module>'.
# Every callback needs a unique identifier, so if you want to
# register this callback multiple time from the same module you need
# to specify a name here.
# 'identifier' is the full identifier assigned to this callback.
#
# The callback function will be called without parameters, except for
# data if it was supplied.
collectd.register_init(handle_init)
# register_write(...)
# register_write(callback[, data][, name]) -> identifier
#
# Register a callback function to receive values dispatched by other plugins
#
# 'callback' is a callable object that will be called every time a value
# is dispatched.
# 'data' is an optional object that will be passed back to the callback
# function every time it is called.
# 'name' is an optional identifier for this callback. The default name
# is 'python.<module>'.
# Every callback needs a unique identifier, so if you want to
# register this callback multiple time from the same module you need
# to specify a name here.
# 'identifier' is the full identifier assigned to this callback.
#
# The callback function will be called with one or two parameters:
# values: A Values object which is a copy of the dispatched values.
# data: The optional data parameter passed to the register function.
# If the parameter was omitted it will be omitted here, too.
# collectd.register_write(handle_write) # Registered as part of handle_init
collectd.register_shutdown(handle_shutdown)
| 1.289063 | 1 |
stardate/migrations/0010_remove_blog_social_auth.py | blturner/django-stardate | 0 | 12788127 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-10 23:25
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('stardate', '0009_auto_20170301_0155'),
]
operations = [
migrations.RemoveField(
model_name='blog',
name='social_auth',
),
]
| 1.460938 | 1 |
smo-install/test/pythonsdk/src/oransdk/policy/policy.py | ShixiongQi/o-ran-ric | 0 | 12788128 | <reponame>ShixiongQi/o-ran-ric
#!/usr/bin/env python3
###
# ============LICENSE_START=======================================================
# ORAN SMO PACKAGE - PYTHONSDK TESTS
# ================================================================================
# Copyright (C) 2021-2022 AT&T Intellectual Property. All rights
# reserved.
# ================================================================================
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============LICENSE_END============================================
# ===================================================================
#
###
"""Onap Policy module."""
from dataclasses import dataclass
from typing import Dict
from onapsdk.onap_service import OnapService
from oransdk.configuration import settings
@dataclass
class PolicyType:
"""PolicyType dataclass."""
type: str
version: str
class OranPolicy(OnapService):
"""Onap Policy library."""
pap_url = settings.POLICY_PAP_URL
api_url = settings.POLICY_API_URL
header = {"Accept": "application/json", "Content-Type": "application/json"}
@classmethod
def get_components_status(cls,
basic_auth: Dict[str, str]) -> Dict:
"""
Get status of Policy component.
Args:
basic_auth: (Dict[str, str]) for example:{ 'username': 'bob', 'password': '<PASSWORD>' }
Returns:
the status of the Policy component
"""
url = f"{cls.pap_url}/policy/pap/v1/components/healthcheck"
status = cls.send_message_json('GET',
'Get status of Policy component',
url,
basic_auth=basic_auth)
return status
@classmethod
def get_policy_status(cls, basic_auth: Dict[str, str]) -> Dict:
"""
Get status of all the policies.
Returns:
the status of all the policies
"""
url = f"{cls.pap_url}/policy/pap/v1/policies/status"
status = cls.send_message_json('GET',
'Get status of all the policies',
url,
basic_auth=basic_auth)
return status
@classmethod
def get_policy(cls, policy_type: PolicyType, policy_name, policy_version, basic_auth: Dict[str, str]) -> Dict:
"""
Get the policy.
Args:
policy_type: the policy type
policy_name: the policy name
policy_version: the version of the policy
basic_auth: (Dict[str, str]) for example:{ 'username': 'bob', 'password': '<PASSWORD>' }
Returns:
the policy reponse
"""
url = f"{cls.api_url}/policy/api/v1/policytypes/{policy_type.type}/versions/{policy_type.version}/policies/{policy_name}/versions/{policy_version}"
policy_response = cls.send_message('GET', 'Get the policy', url, basic_auth=basic_auth)
return policy_response
@classmethod
def create_policy(cls, policy_type: PolicyType, policy_data, basic_auth: Dict[str, str]) -> None:
"""
Create a policy.
Args:
policy_type: the policy type
type_version: the version of the policy type
policy_data: the policy to be created, in binary format
"""
url = f"{cls.api_url}/policy/api/v1/policytypes/{policy_type.type}/versions/{policy_type.version}/policies"
cls.send_message('POST', 'Create Policy', url, data=policy_data, headers=cls.header,
basic_auth=basic_auth)
@classmethod
def deploy_policy(cls, policy_data, basic_auth: Dict[str, str]) -> None:
"""
Deploy a policy.
Args:
policy_data: the policy to be deployed, in binary format
"""
url = f"{cls.pap_url}/policy/pap/v1/pdps/policies"
cls.send_message('POST', 'Deploy Policy', url, data=policy_data, headers=cls.header, basic_auth=basic_auth)
@classmethod
def undeploy_policy(cls, policy_id, policy_version, basic_auth: Dict[str, str]) -> None:
"""
Undeploy a policy.
Args:
policy_id: The policy id as provided during the create
policy_version: The policy version as provided during the create
"""
url = f"{cls.pap_url}/policy/pap/v1/pdps/policies/{policy_id}/versions/{policy_version}"
cls.send_message('DELETE', 'Undeploy Policy', url, headers=cls.header, basic_auth=basic_auth)
@classmethod
def delete_policy(cls, policy_type: PolicyType, policy_id, policy_version, basic_auth: Dict[str, str]) -> None:
"""
Delete a policy.
Args:
policy_type: the policy type
policy_id: The policy id as provided during the create
policy_version: The policy version as provided during the create
"""
url = f"{cls.api_url}/policy/api/v1/policytypes/{policy_type.type}/versions/{policy_type.version}/policies/{policy_id}/versions/{policy_version}"
cls.send_message('DELETE', 'Delete Policy', url, headers=cls.header, basic_auth=basic_auth)
| 1.476563 | 1 |
curriculum/profile/migrations/0002_profile_photo.py | ro199/Hoja-de-Vida | 0 | 12788129 | # Generated by Django 3.1 on 2020-08-17 21:14
from django.db import migrations, models
import profile.models
class Migration(migrations.Migration):
dependencies = [
('profile', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='profile',
name='photo',
field=models.ImageField(blank=True, null=True, upload_to=profile.models.upload_path),
),
]
| 1.6875 | 2 |
main.py | sguo1123/SimpleMMO-Notifier | 0 | 12788130 | <filename>main.py
"""
SMMO API NOTIFICATIONS
Author: HugTed
Date: 3/25/2022
"""
import sys
import time
from threading import Thread
from tkinter import *
from tkinter import ttk
import requests
from discord_webhook import DiscordWebhook
from win10toast import ToastNotifier
class MyWindow:
def __init__(self, win):
self.next_run = None
self.running = False
self.toaster = ToastNotifier()
self.s = ttk.Style()
self.s.theme_use('alt')
self.s.configure("blue.Horizontal.TProgressbar", foreground='cornflower blue', background='cornflower blue')
self.s.configure('W.TButton', font=('calibri', 10, 'bold', 'underline'))
self.pb = ttk.Progressbar(
win,
orient='horizontal',
mode='indeterminate',
length=450
)
self.pb.place(x=25, y=174)
self.frame = Frame(win)
self.frame.place(x=25, y=75)
self.lbl1 = Label(win, text='API Key:')
self.t1 = Entry(bd=3, width=65, show="*")
self.lbl1.place(x=25, y=25)
self.t1.place(x=75, y=25)
self.lbl2 = Label(win, text='Discord:')
self.t2 = Entry(bd=3, width=65)
self.lbl2.place(x=25, y=50)
self.t2.place(x=75, y=50)
self.lbl3 = Label(win, text='Disc ID:')
self.t3 = Entry(bd=3, width=20)
self.lbl3.place(x=25, y=75)
self.t3.place(x=75, y=75)
self.lbl4 = Label(win, text='Delay:')
self.t4 = Entry(bd=3, width=5)
self.t4.insert(0, "300")
self.lbl4.place(x=215, y=75)
self.t4.place(x=265, y=75)
self.notificationOptions = ["Windows", "Discord"]
self.notificationType = StringVar()
self.notificationType.set("Windows")
self.lbl4 = Label(win, text='Type:')
self.t5 = ttk.OptionMenu(win, self.notificationType, self.notificationOptions[0], *self.notificationOptions)
self.lbl4.place(x=335, y=75)
self.t5.place(x=375, y=73)
self.energyCheck = BooleanVar()
self.questCheck = BooleanVar()
self.energyCheck.set(True)
self.questCheck.set(True)
self.c1 = ttk.Checkbutton(win, text='Energy Notifications', variable=self.energyCheck, onvalue=True,
offvalue=False)
self.c2 = ttk.Checkbutton(win, text='Quest Notifications', variable=self.questCheck, onvalue=True,
offvalue=False)
self.c1.place(x=25, y=125)
self.c2.place(x=25, y=150)
self.errorMessage = StringVar()
self.errorMessage.set('')
self.lbl6 = Label(win, textvariable=self.errorMessage, fg='#f00')
self.lbl6.place(x=215, y=125)
self.dataMessage = StringVar()
self.dataMessage.set('?/? Energy ?/? Quests')
self.lbl7 = Label(win, textvariable=self.dataMessage, fg='#00008b')
self.lbl7.place(x=215, y=100)
self.b1 = Button(win, text='Run Checks', command=lambda: self.startChecks())
self.b1.place(x=320, y=145)
self.b2 = Button(win, text='Stop Checks', command=lambda: self.stopChecks())
self.b2.place(x=397, y=145)
self.lbl8 = Label(win, text='Gold:')
self.t6 = Entry(bd=3, width=20)
self.lbl8.place(x=25, y=100)
self.t6.place(x=75, y=100)
self.t6.insert(0, -1)
def readKey(self, k):
self.t1.insert(0, k)
def readHook(self, h):
self.t2.insert(0, h)
def sendNoti(self, ntype, value, gval=0):
if gval != 0:
message = f"You are out of safe mode with {gval:,} gold on you!"
else:
message = f"Your {value} is FULL"
if ntype == "Windows":
try:
self.toaster.show_toast("SimpleMMO Notification", f"{message}", icon_path="./data/logo.ico")
self.errorMessage.set(f'Sending Quest Notification')
except Exception as e:
self.errorMessage.set(f'Error: {e}')
else:
try:
if self.t3.get() != "":
webhook = DiscordWebhook(url=self.t2.get(), content=f"[<@{self.t3.get()}>] {message}")
else:
webhook = DiscordWebhook(url=self.t2.get(), content=f"{message}")
webhook.execute()
self.errorMessage.set(f'Sending Quest Notification')
except Exception as e:
self.errorMessage.set(f'Error: {e}')
def stopChecks(self):
self.errorMessage.set('')
self.running = False
self.pb.stop()
def startChecks(self):
api_key = self.t1.get()
delay = self.t4.get()
if api_key == "":
self.errorMessage.set('Error Missing API Key')
return
if self.notificationType.get() == "Discord" and self.t2.get() == "":
self.errorMessage.set('Missing Discord Hook/ID')
return
if not delay.isdigit():
self.errorMessage.set('Missing Check Delay In Seconds')
return
elif int(delay) < 2:
self.errorMessage.set('Minimum Delay is 2 Seconds')
return
elif int(delay) < 60:
self.errorMessage.set('Note: Max API 40 Calls/Min')
self.pb.start()
self.running = True
self.next_run = Thread(target=self.runCheck, args=(api_key, delay))
self.next_run.daemon = True
self.next_run.start()
def runCheck(self, api_key, delay):
while self.running:
try:
endpoint = f'https://api.simple-mmo.com/v1/player/me'
payload = {'api_key': api_key}
r = requests.post(url=endpoint, data=payload)
if str(r.status_code) != "200":
self.errorMessage.set(f'API ERROR: {r.status_code}')
self.running = False
self.pb.stop()
return
res = r.json()
except:
self.errorMessage.set(f'API ERROR: Unknown Code')
self.running = False
self.pb.stop()
return
self.dataMessage.set(
f'{res["energy"]:,}/{res["maximum_energy"]:,} Energy {res["quest_points"]:,}/{res["maximum_quest_points"]:,} Quests')
goldcheck = self.t6.get()
if goldcheck != -1 and res["safeMode"] == 0 and res["gold"] > int(goldcheck):
if self.notificationType.get() == "Windows":
self.sendNoti("Windows", "Money", res["gold"])
else:
self.sendNoti("Discord", "Money", res["gold"])
time.sleep(10)
if self.questCheck.get() and res["quest_points"] >= res["maximum_quest_points"]:
if self.notificationType.get() == "Windows":
self.sendNoti("Windows", "QP")
else:
self.sendNoti("Discord", "QP")
time.sleep(10)
if self.energyCheck.get() and res["energy"] >= res["maximum_energy"]:
if self.notificationType.get() == "Windows":
self.sendNoti("Windows", "EP")
else:
self.sendNoti("Discord", "EP")
time.sleep(int(delay))
sys.setrecursionlimit(5000)
window = Tk()
mywin = MyWindow(window)
with open(f'./data/key.txt', 'r') as f:
key = f.read()
if key != "":
mywin.readKey(key)
with open(f'./data/hook.txt', 'r') as f:
key = f.read()
if key != "":
mywin.readHook(key)
window.title(f'SimpleMMO Notifications Tool')
window.iconbitmap('./data/logo.ico')
window.geometry("500x200")
window.mainloop()
| 2.75 | 3 |
nssrc/com/citrix/netscaler/nitro/resource/config/protocol/__init__.py | benfinke/ns_python | 2 | 12788131 | __all__ = ['protocolhttpband'] | 1.15625 | 1 |
cobl/lexicon/migrations/0003_dyen_name_data_migration.py | Bibiko/CoBL-public | 3 | 12788132 | <gh_stars>1-10
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def dyenname_to_languagedata(apps, schema_editor):
# move (possible) contents of the DyenName table to
# Language.data["dyen_name"] in preparation for dropping
# DyenName table
DyenName = apps.get_model("lexicon", "DyenName")
for dnobj in DyenName.objects.all():
dnobj.language.data["dyen_name"] = dnobj.name
dnobj.language.save()
class Migration(migrations.Migration):
dependencies = [
('lexicon', '0002_add_JSON_fields'),
]
operations = [
migrations.RunPython(dyenname_to_languagedata)
]
| 1.976563 | 2 |
分类代表题目/动态规划/股票类型问题/指定K次交易.py | ResolveWang/algorithm_qa | 79 | 12788133 | <reponame>ResolveWang/algorithm_qa<filename>分类代表题目/动态规划/股票类型问题/指定K次交易.py
"""
问题: 假设把股票价格按时间先后顺序存储在数组中,可以最多买卖K次,请问
该股票可能获得的最大利润是多少?
""" | 1.21875 | 1 |
tests/test_api_key.py | Thierryvil/escolhedor-de-filme | 0 | 12788134 | """ Tests File"""
import requests
from website import API_KEY
def test_api_key_is_not_null():
""" Get API_KEY value and compare """
assert API_KEY != None
def test_request_api_key():
""" Test a request with api_key value """
assert requests.get(f'https://api.themoviedb.org/3/movie/76341?api_key={API_KEY}').status_code == 200
| 3.046875 | 3 |
python/lib/tensor_embedding.py | ajoudaki/Project2020-seq-tensor-sketching | 7 | 12788135 | <filename>python/lib/tensor_embedding.py
# TENSOR EMBEDDING
from lib.base import *
# a_1...a_t is mapped to index A^{t-1} a_1 + ... + A * a_{t-1} + 1 * a_t
@jitclass(sketchparams_spec + [('pow', nb.int32[:])])
class TE(Sketcher):
# https://github.com/numba/numba/issues/1694
__init__Sketcher = Sketcher.__init__
def __init__(self, params):
self.__init__Sketcher(params)
self.pow = np.zeros(self.t + 1, np.int32)
self.pow[0] = 1
for i in range(1, self.t + 1):
self.pow[i] = self.A * self.pow[i - 1]
# NOTE: The sketch is stored as float64 here so counting won't overflow.
def _empty_tensor(self):
Ts = List()
for l in self.pow:
Ts.append(np.zeros(l, np.float64))
return Ts
# Return the sketch for the concatenation of two sequences.
# TODO: Optimize this to modify Tr in place.
def _join(self, Tl, Tr):
Ts = self._empty_tensor()
for tr in range(self.t + 1):
for tl in range(self.t + 1 - tr):
Ts[tl + tr] += np.kron(Tl[tl], Tr[tr])
return Ts
# Returns the raw 1D count sketches for all tuple sizes up to t.
# NOTE: This returns counts, not frequencies.
def _full_sketch(self, seq: Sequence):
Ts = self._empty_tensor()
Ts[0][0] = 1
# sketch
for c in seq.seq:
assert 0 <= c and c < self.A
for i in range(self.t - 1, -1, -1):
for j in range(len(Ts[i])):
Ts[i + 1][self.A * j + c] += Ts[i][j]
return Ts
def sketch_one(self, seq: Sequence) -> SketchedSequence:
full_sketch = self._full_sketch(seq)
if self.normalize:
# Normalization factor.
n = seq.len()
nct = nb.float64(1)
for i in range(self.t):
nct = nct * (n - i) / (i + 1)
full_sketch[self.t] /= nct
sketch = np.array([x for x in full_sketch[self.t]], dtype=nb.float32)
return SketchedSequence(seq, sketch)
# Returns the sketch for the given t as frequencies.
def sketch(self, seqs: list[Sequence]) -> list[SketchedSequence]:
return [self.sketch_one(seq) for seq in seqs]
| 2.484375 | 2 |
users/migrations/0007_images_date.py | alerin345/Instagram | 1 | 12788136 | # Generated by Django 3.1.3 on 2020-11-26 02:30
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('users', '0006_auto_20201126_0321'),
]
operations = [
migrations.AddField(
model_name='images',
name='date',
field=models.DateTimeField(default=datetime.datetime(2020, 11, 26, 2, 30, 10, 7725, tzinfo=utc)),
),
]
| 1.828125 | 2 |
transformer/seq2seq.py | christofid/DomainUnderstanding | 0 | 12788137 | <filename>transformer/seq2seq.py
import torch.nn as nn
import torch
class Seq2Seq(nn.Module):
def __init__(self,
encoder,
decoder,
src_pad_idx,
trg_pad_idx,
device):
super().__init__()
self.encoder = encoder
self.decoder = decoder
self.src_pad_idx = src_pad_idx
self.trg_pad_idx = trg_pad_idx
self.device = device
def make_src_mask(self, src):
src_mask = (src != self.src_pad_idx).unsqueeze(1).unsqueeze(2)
return src_mask
def make_trg_mask(self, trg):
trg_pad_mask = (trg != self.trg_pad_idx).unsqueeze(1).unsqueeze(2)
trg_len = trg.shape[1]
trg_sub_mask = torch.tril(torch.ones((trg_len, trg_len), device=self.device)).bool()
trg_mask = trg_pad_mask & trg_sub_mask
return trg_mask
def forward(self, src, trg):
src_mask = self.make_src_mask(src)
trg_mask = self.make_trg_mask(trg)
enc_src = self.encoder(src, src_mask)
output, attention = self.decoder(trg, enc_src, trg_mask, src_mask)
return output, attention | 2.6875 | 3 |
projects/views.py | medfiras/Bazinga | 0 | 12788138 | from django import forms
from projects.models import *
from django.views.decorators.csrf import csrf_exempt
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.shortcuts import render
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.contrib.auth.models import User
import urllib2
import json
class Project(forms.ModelForm):
"""docstring for Project"""
class Meta:
model = Projects
exclude = ['created_on', 'updated_on']
widgets = {
'description': forms.Textarea(attrs={'rows':4, 'cols':15}),
'name': forms.TextInput(attrs = {'autofocus':"autofocus"})
}
@staticmethod
@csrf_exempt
def List_all_project(request):
prject_list = Projects.objects.all()
paginator = Paginator(prject_list, 10) # Show 25 contacts per page
page_range = paginator.page_range
page = request.GET.get('page')
try:
projects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
projects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
projects = paginator.page(paginator.num_pages, page_range=1)
return render_to_response('projects.html', {'resource':'projects', 'project_list' : projects, 'page_range' : page_range}, context_instance=RequestContext(request))
@staticmethod
@csrf_exempt
def get_repo_info(request):
response = urllib2.urlopen('https://api.github.com/users/moztn/repos')
data = json.load(response)
paginator = Paginator(data, 10) # Show 25 contacts per page
page_range = paginator.page_range
page = request.GET.get('page')
try:
projects = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
projects = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
projects = paginator.page(paginator.num_pages, page_range=1)
return render_to_response('projects.html', {'data':projects}, context_instance=RequestContext(request))
@staticmethod
@csrf_exempt
def get_repo_details(request):
repo_name = request.GET.get('repo')
repo_url = 'https://api.github.com/repos/moztn/'+repo_name
response = urllib2.urlopen(repo_url)
data = json.load(response)
contrib_url = data['contributors_url']
contrib_response = urllib2.urlopen(contrib_url)
contrib_data = json.load(contrib_response)
language_url = data['languages_url']
language_response = urllib2.urlopen(language_url)
language_data = json.load(language_response)
activity_url = data['events_url']
activity_response = urllib2.urlopen(activity_url)
act_data = json.load(activity_response)
paginator = Paginator(act_data, 10) # Show 25 contacts per page
page_range = paginator.page_range
page = request.GET.get('page')
try:
activity_data = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
activity_data = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
activity_data = paginator.page(paginator.num_pages, page_range=1)
return render_to_response('projects_details.html', {'data':data, 'contrib_data':contrib_data, 'language_data':language_data, 'activity_data':activity_data}, context_instance=RequestContext(request))
| 2.09375 | 2 |
github-webhooks/Event.py | srnd/mattermost-code-review | 2 | 12788139 | class Event:
def __init__(self,data,event=None):
assert type(data) is dict, "data is of type {} not dict".format(type(data))
self.data = data
def __getattr__(self, item):
if item not in self.data:
return False
output = self.data[item]
while type(output) is dict:
output = Event(output) # So nested attributes work - probably a better way to do this
return output
def __repr__(self):
if type(self.data) is dict:
return self.data
else:
return object.__repr__(self)
def __str__(self):
return str(self.__repr__()) # override to force string - repr can return dict | 3.4375 | 3 |
setup.py | karimbahgat/iPandoc | 1 | 12788140 | <gh_stars>1-10
try: from setuptools import setup
except: from distutils.core import setup
setup( long_description=open("README.rst").read(),
name="""iPandoc""",
license="""MIT""",
author="""<NAME>""",
author_email="""<EMAIL>""",
py_modules=['ipandoc'],
url="""http://github.com/karimbahgat/iPandoc""",
version="""1.0.0""",
keywords="""pandoc document format conversion rst markdown docverter API""",
classifiers=['License :: OSI Approved', 'Programming Language :: Python', 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', 'Intended Audience :: End Users/Desktop'],
description="""Pure Python bindings to the online Docverter Pandoc document format conversion API.""",
)
| 1.398438 | 1 |
thinkutils_plus/common/think_hashlib.py | ThinkmanWang/thinkutils_plus | 0 | 12788141 | import os
import sys
import hashlib
import requests
def md5_str(szText):
return str(hashlib.md5(szText).hexdigest())
def MD5(szText):
m = hashlib.md5()
m.update(szText)
return m.digest()
def md5_file(filePath):
with open(filePath, 'rb') as fh:
m = hashlib.md5()
while True:
data = fh.read(8192)
if not data:
break
m.update(data)
return m.hexdigest()
# print md5_str("cf43e9194c5e4f7c8cb11469e1d0691c")
# print MD5("cf43e9194c5e4f7c8cb11469e1d0691c")
# print len(MD5("cf43e9194c5e4f7c8cb11469e1d0691c")) | 2.953125 | 3 |
lib/efelMeasurements.py | ForthePareto/SpikOpt | 0 | 12788142 | import math
import warnings
from collections import OrderedDict
from enum import Enum
import efel
import matplotlib.pyplot as plt
import numpy as np
from lib.Model import Model
from lib.NrnModel import NrnModel
class Level(Enum):
HIGH = 0.5
MID = 5.0
LOW = 10.0
VLOW = 50.0
EFEL_NAME_MAP = {
"AP Amplitude": "AP_amplitude",
"AP Height": "AP_height",
"AP Width": "AP_width",
"AHP Absolute Depth": "AHP_depth_abs",
"AHP time from peak": "AHP_time_from_peak",
"Spike Count": "Spikecount",
"Time to First Spike": "time_to_first_spike",
}
EFEL2NAME_MAP = {v: k for k, v in EFEL_NAME_MAP.items()}
def _zero_valued_dict(keys):
return dict.fromkeys(keys, 0)
class EfelMeasurements():
def __init__(self, model:Model , config: dict):
self.cell = model
self.voltage = None
self.t = None
self.delay = None
self.duration = None
self.current_amplitude = None
self.Tstop = None
self.trace = {}
self._setup(config)
def _setup(self, config):
self.voltage, self.t = self.cell.stimulateCell(
float(config["Amplitude"]), float(
config["Duration"]), float(config["Delay"]),
float(
config["T stop"]), config["Stimulus Section"], config["Recording Section"],
clampAt=float(config["Stimulus Position"]), recordAt=float(config["Recording Position"]), init=float(config["Vinit"]))
self.delay = float(config["Delay"])
self.duration = float(config["Duration"])
self.Tstop = float(config["T stop"])
self.current_amplitude = float(config["Amplitude"])
self._initialize()
def _initialize(self):
# start = sorted(self._closeMatches(self.t,delay,0.025),key=lambda x: x[0])[0][0]
# end = sorted(self._closeMatches(self.t,delay+duration,0.025),key=lambda x: x[0])[0][0]
# print(t[2]-t[1])
efel.setDoubleSetting('stimulus_current', self.current_amplitude)
efel.setIntSetting("strict_stiminterval", True)
self.trace['T'] = self.t
self.trace['V'] = self.voltage
# max because delay may be less than 5ms
self.trace['stim_start'] = [max(self.delay-5, 0)]
self.trace['stim_end'] = [self.Tstop]
return self.voltage, self.t
def get_measurements(self, outputDict: dict,featureNames: list):
traces = [self.trace]
efel_feature_names = self._convert_to_efel_names(featureNames)
warnings.filterwarnings("ignore", category=RuntimeWarning)
check_peaks = efel.getFeatureValues(traces, ["Spikecount_stimint"])
if check_peaks[0]["Spikecount_stimint"][0] == 0:
return _zero_valued_dict(featureNames)
amplitudes = efel.getFeatureValues(traces, ["AP_amplitude"])
if (amplitudes[0]["AP_amplitude"] is None):
# print("efel failed",len(traces_results[0]["AP_amplitude"]) , len(traces_results[0]["AP_height"]))
print(f"n spikes are {check_peaks[0]['Spikecount_stimint'][0]}")
return _zero_valued_dict(featureNames)
traces_results = efel.getFeatureValues(traces, efel_feature_names)
warnings.filterwarnings("default", category=RuntimeWarning)
for trace_results in traces_results:
# trace_result is a dictionary, with as keys the requested eFeatures
for feature_name, feature_values in trace_results.items():
if len(feature_values) > 0:
outputDict[EFEL2NAME_MAP[feature_name]
] = np.mean(feature_values)
else:
print(f"{feature_name} failed")
print(f"{feature_name} equals {feature_values}")
outputDict[EFEL2NAME_MAP[feature_name]] = 0
if "Time to First Spike" in list(outputDict.keys()):
if outputDict["Time to First Spike"] !=0:
outputDict["Time to First Spike"] +=self.delay
self.measurements = outputDict
# for name in featureNames:
# if name == "Input Resistance":
# self.measurements[name] = self.inputResistance(-0.5,
# plotting=False, printing=False)
# elif name == "Rheobase":
# self.measurements[name] = self.Rheobase(
# Level.VLOW, 1, plotting=False, printing=False)
# elif name == "Time Constant":
# self.measurements[name] = self.timeConstant(
# -0.5, plotting=False, printing=False)
return self.measurements
def _closeMatches(self, lst: list, findVal, tolerance):
""" find a list of closest matches to a specific value with a spicified tolerance
Args:
:param lst: target list to search into
:param findVal: target value
:param tolerance: accepted error in matches
:return: list of (value,index) pairs
"""
# matches = [(val,index) for index,val in enumerate(lst) if abs(val - findVal) < tolerance]
matches = [(val, index) for index, val in enumerate(lst)
if math.isclose(val, findVal, abs_tol=tolerance)]
return matches
def _convert_to_efel_names(self, regular_feature_names: list):
efel_feature_names = []
for fName in regular_feature_names:
if fName not in list(EFEL_NAME_MAP.keys()):
raise ValueError(
f" Feature: '{fName}' is not availabe in Efel or not spelled well")
efel_feature_names.append(EFEL_NAME_MAP[fName])
return efel_feature_names
if __name__ == '__main__':
fig, ax = plt.subplots()
for i in range(1):
delay = 150 # 150
duration = 1
current = 21
efel.setDoubleSetting('stimulus_current', current)
# efel.setDoubleSetting('interp_step', 0.025)
# efel.setIntSetting("strict_stiminterval", True)
testEFEL = EfelMeasurements()
testEFEL.stimulateCell(current, duration, delay,
testEFEL.iseg, 0.5, 500)
testEFEL.get_measurements(["Spikecount", "time_to_first_spike", "AP_amplitude",
"AP_height", 'AP_width', 'AHP_depth_abs', "AHP_time_from_peak"])
testEFEL.model.graphVolt(
testEFEL.voltage, testEFEL.t, "trace", ax, color=np.random.rand(3,))
# ax.set_color("red")
plt.show()
| 2.21875 | 2 |
tests/test_svg.py | topology-gg/caistring | 6 | 12788143 | <filename>tests/test_svg.py
import pytest
import os
from starkware.starknet.testing.starknet import Starknet
import asyncio
from random import choices
@pytest.mark.asyncio
async def test_svg ():
starknet = await Starknet.empty()
print()
#
# Test tpg svg
#
contract_tpg = await starknet.deploy('examples/tpg_web_svg.cairo')
ret = await contract_tpg.return_svg().call()
recovered_svg = felt_array_to_ascii(ret.result.arr)
print(f'> tpg::return_svg(): {recovered_svg}')
assert recovered_svg == '''<svg xmlns="http://www.w3.org/2000/svg" width="160" height="160"><polygon fill="#258ed6" points="110.62 149.06 102.63 10.94 51.62 42.59 110.62 149.06"><animate id="shape1" attributeName="points" to="85.43 103.6 147.1 18.21 19.97 103.6 85.43 103.6" dur="1.5s" fill="freeze" begin="0s; shape_og.end"/><animate id="shape2" attributeName="points" to="107.99 103.6 147.1 67.19 17.8 33.87 107.99 103.6" dur="1.5s" fill="freeze" begin="shape1.end"/><animate id="shape_og" attributeName="points" to="110.62 149.06 102.63 10.94 51.62 42.59 110.62 149.06" dur="1.5s" fill="freeze" begin="shape2.end"/><animate id="color1" begin="0s; color_og.end" fill="freeze" attributeName="fill" dur="1.5s" to="#52a747"></animate><animate id="color2" begin="color1.end" fill="freeze" attributeName="fill" dur="1.5s" to="#f1fb3b"></animate><animate id="color_og" begin="color2.end" fill="freeze" attributeName="fill" dur="1.5s" to="#258ed6"></animate></polygon></svg>'''
# @pytest.mark.asyncio
# async def test_svg_one_circle ():
# starknet = await Starknet.empty()
# contract = await starknet.deploy('contracts/mocks/SvgMock.cairo')
# print()
# ret = await contract.generate_one_circle_svg_test().call()
# recovered_svg = felt_array_to_ascii(ret.result.str_arr)
# print(recovered_svg)
# assert recovered_svg == '''<svg width="300" height="300" xmlns="http://www.w3.org/2000/svg"><circle cx="150" cy="150" r="50" stroke="#3A3A3A" fill="#FCFC99" /></svg>'''
def felt_array_to_ascii (felt_array):
ret = ""
for felt in felt_array:
ret += felt_to_ascii (felt)
return ret
def felt_to_ascii (felt):
bytes_object = bytes.fromhex( hex(felt)[2:] )
ascii_string = bytes_object.decode("ASCII")
return ascii_string
| 2.328125 | 2 |
fruit_classification_cnn.py | marshall4471/fruit_prediction_from_the_fruits_360datset | 0 | 12788144 | <reponame>marshall4471/fruit_prediction_from_the_fruits_360datset
# -*- coding: utf-8 -*-
"""fruit_classification_cnn.ipynb
Automatically generated by Colaboratory.
Original file is located at
https://colab.research.google.com/drive/1Aofwv5h3BbHf_PoSantjIgg6AlbvMsCK
"""
from keras.models import Sequential
import tensorflow as tf
import keras as k
from keras import layers
from keras.layers import Conv2D
from keras.layers import Dense
from keras.layers import Flatten
from keras.layers import Dropout
from keras.layers import Activation
from keras.layers import MaxPooling2D
from keras import preprocessing
test_datagen = k.preprocessing.image.ImageDataGenerator(rescale=1./255)
from google.colab import drive
drive.mount("/content/gdrive")
file = ('/content/gdrive/MyDrive/fruits/fruits-360/Test')
file2 = ('/content/gdrive/MyDrive/fruits/fruits-360/Training')
train_datagen = k.preprocessing.image.ImageDataGenerator(rescale=1./255, horizontal_flip=True, validation_split=0.2)
train_gen = train_datagen.flow_from_directory(directory = file2, subset='training', target_size=(100,100), shuffle=True, class_mode='categorical', batch_size=500)
val_gen = train_datagen.flow_from_directory(directory= file2, subset='validation', shuffle=True, class_mode='categorical', target_size=(100,100), batch_size=500)
test_gen = test_datagen.flow_from_directory(directory=file, shuffle=True, target_size=(100,100), class_mode='categorical', batch_size=500)
from tensorflow.keras import Model
import keras
model = keras.Sequential([
layers.Conv2D(filters=32, kernel_size=(3,3), strides=(1,1), activation='relu', input_shape=(100,100,3)),
layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),
layers.Conv2D(filters=64, kernel_size=(3,3), strides=(1,1), activation='relu'),
layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),
layers.Conv2D(filters=128, kernel_size=(3,3), strides=(1,1), activation='relu'),
layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),
layers.Conv2D(filters=256, kernel_size=(3,3), strides=(1,1), activation='relu'),
layers.MaxPool2D(pool_size=(2,2), strides=(2,2)),
layers.Flatten(),
layers.Dense(128, activation='relu'),
layers.Dense(131, activation='softmax')
])
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['categorical_accuracy'])
history = model.fit(train_gen, validation_data = val_gen, batch_size=32, epochs=12)
model.save('fruit_pred.h5')
import cv2
import matplotlib.pyplot as plt
import numpy as np
from google.colab import files
uploaded = files.upload()
x = plt.imread('avocado.jpg')
plt.imshow(x)
x = x/255
x = np.resize(x,(1,100,100,3))
x.shape
classes = list(train_gen.class_indices)
print(classes[np.argmax(model.predict(x))])
| 2.4375 | 2 |
downpour_downloader/downloader.py | ErinMorelli/em-downpour-downloader | 5 | 12788145 | <gh_stars>1-10
"""
Copyright (C) 2021 <NAME>.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see [https://www.gnu.org/licenses/].
"""
import os
from stat import S_IRUSR
import click
from cryptography.fernet import Fernet
from sqlalchemy.orm import sessionmaker
from sqlalchemy import MetaData, create_engine
from sqlalchemy.exc import InvalidRequestError
from sqlalchemy.ext.automap import automap_base
from .books import BooksContent
from .content import DownpourContent
class DownpourDownloader:
"""Core utilities and setup actions."""
user_path = os.path.expanduser('~')
# Manager configuration files storage location
config_env_var = 'DP_DOWNLOAD_CONFIG_PATH'
config_default = os.path.join(user_path, '.config', 'downpour-downloader')
config_path = os.environ.get(config_env_var, None) or config_default
# File to store password encryption key
__key_file = os.path.join(config_path, '.secret_key')
# URI for the local sqlite database file
db_uri = f'sqlite:///{config_path}/content.db'
# CLI context settings
context_settings = {
'help_option_names': ['-h', '--help']
}
# List of all content type classes
_types = [DownpourContent, BooksContent]
def __init__(self):
"""Setup the sqlite database."""
self._setup()
# Encryption/decryption cipher handler
self.__cipher = self.__get_cipher()
# Setup the engine for the sqlite database
self._engine = create_engine(self.db_uri)
# Configure the SQLAlchemy metadata
self._metadata = MetaData()
self._metadata.bind = self._engine
self._load_db()
# Configure the auto-mapping base model
self._base = automap_base(metadata=self._metadata)
self._base.prepare()
# Setup a session generator for database connections
self._session = sessionmaker(bind=self._engine)
def _setup(self):
"""Make sure files and folders exist."""
if not os.path.isdir(self.config_path):
os.makedirs(self.config_path)
# Create a key file if one does not exist
if not os.path.isfile(self.__key_file):
with open(self.__key_file, 'wb') as f:
f.write(Fernet.generate_key())
# Make the file read-only
os.chmod(self.__key_file, S_IRUSR)
def __get_cipher(self):
"""Create a cipher manager from the stored key."""
return Fernet(open(self.__key_file, 'rb').read())
def encode(self, data):
"""Encode data with the cipher manager."""
return self.__cipher.encrypt(data.encode('utf-8'))
def decode(self, data):
"""Decode data with the cipher manager."""
return self.__cipher.decrypt(data)
def _load_db(self):
"""Dynamically loads database table schemas."""
for type_ in self._types:
try:
type_.table(self._metadata)
except InvalidRequestError:
pass
# Reflect metadata so auto-mapping works
self._metadata.reflect(self._engine)
# Make sure the tables exist
self._metadata.create_all()
def get_session(self):
"""Create a new database session using the session maker."""
return self._session()
@property
def models(self):
"""Object containing auto-mapped database model classes."""
return self._base.classes
@staticmethod
def success(msg):
"""Print success message in green text."""
click.secho(msg, fg='green')
@staticmethod
def info(msg):
"""Print info message in blue text."""
click.secho(msg, fg='blue')
@staticmethod
def warning(msg):
"""Print warning message in yellow text."""
click.secho(msg, fg='yellow')
@staticmethod
def error(msg):
"""Print error message in red text."""
click.secho(f'[ERROR] {msg}', fg='red')
@property
def cli(self):
"""Base command group to load subcommands into."""
@click.group(context_settings=self.context_settings)
def fn():
"""Manage Patreon exclusive content."""
# Dynamically load commands from content type classes
for type_ in self._types:
fn.add_command(type_(self).cli, type_.command)
return fn
| 2 | 2 |
tests/unit/identity_parser_test.py | vrepsys/blockstats | 5 | 12788146 | <gh_stars>1-10
import json
import pytest
from blockstats import parser
def test_parse_subdomain_identity():
assert parser.parse_identity('val.id.blockstack') == \
{'address': 'val.id.blockstack', 'namespace': 'blockstack', 'name': 'id', 'subdomain': 'val'}
def test_parse_name_identity():
assert parser.parse_identity('id.blockstack') == \
{'address': 'id.blockstack', 'namespace': 'blockstack', 'name': 'id'}
def test_parse_long_identity():
assert parser.parse_identity('val.one.two.three.blockstack') == \
{'address': 'val.one.two.three.blockstack', 'namespace': 'blockstack',
'name': 'one.two.three', 'subdomain': 'val'}
def test_parse_invalid_identity():
with pytest.raises(ValueError):
assert parser.parse_identity('blockstack')
@pytest.mark.parametrize("zonefile,expected_profile_url", [
('$ORIGIN hello.id\n$TTL 3600\n_http._tcp URI 10 1 "https://gaia.blockstack.org/hub/123/0/profile.json"\n',
'https://gaia.blockstack.org/hub/123/0/profile.json'),
('$ORIGIN hello.id\n$TTL 3600\n_http._tcp IN URI 10 1 "https://blockstack.s3.amazonaws.com/woah.id"',
'https://blockstack.s3.amazonaws.com/woah.id')
])
def test_extract_profile_url(zonefile, expected_profile_url):
assert parser.extract_profile_url(zonefile) == expected_profile_url
def test_parse_apps_list():
with open("tests/data/valid_profile.json", "r") as valid_profile:
profile_json = json.load(valid_profile)
assert parser.extract_apps_list(profile_json) == [
'https://app.graphitedocs.com',
'http://localhost:8080',
'https://www.stealthy.im',
'http://fupio.com']
@pytest.mark.parametrize("profile_json", [[{}], {}])
def test_parse_apps_list_failure(profile_json):
assert parser.extract_apps_list(profile_json) is None
def test_is_person():
with open("tests/data/valid_profile.json", "r") as valid_profile:
profile_json = json.load(valid_profile)
assert parser.is_person(profile_json) is True
| 2.359375 | 2 |
mylibs/dnslib.py | NobuyukiInoue/pyDNSdump | 0 | 12788147 | # -*- coding: utf-8 -*-
import mylibs.recordtype
def is_ipv4_addr(resolvstr):
flds = resolvstr.split(".")
if len(flds) != 4:
return False
for oct in flds:
if not oct.isdecimal():
return False
if int(oct) < 0 or int(oct) > 255:
return False
return True
def set_Header_and_Question(Transaction_ID, resolvstring, type):
data = Transaction_ID.to_bytes(2, 'big') # Transaction ID
data += 0x0100.to_bytes(2, 'big') # Flags
data += 0x0001.to_bytes(2, 'big') # Questions
data += 0x0000.to_bytes(2, 'big') # Answer RRS
data += 0x0000.to_bytes(2, 'big') # Answer RRS
data += 0x0000.to_bytes(2, 'big') # Additional RRS
# Queries
if resolvstring == ".":
data += 0x00.to_bytes(1, 'big')
else:
flds = resolvstring.split(".")
for name in flds:
data += len(name).to_bytes(1, 'big')
data += name.encode(encoding = 'ascii')
data += 0x00.to_bytes(1, 'big')
# set Type list
mylibs.recordtype.set_type_list()
data += mylibs.recordtype.set_RecordType(type) # Type
data += 0x0001.to_bytes(2, 'big') # Class ... IN(0x0001)
return data
def get_class(int_class):
"""
RFC 1035
https://www.ietf.org/rfc/rfc1035.txt
"""
if int_class == 1:
return "IN"
elif int_class == 2:
return "CS"
elif int_class == 3:
return "CH"
elif int_class == 4:
return "HS"
else:
return ""
def get_dhms(ttl):
day_seconds = 24*60*60
d = ttl // day_seconds
t = ttl % day_seconds
ss = t % 60
mm = t // 60
hh = mm // 60
mm = mm % 60
return "{0:d} day {1:02d}:{2:02d}:{3:02d}".format(d, hh, mm, ss)
def get_algorithm(byte_algorithm):
if byte_algorithm == 1:
return ("MD5", 128)
elif byte_algorithm == 5:
return ("SHA1", 160)
elif byte_algorithm == 8:
return ("SHA256", 256)
elif byte_algorithm == 10:
return ("SHA512", 512)
else:
return ("", 0)
def get_dnskey_protocol(Protocol):
if Protocol == 3:
return "DNSKEY"
else:
return ""
def get_digest_type(digest_type):
if digest_type == 0:
return "Reserved"
if digest_type == 1:
return "SHA1"
if digest_type == 2:
return "SHA256"
if digest_type == 3:
return "GOST R 34.11-94"
if digest_type == 4:
return "SHA-384"
else:
return "Unassigned"
def get_NSEC3_Hash_algorithm(byte_algorithm):
if byte_algorithm == 0:
return "Reserved"
elif byte_algorithm == 1:
return "SHA1"
else:
return "Available for assignment"
def get_stripv6addr(addr):
result = ""
for i in range(0, len(addr), 2):
word = (addr[i] << 8) + addr[i + 1]
if word == 0:
if i > 0:
if result[-1] != ":":
result += ":"
else:
result += ":"
elif i == 0:
result = "{0:x}".format(word)
else:
result += ":{0:x}".format(word)
return result
def print_recv_data(data):
print("{0:04x}: {1:13} {2:<16}".format(0, "", "Header:"))
fld_Transaction_ID = (data[0] << 8) + data[1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(0, fld_Transaction_ID, "", "Transaction ID:", fld_Transaction_ID))
fld_Flags = (data[2] << 8) + data[3]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4}".format(2, fld_Flags, "", "Flags:", bin(fld_Flags)))
print_flags(fld_Flags)
fld_Question = (data[4] << 8) + data[5]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(4, fld_Question, "", "Questions:", fld_Question))
fld_Anser_RRS = (data[6] << 8) + data[7]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(6, fld_Anser_RRS, "", "Answer RRS:", fld_Anser_RRS))
fld_Authority_RRS = (data[8] << 8) + data[9]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(8, fld_Authority_RRS, "", "Authority RRS:", fld_Authority_RRS))
fld_Additional_RRS = (data[10] << 8) + data[11]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(10, fld_Additional_RRS, "", "Additional RRS:", fld_Additional_RRS))
i = 12
print("\n"
"{0:04x}: {1:13} {2}".format(i, "", "Querys:"))
# Name:
i = print_name(data, "Name:", i)
fld_type = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4}({5:d})".format(i, fld_type, "", "Type:", mylibs.recordtype.get_RecordType(fld_type), fld_type))
i += 2
fld_class = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4}({5:d})".format(i, fld_class, "", "Class:", get_class(fld_class), fld_class))
i += 2
i = get_answer(data, i, "Answer", fld_Anser_RRS)
i = get_answer(data, i, "Authority", fld_Authority_RRS)
i = get_answer(data, i, "Additional", fld_Anser_RRS)
return i
def print_flags(flags):
print("/*")
QR = (flags & 0x8000) >> 15
label = "[bit 0] QR"
if QR == 0:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, QR, "... Query"))
elif QR == 1:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, QR, "... Response"))
OPCODE = (flags & 0x7800) >> 11
label = "[bit 1-4] OPCODE"
if OPCODE == 0:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, OPCODE, "... standard query"))
elif OPCODE == 1:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, OPCODE, "... inverse query"))
elif OPCODE == 2:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, OPCODE, "... server status request"))
AA = (flags & 0x0400) >> 10
label = "[bit 5] AA"
if AA == 0:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, AA, "... Not Authoritative"))
elif AA == 1:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, AA, "... Authoritative"))
TC = (flags & 0x0200) >> 9
label = "[bit 6] TC"
if TC == 0:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, TC, "... Did not Flagment"))
elif TC == 1:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, TC, "... Flagment occur"))
RD = (flags & 0x0100) >> 8
label = "[bit 7] RD"
if RD == 0:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, RD, "... Recursion Query"))
elif RD == 1:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, RD, "... Repeat Query"))
RA = (flags & 0x0080) >> 7
label = "[bit 8] RA"
if RA == 0:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, RA, "... Recursion Available is True"))
elif RA == 1:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, RA, "... Recursion Available is False"))
Reserve = (flags & 0x0040) >> 6
label = "[bit 9] Reserve"
print("{0:21} {1:<20}({2:d})".format("", label, Reserve))
# bit 10 AD Authentic Data [RFC4035][RFC6840][RFC Errata 4924]
AD = (flags & 0x0020) >> 5
label = "[bit 10] Authentic Data"
print("{0:21} {1:<20}({2:d})".format("", label, AD))
# bit 11 CD Checking Disabled [RFC4035][RFC6840][RFC Errata 4927]
CD = (flags & 0x0010) >> 4
label = "[bit 11] Checking Disable"
print("{0:21} {1:<20}({2:d})".format("", label, CD))
RCODE = (flags & 0x000f)
label = "[bit 12-15] RCODE"
if RCODE == 0:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, RCODE, "... No Error"))
elif RCODE == 1:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, RCODE, "... Format Error"))
elif RCODE == 2:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, RCODE, "... Server Error"))
elif RCODE == 3:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, RCODE, "... Name Error"))
elif RCODE == 4:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, RCODE, "... undefined"))
elif RCODE == 5:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, RCODE, "... Reject"))
else:
print("{0:21} {1:<20}({2:d}) {3}".format("", label, RCODE, "... (unknown)"))
print("*/")
def print_name(data, title, i):
fld_name, name_length = get_name(data, i)
if data[i] == 0x00:
print("{0:04x}: {1:02x} {2:10} {3:<24} {4:}".format(i, data[i], "", title, "<Root>"))
else:
if 2*name_length < 13:
format_str = "{0:04x}: {1:0" + str(2*name_length) + "x} {2:" + str(13 - 2*name_length) + "}{3:<24} {4}"
else:
format_str = "{0:04x}: {1:0" + str(2*name_length) + "x} {2} {3:<24} {4}"
print(format_str.format(i, int.from_bytes(data[i:i + name_length], 'big') , "", title, fld_name))
i += name_length
return i
def get_answer(data, i, title, record_length):
record_count = 0
while i < len(data) and record_count < record_length:
print("\n"
"{0:04x}: {1:13} {2}".format(i, "", title + "[" + str(record_count) + "]:"))
result_bits = ((data[i] << 8) + data[i + 1]) & 0xC000
if result_bits == 0xc000:
name_hex = (data[i] << 8) + data[i + 1]
result_pos = name_hex & 0x3fff
fld_name, _ = get_name(data, result_pos)
print("{0:04x}: {1:04x} {2:8} {3:<24} {4}".format(i, int.from_bytes(data[i:i + 2], 'big') , "", "Name:", fld_name))
i += 2
elif result_bits == 0x8000:
i += 2
elif result_bits == 0x4000:
i += 2
elif data[i] == 0x00:
print("{0:04x}: {1:02x} {2:10} {3:<24} <Root>".format(i, data[i], "", "Name:"))
i += 1
fld_type = (data[i] << 8) + data[i + 1]
type_name = mylibs.recordtype.get_RecordType(fld_type)
print("{0:04x}: {1:04x} {2:8} {3:<24} {4}({5:d})".format(i, fld_type, "", "Type:", type_name, fld_type))
i += 2
fld_class = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4}({5:d})".format(i, fld_class, "", "Class:", get_class(fld_class), fld_class))
i += 2
fld_ttl = (data[i] << 24) + (data[i + 1] << 16) + (data[i + 2] << 8) + data[i + 3]
print("{0:04x}: {1:08x} {2:4} {3:<24} {4}({5:d})".format(i, fld_ttl, "", "Time to live:", get_dhms(fld_ttl), fld_ttl))
i += 4
fld_data_length = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(i, fld_data_length, "", "data_length:", fld_data_length))
i += 2
if type_name == "NS":
# Name:
i = print_name(data, "Name:", i)
elif type_name == "HINFO":
# HINFO:
i = print_name(data, "HINFO:", i)
elif type_name == "MX":
fld_Preference = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(i, fld_Preference, "", "fld_Preference:", fld_Preference))
i += 2
result, result_length = get_name(data, i)
format_str = "{0:04x}: {1:0" + str(2*result_length) + "x}\n {2:18} {3:<24} {4}"
print(format_str.format(i, int.from_bytes(data[i:i + result_length], 'big'), "", "Mail exchange:", result))
i += result_length
elif type_name == 'SOA':
fld_primary_name_server, result_length = get_name(data, i)
format_str = "{0:04x}: {1:0" + str(2*result_length) + "x}\n {2:18} {3:<24} {4}"
print(format_str.format(i, int.from_bytes(data[i:i + result_length], 'big'), "", "Primary name server:", fld_primary_name_server))
i += result_length
fld_Responsivle_authoritys_mailbox, length = get_name(data, i)
format_str = "{0:04x}: {1:0" + str(2*length) + "x} \n {2:18} {3:<24} {4}"
print(format_str.format(i, int.from_bytes(data[i:i + length], 'big'), "", "Responsible authoritys mailbox:", fld_Responsivle_authoritys_mailbox))
i += length
Serial_number = (data[i] << 24) + (data[i + 1] << 16) + (data[i + 2] << 8) + data[i + 3]
print("{0:04x}: {1:08x} {2:4} {3:<24} {4:d}".format(i, Serial_number, "", "Serial number:", Serial_number))
i += 4
Refresh_interval = (data[i] << 24) + (data[i + 1] << 16) + (data[i + 2] << 8) + data[i + 3]
print("{0:04x}: {1:08} {2:4} {3:<24} {4:}".format(i, Refresh_interval, "", "Refresh interval:", Refresh_interval))
i += 4
Retry_interval = (data[i] << 24) + (data[i + 1] << 16) + (data[i + 2] << 8) + data[i + 3]
print("{0:04x}: {1:08x} {2:4} {3:<24} {4:d}".format(i, Retry_interval, "", "Retry interval:", Retry_interval))
i += 4
Expiration_limit = (data[i] << 24) + (data[i + 1] << 16) + (data[i + 2] << 8) + data[i + 3]
print("{0:04x}: {1:08x} {2:4} {3:<24} {4:d}".format(i, Expiration_limit, "", "Expiration limit:", Expiration_limit))
i += 4
Minimum_TTL = (data[i] << 24) + (data[i + 1] << 16) + (data[i + 2] << 8) + data[i + 3]
print("{0:04x}: {1:08x} {2:4} {3:<24} {4}({5:d})".format(i, Minimum_TTL, "", "Minimum TTL:", get_dhms(Minimum_TTL), Minimum_TTL))
i += 4
elif type_name == 'A' or type_name == 'CNAME':
if fld_data_length == 4:
print("{0:04x}: {1:02x}{2:02x}{3:02x}{4:02x} {5:4} {6:<24} {7:d}.{8:d}.{9:d}.{10:d}".format(i, data[i], data[i + 1], data[i + 2], data[i + 3], "", "Addr:", data[i], data[i + 1], data[i + 2], data[i + 3]))
i += 4
else:
result, result_length = get_name(data, i)
format_str = "{0:04x}: {1:0" + str(2*result_length) + "x} \n {2:18} {3:<24} {4}"
print(format_str.format(i, int.from_bytes(data[i:i + result_length], 'big'), "", "Primary name:", result))
i += result_length
elif type_name == "TXT":
"""
# TXT:
i = print_name(data, "TXT:", i)
"""
fld_Text = data[i:i + fld_data_length]
format_str = "{0:04}: {1:0" + str(2*len(fld_Text)) + "x} \n {2:18} {3:<24} {4}"
print(format_str.format(i, int.from_bytes(fld_Text, 'big'), "", "Text:", fld_Text))
i += fld_data_length
elif type_name == "RRSIG":
i_start = i
fld_Type_covered = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(i, fld_Type_covered, "", "Type covered:", fld_Type_covered))
i += 2
fld_Algorithm = data[i]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4}({5:d})".format(i, fld_Algorithm, "", "Algorithm:", get_algorithm(fld_Algorithm)[0], fld_Algorithm))
i += 1
fld_Labels = data[i]
print("{0:04x}: {1:02x} {2:10} {3:<24} {4:d}".format(i, fld_Labels, "", "Labels:", fld_Labels))
i += 1
fld_Original_TTL = (data[i] << 24) + (data[i + 1] << 16) + (data[i + 2] << 8) + data[i + 3]
print("{0:04x}: {1:08x} {2:4} {3:<24} {4}({5:d})".format(i, fld_Original_TTL, "", "Original TTL:", get_dhms(fld_Original_TTL), fld_Original_TTL))
i += 4
fld_Signature_expiration = (data[i] << 24) + (data[i + 1] << 16) + (data[i + 2] << 8) + data[i + 3]
print("{0:04x}: {1:08x} {2:4} {3:<24} {4:d}".format(i, fld_Signature_expiration, "", "fld_Signature_expiration:", fld_Signature_expiration))
i += 4
fld_Time_signed = (data[i] << 24) + (data[i + 1] << 16) + (data[i + 2] << 8) + data[i + 3]
print("{0:04x}: {1:08x} {2:4} {3:<24} {4:d}".format(i, fld_Time_signed, "", "Time signed:", fld_Time_signed))
i += 4
fld_Id_of_signing_key = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:08x} {2:4} {3:<24} {4:d}".format(i, fld_Id_of_signing_key, "", "Id of signing key:", fld_Id_of_signing_key))
i += 2
result, result_length = get_name(data, i)
format_str = "{0:04x}: {1:0" + str(2*result_length) + "x}\n {2:18} {3:<24} {4}"
print(format_str.format(i, int.from_bytes(data[i:i + result_length], 'big'), "", "Signer's name:", result))
i += result_length
signature_size = fld_data_length - (i - i_start)
result, _ = get_signature(data, i, signature_size)
format_str = "{0:04x}: {1:0" + str(2*signature_size) + "x}\n" + " {2:18} {3:<24}"
print(format_str.format(i, int.from_bytes(data[i:i + signature_size], 'big'), "", "Signature:"), end = "")
print_result(result)
i += signature_size
elif type_name == "DNSKEY":
i_start = i
fld_Flags = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4}".format(i, fld_Flags, "", "Flags:", fld_Flags))
i += 2
fld_Protocol = data[i]
print("{0:04x}: {1:02x} {2:10} {3:<24} {4}({5:d})".format(i, fld_Protocol, "", "Protocol:", get_dnskey_protocol(fld_Protocol), fld_Protocol))
i += 1
fld_Algorithm = data[i]
print("{0:04x}: {1:02x} {2:10} {3:<24} {4}({5:d})".format(i, fld_Algorithm, "", "Algorithm:", get_algorithm(fld_Algorithm)[0], fld_Algorithm))
i += 1
fld_public_key_length = fld_data_length - (i - i_start)
fld_public_key = data[i:i + fld_public_key_length]
format_str = "{0:04x}: {1:0" + str(2*fld_public_key_length) + "x}\n {2:18} {3:<24}"
print(format_str.format(i, int.from_bytes(fld_public_key, 'big'), "", "Public Key:"), end = "")
print_result_bin(fld_public_key)
i += fld_public_key_length
elif type_name == "NSEC":
i_start = i
# Next domain name:
i = print_name(data, "Next domain name:", i)
fld_bitmap_length = fld_data_length - (i - i_start)
fld_bitmap = data[i:i + fld_bitmap_length]
format_str = "{0:04x}: {1:0" + str(2*fld_bitmap_length) + "x}\n {2:18} {3:<24}"
print(format_str.format(i, int.from_bytes(fld_bitmap, 'big'), "", "bit map:"), end = "")
print_result_bin(fld_bitmap)
i += fld_bitmap_length
elif type_name == "NSEC3" or type_name == "NSEC3PARAM":
i_start = i
fld_Algorithm = data[i]
print("{0:04x}: {1:02x} {2:10} {3:<24} {4}({5:d})".format(i, fld_Algorithm, "", "Hash Algorithm:", get_NSEC3_Hash_algorithm(fld_Algorithm), fld_Algorithm))
i += 1
fld_NSEC3_flags = data[i]
print("{0:04x}: {1:02x} {2:10} {3:<24} {4:d}".format(i, fld_Algorithm, "", "NSEC3 flags:", fld_NSEC3_flags))
i += 1
fld_NSEC3_iterations = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(i, fld_NSEC3_iterations, "", "NSEC3 iterations:", fld_NSEC3_iterations))
i += 2
fld_Salt_length = data[i]
print("{0:04x}: {1:02x} {2:10} {3:<24} {4:d}".format(i, fld_Salt_length, "", "Salt length:", fld_Salt_length))
i += 1
fld_Salt_value = int.from_bytes(data[i:i + fld_Salt_length], 'big')
format_str = "{0:04x}: {1:0" + str(2*fld_Salt_length) + "x} {2:2} {3:<24} {4:d}"
print(format_str.format(i, fld_Salt_value, "", "Salt value:", fld_Salt_value))
i += fld_Salt_length
elif type_name == "DS":
i_start = i
fld_Key_id = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(i, fld_Key_id, "", "Key_id:", fld_Key_id))
i += 2
fld_Algorithm = data[i]
print("{0:04x}: {1:02x} {2:10} {3:<24} {4}({5:d})".format(i, fld_Algorithm, "", "Algorithm:", get_algorithm(fld_Algorithm)[0], fld_Algorithm))
i += 1
fld_Digest_type = data[i]
print("{0:04x}: {1:02x} {2:10} {3:<24} {4}({5:d})".format(i, fld_Digest_type, "", "Digest type:", get_digest_type(fld_Digest_type), fld_Digest_type))
i += 1
fld_Public_Key_size = fld_data_length - (i - i_start)
fld_Public_Key = data[i:i + fld_Public_Key_size]
format_str = "{0:04x}: {1:0" + str(2*fld_Public_Key_size) + "x}\n {2:18} {3:<24}"
print(format_str.format(i, int.from_bytes(fld_Public_Key, 'big'), "", "Digest:"), end = "")
print_result_bin(fld_Public_Key)
i += fld_Public_Key_size
elif type_name == "AAAA":
print("{0:04x}: {1:02x}{2:02x}{3:02x}{4:02x}{5:02x}{6:02x}{7:02x}{8:02x}{9:02x}{10:02x}{11:02x}{12:02x}{13:02x}{14:02x}{15:02x}{16:02x}\n {17:18} {18} {19}".format(i,
data[i], data[i + 1], data[i + 2], data[i + 3],
data[i + 4], data[i + 5], data[i + 6], data[i + 7],
data[i + 8], data[i + 9], data[i + 10], data[i + 11],
data[i + 12], data[i + 13], data[i + 14], data[i + 15],
"", "Addr:", get_stripv6addr(data[i:i + 16])))
i += fld_data_length
elif type_name == "SRV":
i_start = i
fld_Priority = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(i, fld_Priority, "", "Priority:", fld_Priority))
i += 2
fld_Weight = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(i, fld_Weight, "", "Weight:", fld_Weight))
i += 2
fld_Port = (data[i] << 8) + data[i + 1]
print("{0:04x}: {1:04x} {2:8} {3:<24} {4:d}".format(i, fld_Port, "", "Port:", fld_Port))
i += 2
result, result_length = get_name(data, i)
format_str = "{0:04x}: {1:0" + str(2*result_length) + "x}\n {2:18} {3:<24} {4}"
print(format_str.format(i, int.from_bytes(data[i:i + result_length], 'big') ,"", "Target:", result))
i += result_length
elif type_name == "PTR":
result, result_length = get_name(data, i)
format_str = "{0:04x}: {1:0" + str(2*result_length) + "x}\n {2:18} {3:<24} {4}"
print(format_str.format(i, int.from_bytes(data[i:i + result_length], 'big') ,"", "Domain Name:", result))
i += result_length
elif type_name == "CAA":
i_start = i
print("{0:04x}: {1:04x} {2:8} {3:<24} {1:d}".format(i, data[i], "", "CAA Flags:"))
i += 1
result, result_length = get_name(data, i)
format_str = "{0:04x}: {1:0" + str(2*result_length) + "x}\n {2:18} {3:<24} {4}"
print(format_str.format(i, int.from_bytes(data[i:i + result_length], 'big') ,"", "Issue Tag:", result))
i += result_length
fld_issue_value = data[i:i + fld_data_length - (i - i_start)]
format_str = "{0:04x}: {1:0" + str(2*len(fld_issue_value)) + "x} {2:12} {3}"
print(format_str.format(i, int.from_bytes(fld_issue_value, 'big'), "Issue Value:", fld_issue_value))
i += len(fld_issue_value)
else:
fld_other = data[i:i + fld_data_length]
format_str = "{0:04x}: {1:0" + str(2*fld_data_length) + "x} {2:12} {3}"
print(format_str.format(i, int.from_bytes(fld_other, 'big'), "Data:", fld_other))
i += fld_data_length
record_count += 1
return i
def get_name(data, i):
result = ""
start_i = i
while i < len(data):
fld_length = data[i]
if fld_length == 0:
if result == "":
result += "<Root>"
i += 1
break
if i + 1 >= len(data):
break
result_bits = ((data[i] << 8) + data[i + 1]) & 0xC000
if result_bits == 0xc000:
result_pos = ((data[i] << 8) + data[i + 1]) & 0x3fff
pre_name, _ = get_name(data, result_pos)
result += "[." + pre_name + "]"
i += 2
break
elif result_bits == 0x8000:
i += 2
break
elif result_bits == 0x4000:
i += 2
break
else:
i += 1
if len(result) > 0:
result += "."
for n in range(fld_length):
if i + n >= len(data):
return i + n, result
result += chr(data[i + n])
i += fld_length
if i >= len(data):
break
return result, i - start_i
def get_signature(data, i, size):
current_i = i
result = ""
max_i = i + size
while i < max_i:
result += chr(data[i])
i += 1
return result, i - current_i
def print_result(target_str):
col = 0
for i in range(len(target_str)):
if col % 16 == 0 and col >= 16:
print("\n{0:44} {1:02x}".format("", ord(target_str[i])), end = "")
else:
print(" {0:02x}".format(ord(target_str[i])), end = "")
col += 1
print()
def print_result_bin(target_str):
col = 0
for i in range(len(target_str)):
if col % 16 == 0 and col >= 16:
print("\n{0:44} {1:02x}".format("", ord(chr(target_str[i]))), end = "")
else:
print(" {0:02x}".format(ord(chr(target_str[i]))), end = "")
col += 1
print()
| 2.625 | 3 |
App/Blockchain/blockchain.py | ptenteromano/Blockchain-Tech | 0 | 12788148 | # Implementatin of our blockchain
# <NAME>
# Object Oriented blockchain
# The container + chain where our blocks live
# Bring in some needed libraries
from datetime import datetime
import hashlib
import json
from urllib.parse import urlparse
import requests
from timeit import default_timer as timer
class Blockchain:
# Initialize the chain and the genesis block
def __init__(self):
self.chain = []
self.transactions = []
self.difficulty = "0000"
self.difficultyArray = []
self.createBlock(1, "0", None) # Genesis block
self.nodes = set()
self.users = {}
# This dict keeps track of all clients/miners using the chain
def addUser(self, userId, publickey, miner=False):
self.users[userId] = {"publicKey": publickey, "isMiner": miner}
# Either add or subtract a "0" from the difficulty
def changeDifficulty(self, increase=True):
if increase:
self.difficulty += "0"
else:
self.difficulty = self.difficulty[:-1]
def getLength(self):
return len(self.chain)
# Block format is a dictonary
# Hash_solution is the puzzle that solved it
def createBlock(self, nonce, previous_hash, hash_solution):
block = {
"blockNum": len(self.chain) + 1,
"timestamp": str(datetime.now().replace(microsecond=0)),
"nonce": nonce,
"hashSolution": hash_solution,
"previousHash": previous_hash,
"transactions": self.transactions,
}
# Empty the transactions
self.transactions = []
self.chain.append(block)
self.difficultyArray.append(self.difficulty)
return block
# Returns the last block in the chain
def getPreviousBlock(self):
return self.chain[-1]
# Solving the hash with the nonce
def proofOfWork(self, previous_nonce):
new_nonce = 1
proof_of_work = False
start = timer()
while proof_of_work is False:
# We can define our own proof-of-work puzzle (n**2 - pn**2) in this case
hash_solution = hashlib.sha256(
str((new_nonce ** 2 - previous_nonce ** 2) + len(self.chain)).encode(
"utf-8"
)
).hexdigest()
if hash_solution[: len(self.difficulty)] == self.difficulty:
proof_of_work = True
else:
new_nonce += 1
end = timer()
return new_nonce, hash_solution, round(end - start, 6)
# Hash the contents of the entire block
def hash(self, block):
encoded_block = json.dumps(block, sort_keys=True).encode("utf-8")
return hashlib.sha256(encoded_block).hexdigest()
# Check if chain has all valid blocks
def isChainValid(self, chain):
previous_block = chain[0]
block_index = 1
while block_index < len(chain):
block = chain[block_index]
if block["previousHash"] != self.hash(previous_block):
print("No!")
return False, block_index
previous_nonce = previous_block["nonce"]
nonce = block["nonce"]
hash_operation = hashlib.sha256(
str((nonce ** 2 - previous_nonce ** 2) +
block_index).encode("utf-8")
).hexdigest()
try:
difficultyAtBlock = self.difficultyArray[block_index]
if hash_operation[:len(difficultyAtBlock)] != difficultyAtBlock:
return False, block_index
except:
print(len(self.difficultyArray), len(self.chain))
# Move forward in the chain if everything checks out
previous_block = block
block_index += 1
return True, len(self.chain)
# Creates a transaction and returns the future next block number
def addTransaction(self, sender, receiver, data):
self.transactions.append(
{"sender": sender, "receiver": receiver, "document": data}
)
previous_block = self.getPreviousBlock()
return previous_block["blockNum"] + 1
# Returns the address of a new node on the network
def addNode(self, addressOfNode):
parsed_url = urlparse(addressOfNode)
self.nodes.add(parsed_url.netloc)
def getNumNodes(self):
return len(self.nodes)
# Find the best chain-by-consensus on network (longest chain)
def replaceChain(self):
network = self.nodes
longest_chain = None
max_length = len(self.chain)
for node in network:
try:
response = requests.get(f"http://{node}/get_chain_json")
if response.status_code == 200:
length = response.json()["length"]
chain = response.json()["chain"]
difficulties = list(response.json()["difficulties"])
print(len(difficulties), difficulties)
if length > max_length: # (self.isChainValid(chain)):
print("yes!")
max_length = length
longest_chain = chain
chain_difficulties = difficulties
except:
continue
if longest_chain:
self.chain = longest_chain
self.difficultyArray = chain_difficulties
return True
return False
# Functions to append bogus blocks to chain and remove
def simulateFakeBlocks(self):
for _ in range(2):
self.chain.append(
{
"blockNum": len(self.chain) + 1,
"timestamp": "Never",
"nonce": -1,
"previousHash": "FAKE BLOCK",
"transactions": [
{
"sender": "You",
"receiver": "Theif",
"document": {"Your Bank Account": 123456789},
}
],
}
)
def pruneFakeBlocks(self):
is_valid, last_valid_block = self.isChainValid(self.chain)
if not is_valid:
self.chain = self.chain[:last_valid_block]
return True, last_valid_block
return False, last_valid_block
# --- Testing Functions below ---
# bc = Blockchain()
# print(bc.isChainValid(bc.chain))
| 3.3125 | 3 |
crawler/qq_music/crawler.py | MerleLiuKun/daily_mail | 0 | 12788149 | """
Get singer songs by qq music.
Refer: https://github.com/yangjianxin1/QQMusicSpider
``` bash
cd crawler/qq_music
python crawler.py initdb
python crawler.py crawler -s {singer_mid}
```
"""
import sqlite3
import sys
import time
import click
import requests
from requests.adapters import HTTPAdapter
sys.path.append("../../")
import config
session = requests.Session()
adapters = HTTPAdapter(max_retries=3)
session.mount('https://', adapters)
SONG_BY_SINGER_URL = "https://u.y.qq.com/cgi-bin/musicu.fcg?data=%7B%22comm%22%3A%7B%22ct%22%3A24%2C%22cv%22%3A0%7D%2C%22singerSongList%22%3A%7B%22method%22%3A%22GetSingerSongList%22%2C%22param%22%3A%7B%22order%22%3A1%2C%22singerMid%22%3A%22{singer_mid}%22%2C%22begin%22%3A{begin}%2C%22num%22%3A{num}%7D%2C%22module%22%3A%22musichall.song_list_server%22%7D%7D"
def init_db(filename):
table_sql = """CREATE TABLE `song`(
id INT PRIMARY KEY,
mid VARCHAR(100) NOT NULL,
singer_mid VARCHAR(100) NOT NULL,
name VARCHAR(255) NOT NULL,
title VARCHAR(255) NOT NULL,
created_at INT NOT NULL)"""
if filename is None:
filename = config.DB_PATH
conn = sqlite3.connect(filename)
cursor = conn.cursor()
cursor.execute(table_sql)
cursor.close()
conn.close()
def get_song_from_qq(singer_mid: str, offset: int, limit: int):
"""
Get music data list from qq
Args:
singer_mid: singer mid
offset:
limit:
Returns:
song data
"""
try:
resp = session.get(url=SONG_BY_SINGER_URL.format(singer_mid=singer_mid, begin=offset, num=limit))
data = resp.json()
if data["code"] == 0:
return data["singerSongList"]["data"]["songList"]
else:
print(f"Error in req for singer {singer_mid}, offset: {offset}, limit: {limit}")
return []
except Exception as e:
print(f"Exception in get song from qq. errors: {e}")
return []
def save_to_db(filename, singer_mid, data):
now_time = int(time.time())
params = []
for song in data:
song_info = song["songInfo"]
item = [
song_info["mid"], singer_mid,
song_info["name"], song_info["title"], now_time
]
params.append(item)
conn = sqlite3.connect(filename)
cursor = None
try:
cursor = conn.cursor()
cursor.executemany(
"INSERT INTO song(mid, singer_mid, name, title, created_at) "
"VALUES (?,?,?,?,?)",
params
)
conn.commit()
return True
except Exception as e:
conn.rollback()
print(f"Exception save data to db, errors: {e}")
return False
finally:
if cursor:
cursor.close()
conn.close()
def handler(filename, singer_mid):
offset = 0
limit = 100
while 1:
data = get_song_from_qq(singer_mid, offset, limit)
if data:
st = save_to_db(filename, singer_mid, data)
click.echo(f"Save data for offset: {offset}, limit: {limit}, status: {st}")
else:
break
offset += limit
return True
@click.group()
def cli():
pass
@cli.command(help="Initial database")
@click.option("--filename", "-f", default=None)
def initdb(filename):
click.echo("Begin to initial db.")
init_db(filename)
click.echo("Finished initial.")
@cli.command(help="Crawler music for singer")
@click.option("--filename", "-f", default=None)
@click.option("--singer", "-s", help="The singer mid", default=None)
def crawler(filename, singer):
if singer is None:
click.echo("You must need provide singer mid!")
return
if filename is None:
filename = config.DB_PATH
handler(filename, singer)
if __name__ == '__main__':
cli()
| 2.71875 | 3 |
admin_tweaks/image.py | RRMoelker/django-admin-tweaks | 0 | 12788150 | <reponame>RRMoelker/django-admin-tweaks<filename>admin_tweaks/image.py<gh_stars>0
# -*- coding: utf-8 -*-
from easy_thumbnails.files import get_thumbnailer
def icon_view(image_field):
"""Create thumbnail for use in admin list display"""
alias = 'admin_preview'
if not image_field:
return "no image"
thumb = get_thumbnailer(image_field)[alias]
html = '<img src="%s" alt="%s" width="%s" height="%s"/>' % (thumb.url, image_field, thumb.width, thumb.height)
return html
| 1.914063 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.