id
stringlengths 1
7
| text
stringlengths 6
1.03M
| dataset_id
stringclasses 1
value |
---|---|---|
144971
|
<reponame>vdot/mdet<gh_stars>1-10
#!/usr/bin/env python3
import rospy
import numpy as np
from tf import TransformListener
from std_msgs.msg import Empty
from sensor_msgs.msg import PointCloud2
from nav_msgs.msg import OccupancyGrid
from geometry_msgs.msg import TransformStamped
from tf2_sensor_msgs.tf2_sensor_msgs import do_transform_cloud
from occupancy_grid import LocalOccupancyGridParams, LocalOccupancyGrid
from common import publish_point_cloud
import time
request_publisher = None
MAP_FRAME = '/map'
STATIC_THRESHOLD = 0.03
class NumpyAccumulator(object):
def __init__(self, columns):
self._capacity = 32000000
self._columns = columns
self._array = np.empty(shape=(self._capacity, self._columns))
self._rows = 0
def get_rows(self):
return self._rows
def get(self):
return self._array[0:self._rows, :]
def _grow(self, required_rows):
if required_rows > self._capacity:
target_capacity = self._capacity
while target_capacity < required_rows:
target_capacity *= 2
old_array = self._array
self._array = np.empty(shape=(target_capacity, self._columns))
self._array[0: self._rows] = old_array[0: self._rows]
self._capacity = target_capacity
def append(self, array):
rows = array.shape[0]
required_rows = self._rows + rows
self._grow(required_rows)
self._array[self._rows: required_rows, :] = array
self._rows = required_rows
class PointCloudAccumulatorNode(object):
def __init__(self):
rospy.init_node('pcl_accumulator')
self.obstacles_accumulator = NumpyAccumulator(3)
self.ground_accumulator = NumpyAccumulator(3)
self.tf_listener = TransformListener()
rospy.Subscriber("point_cloud_ground", PointCloud2, lambda point_cloud: self.ground_cloud_handler(point_cloud))
rospy.Subscriber("point_cloud_obstacles", PointCloud2, lambda point_cloud: self.obstacle_cloud_handler(point_cloud))
rospy.Subscriber("map_static", OccupancyGrid, lambda grid: self.static_map_handler(grid))
self.ground_point_cloud_publisher = rospy.Publisher('acc_point_cloud_ground', PointCloud2, queue_size=1)
self.obstacles_static_point_cloud_publisher = rospy.Publisher('acc_point_cloud_obstacles_static', PointCloud2, queue_size=1)
self.obstacles_dynamic_point_cloud_publisher = rospy.Publisher('acc_point_cloud_obstacles_dynamic', PointCloud2, queue_size=1)
self.static_map = None
rospy.Subscriber("request_acc", Empty, lambda request: self.request_handler(request))
rospy.spin()
def point_cloud_to_np_array(self, point_cloud):
# assuming 4 float32-s for a point
a = np.fromstring(point_cloud.data, dtype=np.float32).reshape((-1, 4))
a[:3] = 1.0 # use homogeneous coords for matrix transformation
return a
def transform_cloud(self, point_cloud, msg_header, target_frame):
self.tf_listener.waitForTransform(msg_header.frame_id, target_frame, msg_header.stamp, rospy.Duration(4.0))
mat44 = self.tf_listener.asMatrix(target_frame, msg_header)
# assuming point_cloud is a Nx4 matrix
return np.dot(point_cloud, mat44.T)
def obstacle_cloud_handler(self, point_cloud_msg):
point_cloud = self.point_cloud_to_np_array(point_cloud_msg)
point_cloud = self.transform_cloud(point_cloud, point_cloud_msg.header, MAP_FRAME)
self.obstacles_accumulator.append(point_cloud[:, 0:3])
print("Obstacle cloud received, accumulated size: {0}".format(self.obstacles_accumulator.get_rows()))
def ground_cloud_handler(self, point_cloud_msg):
point_cloud = self.point_cloud_to_np_array(point_cloud_msg)
point_cloud = self.transform_cloud(point_cloud, point_cloud_msg.header, MAP_FRAME)
self.ground_accumulator.append(point_cloud[:, 0:3])
print("Ground cloud received, accumulated size: {0}".format(self.ground_accumulator.get_rows()))
def static_map_handler(self, map):
self.static_map = map
print("Static map received")
def request_handler(self, request):
if self.static_map is None:
print("No map received")
return
start_time = time.time()
grid = LocalOccupancyGrid(self.static_map, LocalOccupancyGridParams(rospy, '~grid_'))
obstacle_cloud = self.obstacles_accumulator.get()
ground_cloud = self.ground_accumulator.get()
cols = grid.get_col_i(obstacle_cloud)
rows = grid.get_row_i(obstacle_cloud)
valid_pos = np.logical_and(
np.logical_and(cols >= 0, cols < grid.cols()),
np.logical_and(rows >= 0, rows < grid.rows()),
)
obstacle_cloud = obstacle_cloud[valid_pos]
ground_cols = grid.get_col_i(ground_cloud)
ground_rows = grid.get_row_i(ground_cloud)
ground_valid_pos = np.logical_and(
np.logical_and(ground_cols >= 0, ground_cols < grid.cols()),
np.logical_and(ground_rows >= 0, ground_rows < grid.rows()),
)
ground_cloud = ground_cloud[ground_valid_pos]
cols = cols[valid_pos]
rows = rows[valid_pos]
threshold = STATIC_THRESHOLD
obstacle_static_cloud = obstacle_cloud[grid.get_grid()[rows, cols] > threshold, :]
obstacle_dynamic_cloud = obstacle_cloud[grid.get_grid()[rows, cols] < threshold, :]
now = rospy.Time.now()
publish_point_cloud(self.ground_point_cloud_publisher, ground_cloud, MAP_FRAME, now)
publish_point_cloud(self.obstacles_static_point_cloud_publisher, obstacle_static_cloud, MAP_FRAME, now)
publish_point_cloud(self.obstacles_dynamic_point_cloud_publisher, obstacle_dynamic_cloud, MAP_FRAME, now)
print("Point clouds published, generation took {0}s".format(time.time() - start_time))
if __name__ == '__main__':
try:
PointCloudAccumulatorNode()
except rospy.ROSInterruptException:
print("Interrupted by ROS")
pass
|
StarcoderdataPython
|
4801103
|
<filename>ManWeiHiro.py
#!/usr/bin/python
#coding: utf-8
from wordcloud import WordCloud,STOPWORDS
import pandas as pd
import jieba
import sqlite3
import matplotlib.pyplot as plt
#import seaborn as sns
from pyecharts import Geo,Style,Line,Bar,Overlap,Map
import io
import requests
import time
import random
import json
import sys
reload(sys)
sys.setdefaultencoding( "utf-8" )
def getCommentCount(names):
conn = conn = sqlite3.connect("end.db")
conn.text_factory = str
cursor = conn.cursor()
likeStr = ""
for i in range(0, len(names)):
print names[i]
likeStr = likeStr + "content like \"%" + names[i] + "%\" "
if i + 1 < len(names):
likeStr = likeStr + " or "
cursor.execute("select COUNT(content) from convertData where " + likeStr)
values = cursor.fetchall()
return values[0][0]
def getAlias(alias, name):
if alias.has_key(name):
return alias[name]
else:
return [name]
if __name__ == '__main__':
attr = ["灭霸","美国队长",
"钢铁侠", "浩克", "奇异博士", "蜘蛛侠", "索尔" ,"黑寡妇",
"鹰眼", "惊奇队长", "幻视",
"猩红女巫","蚁人", "古一法师", "洛基"]
alias = {
"灭霸": ["灭霸", "Thanos"],
"美国队长": ["美国队长", "美队"],
"浩克": ["浩克", "绿巨人", "班纳", "HULK"],
"奇异博士": ["奇异博士", "医生"],
"钢铁侠": ["钢铁侠", "stark", "斯塔克", "托尼", "史塔克"],
"蜘蛛侠": ["蜘蛛侠","蜘蛛","彼得", "荷兰弟"],
"索尔":["索尔", "雷神", "托尔"],
"黑寡妇": ["黑寡妇", "寡姐"],
"鹰眼":["鹰眼","克林顿","巴顿","克林特"],
"惊奇队长":["惊奇队长","卡罗尔", "惊奇"],
"星云":["星云"],
"猩红女巫": ["猩红女巫", "绯红女巫", "旺达"],
"蚁人":["蚁人", "蚁侠", "Ant", "AntMan"],
"古一法师": ["古一", "古一法师", "法师"],
"洛基": ["洛基", "抖森"]
}
v1 = [getCommentCount(getAlias(alias, attr[i])) for i in range(0, len(attr))]
bar = Bar("Hiro")
bar.add("count",attr,v1,is_stack=True,xaxis_rotate=30,yaxix_min=4.2,
xaxis_interval=0,is_splitline_show=True)
bar.render("html/hiro_count.html")
|
StarcoderdataPython
|
3377989
|
<gh_stars>10-100
from rest_framework import serializers
from orchestra.api import router
from orchestra.utils.db import database_ready
from .models import Resource, ResourceData
class ResourceSerializer(serializers.ModelSerializer):
name = serializers.SerializerMethodField()
unit = serializers.ReadOnlyField()
class Meta:
model = ResourceData
fields = ('name', 'used', 'allocated', 'unit')
read_only_fields = ('used',)
def to_internal_value(self, raw_data):
data = super(ResourceSerializer, self).to_internal_value(raw_data)
if not data.resource_id:
data.resource = Resource.objects.get(name=raw_data['name'])
return data
def get_name(self, instance):
return instance.resource.name
def get_identity(self, data):
return data.get('name')
# Monkey-patching section
def insert_resource_serializers():
# clean previous state
for related in Resource._related:
try:
viewset = router.get_viewset(related)
except KeyError:
# API viewset not registered
pass
else:
fields = list(viewset.serializer_class.Meta.fields)
try:
fields.remove('resources')
except ValueError:
pass
viewset.serializer_class.Meta.fields = fields
# Create nested serializers on target models
for ct, resources in Resource.objects.group_by('content_type').items():
model = ct.model_class()
try:
router.insert(model, 'resources', ResourceSerializer, required=False, many=True, source='resource_set')
except KeyError:
continue
# TODO this is a fucking workaround, reimplement this on the proper place
def validate_resources(self, posted, _resources=resources):
""" Creates missing resources """
result = []
resources = list(_resources)
for data in posted:
resource = data.resource
if resource not in resources:
msg = "Unknown or duplicated resource '%s'." % resource
raise serializers.ValidationError(msg)
resources.remove(resource)
if not resource.on_demand and not data.allocated:
data.allocated = resource.default_allocation
result.append(data)
for resource in resources:
data = ResourceData(resource=resource)
if not resource.on_demand:
data.allocated = resource.default_allocation
result.append(data)
return result
viewset = router.get_viewset(model)
viewset.serializer_class.validate_resources = validate_resources
old_options = viewset.options
def options(self, request, resources=resources):
""" Provides available resources description """
metadata = old_options(self, request)
metadata.data['available_resources'] = [
{
'name': resource.name,
'on_demand': resource.on_demand,
'default_allocation': resource.default_allocation
} for resource in resources
]
return metadata
viewset.options = options
if database_ready():
insert_resource_serializers()
|
StarcoderdataPython
|
3266438
|
#!/usr/bin/env python
import io
import os
import setuptools
setuptools.setup(
name='vxi11aio',
version='0.0.1',
python_requires='>=3.7',
install_requires=['aioserial'],
)
|
StarcoderdataPython
|
3333282
|
from app.models.models import Kongqishidu,Kongqiwendu,Turangshidu,Turangwendu,Guangzhao
from app.tools.orm import ORM
session = ORM.db()
kw = session.query(Kongqiwendu).order_by(Kongqiwendu.create_dt.desc()).first()
print(kw.percent)
|
StarcoderdataPython
|
3207539
|
<filename>tests/large_test.py
#!/usr/bin/env python
# Copyright 2016 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import random
import unittest
# Mutates sys.path.
import test_env
from utils import large
class LargeTest(unittest.TestCase):
def test_1m_1(self):
array = range(1000000)
data = large.pack(array)
self.assertGreater(1000, len(data))
self.assertEqual(array, large.unpack(data))
def test_1m_1000(self):
array = [i*1000 for i in xrange(1000000)]
data = large.pack(array)
self.assertGreater(2000, len(data))
self.assertEqual(array, large.unpack(data))
def test_1m_pseudo(self):
# Compresses a pseudo-random suite. Still compresses very well.
random.seed(0)
array = sorted(random.randint(0, 1000000) for _ in xrange(1000000))
data = large.pack(array)
self.assertGreater(302000, len(data))
self.assertEqual(array, large.unpack(data))
def test_empty(self):
self.assertEqual('', large.pack([]))
self.assertEqual([], large.unpack(''))
if __name__ == '__main__':
test_env.main()
|
StarcoderdataPython
|
1662999
|
#!/usr/bin/env python3
"""Convert JWK from/to PEM and other formats"""
import argparse
import json
from binascii import hexlify
from getpass import getpass
from typing import Optional
from cryptography.hazmat.primitives import serialization
from cryptojwt.jwk import JWK
from cryptojwt.jwk.ec import ECKey
from cryptojwt.jwk.ec import import_private_ec_key_from_file
from cryptojwt.jwk.ec import import_public_ec_key_from_file
from cryptojwt.jwk.hmac import SYMKey
from cryptojwt.jwk.rsa import RSAKey
from cryptojwt.jwk.rsa import import_private_rsa_key_from_file
from cryptojwt.jwk.rsa import import_public_rsa_key_from_file
from cryptojwt.jwx import key_from_jwk_dict
def jwk_from_file(filename: str, private: bool = True) -> JWK:
"""Read JWK from file"""
with open(filename, mode="rt") as input_file:
jwk_dict = json.loads(input_file.read())
return key_from_jwk_dict(jwk_dict, private=private)
def pem2rsa(
filename: str,
kid: Optional[str] = None,
private: bool = False,
passphrase: Optional[str] = None,
) -> JWK:
"""Convert RSA key from PEM to JWK"""
if private:
key = import_private_rsa_key_from_file(filename, passphrase)
else:
key = import_public_rsa_key_from_file(filename)
jwk = RSAKey(kid=kid)
jwk.load_key(key)
return jwk
def pem2ec(
filename: str,
kid: Optional[str] = None,
private: bool = False,
passphrase: Optional[str] = None,
) -> JWK:
"""Convert EC key from PEM to JWK"""
if private:
key = import_private_ec_key_from_file(filename, passphrase)
else:
key = import_public_ec_key_from_file(filename)
jwk = ECKey(kid=kid)
jwk.load_key(key)
return jwk
def bin2jwk(filename: str, kid: str) -> JWK:
"""Read raw key from filename and return JWK"""
with open(filename, "rb") as file:
content = file.read()
return SYMKey(kid=kid, key=content)
def pem2jwk(
filename: str,
kid: Optional[str] = None,
kty: Optional[str] = None,
private: bool = False,
passphrase: Optional[str] = None,
) -> JWK:
"""Read PEM from filename and return JWK"""
with open(filename, "rt") as file:
content = file.readlines()
header = content[0]
if private:
if passphrase is None:
passphrase = getpass("Private key passphrase: ")
if len(passphrase) == 0:
passphrase = None
else:
passphrase = None
if "BEGIN PUBLIC KEY" in header:
if kty is not None and kty == "EC":
jwk = pem2ec(filename, kid, private=False)
elif kty is not None and kty == "RSA":
jwk = pem2rsa(filename, kid, private=False)
else:
raise ValueError("Unknown key type")
elif "BEGIN PRIVATE KEY" in header:
if kty is not None and kty == "EC":
jwk = pem2ec(filename, kid, private=True, passphrase=passphrase)
elif kty is not None and kty == "RSA":
jwk = pem2rsa(filename, kid, private=True, passphrase=passphrase)
else:
raise ValueError("Unknown key type")
elif "BEGIN EC PRIVATE KEY" in header:
jwk = pem2ec(filename, kid, private=True, passphrase=passphrase)
elif "BEGIN EC PUBLIC KEY" in header:
jwk = pem2ec(filename, kid, private=False)
elif "BEGIN RSA PRIVATE KEY" in header:
jwk = pem2rsa(filename, kid, private=True, passphrase=passphrase)
elif "BEGIN RSA PUBLIC KEY" in header:
jwk = pem2rsa(filename, kid, private=False)
else:
raise ValueError("Unknown PEM format")
return jwk
def export_jwk(
jwk: JWK,
private: bool = False,
encrypt: bool = False,
passphrase: Optional[str] = None,
) -> bytes:
"""Export JWK as PEM/bin"""
if jwk.kty == "oct": # jwk is in fact a SYMKey
return jwk.key
# All other key types have private and public keys
if private:
if encrypt:
if passphrase is None:
passphrase = getpass("Private key passphrase: ")
else:
passphrase = None
if passphrase:
enc = serialization.BestAvailableEncryption(passphrase.encode())
else:
enc = serialization.NoEncryption()
serialized = jwk.priv_key.private_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PrivateFormat.PKCS8,
encryption_algorithm=enc,
)
else:
serialized = jwk.pub_key.public_bytes(
encoding=serialization.Encoding.PEM,
format=serialization.PublicFormat.SubjectPublicKeyInfo,
)
return serialized
def output_jwk(jwk: JWK, private: bool = False, filename: Optional[str] = None) -> None:
"""Output JWK to file"""
serialized = jwk.serialize(private=private)
if filename is not None:
with open(filename, mode="wt") as file:
file.write(json.dumps(serialized))
else:
print(json.dumps(serialized, indent=4))
def output_bytes(data: bytes, binary: bool = False, filename: Optional[str] = None) -> None:
"""Output data to file"""
if filename is not None:
with open(filename, mode="wb") as file:
file.write(data)
else:
if binary:
print(hexlify(data).decode())
else:
print(data.decode())
def main():
"""Main function"""
parser = argparse.ArgumentParser(description="JWK Conversion Utility")
parser.add_argument("--kid", dest="kid", metavar="key_id", help="Key ID")
parser.add_argument("--kty", dest="kty", metavar="type", help="Key type")
parser.add_argument("--private", dest="private", action="store_true", help="Output private key")
parser.add_argument(
"--encrypt", dest="encrypt", action="store_true", help="Encrypt private key"
)
parser.add_argument("--output", dest="output", metavar="filename", help="Output file name")
parser.add_argument("filename", metavar="filename", nargs=1, help="filename")
args = parser.parse_args()
f = args.filename[0]
if f.endswith(".json"):
jwk = jwk_from_file(f, args.private)
serialized = export_jwk(jwk, private=args.private, encrypt=args.encrypt)
output_bytes(data=serialized, binary=(jwk.kty == "oct"), filename=args.output)
elif f.endswith(".bin"):
jwk = bin2jwk(filename=f, kid=args.kid)
output_jwk(jwk=jwk, private=True, filename=args.output)
elif f.endswith(".pem"):
jwk = pem2jwk(filename=f, kid=args.kid, private=args.private, kty=args.kty)
output_jwk(jwk=jwk, private=args.private, filename=args.output)
else:
exit(-1)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
3237278
|
# coding: utf-8
"""
Masking API
Schema for the Masking Engine API # noqa: E501
OpenAPI spec version: 5.1.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from dxm.lib.masking_api.api_client import ApiClient
class FileRulesetApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def bulk_file_update(self, file_ruleset_id, body, **kwargs): # noqa: E501
"""Update the set of files and their attributes associated with a file ruleset in bulk # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_file_update(file_ruleset_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int file_ruleset_id: The ID of the file ruleset to update the file for (required)
:param FileMetadataBulkInput body: The exact list of files to put in the ruleset. Note that existing files for this ruleset not in this list will be deleted (required)
:return: AsyncTask
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.bulk_file_update_with_http_info(file_ruleset_id, body, **kwargs) # noqa: E501
else:
(data) = self.bulk_file_update_with_http_info(file_ruleset_id, body, **kwargs) # noqa: E501
return data
def bulk_file_update_with_http_info(self, file_ruleset_id, body, **kwargs): # noqa: E501
"""Update the set of files and their attributes associated with a file ruleset in bulk # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.bulk_file_update_with_http_info(file_ruleset_id, body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int file_ruleset_id: The ID of the file ruleset to update the file for (required)
:param FileMetadataBulkInput body: The exact list of files to put in the ruleset. Note that existing files for this ruleset not in this list will be deleted (required)
:return: AsyncTask
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['file_ruleset_id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method bulk_file_update" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'file_ruleset_id' is set
if self.api_client.client_side_validation and ('file_ruleset_id' not in params or
params['file_ruleset_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `file_ruleset_id` when calling `bulk_file_update`") # noqa: E501
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in params or
params['body'] is None): # noqa: E501
raise ValueError("Missing the required parameter `body` when calling `bulk_file_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_ruleset_id' in params:
path_params['fileRulesetId'] = params['file_ruleset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/file-rulesets/{fileRulesetId}/bulk-file-update', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='AsyncTask', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_file_ruleset(self, body, **kwargs): # noqa: E501
"""Create file ruleset # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_file_ruleset(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param FileRuleset body: The file ruleset to create (required)
:return: FileRuleset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_file_ruleset_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_file_ruleset_with_http_info(body, **kwargs) # noqa: E501
return data
def create_file_ruleset_with_http_info(self, body, **kwargs): # noqa: E501
"""Create file ruleset # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_file_ruleset_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param FileRuleset body: The file ruleset to create (required)
:return: FileRuleset
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_file_ruleset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if self.api_client.client_side_validation and ('body' not in params or
params['body'] is None): # noqa: E501
raise ValueError("Missing the required parameter `body` when calling `create_file_ruleset`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/file-rulesets', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileRuleset', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_file_ruleset(self, file_ruleset_id, **kwargs): # noqa: E501
"""Delete file ruleset by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_file_ruleset(file_ruleset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int file_ruleset_id: The ID of the file ruleset to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_file_ruleset_with_http_info(file_ruleset_id, **kwargs) # noqa: E501
else:
(data) = self.delete_file_ruleset_with_http_info(file_ruleset_id, **kwargs) # noqa: E501
return data
def delete_file_ruleset_with_http_info(self, file_ruleset_id, **kwargs): # noqa: E501
"""Delete file ruleset by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_file_ruleset_with_http_info(file_ruleset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int file_ruleset_id: The ID of the file ruleset to delete (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['file_ruleset_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_file_ruleset" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'file_ruleset_id' is set
if self.api_client.client_side_validation and ('file_ruleset_id' not in params or
params['file_ruleset_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `file_ruleset_id` when calling `delete_file_ruleset`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_ruleset_id' in params:
path_params['fileRulesetId'] = params['file_ruleset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/file-rulesets/{fileRulesetId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_file_rulesets(self, **kwargs): # noqa: E501
"""Get all file rulesets # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_file_rulesets(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int environment_id: The ID of the environment to get all file rulesets from
:param int page_number: The page number for which to get file rulesets. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:return: FileRulesetList
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_file_rulesets_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_file_rulesets_with_http_info(**kwargs) # noqa: E501
return data
def get_all_file_rulesets_with_http_info(self, **kwargs): # noqa: E501
"""Get all file rulesets # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_file_rulesets_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int environment_id: The ID of the environment to get all file rulesets from
:param int page_number: The page number for which to get file rulesets. This will default to the first page if excluded
:param int page_size: The maximum number of objects to return. This will default to the DEFAULT_API_PAGE_SIZE property if not provided
:return: FileRulesetList
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['environment_id', 'page_number', 'page_size'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_file_rulesets" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'environment_id' in params:
query_params.append(('environment_id', params['environment_id'])) # noqa: E501
if 'page_number' in params:
query_params.append(('page_number', params['page_number'])) # noqa: E501
if 'page_size' in params:
query_params.append(('page_size', params['page_size'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/file-rulesets', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileRulesetList', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_file_ruleset_by_id(self, file_ruleset_id, **kwargs): # noqa: E501
"""Get file ruleset by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_file_ruleset_by_id(file_ruleset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int file_ruleset_id: The ID of the file ruleset to get (required)
:return: FileRuleset
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_file_ruleset_by_id_with_http_info(file_ruleset_id, **kwargs) # noqa: E501
else:
(data) = self.get_file_ruleset_by_id_with_http_info(file_ruleset_id, **kwargs) # noqa: E501
return data
def get_file_ruleset_by_id_with_http_info(self, file_ruleset_id, **kwargs): # noqa: E501
"""Get file ruleset by ID # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_file_ruleset_by_id_with_http_info(file_ruleset_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int file_ruleset_id: The ID of the file ruleset to get (required)
:return: FileRuleset
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['file_ruleset_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_file_ruleset_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'file_ruleset_id' is set
if self.api_client.client_side_validation and ('file_ruleset_id' not in params or
params['file_ruleset_id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `file_ruleset_id` when calling `get_file_ruleset_by_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'file_ruleset_id' in params:
path_params['fileRulesetId'] = params['file_ruleset_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/file-rulesets/{fileRulesetId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FileRuleset', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
StarcoderdataPython
|
1693937
|
# -*- coding: utf-8 -*-
from pyramid.view import view_config
@view_config(route_name='admin_index',
request_method='GET',
renderer='h:templates/admin/index.html.jinja2',
permission='admin_index')
def index(_):
return {}
def includeme(config):
config.scan(__name__)
|
StarcoderdataPython
|
3295084
|
from django.urls import path
from . import views
urlpatterns = [
path('view-path/', views.my_view, name='view-name'),
]
|
StarcoderdataPython
|
1617453
|
import requests
url = 'http://127.0.0.1:5000/cars/08%20C%201234'
response = requests.delete(url)
print (response.status_code)
print (response.text)
|
StarcoderdataPython
|
3379383
|
<filename>.leetcode/68.text-justification.py
# @lc app=leetcode id=68 lang=python3
#
# [68] Text Justification
#
# https://leetcode.com/problems/text-justification/description/
#
# algorithms
# Hard (30.13%)
# Likes: 995
# Dislikes: 1966
# Total Accepted: 168.2K
# Total Submissions: 556.2K
# Testcase Example: '["This", "is", "an", "example", "of", "text", "justification."]\n16'
#
# Given an array of words and a width maxWidth, format the text such that each
# line has exactly maxWidth characters and is fully (left and right)
# justified.
#
# You should pack your words in a greedy approach; that is, pack as many words
# as you can in each line. Pad extra spaces ' ' when necessary so that each
# line has exactly maxWidth characters.
#
# Extra spaces between words should be distributed as evenly as possible. If
# the number of spaces on a line do not divide evenly between words, the empty
# slots on the left will be assigned more spaces than the slots on the right.
#
# For the last line of text, it should be left justified and no extra space is
# inserted between words.
#
# Note:
#
#
# A word is defined as a character sequence consisting of non-space characters
# only.
# Each word's length is guaranteed to be greater than 0 and not exceed
# maxWidth.
# The input array words contains at least one word.
#
#
#
# Example 1:
#
#
# Input: words = ["This", "is", "an", "example", "of", "text",
# "justification."], maxWidth = 16
# Output:
# [
# "This is an",
# "example of text",
# "justification. "
# ]
#
# Example 2:
#
#
# Input: words = ["What","must","be","acknowledgment","shall","be"], maxWidth =
# 16
# Output:
# [
# "What must be",
# "acknowledgment ",
# "shall be "
# ]
# Explanation: Note that the last line is "shall be " instead of "shall
# be", because the last line must be left-justified instead of fully-justified.
# Note that the second line is also left-justified becase it contains only one
# word.
#
# Example 3:
#
#
# Input: words =
# ["Science","is","what","we","understand","well","enough","to","explain","to","a","computer.","Art","is","everything","else","we","do"],
# maxWidth = 20
# Output:
# [
# "Science is what we",
# "understand well",
# "enough to explain to",
# "a computer. Art is",
# "everything else we",
# "do "
# ]
#
#
# Constraints:
#
#
# 1 <= words.length <= 300
# 1 <= words[i].length <= 20
# words[i] consists of only English letters and symbols.
# 1 <= maxWidth <= 100
# words[i].length <= maxWidth
#
#
#
#
#
# @lc tags=string
# @lc imports=start
from imports import *
# @lc imports=end
# @lc idea=start
#
# 文本适配。给定一系列单词,使用平铺模式,即每一行尽可能插入最多的单词,当不能继续插入时,两单词间的间隔要平均,前比后多。
#
#
# @lc idea=end
# @lc group=
# @lc rank=
# @lc code=start
class Solution:
def fullJustify(self, words: List[str], maxWidth: int) -> List[str]:
result = []
now = [words[0]]
length = len(words[0])
for word in words[1:]:
if length + len(now) + len(word) > maxWidth:
if len(now) == 1:
result.append(now[0] + ' ' * (maxWidth - len(now[0])))
else:
blankSpace = maxWidth - length
base = blankSpace // (len(now) - 1)
times = blankSpace % (len(now) - 1)
t = (' ' * (base + 1)).join(now[:times + 1])
t = (' ' * (base)).join([t] + now[times + 1:])
result.append(t)
pass
now = [word]
length = len(word)
else:
length += len(word)
now.append(word)
if len(now) != 0:
t = ' '.join(now)
result.append(t + ' ' * (maxWidth - len(t)))
return result
# @lc code=end
# @lc main=start
if __name__ == '__main__':
print('Example 1:')
print('Input : ')
print(
'words = ["This", "is", "an", "example", "of", "text","justification."], maxWidth = 16'
)
print('Output :')
print(
str(Solution().fullJustify(
["This", "is", "an", "example", "of", "text", "justification."],
16)))
print('Exception :')
print('["This is an","example of text","justification. "]')
print()
print('Example 2:')
print('Input : ')
print(
'words = ["What","must","be","acknowledgment","shall","be"], maxWidth =16'
)
print('Output :')
print(
str(Solution().fullJustify(
["What", "must", "be", "acknowledgment", "shall", "be"], 16)))
print('Exception :')
print('["What must be","acknowledgment ","shall be "]')
print()
print('Example 3:')
print('Input : ')
print(
'words =["Science","is","what","we","understand","well","enough","to","explain","to","a","computer.","Art","is","everything","else","we","do"],maxWidth = 20'
)
print('Output :')
print(
str(Solution().fullJustify([
"Science", "is", "what", "we", "understand", "well", "enough",
"to", "explain", "to", "a", "computer.", "Art", "is", "everything",
"else", "we", "do"
], 20)))
print('Exception :')
print(
'["Science is what we", "understand well","enough to explain to","a computer. Art is","everything else we","do "]'
)
print()
pass
# @lc main=end
|
StarcoderdataPython
|
3266721
|
<gh_stars>0
# --------------
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
# code starts here
df = pd.read_csv(path)
# probability of fico score greater than 700
p_a = df[df['fico'].astype(float) >700].shape[0]/df.shape[0]
print(p_a)
# probability of purpose == debt_consolidation
p_b = df[df['purpose']== 'debt_consolidation'].shape[0]/df.shape[0]
print(p_b)
# Create new dataframe for condition ['purpose']== 'debt_consolidation'
df1 = df[df['purpose']== 'debt_consolidation']
# Calculate the P(A|B)
p_a_b = df1[df1['fico'].astype(float) >700].shape[0]/df1.shape[0]
print(p_a_b)
# Check whether the P(A) and P(B) are independent from each other
result = (p_a == p_a_b)
print(result)
# code ends here
# --------------
# code starts here
prob_lp = df[df['paid.back.loan'] == 'Yes'].shape[0]/df.shape[0]
prob_cs = df[df['credit.policy'] == 'Yes'].shape[0]/df.shape[0]
new_df = df[df['paid.back.loan'] == 'Yes']
prob_pd_cs = new_df[new_df['credit.policy']=='Yes'].shape[0]/new_df.shape[0]
bayes = (prob_pd_cs*prob_lp)/prob_cs
# code ends here
# --------------
# code starts here
#df[df['purpose']].plot(kind='bar')
#df[df['purpose']].plot.bar()
plt.bar(df['purpose'].index,df['purpose'])
df1 = df[df['paid.back.loan']=='No']
plt.bar(df1['purpose'].index,df1['purpose'])
# code ends here
# --------------
# code starts here
inst_median = df['installment'].median()
inst_mean = df['installment'].mean()
df.hist('installment',bins=10)
df.hist('log.annual.inc',bins=10)
# code ends here
|
StarcoderdataPython
|
4817909
|
from dataclasses import dataclass
from typing import Generator, Any
from datek_jaipur.domain.compound_types.card import Card, CardSet
from datek_jaipur.domain.compound_types.goods import GoodsType
from datek_jaipur.domain.compound_types.player import Player
@dataclass
class Scenario:
name: str
player1: Player
player2: Player
expected: Any
def generate_scenarios() -> Generator[Scenario, None, None]:
camel = Card(GoodsType.CAMEL, 1)
player1 = Player(name=_PLAYER1, score=1, goods=CardSet(), herd=CardSet([camel]))
player2 = Player(name=_PLAYER2, score=0, goods=CardSet(), herd=CardSet())
yield Scenario(name=_PLAYER1, player1=player1, player2=player2, expected=player1)
player1 = Player(name=_PLAYER1, score=0, goods=CardSet(), herd=CardSet())
player2 = Player(name=_PLAYER2, score=1, goods=CardSet(), herd=CardSet([camel]))
yield Scenario(name=_PLAYER2, player1=player1, player2=player2, expected=player2)
player1 = Player(name=_PLAYER1, score=1, goods=CardSet(), herd=CardSet([camel]))
player2 = Player(name=_PLAYER2, score=1, goods=CardSet(), herd=CardSet([camel]))
yield Scenario(name="None", player1=player1, player2=player2, expected=None)
_PLAYER1 = "Player1"
_PLAYER2 = "Player2"
|
StarcoderdataPython
|
1676180
|
<filename>tests/core/test_path_mapping.py
# Copyright 2020 ScyllaDB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from core.operations import PathMapping
@pytest.fixture
def mapping():
return PathMapping("/")
def test_get_from_empty(mapping):
with pytest.raises(KeyError):
return mapping[42]
def test_set_one_path(mapping):
mapping[42] = "/root"
assert mapping[42] == "/root"
assert mapping.inode_lookups[42] == 1
def test_set_many_paths(mapping):
mapping[42] = "/root"
mapping[42] = "/home"
mapping[42] = "/lib"
assert mapping[42] in ["/root", "/home", "/lib"]
assert mapping.inode_lookups[42] == 3
assert dict.__getitem__(mapping, 42) == {"/root", "/home", "/lib"}
def test_set_same_path_twice(mapping):
mapping[42] = "/root"
mapping[42] = "/root"
assert mapping[42] == "/root"
assert mapping.inode_lookups[42] == 2
assert dict.__getitem__(mapping, 42) == "/root"
def test_forget_path(mapping):
mapping[42] = "/root"
mapping[42] = "/home"
mapping[42] = "/lib"
with pytest.raises(KeyError):
mapping.forget_path(42, "/usr")
assert dict.__getitem__(mapping, 42) == {"/root", "/home", "/lib"}
assert mapping.inode_lookups[42] == 3
mapping.forget_path(100500, "/root")
assert dict.__getitem__(mapping, 42) == {"/root", "/home", "/lib"}
assert mapping.inode_lookups[42] == 3
mapping.forget_path(42, "/root")
assert dict.__getitem__(mapping, 42) == {"/home", "/lib"}
assert mapping.inode_lookups[42] == 3
mapping.forget_path(42, "/home")
assert dict.__getitem__(mapping, 42) == "/lib"
assert mapping.inode_lookups[42] == 3
with pytest.raises(KeyError):
mapping.forget_path(42, "/usr")
assert dict.__getitem__(mapping, 42) == "/lib"
assert mapping.inode_lookups[42] == 3
mapping.forget_path(42, "/lib")
assert 42 not in mapping
assert mapping.inode_lookups[42] == 3
def test_forget_path_and_add_again(mapping):
mapping[42] = "/root"
mapping.forget_path(42, "/root")
assert mapping.inode_lookups[42] == 1
mapping[42] = "/root"
assert mapping.inode_lookups[42] == 2 # is it expected?
def replace_path_for_inode_with_one_path(mapping):
mapping[42] = "/root"
mapping.replace_path(100500, "/root", "/usr")
assert dict.__getitem__(mapping, 42) == "/root"
assert mapping.inode_lookups[42] == 1
with pytest.raises(KeyError):
mapping.replace_path(42, "/lib", "/usr")
assert dict.__getitem__(mapping, 42) == "/root"
assert mapping.inode_lookups[42] == 1
mapping.replace_path(42, "/root", "/usr")
assert dict.__getitem__(mapping, 42) == "/usr"
assert mapping.inode_lookups[42] == 1
def replace_path_for_inode_with_multiple_pathes(mapping):
mapping[42] = "/root"
mapping[42] = "/home"
mapping.replace_path(100500, "/root", "/usr")
assert dict.__getitem__(mapping, 42) == {"/root", "/home"}
assert mapping.inode_lookups[42] == 2
with pytest.raises(KeyError):
mapping.replace_path(42, "/lib", "/usr")
assert dict.__getitem__(mapping, 42) == {"/root", "/home"}
assert mapping.inode_lookups[42] == 2
mapping.replace_path(42, "/root", "/usr")
assert dict.__getitem__(mapping, 42) == {"/usr", "/home"}
assert mapping.inode_lookups[42] == 2
def test_forget_inode_lookups(mapping):
mapping[42] = "/root"
mapping[42] = "/root"
mapping[42] = "/root"
mapping.forget_inode_lookups(inode=42, nlookup=2)
assert 42 in mapping
assert mapping.inode_lookups[42] == 1
mapping.forget_inode_lookups(inode=42, nlookup=1)
assert 42 not in mapping
assert 42 not in mapping.inode_lookups
mapping[13] = "/lib"
mapping.forget_inode_lookups(inode=13, nlookup=666)
assert 13 not in mapping
assert 13 not in mapping.inode_lookups
|
StarcoderdataPython
|
3333964
|
#!/usr/bin/env python3
# encoding: utf-8
# By @LyricLy
# Public Domain / CC-0 <https://creativecommons.org/publicdomain/zero/1.0/>
import time
from collections import defaultdict
class Entity:
def __init__(self, value):
self.value = value
self.pos = 0
self.vel = value
def move(self):
self.pos += self.vel
if self.pos <= 0:
return self.value
self.vel -= 1
def print_entities(edct):
print("\n" * 100)
for i in range(max(edct.keys()))[::-1]:
try:
print(edct[i][0].value)
except IndexError:
print("-")
time.sleep(1)
def arrowsort(lst, output=None):
rlst = []
edct = defaultdict(list, {0: list(map(Entity, lst))})
while True:
if output is not None:
output(edct)
new_edct = defaultdict(list)
for n in edct:
for x in edct[n]:
r = x.move()
if r is not None:
rlst.append(r)
else:
new_edct[x.pos].append(x)
if not new_edct:
return rlst
edct = new_edct
if __name__ == "__main__":
xs = eval(input())
arrowsort(xs, print_entities)
print(xs)
|
StarcoderdataPython
|
1683626
|
<filename>backend/app/route/survey/provider.py
from app.api.base.base_sql import Sql
from app.api.base import base_name as names
class Provider:
"""
Класс для работы с товарами
"""
@staticmethod
def post_survey(args):
"""
Занести результаты теста
:param args:
:return:
"""
query = """
insert into survey
(eat,
dysphagia,
wash,
wash_teeth,
dress,
restroom,
id_user)
values ({eat},
{dysphagia},
{wash},
{wash_teeth},
{dress},
{restroom},
{id_user})
returning
id_survey,
eat,
dysphagia,
wash,
wash_teeth,
dress,
restroom,
id_user
"""
result = Sql.exec(query=query, args=args)
return result
|
StarcoderdataPython
|
3202315
|
"""
========================================================
Filename: test_suite.py
Author: <NAME>
Description:
The test suit constist of a test bench, a testharness
and a set of design files.
The test suite is used for compiling and running
a set of simulations/tests.
(c) 2019 <NAME>
========================================================
"""
from compiler import Compiler
class Test_suite:
def __init__(self):
self.tb_list = []
self.lib_list = []
self.testbench = None
self.compiler = Compiler()
self.organize_collection_list = []
def add_testbench(self, testbench):
"""
Set the testbench object for test suite.
Args:
testbench (str): testbench filename and path
"""
self.testbench = testbench
def get_testbench(self):
return self.testbench
def set_collection(self, organize_collection_list):
"""
Add a collection to test suite
Args:
organize_collection_list (Collection object): a collection of VHDL object files.
"""
self.organize_collection_list = organize_collection_list
def get_collection(self):
return self.organize_collection_list
def compile(self):
#self.compiler.compile(self.organize_collection_list)
pass
def simulate(self):
pass
|
StarcoderdataPython
|
1640977
|
from typing import Union
from collections import OrderedDict
from ..api.syntax import TaskDeclaration
from ..api.syntax import GroupDeclaration
from ..inputoutput import SystemIO
STATUS_STARTED = 'started'
STATUS_ERRORED = 'errored'
STATUS_FAILURE = 'failure'
STATUS_SUCCEED = 'succeed'
class QueueItem(object):
task: TaskDeclaration
status: str
def __init__(self, task: TaskDeclaration, status: str):
self.task = task
self.status = status
class ProgressObserver(object):
_io: SystemIO
_tasks: OrderedDict # OrderedDict[str, QueueItem]
_failed_count: int
def __init__(self, io: SystemIO):
self._io = io
self._tasks = OrderedDict()
self._failed_count = False
@staticmethod
def _format_parent_task(parent: Union[GroupDeclaration, None]) -> str:
return ('[part of ' + parent.get_name() + ']') if parent else ''
def task_started(self, declaration: TaskDeclaration, parent: Union[GroupDeclaration, None], args: list):
""" When task is just started """
self._tasks[declaration.to_full_name()] = QueueItem(declaration, STATUS_STARTED)
self._io.info_msg(' >> Executing %s %s %s' % (
declaration.to_full_name(),
' '.join(args),
self._format_parent_task(parent)
))
def task_errored(self, declaration: TaskDeclaration, exception: Exception):
""" On exception catched in task execution """
self._tasks[declaration.to_full_name()] = QueueItem(declaration, STATUS_ERRORED)
self._failed_count += 1
self._io.print_opt_line()
self._io.error_msg('The task "%s" was interrupted with an %s' % (
declaration.to_full_name(),
str(exception.__class__)
))
self._io.print_separator()
self._io.print_opt_line()
def task_failed(self, declaration: TaskDeclaration, parent: Union[GroupDeclaration, None]):
""" When task returns False """
self._set_status(declaration, STATUS_FAILURE)
self._failed_count += 1
if not declaration.get_task_to_execute().is_silent_in_observer():
self._io.print_opt_line()
self._io.error_msg('The task "%s" %s ended with a failure' % (
declaration.to_full_name(),
self._format_parent_task(parent)
))
self._io.print_separator()
self._io.print_opt_line()
def task_succeed(self, declaration: TaskDeclaration, parent: Union[GroupDeclaration, None]):
""" When task success """
self._set_status(declaration, STATUS_SUCCEED)
if not declaration.get_task_to_execute().is_silent_in_observer():
self._io.print_opt_line()
self._io.success_msg('The task "%s" %s succeed.' % (
declaration.to_full_name(),
self._format_parent_task(parent)
))
self._io.print_separator()
self._io.print_opt_line()
def execution_finished(self):
""" When all tasks were executed """
if self.has_at_least_one_failed_task():
self._io.error_msg('Execution failed with %i failed tasks of %i total tasks scheduled for execution' % (
self._failed_count, len(self._tasks)
))
else:
self._io.success_msg('Successfully executed %i tasks.' % len(self._tasks))
self._io.print_opt_line()
def _set_status(self, declaration: TaskDeclaration, status: str):
self._tasks[declaration.to_full_name()] = QueueItem(declaration, status)
def has_at_least_one_failed_task(self) -> bool:
return self._failed_count > 0
|
StarcoderdataPython
|
23298
|
from flask_wtf import Form
from wtforms import TextField, PasswordField, validators, IntegerField, BooleanField, SelectField, SubmitField
from wtforms.validators import Required, Length, Email, ValidationError, Regexp, EqualTo, NumberRange
from wtforms.widgets import SubmitInput
class SignUp(Form):
username = TextField("Username", validators=[Required("Please provide a username without any spaces"),
Length(min=4, max=20), Regexp(r'^[\w.@+-]+$', message="Please provide a username without any spaces")])
password = PasswordField("Password", validators=[Required("Please pick a secure password"),
Regexp(r'^[\w.@+-]+$', message="Please provide a password without any spaces")])
email = TextField("Email", validators=[Required("Please provide a valid email address"),
Length(min=6, max=35), Email(message="That is not a valid email address"),
Regexp(r'^[\w.@+-]+$', message="Please provide an email without any spaces")])
firstname = TextField("First Name", validators=[Required("Please provide your first name"),
Regexp(r'^[\w.@+-]+$', message="Please enter your first name without any spaces")])
lastname = TextField("Last Name", validators=[Required("Please provide your last name"),
Regexp(r'^[\w.@+-]+$', message="Please enter your last name without any spaces")])
class Login(Form):
username = TextField("Username", validators=[Required("Please provide a username without any spaces"),
Length(min=4, max=20), Regexp(r'^[\w.@+-]+$', message="Please provide a username without any spaces")])
password = PasswordField("Password", validators=[Required("Please pick a secure password"),
Regexp(r'^[\w.@+-]+$', message="Please provide a password without any spaces")])
class ForgotPassword(Form):
email = TextField("Email", validators=[Required("Please provide a valid email address"),
Length(min=6, max=35), Email(message="That is not a valid email address"),
Regexp(r'^[\w.@+-]+$', message="Please provide an email without any spaces")])
class NewPassword(Form):
password = PasswordField("Password", validators=[Required("Please pick a secure password"),
Regexp(r'^[\w.@+-]+$', message="Please provide a password without any spaces")])
confirm_password = PasswordField("Confirm Password", validators=[Required("Required"),
Regexp(r'^[\w.@+-]+$', message="Please provide a password without any spaces"),
EqualTo("password", message="Passwords must match")])
class ChangePassword(Form):
current_password = PasswordField("Current Password", validators=[Required("Please type in your current password"),
Regexp(r'^[\w.@+-]+$', message="Please provide a password without any spaces")])
password = PasswordField("<PASSWORD>", validators=[Required("Please pick a secure password"),
Regexp(r'^[\w.@+-]+$', message="Please provide a password without any spaces")])
confirm_password = PasswordField("Confirm Password", validators=[Required("Password must match with new password"),
Regexp(r'^[\w.@+-]+$', message="Please provide a password without any spaces"),
EqualTo("password", message="Password must match with new password")])
class CamSettings(Form):
brightness = IntegerField("Brightness", default=50, validators=[Required("Please choose a number between 0 and 100"),
NumberRange(min=0, max=100, message="Please choose a number between 0 and 100")])
resolution = SelectField("Video/Image Resolution: ", choices=[("320x240", "320 x 240"), ("640x480", "640 x 480"),
("800x600", "800 x 600")], default="640x480", validators=[(Required("Required"))])
hflip = BooleanField("Horizontal Flip: ", default=False)
vflip = BooleanField("Vertical Flip: ", default=False)
class Recording(Form):
start = SubmitField("Start Recording")
stop = SubmitField("Stop Recording")
class LicensePlate(Form):
license = TextField("License Plate", validators=[Required("Please provide a license plate without any spaces"),
Length(min=4, max=10), Regexp(r'^[\w.@+-]+$', message="Please provide a license plate without any spaces")])
class ForceLock(Form):
forcelock = SubmitField("Force Lock")
class GarageDoor(Form):
opengarage = SubmitField("Open Garage")
|
StarcoderdataPython
|
3396265
|
<reponame>zeljkofilipin/how-to-code
print("bok")
|
StarcoderdataPython
|
4834949
|
"""UniFi sensor platform tests."""
from collections import deque
from copy import deepcopy
from asynctest import patch
from homeassistant import config_entries
from homeassistant.components import unifi
from homeassistant.components.unifi.const import (
CONF_CONTROLLER,
CONF_SITE_ID,
CONTROLLER_ID as CONF_CONTROLLER_ID,
UNIFI_CONFIG,
UNIFI_WIRELESS_CLIENTS,
)
from homeassistant.setup import async_setup_component
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
import homeassistant.components.sensor as sensor
CLIENTS = [
{
"hostname": "Wired client hostname",
"ip": "10.0.0.1",
"is_wired": True,
"last_seen": 1562600145,
"mac": "00:00:00:00:00:01",
"name": "Wired client name",
"oui": "Producer",
"sw_mac": "00:00:00:00:01:01",
"sw_port": 1,
"wired-rx_bytes": 1234000000,
"wired-tx_bytes": 5678000000,
},
{
"hostname": "Wireless client hostname",
"ip": "10.0.0.2",
"is_wired": False,
"last_seen": 1562600145,
"mac": "00:00:00:00:00:02",
"name": "Wireless client name",
"oui": "Producer",
"sw_mac": "00:00:00:00:01:01",
"sw_port": 2,
"rx_bytes": 1234000000,
"tx_bytes": 5678000000,
},
]
CONTROLLER_DATA = {
CONF_HOST: "mock-host",
CONF_USERNAME: "mock-user",
CONF_PASSWORD: "<PASSWORD>",
CONF_PORT: 1234,
CONF_SITE_ID: "mock-site",
CONF_VERIFY_SSL: False,
}
ENTRY_CONFIG = {CONF_CONTROLLER: CONTROLLER_DATA}
CONTROLLER_ID = CONF_CONTROLLER_ID.format(host="mock-host", site="mock-site")
SITES = {"Site name": {"desc": "Site name", "name": "mock-site", "role": "admin"}}
async def setup_unifi_integration(
hass,
config,
options,
sites,
clients_response,
devices_response,
clients_all_response,
):
"""Create the UniFi controller."""
hass.data[UNIFI_CONFIG] = []
hass.data[UNIFI_WIRELESS_CLIENTS] = unifi.UnifiWirelessClients(hass)
config_entry = config_entries.ConfigEntry(
version=1,
domain=unifi.DOMAIN,
title="Mock Title",
data=config,
source="test",
connection_class=config_entries.CONN_CLASS_LOCAL_POLL,
system_options={},
options=options,
entry_id=1,
)
mock_client_responses = deque()
mock_client_responses.append(clients_response)
mock_device_responses = deque()
mock_device_responses.append(devices_response)
mock_client_all_responses = deque()
mock_client_all_responses.append(clients_all_response)
mock_requests = []
async def mock_request(self, method, path, json=None):
mock_requests.append({"method": method, "path": path, "json": json})
if path == "s/{site}/stat/sta" and mock_client_responses:
return mock_client_responses.popleft()
if path == "s/{site}/stat/device" and mock_device_responses:
return mock_device_responses.popleft()
if path == "s/{site}/rest/user" and mock_client_all_responses:
return mock_client_all_responses.popleft()
return {}
with patch("aiounifi.Controller.login", return_value=True), patch(
"aiounifi.Controller.sites", return_value=sites
), patch("aiounifi.Controller.request", new=mock_request):
await unifi.async_setup_entry(hass, config_entry)
await hass.async_block_till_done()
hass.config_entries._entries.append(config_entry)
controller_id = unifi.get_controller_id_from_config_entry(config_entry)
controller = hass.data[unifi.DOMAIN][controller_id]
controller.mock_client_responses = mock_client_responses
controller.mock_device_responses = mock_device_responses
controller.mock_client_all_responses = mock_client_all_responses
controller.mock_requests = mock_requests
return controller
async def test_platform_manually_configured(hass):
"""Test that we do not discover anything or try to set up a controller."""
assert (
await async_setup_component(
hass, sensor.DOMAIN, {sensor.DOMAIN: {"platform": "unifi"}}
)
is True
)
assert unifi.DOMAIN not in hass.data
async def test_no_clients(hass):
"""Test the update_clients function when no clients are found."""
controller = await setup_unifi_integration(
hass,
ENTRY_CONFIG,
options={unifi.const.CONF_ALLOW_BANDWIDTH_SENSORS: True},
sites=SITES,
clients_response=[],
devices_response=[],
clients_all_response=[],
)
assert len(controller.mock_requests) == 3
assert len(hass.states.async_all()) == 2
async def test_switches(hass):
"""Test the update_items function with some clients."""
controller = await setup_unifi_integration(
hass,
ENTRY_CONFIG,
options={
unifi.const.CONF_ALLOW_BANDWIDTH_SENSORS: True,
unifi.const.CONF_TRACK_CLIENTS: False,
unifi.const.CONF_TRACK_DEVICES: False,
},
sites=SITES,
clients_response=CLIENTS,
devices_response=[],
clients_all_response=[],
)
assert len(controller.mock_requests) == 3
assert len(hass.states.async_all()) == 6
wired_client_rx = hass.states.get("sensor.wired_client_name_rx")
assert wired_client_rx.state == "1234.0"
wired_client_tx = hass.states.get("sensor.wired_client_name_tx")
assert wired_client_tx.state == "5678.0"
wireless_client_rx = hass.states.get("sensor.wireless_client_name_rx")
assert wireless_client_rx.state == "1234.0"
wireless_client_tx = hass.states.get("sensor.wireless_client_name_tx")
assert wireless_client_tx.state == "5678.0"
clients = deepcopy(CLIENTS)
clients[0]["is_wired"] = False
clients[1]["rx_bytes"] = 2345000000
clients[1]["tx_bytes"] = 6789000000
controller.mock_client_responses.append(clients)
await controller.async_update()
await hass.async_block_till_done()
wireless_client_rx = hass.states.get("sensor.wireless_client_name_rx")
assert wireless_client_rx.state == "2345.0"
wireless_client_tx = hass.states.get("sensor.wireless_client_name_tx")
assert wireless_client_tx.state == "6789.0"
|
StarcoderdataPython
|
1665447
|
from mayan.apps.common.tests.base import GenericViewTestCase
from mayan.apps.documents.tests.base import GenericDocumentViewTestCase
from ..literals import WIDGET_CLASS_TEXTAREA
from ..models import WorkflowTransition
from ..permissions import (
permission_workflow_edit, permission_workflow_transition,
permission_workflow_view
)
from .literals import (
TEST_WORKFLOW_TRANSITION_FIELD_HELP_TEXT,
TEST_WORKFLOW_TRANSITION_FIELD_LABEL, TEST_WORKFLOW_TRANSITION_FIELD_NAME,
TEST_WORKFLOW_TRANSITION_FIELD_TYPE, TEST_WORKFLOW_TRANSITION_LABEL,
TEST_WORKFLOW_TRANSITION_LABEL_EDITED
)
from .mixins import (
WorkflowTestMixin, WorkflowTransitionEventViewTestMixin,
WorkflowTransitionFieldViewTestMixin, WorkflowViewTestMixin,
WorkflowTransitionViewTestMixin
)
class WorkflowTransitionViewTestCase(
WorkflowTestMixin, WorkflowViewTestMixin, WorkflowTransitionViewTestMixin,
GenericViewTestCase
):
def test_create_test_workflow_transition_no_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
response = self._request_test_workflow_transition_create_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(WorkflowTransition.objects.count(), 0)
def test_create_test_workflow_transition_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_create_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(WorkflowTransition.objects.count(), 1)
self.assertEqual(
WorkflowTransition.objects.all()[0].label,
TEST_WORKFLOW_TRANSITION_LABEL
)
self.assertEqual(
WorkflowTransition.objects.all()[0].origin_state,
self.test_workflow_state_1
)
self.assertEqual(
WorkflowTransition.objects.all()[0].destination_state,
self.test_workflow_state_2
)
def test_delete_workflow_transition_no_permissions(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_delete_view()
self.assertEqual(response.status_code, 404)
self.assertTrue(
self.test_workflow_transition in WorkflowTransition.objects.all()
)
def test_delete_workflow_transition_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(permission=permission_workflow_edit, obj=self.test_workflow)
response = self._request_test_workflow_transition_delete_view()
self.assertEqual(response.status_code, 302)
self.assertFalse(
self.test_workflow_transition in WorkflowTransition.objects.all()
)
def test_edit_workflow_transition_no_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_edit_view()
self.assertEqual(response.status_code, 404)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.label, TEST_WORKFLOW_TRANSITION_LABEL
)
def test_edit_workflow_transition_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_edit_view()
self.assertEqual(response.status_code, 302)
self.test_workflow_transition.refresh_from_db()
self.assertEqual(
self.test_workflow_transition.label,
TEST_WORKFLOW_TRANSITION_LABEL_EDITED
)
def test_workflow_transition_list_no_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_list_view()
self.assertNotContains(
response=response, text=self.test_workflow_transition.label,
status_code=404
)
def test_workflow_transition_list_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_view
)
response = self._request_test_workflow_transition_list_view()
self.assertContains(
response=response, text=self.test_workflow_transition.label,
status_code=200
)
class WorkflowTransitionEventViewTestCase(
WorkflowTestMixin, WorkflowTransitionEventViewTestMixin,
GenericDocumentViewTestCase
):
def test_workflow_transition_event_list_no_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
response = self._request_test_workflow_transition_event_list_view()
self.assertEqual(response.status_code, 404)
def test_workflow_transition_event_list_with_access(self):
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_event_list_view()
self.assertEqual(response.status_code, 200)
class WorkflowTransitionFieldTestMixin:
def _create_test_workflow_transition_field(self, extra_data=None):
kwargs = {
'field_type': TEST_WORKFLOW_TRANSITION_FIELD_TYPE,
'name': TEST_WORKFLOW_TRANSITION_FIELD_NAME,
'label': TEST_WORKFLOW_TRANSITION_FIELD_LABEL,
'help_text': TEST_WORKFLOW_TRANSITION_FIELD_HELP_TEXT
}
kwargs.update(extra_data or {})
self.test_workflow_transition_field = self.test_workflow_transition.fields.create(
**kwargs
)
class WorkflowTransitionFieldViewTestCase(
WorkflowTestMixin, WorkflowTransitionFieldTestMixin,
WorkflowTransitionFieldViewTestMixin, WorkflowTransitionViewTestMixin,
GenericViewTestCase
):
def setUp(self):
super(WorkflowTransitionFieldViewTestCase, self).setUp()
self._create_test_workflow()
self._create_test_workflow_states()
self._create_test_workflow_transition()
def test_workflow_transition_field_list_view_no_permission(self):
self._create_test_workflow_transition_field()
response = self._request_test_workflow_transition_field_list_view()
self.assertNotContains(
response=response,
text=self.test_workflow_transition_field.label,
status_code=404
)
def test_workflow_transition_field_list_view_with_access(self):
self._create_test_workflow_transition_field()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_test_workflow_transition_field_list_view()
self.assertContains(
response=response,
text=self.test_workflow_transition_field.label,
status_code=200
)
def test_workflow_transition_field_create_view_no_permission(self):
workflow_transition_field_count = self.test_workflow_transition.fields.count()
response = self._request_workflow_transition_field_create_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_workflow_transition.fields.count(),
workflow_transition_field_count
)
def test_workflow_transition_field_create_view_with_access(self):
workflow_transition_field_count = self.test_workflow_transition.fields.count()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_workflow_transition_field_create_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_workflow_transition.fields.count(),
workflow_transition_field_count + 1
)
def test_workflow_transition_field_delete_view_no_permission(self):
self._create_test_workflow_transition_field()
workflow_transition_field_count = self.test_workflow_transition.fields.count()
response = self._request_workflow_transition_field_delete_view()
self.assertEqual(response.status_code, 404)
self.assertEqual(
self.test_workflow_transition.fields.count(),
workflow_transition_field_count
)
def test_workflow_transition_field_delete_view_with_access(self):
self._create_test_workflow_transition_field()
workflow_transition_field_count = self.test_workflow_transition.fields.count()
self.grant_access(
obj=self.test_workflow, permission=permission_workflow_edit
)
response = self._request_workflow_transition_field_delete_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_workflow_transition.fields.count(),
workflow_transition_field_count - 1
)
class WorkflowInstanceTransitionFieldViewTestCase(
WorkflowTestMixin, WorkflowTransitionFieldTestMixin,
WorkflowTransitionFieldViewTestMixin, WorkflowTransitionViewTestMixin,
GenericDocumentViewTestCase
):
def setUp(self):
super().setUp()
self._create_test_workflow(add_document_type=True)
self._create_test_workflow_states()
self._create_test_workflow_transition()
self._create_test_workflow_transition_field(
extra_data={
'widget': WIDGET_CLASS_TEXTAREA
}
)
self._upload_test_document()
self.test_workflow_instance = self.test_document.workflows.first()
def test_workflow_transition_text_area_widget_execute_view_with_transition_access(self):
self.grant_access(
obj=self.test_workflow_transition,
permission=permission_workflow_transition
)
response = self._request_test_workflow_transition_execute_view()
self.assertEqual(response.status_code, 302)
self.assertEqual(
self.test_workflow_instance.get_current_state(),
self.test_workflow_state_2
)
|
StarcoderdataPython
|
75643
|
from django.contrib import admin
from .models import Quiz, Question, Response
# Register your models here.
class InLineResponse(admin.StackedInline):
model = Response
extra = 0
class InLineQuestion(admin.StackedInline):
model = Question
extra = 0
class QuizAdmin(admin.ModelAdmin):
inlines = [InLineQuestion]
pass
class QuestionAdmin(admin.ModelAdmin):
inlines = [InLineResponse]
pass
admin.site.register(Quiz, QuizAdmin)
admin.site.register(Question, QuestionAdmin)
|
StarcoderdataPython
|
4812253
|
#
# Copyright 2020, Data61, CSIRO (ABN 41 687 119 230)
#
# SPDX-License-Identifier: GPL-2.0-only
#
from typing import List
from hardware.device import WrappedNode
from hardware.fdt import FdtParser
# documentation for CPU bindings:
# https://www.kernel.org/doc/Documentation/devicetree/bindings/arm/cpus.yaml
def get_cpus(tree: FdtParser) -> List[WrappedNode]:
' Return a list of all the CPUs described in this device tree. '
cpus_node = tree.get_path('/cpus')
found_cpus = []
for node in cpus_node:
if node.has_prop('device_type') and node.get_prop('device_type').strings[0] == 'cpu':
found_cpus.append(node)
return found_cpus
|
StarcoderdataPython
|
3362516
|
#! /usr/bin/env python3
import sys
import numpy as np
import filtering as flt
from time import time
kappa_z_array = np.array([0.01,0.03,0.1,0.2,0.3,0.4,0.5,0.6,0.7,0.8,0.9,1,2,3,4,5,6,7,8,9,10])
# choose a kappa_y according to system variable
kappa_z = kappa_z_array[int(sys.argv[1])]
# pass the number of iterations
nIter = int(sys.argv[2])
# where to save
filename = "data_raw/performance_kappaz="+str(kappa_z)
# seeed the run with a value related to kappa_y
np.random.seed(int(kappa_z*10))
# model parameters
T = 10 # simulation time
dt = 0.01 # step size
t = np.arange(0,T,dt)
alpha = flt.xi_fun_inv(kappa_z * dt)
timesteps = int(T/dt)
kappa_phi = 1 # inverse diffusion constant
phi_0 = 0 # initial mean
kappa_0 = 20 # initial certainty
kappa_y = 1 # certainty of increment observations
# run the simulations and read out first and second order statistics for each time step
phi_final = np.zeros([nIter,timesteps])
vonMises = np.zeros([nIter,2,timesteps])
GaussADF = np.zeros([nIter,2,timesteps])
PF = np.zeros([nIter,2,timesteps])
start = time()
for i in range(0, nIter): # run for nIter iterations
# generate data
phi, dy, z = flt.generateData(T,kappa_phi,kappa_y=kappa_y,dt=dt,phi_0=phi_0,kappa_0=kappa_0,alpha=alpha)
# Gauss ADF
mu_G, kappa_G = flt.GaussADF_run(T,kappa_phi,dy=dy,kappa_y=kappa_y,z=z,alpha=alpha,
phi_0=phi_0,kappa_0=kappa_0,dt=dt)
# Particle Filter
N = 1000
mu_PF, r_PF = flt.PF_run(T,N,kappa_phi,dy=dy,z=z,alpha=alpha,
kappa_y=kappa_y,phi_0=phi_0,kappa_0=kappa_0,dt=dt)
# von Mises projection filter
mu_VM, kappa_VM = flt.vM_Projection_Run(T,kappa_phi,dy=dy,kappa_y=kappa_y,z=z,alpha=alpha,
phi_0=phi_0,kappa_0=kappa_0,dt=dt)
# read out statistics
phi_final[i] = phi
vonMises[i] = np.array([mu_VM,kappa_VM])
GaussADF[i] = np.array([mu_G,kappa_G])
PF[i] = np.array([mu_PF,r_PF])
np.savez(filename,phi_final=phi_final,vonMises=vonMises,GaussADF=GaussADF,PF=PF,
kappa_phi=kappa_phi,kappa_y=kappa_y,kappa_z=kappa_z,T=T,dt=dt)
print('kappa_z = '+str(kappa_z)+' done \n')
end = time()
print(f'It took {end - start} seconds!')
|
StarcoderdataPython
|
3325540
|
<gh_stars>1-10
"""Provides a formatter for AoE2 AI rule files."""
from .formatter import format_per
|
StarcoderdataPython
|
3341903
|
<gh_stars>0
"""
Support for Haiku with SenseME ceiling fan and lights.
For more details about this platform, please refer to the documentation at
https://github.com/mikelawrence/homeassistant-custom-components
"""
import logging
import voluptuous as vol
from datetime import timedelta
from homeassistant.const import (CONF_INCLUDE, CONF_NAME, CONF_FRIENDLY_NAME)
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.discovery import load_platform
from homeassistant.helpers.event import track_time_interval
from homeassistant.util import Throttle
from homeassistant.components.fan import (DIRECTION_FORWARD, DIRECTION_REVERSE)
# SenseMe Python library by <NAME>
REQUIREMENTS = ['https://github.com/TomFaulkner/SenseMe/archive/d3ead08f0a5db1326abfdb678fbe3f6d170c42dd.zip#SenseMe==0.1.6']
# delay between SenseMe background updates (in seconds)
SENSEME_UPDATE_DELAY = 30.0
# Fan has light default value
HAS_LIGHT_DEFAULT = True
# Component update rate
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=30)
DOMAIN = 'senseme'
DATA_HUBS = 'fans'
CONF_HAS_LIGHT = 'has_light'
CONF_IP_ADDRESS = 'ip_address'
FAN_SCHEMA = vol.Schema({
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_FRIENDLY_NAME): cv.string,
vol.Optional(CONF_IP_ADDRESS): cv.string,
vol.Optional(CONF_HAS_LIGHT, default=HAS_LIGHT_DEFAULT): cv.boolean})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(CONF_INCLUDE, default=[]): vol.All(
cv.ensure_list, [FAN_SCHEMA]),
})
}, extra=vol.ALLOW_EXTRA)
_LOGGER = logging.getLogger(__name__)
def setup(hass, config):
"""Set up the Haiku SenseME platform."""
from senseme import SenseMe
hubs = []
device_list = config[DOMAIN].get(CONF_INCLUDE)
if len(device_list) > 0:
for device in device_list:
newDevice = SenseMe(ip=device.get('ip_address'), name=device.get('name'),
monitor_frequency=SENSEME_UPDATE_DELAY,
monitor=True)
hubs.append(SenseMeHub(newDevice,
device.get('friendly_name'),
device['has_light']))
# SenseME fan and light platforms use hub to communicate with the fan
hass.data[DATA_HUBS] = hubs
# Add fan and light platform
load_platform(hass, 'fan', DOMAIN, {}, config)
load_platform(hass, 'light', DOMAIN, {}, config)
return True
def conv_bright_ha_to_lib(brightness) -> int:
"""Convert HA brightness scale 0-255 to library scale 0-16."""
if brightness == 255: # this will end up as 16 which is max
brightness = 256
return int(brightness / 16)
def conv_bright_lib_to_ha(brightness) -> int:
"""Convert library brightness scale 0-16 to HA scale 0-255."""
brightness = int(brightness) * 16
if brightness > 255: # force big numbers into 8-bit int range
brightness = 255
return brightness
class SenseMeHub(object):
"""Data object and access to Haiku with SenseME fan."""
def __init__(self, device, friendly_name, has_light):
"""Initialize the data object."""
self._device = device
self._fan_on = None
self._fan_speed = None
self._whoosh_on = None
self._fan_direction = None
self._light_on = None
self._light_brightness = None
self._friendly_name = friendly_name
self._light_exists = has_light
@property
def name(self) -> str:
"""Gets name of fan."""
return self._device.name
@property
def friendly_name(self) -> str:
"""Gets friendly name of fan."""
if self._friendly_name:
# friendly name is defined
return self._friendly_name
else:
# friendly name is not defined
return self._device.name
@property
def ip(self) -> str:
"""Gets IP address of fan."""
return self._device.ip
@property
def light_exists(self) -> bool:
"""Gets light exists state."""
return self._light_exists
@property
def fan_on(self) -> bool:
"""Gets fan on state."""
return self._fan_on
@fan_on.setter
def fan_on(self, fan_on):
"""Sets fan on state."""
if fan_on: # fan was turned on
self.fan_speed = '4'
else: # fan was turned off
self.fan_speed = 'off'
@property
def fan_speed(self) -> str:
"""Gets fan speed."""
return self._fan_speed
@fan_speed.setter
def fan_speed(self, fan_speed):
"""Sets fan speed."""
if fan_speed == None: # default fan speed when not specified
fan_speed = '4'
self._fan_speed = fan_speed
if fan_speed == 'off': # fan speed set to off
self._device.speed = 0
# turning fan off also affects whoosh state
self._fan_on = False
self._whoosh_on = False
else: # fan speed set to a number
self._device.speed = int(fan_speed)
self._fan_on = True
@property
def fan_direction(self) -> str:
"""Gets fan direction state."""
return self._fan_direction
@fan_direction.setter
def fan_direction(self, fan_direction):
"""Sets fan direction state."""
self._fan_direction = fan_direction
direction = 'FWD'
if fan_direction != DIRECTION_FORWARD:
direction = 'REV'
self._device._send_command(
'<%s;FAN;DIR;SET;%s>' % (self._device.name, direction))
self._device._update_cache('FAN;DIR', direction)
@property
def whoosh_on(self) -> bool:
"""Gets whoosh on state."""
return self._whoosh_on
@whoosh_on.setter
def whoosh_on(self, whoosh_on):
"""Sets whoosh on state."""
self._device.whoosh = whoosh_on
self._whoosh_on = whoosh_on
@property
def light_on(self) -> bool:
"""Gets current light state."""
return self._light_on
@light_on.setter
def light_on(self, light_on):
"""Sets light state."""
# changing light on state using brightness
if light_on: # light was turned on
self.light_brightness = 255
else: # light was turned off
self.light_brightness = 0
@property
def light_brightness(self) -> int:
"""Gets light brightness."""
return self._light_brightness
@light_brightness.setter
def light_brightness(self, light_brightness):
"""Sets light brightness."""
self._device.brightness = conv_bright_ha_to_lib(light_brightness)
self._light_brightness = light_brightness
# changing brightness also affects light on state
if light_brightness == 0:
self._light_on = False
else:
self._light_on = True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self) -> None:
"""Get the latest status from fan."""
self._fan_on = self._device.get_attribute("FAN;PWR") == 'ON'
self._fan_speed = self._device.get_attribute("FAN;SPD;ACTUAL")
if self._fan_speed == '0':
self._fan_speed = 'off'
if self._device.get_attribute("FAN;DIR") == 'FWD':
self._fan_direction = DIRECTION_FORWARD
else:
self._fan_direction = DIRECTION_REVERSE
self._whoosh_on = self._device.get_attribute(
"FAN;WHOOSH;STATUS") == 'ON'
if self._light_exists:
self._light_on = self._device.get_attribute("LIGHT;PWR") == 'ON'
self._light_brightness = conv_bright_lib_to_ha(
self._device.get_attribute("LIGHT;LEVEL;ACTUAL"))
_LOGGER.debug("SenseMe: Updated fan '%s'." % self._device.name)
return True
|
StarcoderdataPython
|
3240882
|
from .contacts_helper import ContactsHelper
from .controller import Controller
from .permissions_helper import PermissionsHelper
from forms import DataSourceForm
class DataSourcesController(Controller):
"""Controller for DataSource GUI
Manage data_source and related models from combined GUI.
"""
def __init__(self, app, config_models):
"""Constructor
:param Flask app: Flask application
:param ConfigModels config_models: Helper for ORM models
"""
super(DataSourcesController, self).__init__(
"DataSource", 'data_sources', 'data_source', 'data_sources', app,
config_models
)
self.ContactsHelper = ContactsHelper(config_models, app.logger)
self.PermissionsHelper = PermissionsHelper(config_models)
self.DataSource = self.config_models.model('data_source')
self.Person = self.config_models.model('person')
def resource_pkey(self):
"""Return primary key column name."""
return 'gdi_oid'
def resources_for_index(self, session):
"""Return data sources list.
:param Session session: DB session
"""
return session.query(self.DataSource).order_by(self.DataSource.name) \
.all()
def form_for_new(self):
"""Return form for new data source."""
return self.create_form()
def form_for_create(self):
"""Return form for creating DataSource."""
return self.create_form()
def create_resource(self, form, session):
"""Create new DataSource records in DB.
:param FlaskForm form: Form for DataSource
:param Session session: DB session
"""
self.create_or_update_resources(None, form, session)
def find_resource(self, id, session):
"""Find DataSource by DataSource ID.
:param int id: DataSource ID
:param Session session: DB session
"""
return session.query(self.DataSource).filter_by(gdi_oid=id).first()
def form_for_edit(self, resource):
"""Return form for editing DataSource.
:param object resource: DataSource object
"""
return self.create_form(resource, True)
def form_for_update(self, resource):
"""Return form for updating DataSource.
:param object resource: data_source object
"""
return self.create_form(resource)
def update_resource(self, resource, form, session):
"""Update existing DataSource records in DB.
:param object resource: data_source object
:param FlaskForm form: Form for DataSource
:param Session session: DB session
"""
self.create_or_update_resources(resource, form, session)
def create_form(self, resource=None, edit_form=False):
"""Return form with fields loaded from DB.
:param object resource: Optional data_source object
:param bool edit_form: Set if edit form
"""
form = DataSourceForm(self.config_models, obj=resource)
if edit_form:
# override form fields with resource values on edit
form.responsible.data = self.ContactsHelper.resource_contact_id(
resource.gdi_oid, self.ContactsHelper.ROLE_RESPONSIBLE
)
# set choices for responsible select field
form.responsible.choices = [(0, "")] + \
self.ContactsHelper.person_choices()
return form
def create_or_update_resources(self, resource, form, session):
"""Create or update DataSource records in DB.
:param object resource: Optional data_source object
(None for create)
:param FlaskForm form: Form for DataSource
:param Session session: DB session
"""
if resource is None:
# create new data_source
data_source = self.DataSource()
session.add(data_source)
else:
# update existing data_source
data_source = resource
# update data_source
data_source.connection_type = form.connection_type.data
data_source.name = form.name.data
data_source.description = form.description.data
data_source.connection = form.connection.data
# NOTE: flush object changes in session to update gdi_oid of a
# new data_source
session.flush()
# update resource contact
self.ContactsHelper.update_resource_contact(
data_source.gdi_oid, self.ContactsHelper.ROLE_RESPONSIBLE,
form.responsible.data, session
)
# add default public permission
role = self.PermissionsHelper.public_role()
if role is not None:
self.PermissionsHelper.update_resource_permission(
data_source.gdi_oid, role.id, True, False, session
)
def destroy_resource(self, resource, session):
"""Delete existing DataSource records in DB.
:param object resource: data_source object
:param Session session: DB session
"""
# remove data_source, resource contacts and permissions
data_source = resource
self.ContactsHelper.remove_resource_contacts(
data_source.gdi_oid, session)
self.PermissionsHelper.remove_resource_permissions(
data_source.gdi_oid, session
)
session.delete(data_source)
|
StarcoderdataPython
|
3316128
|
# import the necessary packages
from tensorflow.keras.preprocessing.image import img_to_array
from tensorflow.keras.models import load_model
import numpy as np
import argparse
import imutils
import cv2
# construct argument parser and parse arguments
ap = argparse.ArgumentParser()
ap.add_argument("-c", "--cascade", required=True,
help="path to where face cascade detector reside.")
ap.add_argument("-m", "--model", required=True,
help="path to pre-trained emotion classifier model")
ap.add_argument("-v", "--video", default=None, type=str,
help="path to optional video file")
args = vars(ap.parse_args())
# load face detectore and pre-trained model
detector = cv2.CascadeClassifier(args["cascade"])
model = load_model(args["model"])
EMOTIONS = ["happy", "angry"]
# if video file not supplied, start the webcam
if not args.get("video", False):
camera = cv2.VideoCapture(0)
# else load the video file
else:
camera = cv2.VideoCapture(args["video"])
while True:
(grabbed, frame) = camera.read()
if args.get("video", False) and grabbed is None:
break
frame = imutils.resize(frame, width=300)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
canvas = np.zeros((250, 300, 3), dtype="uint8")
frameClone = frame.copy()
rects = detector.detectMultiScale(gray, scaleFactor=1.1, miniNeighbors=5, minSize=(
30, 30), flags=cv2.CASCADE_SCALE_IMAGE)
if len(rects) > 0:
rect = sorted(rects, key=lambda x: (x[2] - x[0]) * (x[3] - x[1]), reverse=True)[0]
(fX, fY, fW, fH) = rect
roi = gray[fY:fY + fH, fX:fX + fW]
roi = cv2.resize(roi, (48, 48))
roi = roi.astype("float")/255.
roi = img_to_array(roi)
roi = np.expand_dims(roi, axis=0)
preds = model.predict(roi)[0]
label = EMOTIONS[preds.argmax()]
for (i, (emotion, prob)) in zip(EMOTIONS, preds):
text = "{}: {:.2f}".format(emotion, prob * 100)
w = int(prob * 300)
cv2.rectangle(canvas, (5, (i * 35) + 5), (w, (i * 35) + 35), (0, 0, 255), -1)
cv2.putText(canvas, text, (10, (i * 35) + 10), cv2.FONT_HERSHEY_SIMPLEX, 0.45, (255, 255, 255), 2)
cv2.rectangle(frameClone, (fX, fY), (fX + w, fY + fH), (0, 0, 255), 2)
cv2.putText(frameClone, label, (fX, fY - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.45, (0, 0, 255), 2)
cv2.imshow("Emotion", frameClone)
cv2.imshow("Probabilities", canvas)
camera.release()
cv2.destroyAllWindows()
|
StarcoderdataPython
|
1743977
|
import pandas as pd
from unittest2 import TestCase # or `from unittest import ...` if on Python 3.4+
import category_encoders as encoders
class TestBackwardsEncoder(TestCase):
def test_backwards_difference_encoder_preserve_dimension_1(self):
train = ['A', 'B', 'C']
test = ['A', 'D', 'E']
encoder = encoders.BackwardDifferenceEncoder()
encoder.fit(train)
test_t = encoder.transform(test)
expected = [[1, -2 / 3.0, -1 / 3.0],
[1, 0, 0],
[1, 0, 0]]
self.assertEqual(test_t.values.tolist(), expected)
def test_backwards_difference_encoder_preserve_dimension_2(self):
train = ['A', 'B', 'C']
test = ['B', 'D', 'E']
encoder = encoders.BackwardDifferenceEncoder()
encoder.fit(train)
test_t = encoder.transform(test)
expected = [[1, 1 / 3.0, -1 / 3.0],
[1, 0, 0],
[1, 0, 0]]
self.assertEqual(test_t.values.tolist(), expected)
def test_backwards_difference_encoder_preserve_dimension_3(self):
train = ['A', 'B', 'C']
test = ['A', 'B', 'C', None]
encoder = encoders.BackwardDifferenceEncoder()
encoder.fit(train)
test_t = encoder.transform(test)
expected = [[1, -2 / 3.0, -1 / 3.0],
[1, 1 / 3.0, -1 / 3.0],
[1, 1 / 3.0, 2 / 3.0],
[1, 0, 0]]
self.assertEqual(test_t.values.tolist(), expected)
def test_backwards_difference_encoder_preserve_dimension_4(self):
train = ['A', 'B', 'C']
test = ['D', 'B', 'C', None]
encoder = encoders.BackwardDifferenceEncoder()
encoder.fit(train)
test_t = encoder.transform(test)
expected = [[1, 0, 0],
[1, 1 / 3.0, -1 / 3.0],
[1, 1 / 3.0, 2 / 3.0],
[1, 0, 0]]
self.assertEqual(test_t.values.tolist(), expected)
def test_backwards_difference_encoder_2cols(self):
train = [['A', 'A'], ['B', 'B'], ['C', 'C']]
encoder = encoders.BackwardDifferenceEncoder()
encoder.fit(train)
obtained = encoder.transform(train)
expected = [[1, -2 / 3.0, -1 / 3.0, -2 / 3.0, -1 / 3.0],
[1, 1 / 3.0, -1 / 3.0, 1 / 3.0, -1 / 3.0],
[1, 1 / 3.0, 2 / 3.0, 1 / 3.0, 2 / 3.0]]
self.assertEqual(obtained.values.tolist(), expected)
def test_backwards_difference_encoder_2StringCols_ExpectCorrectOrder(self):
train = pd.DataFrame({'col1': [1, 2, 3, 4],
'col2': ['A', 'B', 'C', 'D'],
'col3': [1, 2, 3, 4],
'col4': ['A', 'B', 'C', 'A']
},
columns=['col1', 'col2', 'col3', 'col4'])
expected_columns = ['intercept', 'col1', 'col2_0', 'col2_1', 'col2_2', 'col3', 'col4_0', 'col4_1']
encoder = encoders.BackwardDifferenceEncoder()
encoder.fit(train)
columns = encoder.transform(train).columns.values
self.assertItemsEqual(expected_columns, columns)
|
StarcoderdataPython
|
45998
|
<gh_stars>1-10
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import web.models
class Migration(migrations.Migration):
dependencies = [
('web', '0006_auto_20150627_1942'),
]
operations = [
migrations.AddField(
model_name='product_web',
name='price',
field=models.IntegerField(max_length=11, null=True, verbose_name=b'\xe5\x8d\x95\xe5\x93\x81\xe4\xbb\xb7\xe6\xa0\xbc', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='brand_fabric',
name='type',
field=models.CharField(default=b'suit', max_length=10, verbose_name=b'\xe9\x9d\xa2\xe6\x96\x99\xe5\x93\x81\xe7\x89\x8c\xe7\xb1\xbb\xe5\x9e\x8b', choices=[(b'suit', b'\xe8\xa5\xbf\xe6\x9c\x8d'), (b'shirt', b'\xe8\xa1\xac\xe8\xa1\xab'), (b'item', b'\xe5\x8d\x95\xe5\x93\x81'), (b'accessorie', b'\xe9\x85\x8d\xe9\xa5\xb0')]),
preserve_default=True,
),
migrations.AlterField(
model_name='fabric_web',
name='brand_fabric',
field=models.ForeignKey(verbose_name=b'\xe9\x9d\xa2\xe6\x96\x99\xe5\x93\x81\xe7\x89\x8c', blank=True, to='web.Brand_fabric', null=True),
preserve_default=True,
),
migrations.AlterField(
model_name='fabric_web',
name='desigh',
field=models.CharField(max_length=64, null=True, verbose_name=b'Design', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='fabric_web',
name='thumbnail_url',
field=models.ImageField(upload_to=web.models.get_uploadto_path, null=True, verbose_name=b'\xe9\x9d\xa2\xe6\x96\x99\xe7\xbc\xa9\xe7\x95\xa5\xe5\x9b\xbe', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='product_web',
name='type',
field=models.CharField(default=b'shirt', max_length=10, verbose_name=b'\xe4\xba\xa7\xe5\x93\x81\xe7\xb1\xbb\xe5\x9e\x8b', choices=[(b'suit', b'\xe8\xa5\xbf\xe6\x9c\x8d'), (b'shirt', b'\xe8\xa1\xac\xe8\xa1\xab'), (b'item', b'\xe5\x8d\x95\xe5\x93\x81'), (b'accessorie', b'\xe9\x85\x8d\xe9\xa5\xb0')]),
preserve_default=True,
),
]
|
StarcoderdataPython
|
41478
|
<reponame>Nabeel965/AgriTechies
from django.http import HttpResponse
from django.shortcuts import render
import joblib
import pandas as pd
import numpy as np
from .models import crop_data
from .recommender import CropDataForm
model = joblib.load('model.pkl')
xl_file = pd.ExcelFile('Features - Rev02.xlsx')
storage_df=xl_file.parse('Cold Storage Data')
crop_df=xl_file.parse('Crop Data')
# Create your views here.
def crop_recommender(response):
if response.method == "POST":
form = CropDataForm(response.POST)
if form.is_valid():
form=form.save(commit=False)
return present_output(form)
else:
form = CropDataForm()
return render(response, 'recommender/recommender.html', {"form":form})
def present_output(form):
crop_name = form.crop_name
quantity = form.quantity
lat = form.lat
lon = form.lon
s = predict(crop_name, quantity, lat, lon)
return HttpResponse(s)
def predict(Crop_Name,quantity,lat,lon):
crop_temperature=crop_df['Required Temperature'][crop_df['Crop Name']==Crop_Name]
crop_humidity=crop_df['Required Relative Humidity'][crop_df['Crop Name']==Crop_Name]
lat=storage_df['lat']
lon=storage_df['lon']
quantity=storage_df['Avaiable Capacity']
storage_temperature=storage_df['Room Temperature']
storage_humidity=storage_df['Relative Humidity']
temp_X=np.zeros((lat.shape[0],7))
temp_X[:,0]=crop_temperature
temp_X[:,1]=crop_humidity
temp_X[:,2]=quantity
temp_X[:,3]=storage_temperature
temp_X[:,4]=storage_humidity
temp_X[:,5]=lat
temp_X[:,6]=lon
return model.predict(temp_X)
|
StarcoderdataPython
|
1783432
|
<reponame>scvannost/clustergrammer-py
'''
The clustergrammer python module can be installed using pip:
pip install clustergrammer
or by getting the code from the repo:
https://github.com/MaayanLab/clustergrammer-py
'''
# from clustergrammer import Network
from clustergrammer import Network
net = Network()
# load matrix tsv file
net.load_stdin()
# optional filtering and normalization
##########################################
# net.filter_sum('row', threshold=20)
# net.normalize(axis='col', norm_type='zscore', keep_orig=True)
# net.filter_N_top('row', 250, rank_type='sum')
# net.filter_threshold('row', threshold=3.0, num_occur=4)
# net.swap_nan_for_zero()
net.make_clust(dist_type='cos',views=['N_row_sum', 'N_row_var'] , dendro=True,
sim_mat=True, filter_sim=0.1, calc_cat_pval=False)
# output jsons for front-end visualizations
print(net.export_net_json('viz', 'no-indent'))
|
StarcoderdataPython
|
1708336
|
#%%
import os
import sys
import joblib
from numpy.lib.function_base import select
import sklearn
import warnings
import tarfile
import urllib
import numpy as np
import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from pandas.plotting import scatter_matrix
from sklearn import impute
warnings.filterwarnings('ignore')
mpl.rc('axes', labelsize=14)
mpl.rc('xtick', labelsize=12)
mpl.rc('ytick', labelsize=12)
# Where to save the figures
PROJECT_ROOT_DIR = ".."
CHAPTER_ID = "end_to_end_project"
IMAGES_PATH = os.path.join(PROJECT_ROOT_DIR, ".images", CHAPTER_ID)
os.makedirs(IMAGES_PATH, exist_ok=True)
DOWNLOAD_ROOT = "https://raw.githubusercontent.com/ageron/handson-ml2/master/"
HOUSING_PATH = os.path.join("../data", "housing")
HOUSING_URL = DOWNLOAD_ROOT + "datasets/housing/housing.tgz"
#%%
from scipy import stats
from scipy.stats import randint
from scipy.stats import geom, expon
from sklearn.pipeline import Pipeline
from sklearn.compose import ColumnTransformer
from sklearn.impute import SimpleImputer
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.preprocessing import OneHotEncoder, OrdinalEncoder, StandardScaler
from sklearn.model_selection import train_test_split, StratifiedShuffleSplit, cross_val_score, GridSearchCV, RandomizedSearchCV
from sklearn.svm import SVR
from sklearn.linear_model import LinearRegression
from sklearn.tree import DecisionTreeRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.metrics import mean_squared_error, mean_absolute_error
# %%
def save_fig(fig_id, tight_layout=True, fig_extension="png", resolution=300):
path = os.path.join(IMAGES_PATH, fig_id +'.'+fig_extension)
print(f"Saving fig {fig_id}")
if tight_layout:
plt.tight_layout()
plt.savefig(path, format=fig_extension, dpi=resolution)
def fetch_housing_data(housing_url=HOUSING_URL, housing_path=HOUSING_PATH):
if not os.path.isdir(housing_path):
os.makedirs(housing_path)
tgz_path = os.path.join(housing_path, "housing.tgz")
urllib.request.urlretrieve(housing_url, tgz_path)
housing_tgz = tarfile.open(tgz_path)
housing_tgz.extractall(path=housing_path)
housing_tgz.close()
def load_housing_data(housing_path=HOUSING_PATH):
csv_path = os.path.join(housing_path, "housing.csv")
return pd.read_csv(csv_path)
def download_background():
# Download the California image
images_path = os.path.join(PROJECT_ROOT_DIR, "images", "end_to_end_project")
os.makedirs(images_path, exist_ok=True)
DOWNLOAD_ROOT = "https://raw.githubusercontent.com/ageron/handson-ml2/master/"
filename = "california.png"
print("Downloading", filename)
url = DOWNLOAD_ROOT + "images/end_to_end_project/" + filename
urllib.request.urlretrieve(url, os.path.join(images_path, filename))
return True
def plot_housing_price_distribution(housing):
california_img = mpimg.imread(os.path.join(IMAGES_PATH, "california.png"))
params = {
's': housing['population'] / 100,
'label': 'population',
'figsize': (10,7),
'c': "median_house_value",
'cmap': plt.get_cmap('jet'),
'colorbar': False,
'sharex': False
}
ax = housing.plot(kind='scatter', x='longitude', y='latitude', alpha=.4, **params)
plt.imshow(california_img, extent=[-124.55, -113.80, 32.45, 42.05], alpha=.5, cmap=plt.get_cmap("jet"))
plt.ylabel("Latitude", fontsize=14)
plt.xlabel("Longitude", fontsize=14)
prices = housing["median_house_value"]
tick_values = np.linspace(prices.min(), prices.max(), 11)
cbar = plt.colorbar(ticks=tick_values/prices.max())
cbar.ax.set_yticklabels(["$%dk"%(round(v/1000)) for v in tick_values], fontsize=14)
cbar.set_label('Median House Value', fontsize=16)
plt.legend(fontsize=16)
save_fig("california_housing_prices_plot")
plt.show()
return
# %%
if __name__ == '__main__':
# fetch_housing_data()
load_housing_data()
# EDA
housing = load_housing_data()
housing.info()
housing.ocean_proximity.value_counts()
housing.describe()
housing.hist(bins=50, figsize=(20, 15))
save_fig("attribute_histogram_plots")
train_set, test_set = train_test_split(housing, test_size=.2, random_state=42)
#%%
# Data split
housing['income_cat'] = pd.cut(housing['median_income'],
bins=[0., 1.5, 3.0, 4.5, 6., np.inf],
labels=[1, 2, 3, 4, 5])
housing['income_cat'].value_counts()
split = StratifiedShuffleSplit(n_splits=1, test_size=.2, random_state=42)
for train_index, test_index in split.split(housing, housing['income_cat']):
strat_train_set = housing.loc[train_index]
strat_test_set = housing.loc[test_index]
strat_train_set['income_cat'].value_counts() / len(strat_train_set)
strat_test_set['income_cat'].value_counts() / len(strat_test_set)
for set_ in (strat_train_set, strat_test_set):
set_.drop("income_cat", axis=1, inplace=True)
# %%
# Discover and visualize the data to gain insights
housing = train_set.copy()
plot_housing_price_distribution(housing)
# %%
# correlation
corr_matrix = housing.corr()
corr_matrix['median_house_value'].sort_values(ascending=False)
attributes = ["median_house_value", "median_income", "total_rooms", "housing_median_age"]
scatter_matrix(housing[attributes], figsize=(12, 8))
save_fig("scatter_matrix_plot")
#%%
housing.loc[:, 'rooms_per_household'] = housing.total_rooms / housing.households
housing.loc[:, 'bedrooms_per_room'] = housing.total_bedrooms / housing.total_rooms
housing.loc[:, 'population_per_household'] = housing.population / housing.households
corr_matrix = housing.corr()
corr_matrix["median_house_value"].sort_values(ascending=False)
housing.plot(kind="scatter", x="rooms_per_household", y="median_house_value",
alpha=0.2)
plt.axis([0, 5, 0, 520000])
plt.show()
housing.describe()
# %%
"""Prepare the data for Machine Learning algorithms"""
housing = strat_train_set.drop("median_house_value", axis=1) # drop labels for training set
housing_labels = strat_train_set["median_house_value"].copy()
housing_nums = housing.drop("ocean_proximity", axis=1)
imputer = SimpleImputer(strategy='median')
imputer.fit(housing_nums)
imputer.statistics_
X = imputer.transform(housing_nums)
housing_tr = pd.DataFrame(X, columns=housing_nums.columns, index=housing_nums.index)
housing_tr.head()
#%%
# preprocess the categorical input feature, `ocean_proximity`
housing_cat = housing[["ocean_proximity"]]
cat_encoder = OneHotEncoder(sparse=False)
cat_encoder.categories
housing_cat_1hot = cat_encoder.fit_transform(housing_cat)
housing_cat_1hot
# %%
# create a custom transformer to add extra attributes:
rooms_ix, bedrooms_ix, population_ix, households_ix = 3, 4, 5, 6
class CombinedAttributesAdder(BaseEstimator, TransformerMixin):
def __init__(self, add_bedrooms_per_room=True):
super().__init__()
self.add_bedrooms_per_room = add_bedrooms_per_room
def fit(self, X, y = None):
return self
def transform(self, X):
rooms_per_household = X[:, rooms_ix] / X[:, households_ix]
population_per_household = X[:, population_ix] / X[:, households_ix]
if self.add_bedrooms_per_room:
bedrooms_per_room = X[:, bedrooms_ix] / X[:, rooms_ix]
return np.c_[X, rooms_per_household, population_per_household, bedrooms_per_room]
return np.c_[X, rooms_per_household, population_per_household]
attr_adder = CombinedAttributesAdder(add_bedrooms_per_room=False)
housing_extra_attribs = attr_adder.transform(housing.values)
housing_extra_attribs = pd.DataFrame(
housing_extra_attribs,
columns=list(housing.columns) +["rooms_per_household", "population_per_household"],
index = housing.index
)
housing_extra_attribs.head()
# %%
# build a pipeline for preprocessing the numerical attributes
num_pipeline = Pipeline([
('imputer', SimpleImputer(strategy='median')),
('attribs_adder', CombinedAttributesAdder()),
('std_scaler', StandardScaler())
])
housing_num_tr = num_pipeline.fit_transform(housing_nums)
housing_num_tr
num_attribs = list(housing_nums)
cat_attribs = ['ocean_proximity']
full_pipeline = ColumnTransformer([
('num', num_pipeline, num_attribs),
('cat', OneHotEncoder(), cat_attribs)
])
housing_prepared = full_pipeline.fit_transform(housing)
housing_prepared.shape
# %%
""" Select and train a model """
lin_reg = LinearRegression()
lin_reg.fit(housing_prepared, housing_labels)
some_data = housing.iloc[:5]
some_labels = housing_labels.iloc[:5]
some_data_prepared = full_pipeline.transform(some_data)
lin_reg.predict(some_data_prepared)
housing_predictions = lin_reg.predict(housing_prepared)
lin_mse = mean_squared_error(housing_predictions, housing_labels)
lin_rmse = np.sqrt(lin_mse)
lin_rmse
lin_mae = mean_absolute_error(housing_labels, housing_predictions)
lin_mae
# %%
tree_reg = DecisionTreeRegressor(random_state=42)
tree_reg.fit(housing_prepared, housing_labels)
housing_predictions = tree_reg.predict(housing_prepared)
tree_mse = mean_squared_error(housing_labels, housing_predictions)
tree_rmse = np.sqrt(tree_mse)
tree_rmse
scores = cross_val_score(tree_reg, housing_prepared, housing_labels, scoring="neg_mean_squared_error", cv=10)
tree_rmse_scores = np.sqrt(-scores)
def display_scores(scores):
print("Scores:", scores)
print("Mean:", scores.mean())
print("Standard deviation:", scores.std())
display_scores(tree_rmse_scores)
lin_scores = cross_val_score(lin_reg, housing_prepared, housing_labels,
scoring="neg_mean_squared_error", cv=10)
lin_rmse_scores = np.sqrt(-lin_scores)
display_scores(lin_rmse_scores)
#%%
forest_reg = RandomForestRegressor(n_estimators=100, random_state=42, n_jobs=-1)
forest_reg.fit(housing_prepared, housing_labels)
housing_predictions = forest_reg.predict(housing_prepared)
forest_mse = mean_squared_error(housing_labels, housing_predictions)
forest_rmse = np.sqrt(forest_mse)
forest_rmse
forest_scores = cross_val_score(forest_reg, housing_prepared, housing_labels, scoring="neg_mean_squared_error", cv=10)
forest_rmse_scores = np.sqrt(-forest_scores)
display_scores(forest_rmse_scores)
# %%
scores = cross_val_score(lin_reg, housing_prepared, housing_labels, scoring="neg_mean_squared_error", cv=10)
pd.Series(np.sqrt(-scores)).describe()
# %%
svm_reg = SVR(kernel='linear')
svm_reg.fit(housing_prepared, housing_labels)
housing_predictions = svm_reg.predict(housing_prepared)
svm_mse = mean_squared_error(housing_labels, housing_predictions)
svm_rmse = np.sqrt(svm_mse)
svm_rmse
# %%
# GridSearch
param_grid = [
{'n_estimators':[3, 10, 30], 'max_features':[2,4,6,8]},
{'bootstrap': [False], 'n_estimators': [3, 10], 'max_features': [2, 3, 4]},
]
forest_reg = RandomForestRegressor(random_state=42)
grid_search = GridSearchCV(forest_reg, param_grid, cv=5, scoring='neg_mean_squared_error', return_train_score=True, n_jobs=-1)
grid_search.fit(housing_prepared, housing_labels)
grid_search.best_params_
grid_search.best_estimator_
# each hyperparameter combination tested during the grid search
cvres = grid_search.cv_results_
for mean_score, params in zip(cvres["mean_test_score"], cvres["params"]):
print(np.sqrt(-mean_score), params)
# %%
# RandomizedSearchCV
param_distribs = {
'n_estimators': randint(low=1, high=200),
'max_features': randint(low=1, high=8),
}
forest_reg = RandomForestRegressor(random_state=42)
rnd_search = RandomizedSearchCV(forest_reg, param_distribs, n_iter=100, cv=5, scoring='neg_mean_squared_error', random_state=42, n_jobs=-1)
rnd_search.fit(housing_prepared, housing_labels)
# %%
cvres = rnd_search.cv_results_
for mean_score, params in zip(cvres["mean_test_score"], cvres["params"]):
print(np.sqrt(-mean_score), params)
# %%
feature_importances = grid_search.best_estimator_.feature_importances_
feature_importances
# %%
extra_attribs = ["rooms_per_hhold", "pop_per_hhold", "bedrooms_per_room"]
#cat_encoder = cat_pipeline.named_steps["cat_encoder"] # old solution
cat_encoder = full_pipeline.named_transformers_["cat"]
cat_one_hot_attribs = list(cat_encoder.categories_[0])
attributes = num_attribs + extra_attribs + cat_one_hot_attribs
sorted(zip(feature_importances, attributes), reverse=True)
# %%
|
StarcoderdataPython
|
3323710
|
<gh_stars>1-10
import arrow
import pandas as pd
WARN_DURATION_VARIATION = 5 * 60 # seconds
MAX_DURATION_VARIATION = 45 * 60 # seconds
def get_expected_config_map_for_calibration(sd):
expected_config_map = {}
for ct in sd.curr_spec["calibration_tests"]:
expected_config_map[ct["id"]] = ct["config"]["sensing_config"]
return expected_config_map
def get_expected_config_map_for_evaluation(sd):
expected_config_map = {
"android": {
"fixed:ACCURACY_CONTROL": {
"is_duty_cycling": False,
"accuracy": ["PRIORITY_HIGH_ACCURACY","kCLLocationAccuracyBest"],
"filter": 1,
},
"fixed:POWER_CONTROL": {
"is_duty_cycling": False,
"accuracy": ["PRIORITY_NO_POWER","kCLLocationAccuracyThreeKilometers"],
"filter": 1200,
}
}
}
# The control settings were the same for both OSes, since we do put both
# control values into an array.
expected_config_map["ios"] = expected_config_map["android"]
for ct in sd.curr_spec["sensing_settings"]:
for phoneOS, phone_map in ct.items():
for s in phone_map["sensing_configs"]:
expected_config_map[phoneOS]["%s:%s" % (phone_map["name"], s["id"])] = s["sensing_config"]
# print(expected_config_map)
return expected_config_map
# Current accuracy constants
# Since we can't read these from the phone, we hardcoded them from the documentation
# If there are validation failures, these need to be updated
# In the future, we could upload the options from the phone (maybe the accuracy control)
# but that seems like overkill here
accuracy_options = {
"android": {
"PRIORITY_HIGH_ACCURACY": 100,
"PRIORITY_BALANCED_POWER_ACCURACY": 102,
"PRIORITY_LOW_POWER": 104,
"PRIORITY_NO_POWER": 105
},
"ios": {
"kCLLocationAccuracyBestForNavigation": -2,
"kCLLocationAccuracyBest": -1,
"kCLLocationAccuracyNearestTenMeters": 10,
"kCLLocationAccuracyHundredMeters": 100,
"kCLLocationAccuracyKilometer": 1000,
"kCLLocationAccuracyThreeKilometers": 3000,
}
}
opt_array_idx = lambda phoneOS: 0 if phoneOS == "android" else 1
"""
Internal method to validate the filter settings
"""
def _validate_filter(phoneOS, config_during_test, expected_config):
# filter checking is a bit tricky because the expected value has two possible values and the real config has two possible values
expected_filter = expected_config["filter"]
if type(expected_filter) == int:
ev = expected_filter
else:
assert type(expected_filter) == list, "platform specific filters should be specified in array, not %s" % expected_filter
ev = expected_filter[opt_array_idx(phoneOS)]
if phoneOS == "android":
cvf = "filter_time"
ev = ev * 1000 # milliseconds in the config, specified in secs
elif phoneOS == "ios":
cvf = "filter_distance"
assert config_during_test[cvf] == ev,\
"Field filter mismatch! %s (from %s) != %s (from %s)" %\
(config_during_test[cvf], config_during_test, ev, expected_config)
"""
Internal method to validate the filter settings
"""
def _validate_accuracy(phoneOS, config_during_test, expected_config):
# expected config accuracy is an array of strings ["PRIORITY_BALANCED_POWER_ACCURACY", "kCLLocationAccuracyNearestTenMeters"]
# so we find the string at the correct index and then map it to the value from the options
ev = accuracy_options[phoneOS][expected_config["accuracy"][opt_array_idx(phoneOS)]]
assert config_during_test["accuracy"] == ev, "Field accuracy mismatch! %s != %s" % (config_during_test[accuracy], ev)
def validate_calibration_settings(phone_view):
expected_config_map = get_expected_config_map_for_calibration(phone_view.spec_details)
# print(expected_config_map)
for phoneOS, phone_map in phone_view.map().items():
print("Processing data for %s phones" % phoneOS)
for phone_label in phone_map:
curr_calibration_ranges = phone_map[phone_label]["calibration_ranges"]
all_test_ids = [r["trip_id"] for r in curr_calibration_ranges]
unique_test_ids = sorted(list(set(all_test_ids)))
spec_test_ids = sorted([ct["id"] for ct in
phone_view.spec_details.curr_spec["calibration_tests"]])
# assert unique_test_ids == spec_test_ids, "Missing calibration test while comparing %s, %s" % (unique_test_ids, spec_test_ids)
for r in curr_calibration_ranges:
config_during_test_entries = phone_view.spec_details.retrieve_data_from_server(phone_label, ["config/sensor_config"], r["start_ts"], r["end_ts"])
print("%s -> %s" % (r["trip_id"], [c["data"]["accuracy"] for c in config_during_test_entries]))
# assert len(config_during_test_entries) == 1, "Out of band configuration? Found %d config changes" % len(config_during_test_entries)
config_during_test = config_during_test_entries[0]["data"]
expected_config = expected_config_map[r["trip_id_base"]]
# print(config_during_test, expected_config)
_validate_filter(phoneOS, config_during_test, expected_config)
_validate_accuracy(phoneOS, config_during_test, expected_config)
for f in expected_config:
if f != "accuracy" and f != "filter":
assert config_during_test[f] == expected_config[f],\
"Field %s mismatch! %s != %s" % \
(f, config_during_test[f], expected_config[f])
def validate_evaluation_settings(phone_view):
expected_config_map = get_expected_config_map_for_evaluation(phone_view.spec_details)
for phoneOS, phone_map in phone_view.map().items():
# print("Processing data for %s phones, next level keys = %s" % (phoneOS, phone_map.keys()))
for phone_label in phone_map:
curr_evaluation_ranges = phone_map[phone_label]["evaluation_ranges"]
unique_test_ids = set(filter(lambda id: id != "fixed", [r["trip_id"].split(":")[0] for r in curr_evaluation_ranges]))
# This is a tricky check since, unlike in the calibration case, we will have different ids for the different evaluation
# ranges. For now, let us just assert that the evaluation range is valid
# print("Unique test ids = %s" % unique_test_ids)
spec_test_ids = set(
[ct["android"]["name"] for ct in phone_view.spec_details.curr_spec["sensing_settings"]] +
[ct["ios"]["name"] for ct in phone_view.spec_details.curr_spec["sensing_settings"]])
# print(spec_test_ids)
# <= represents subset for set objects
assert unique_test_ids <= spec_test_ids, "Invalid evaluation test while comparing %s, %s" % (unique_test_ids, spec_test_ids)
for r in curr_evaluation_ranges:
config_during_test_entries = phone_view.spec_details.retrieve_data_from_server(phone_label, ["config/sensor_config"], r["start_ts"], r["end_ts"])
print("%s -> %s" % (r["trip_id"], [c["data"]["accuracy"] for c in config_during_test_entries]))
# assert len(config_during_test_entries) == 1, "Out of band configuration? Found %d config changes" % len(config_during_test_entries)
config_during_test = config_during_test_entries[0]["data"]
expected_config = expected_config_map[phoneOS][r["trip_id_base"]]
# print(config_during_test.keys(), expected_config.keys())
_validate_filter(phoneOS, config_during_test, expected_config)
_validate_accuracy(phoneOS, config_during_test, expected_config)
for f in expected_config:
if f != "accuracy" and f != "filter":
assert config_during_test[f] == expected_config[f], "Field %s mismatch! %s != %s" % (f, config_during_test[f], expected_config[f])
"""
range_key: the key in each range that links the ranges for an experiment together
"trip_id" for calibration and "eval_common_trip_id" for evaluation
"""
def validate_range_durations(phone_view, range_key, range_entry_id):
duration_map = {}
for phoneOS, phone_map in phone_view.map().items():
print("Processing data for %s phones" % phoneOS)
for phone_label in phone_map:
curr_phone_duration_map = {}
curr_calibration_ranges = phone_map[phone_label]["{}_ranges".format(range_key)]
for r in curr_calibration_ranges:
curr_phone_duration_map[r[range_entry_id]] = r["duration"]
duration_map[phoneOS+"_"+phone_label] = curr_phone_duration_map
# print(duration_map)
duration_df = pd.DataFrame(duration_map).transpose()
print(duration_df)
for col in duration_df:
duration_variation = duration_df[col] - duration_df[col].median()
print("For %s, duration_variation = %s" % (col, duration_variation.tolist()))
if duration_variation.abs().max() > WARN_DURATION_VARIATION:
"WARNING: for %s, duration_variation.abs().max() %d > threshold %d" % \
(col, duration_variation.abs().max(), WARN_DURATION_VARIATION)
assert duration_variation.abs().max() < MAX_DURATION_VARIATION,\
"INVALID: for %s, duration_variation.abs().max() %d > threshold %d" % \
(col, duration_variation.abs().max(), MAX_DURATION_VARIATION)
def validate_range_durations_for_calibration(phone_view):
validate_range_durations(phone_view, "calibration", "trip_id")
# This is slightly different from the calibration validation because the
# matching durations don't have the same IDs
def validate_range_durations_for_evaluation(phone_view):
validate_range_durations(phone_view, "evaluation", "eval_common_trip_id")
|
StarcoderdataPython
|
62506
|
<gh_stars>0
#!/usr/bin/env,python3
#,-*-,coding:,utf-8,-*-
'''
Problem 32
Find the sum of all products whose multiplicand/multiplier/product
identity can be written as a 1 through 9 pandigital.
e.g. 39 × 186 = 7254
'''
import itertools
def tuple_to_jointed_int(tp):
return int(''.join(map(str,tp)))
def product_of_multi_pandigit(pan, div):
multiplicand = tuple_to_jointed_int(pan[0: div])
multiplier = tuple_to_jointed_int(pan[div: 5])
product = tuple_to_jointed_int(pan[5: 9])
if multiplicand * multiplier == product:
return product
return 0
# [Note] The products must be 4-digits. If we assume that
# multiplicand < multiplier, their possible digits are (1,4) or (2,3).
print(sum(set(
[product_of_multi_pandigit(pan, div)
for pan in itertools.permutations(range(1, 10))
for div in range(1, 3)])))
|
StarcoderdataPython
|
49624
|
<reponame>nleguillarme/snr_tools_and_methods
import merpy
merpy.create_lexicon_from_file("ncbi.txt", "ncbi")
merpy.process_lexicon("ncbi")
|
StarcoderdataPython
|
3329575
|
def parse_spec(s):
a, b = s.split("/")
return int(a), int(b)
def compose(*sharders):
sharders = [x for x in sharders if x]
if not sharders:
return identity
def f(stream):
for sharder in sharders:
stream = sharder(stream)
return stream
return f
def from_spec(spec):
if not spec:
return identity
a, b = parse_spec(spec)
return sharder(a, b)
def identity(x):
return x
def limiter(n):
if not n:
return identity
def f(stream):
ctr = 0
for x in stream:
ctr += 1
if ctr > n:
break
yield x
return f
def sharder(a, b):
assert b > 0
assert 0 <= a < b
def f(stream):
ctr = 0
for x in stream:
if (ctr % b) == a:
yield x
ctr += 1
return f
|
StarcoderdataPython
|
4841514
|
import matplotlib.pyplot as plt
import numpy as np
from matplotlib_venn import venn2
def box_plot(pseudoCount, controlCount):
x = np.arange(2)
counts = [pseudoCount, controlCount]
fig, ax = plt.subplots()
plt.bar(x, counts)
plt.xticks(x, ('Pseudouridine', 'control'))
#save box plot
plt.savefig("TotalCount.png")
plt.close()
def ven_diagram(pseudoCount, controlCount, totalUmers):
v = venn2((pseudoCount, controlCount, totalUmers), alpha=1)
plt.gca().set_axis_bgcolor('skyblue')
plt.gca().set_axis_on()
#save graph
plt.savefig("CountVennDiagram.png")
plt.close()
|
StarcoderdataPython
|
3291997
|
<filename>ceilometerclient/tests/unit/test_utils.py
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import itertools
import mock
import six
from ceilometerclient.common import utils
from ceilometerclient.tests.unit import utils as test_utils
class UtilsTest(test_utils.BaseTestCase):
def test_prettytable(self):
class Struct(object):
def __init__(self, **entries):
self.__dict__.update(entries)
# test that the prettytable output is wellformatted (left-aligned)
with mock.patch('sys.stdout', new=six.StringIO()) as stdout:
utils.print_dict({'K': 'k', 'Key': 'Value'})
self.assertEqual('''\
+----------+-------+
| Property | Value |
+----------+-------+
| K | k |
| Key | Value |
+----------+-------+
''', stdout.getvalue())
with mock.patch('sys.stdout', new=six.StringIO()) as stdout:
utils.print_dict({'alarm_id': '262567fd-d79a-4bbb-a9d0-59d879b6',
'name': u'\u6d4b\u8bd5',
'description': u'\u6d4b\u8bd5',
'state': 'insufficient data',
'repeat_actions': 'False',
'type': 'threshold',
'threshold': '1.0',
'statistic': 'avg',
'alarm_actions': [u'http://something/alarm1',
u'http://something/alarm2'],
'ok_actions': [{"get_attr1":
[u"web_server_scaleup_policy1",
u"alarm_url1"]},
{"get_attr2":
[u"web_server_scaleup_policy2",
u"alarm_url2"]}],
'time_constraints': '[{name: c1,'
'\\n description: test,'
'\\n start: 0 18 * * *,'
'\\n duration: 1,'
'\\n timezone: US}]'},
wrap=72)
expected = u'''\
+------------------+-------------------------------------------------------\
--------+
| Property | Value \
|
+------------------+-------------------------------------------------------\
--------+
| alarm_actions | ["http://something/alarm1", "http://something/alarm2"]\
|
| alarm_id | 262567fd-d79a-4bbb-a9d0-59d879b6 \
|
| description | \u6d4b\u8bd5 \
|
| name | \u6d4b\u8bd5 \
|
| ok_actions | [{"get_attr1": ["web_server_scaleup_policy1", "alarm_u\
rl1"]}, |
| | {"get_attr2": ["web_server_scaleup_policy2", "alarm_ur\
l2"]}] |
| repeat_actions | False \
|
| state | insufficient data \
|
| statistic | avg \
|
| threshold | 1.0 \
|
| time_constraints | [{name: c1, \
|
| | description: test, \
|
| | start: 0 18 * * *, \
|
| | duration: 1, \
|
| | timezone: US}] \
|
| type | threshold \
|
+------------------+-------------------------------------------------------\
--------+
'''
# py2 prints str type, py3 prints unicode type
if six.PY2:
expected = expected.encode('utf-8')
self.assertEqual(expected, stdout.getvalue())
def test_print_list(self):
class Foo(object):
def __init__(self, one, two, three):
self.one = one
self.two = two
self.three = three
foo_list = [
Foo(10, 'a', 'B'),
Foo(8, 'c', 'c'),
Foo(12, '0', 'Z')]
def do_print_list(sortby):
with mock.patch('sys.stdout', new=six.StringIO()) as stdout:
utils.print_list(foo_list,
['one', 'two', 'three'],
['1st', '2nd', '3rd'],
{'one': lambda o: o.one * 10},
sortby)
return stdout.getvalue()
printed = do_print_list(None)
self.assertEqual('''\
+-----+-----+-----+
| 1st | 2nd | 3rd |
+-----+-----+-----+
| 100 | a | B |
| 80 | c | c |
| 120 | 0 | Z |
+-----+-----+-----+
''', printed)
printed = do_print_list(0)
self.assertEqual('''\
+-----+-----+-----+
| 1st | 2nd | 3rd |
+-----+-----+-----+
| 80 | c | c |
| 100 | a | B |
| 120 | 0 | Z |
+-----+-----+-----+
''', printed)
printed = do_print_list(1)
self.assertEqual('''\
+-----+-----+-----+
| 1st | 2nd | 3rd |
+-----+-----+-----+
| 120 | 0 | Z |
| 100 | a | B |
| 80 | c | c |
+-----+-----+-----+
''', printed)
def test_args_array_to_dict(self):
my_args = {
'matching_metadata': ['metadata.key=metadata_value'],
'other': 'value'
}
cleaned_dict = utils.args_array_to_dict(my_args,
"matching_metadata")
self.assertEqual({
'matching_metadata': {'metadata.key': 'metadata_value'},
'other': 'value'
}, cleaned_dict)
def test_args_array_to_list_of_dicts(self):
starts = ['0 11 * * *', '"0 11 * * *"', '\'0 11 * * *\'']
timezones = [None, 'US/Eastern', '"US/Eastern"', '\'US/Eastern\'']
descs = [None, 'de sc', '"de sc"', '\'de sc\'']
for start, tz, desc in itertools.product(starts, timezones, descs):
my_args = {
'time_constraints': ['name=const1;start=%s;duration=1'
% start],
'other': 'value'
}
expected = {
'time_constraints': [dict(name='const1',
start='0 11 * * *',
duration='1')],
'other': 'value'
}
if tz:
my_args['time_constraints'][0] += ';timezone=%s' % tz
expected['time_constraints'][0]['timezone'] = 'US/Eastern'
if desc:
my_args['time_constraints'][0] += ';description=%s' % desc
expected['time_constraints'][0]['description'] = 'de sc'
cleaned = utils.args_array_to_list_of_dicts(my_args,
'time_constraints')
self.assertEqual(expected, cleaned)
def test_key_with_slash_to_nested_dict(self):
my_args = {
'combination_rule/alarm_ids': ['id1', 'id2'],
'combination_rule/operator': 'and',
'threshold_rule/threshold': 400,
'threshold_rule/statictic': 'avg',
'threshold_rule/comparison_operator': 'or',
}
nested_dict = utils.key_with_slash_to_nested_dict(my_args)
self.assertEqual({
'combination_rule': {'alarm_ids': ['id1', 'id2'],
'operator': 'and'},
'threshold_rule': {'threshold': 400,
'statictic': 'avg',
'comparison_operator': 'or'},
}, nested_dict)
def test_arg(self):
@utils.arg(help="not_required_no_default.")
def not_required_no_default():
pass
_, args = not_required_no_default.__dict__['arguments'][0]
self.assertEqual("not_required_no_default.", args['help'])
@utils.arg(required=True, help="required_no_default.")
def required_no_default():
pass
_, args = required_no_default.__dict__['arguments'][0]
self.assertEqual("required_no_default. Required.", args['help'])
@utils.arg(default=42, help="not_required_default.")
def not_required_default():
pass
_, args = not_required_default.__dict__['arguments'][0]
self.assertEqual("not_required_default. Defaults to 42.", args['help'])
def test_merge_nested_dict(self):
dest = {'key': 'value',
'nested': {'key2': 'value2',
'key3': 'value3',
'nested2': {'key': 'value',
'some': 'thing'}}}
source = {'key': 'modified',
'nested': {'key3': 'modified3',
'nested2': {'key5': 'value5'}}}
utils.merge_nested_dict(dest, source, depth=1)
self.assertEqual({'key': 'modified',
'nested': {'key2': 'value2',
'key3': 'modified3',
'nested2': {'key5': 'value5'}}}, dest)
def test_merge_nested_dict_no_depth(self):
dest = {'key': 'value',
'nested': {'key2': 'value2',
'key3': 'value3',
'nested2': {'key': 'value',
'some': 'thing'}}}
source = {'key': 'modified',
'nested': {'key3': 'modified3',
'nested2': {'key5': 'value5'}}}
utils.merge_nested_dict(dest, source)
self.assertEqual({'key': 'modified',
'nested': {'key3': 'modified3',
'nested2': {'key5': 'value5'}}}, dest)
@mock.patch('prettytable.PrettyTable')
def test_format_nested_list_of_dict(self, pt_mock):
actual_rows = []
def mock_add_row(row):
actual_rows.append(row)
table = mock.Mock()
table.add_row = mock_add_row
table.get_string.return_value = "the table"
test_data = [
{'column_1': 'value_c', 'column_2': 'value_23'},
{'column_1': 'value_b', 'column_2': 'value_22'},
{'column_1': 'value_a', 'column_2': 'value_21'}
]
columns = ['column_1', 'column_2']
pt_mock.return_value = table
rval = utils.format_nested_list_of_dict(test_data, columns)
self.assertEqual("the table", rval)
self.assertEqual([['value_a', 'value_21'],
['value_b', 'value_22'],
['value_c', 'value_23']],
actual_rows)
|
StarcoderdataPython
|
51206
|
from moai.monads.human.pose.openpose import (
Split as OpenposeSplit,
JointMap as OpenposeJointMap
)
__all__ = [
'OpenposeSplit',
'OpenposeJointMap',
]
|
StarcoderdataPython
|
1794664
|
<reponame>blakev/stencil<filename>stencil/objects.py
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# >>
# stencil-blog, 2020
# - blake
# <<
import os
from datetime import datetime
from dataclasses import dataclass, field, asdict
from typing import Any, Dict, List, NamedTuple
import toml
class Validation(NamedTuple):
errors: List[str]
@property
def success(self):
return not self.errors
@dataclass
class FileMetadata:
raw: str
config: Dict[Any, Any] = field(init=False)
def __post_init__(self):
self.config = toml.loads(self.raw)
@dataclass
class MarkdownFile:
source_folder: str
fullpath: str
relative_path: str = field(init=False)
extension: str = field(init=False)
filename: str = field(init=False)
created: datetime = field(init=False)
modified: datetime = field(init=False)
is_index: bool = field(init=False, default=False)
metadata: FileMetadata = field(init=False)
markdown: str = field(init=False, default="")
def __post_init__(self):
self.relative_path = self.fullpath.split(self.source_folder)[-1]
self.filename = os.path.split(self.fullpath)[-1]
self.extension = os.path.splitext(self.filename)[-1]
self.created = datetime.fromtimestamp(os.stat(self.fullpath).st_ctime)
self.modified = datetime.fromtimestamp(os.stat(self.fullpath).st_mtime)
self.is_index = self.filename.startswith("index.")
@property
def size(self) -> int:
return os.stat(self.fullpath).st_size
def add_markdown(self, raw: str) -> None:
self.markdown = raw
def add_metadata(self, raw: str) -> FileMetadata:
self.metadata = FileMetadata(raw=raw)
return self.metadata
|
StarcoderdataPython
|
17494
|
<reponame>judge2020/crossover-viz
from main import extract_data
if __name__ == '__main__':
top = {}
out = extract_data('CrossoverWiki.xml')
for name in out:
for link in name['links']:
w = link['with']
top[w] = top[w] + 1 if w in top else 1
top = dict(reversed(sorted(top.items(), key=lambda item: item[1])))
print(top)
|
StarcoderdataPython
|
1612228
|
<gh_stars>0
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import webkitpy.thirdparty.unittest2 as unittest
from webkitpy.common.checkout.baselineoptimizer import BaselineOptimizer
from webkitpy.common.checkout.scm.scm_mock import MockSCM
from webkitpy.common.host_mock import MockHost
from webkitpy.common.webkit_finder import WebKitFinder
class ExcludingMockSCM(MockSCM):
def __init__(self, exclusion_list, filesystem=None, executive=None):
MockSCM.__init__(self, filesystem, executive)
self._exclusion_list = exclusion_list
def exists(self, path):
if path in self._exclusion_list:
return False
return MockSCM.exists(self, path)
def delete(self, path):
return self.delete_list([path])
def delete_list(self, paths):
for path in paths:
if path in self._exclusion_list:
raise Exception("File is not SCM managed: " + path)
return MockSCM.delete_list(self, paths)
def move(self, origin, destination):
if origin in self._exclusion_list:
raise Exception("File is not SCM managed: " + origin)
return MockSCM.move(self, origin, destination)
class BaselineOptimizerTest(unittest.TestCase):
def test_move_baselines(self):
host = MockHost(scm=ExcludingMockSCM(['/mock-checkout/third_party/WebKit/LayoutTests/platform/mac/another/test-expected.txt']))
host.filesystem.write_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/platform/win/another/test-expected.txt', 'result A')
host.filesystem.write_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/platform/mac/another/test-expected.txt', 'result A')
host.filesystem.write_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/another/test-expected.txt', 'result B')
baseline_optimizer = BaselineOptimizer(host, host.port_factory.all_port_names(), skip_scm_commands=False)
baseline_optimizer._move_baselines('another/test-expected.txt', {
'/mock-checkout/third_party/WebKit/LayoutTests/platform/win': 'aaa',
'/mock-checkout/third_party/WebKit/LayoutTests/platform/mac': 'aaa',
'/mock-checkout/third_party/WebKit/LayoutTests': 'bbb',
}, {
'/mock-checkout/third_party/WebKit/LayoutTests': 'aaa',
})
self.assertEqual(host.filesystem.read_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/another/test-expected.txt'), 'result A')
def test_move_baselines_skip_scm_commands(self):
host = MockHost(scm=ExcludingMockSCM(['/mock-checkout/third_party/WebKit/LayoutTests/platform/mac/another/test-expected.txt']))
host.filesystem.write_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/platform/win/another/test-expected.txt', 'result A')
host.filesystem.write_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/platform/mac/another/test-expected.txt', 'result A')
host.filesystem.write_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/another/test-expected.txt', 'result B')
baseline_optimizer = BaselineOptimizer(host, host.port_factory.all_port_names(), skip_scm_commands=True)
baseline_optimizer._move_baselines('another/test-expected.txt', {
'/mock-checkout/third_party/WebKit/LayoutTests/platform/win': 'aaa',
'/mock-checkout/third_party/WebKit/LayoutTests/platform/mac': 'aaa',
'/mock-checkout/third_party/WebKit/LayoutTests': 'bbb',
}, {
'/mock-checkout/third_party/WebKit/LayoutTests/platform/linux': 'bbb',
'/mock-checkout/third_party/WebKit/LayoutTests': 'aaa',
})
self.assertEqual(host.filesystem.read_binary_file('/mock-checkout/third_party/WebKit/LayoutTests/another/test-expected.txt'), 'result A')
self.assertEqual(baseline_optimizer._files_to_delete, [
'/mock-checkout/third_party/WebKit/LayoutTests/platform/win/another/test-expected.txt',
])
self.assertEqual(baseline_optimizer._files_to_add, [
'/mock-checkout/third_party/WebKit/LayoutTests/another/test-expected.txt',
'/mock-checkout/third_party/WebKit/LayoutTests/platform/linux/another/test-expected.txt',
])
def _assertOptimization(self, results_by_directory, expected_new_results_by_directory, baseline_dirname='', expected_files_to_delete=None):
host = MockHost()
fs = host.filesystem
webkit_base = WebKitFinder(fs).webkit_base()
baseline_name = 'mock-baseline-expected.txt'
for dirname, contents in results_by_directory.items():
path = fs.join(webkit_base, 'LayoutTests', dirname, baseline_name)
fs.write_binary_file(path, contents)
baseline_optimizer = BaselineOptimizer(host, host.port_factory.all_port_names(), skip_scm_commands=expected_files_to_delete is not None)
self.assertTrue(baseline_optimizer.optimize(fs.join(baseline_dirname, baseline_name)))
for dirname, contents in expected_new_results_by_directory.items():
path = fs.join(webkit_base, 'LayoutTests', dirname, baseline_name)
if contents is None:
self.assertTrue(not fs.exists(path) or path in baseline_optimizer._files_to_delete)
else:
self.assertEqual(fs.read_binary_file(path), contents)
# Check that the files that were in the original set have been deleted where necessary.
for dirname in results_by_directory:
path = fs.join(webkit_base, 'LayoutTests', dirname, baseline_name)
if not dirname in expected_new_results_by_directory:
self.assertTrue(not fs.exists(path) or path in baseline_optimizer._files_to_delete)
if expected_files_to_delete:
self.assertEqual(baseline_optimizer._files_to_delete, expected_files_to_delete)
def test_linux_redundant_with_win(self):
self._assertOptimization({
'platform/win': '1',
'platform/linux': '1',
}, {
'platform/win': '1',
})
def test_covers_mac_win_linux(self):
self._assertOptimization({
'platform/mac': '1',
'platform/win': '1',
'platform/linux': '1',
'': None,
}, {
'': '1',
})
def test_overwrites_root(self):
self._assertOptimization({
'platform/mac': '1',
'platform/win': '1',
'platform/linux': '1',
'': '2',
}, {
'': '1',
})
def test_no_new_common_directory(self):
self._assertOptimization({
'platform/mac': '1',
'platform/linux': '1',
'': '2',
}, {
'platform/mac': '1',
'platform/linux': '1',
'': '2',
})
def test_local_optimization(self):
self._assertOptimization({
'platform/mac': '1',
'platform/linux': '1',
'platform/linux-x86': '1',
}, {
'platform/mac': '1',
'platform/linux': '1',
})
def test_local_optimization_skipping_a_port_in_the_middle(self):
self._assertOptimization({
'platform/mac-snowleopard': '1',
'platform/win': '1',
'platform/linux-x86': '1',
}, {
'platform/mac-snowleopard': '1',
'platform/win': '1',
})
def test_baseline_redundant_with_root(self):
self._assertOptimization({
'platform/mac': '1',
'platform/win': '2',
'': '2',
}, {
'platform/mac': '1',
'': '2',
})
def test_root_baseline_unused(self):
self._assertOptimization({
'platform/mac': '1',
'platform/win': '2',
'': '3',
}, {
'platform/mac': '1',
'platform/win': '2',
})
def test_root_baseline_unused_and_non_existant(self):
self._assertOptimization({
'platform/mac': '1',
'platform/win': '2',
}, {
'platform/mac': '1',
'platform/win': '2',
})
def test_virtual_root_redundant_with_actual_root(self):
self._assertOptimization({
'virtual/softwarecompositing': '2',
'compositing': '2',
}, {
'virtual/softwarecompositing': None,
'compositing': '2',
}, baseline_dirname='virtual/softwarecompositing')
def test_virtual_root_redundant_with_ancestors(self):
self._assertOptimization({
'virtual/softwarecompositing': '2',
'platform/mac/compositing': '2',
'platform/win/compositing': '2',
}, {
'virtual/softwarecompositing': None,
'compositing': '2',
}, baseline_dirname='virtual/softwarecompositing')
def test_virtual_root_redundant_with_ancestors_skip_scm_commands(self):
self._assertOptimization({
'virtual/softwarecompositing': '2',
'platform/mac/compositing': '2',
'platform/win/compositing': '2',
}, {
'virtual/softwarecompositing': None,
'compositing': '2',
},
baseline_dirname='virtual/softwarecompositing',
expected_files_to_delete=[
'/mock-checkout/third_party/WebKit/LayoutTests/virtual/softwarecompositing/mock-baseline-expected.txt',
'/mock-checkout/third_party/WebKit/LayoutTests/platform/mac/compositing/mock-baseline-expected.txt',
'/mock-checkout/third_party/WebKit/LayoutTests/platform/win/compositing/mock-baseline-expected.txt',
])
def test_virtual_root_not_redundant_with_ancestors(self):
self._assertOptimization({
'virtual/softwarecompositing': '2',
'platform/mac/compositing': '1',
}, {
'virtual/softwarecompositing': '2',
'platform/mac/compositing': '1',
}, baseline_dirname='virtual/softwarecompositing')
|
StarcoderdataPython
|
3380145
|
#Importing the required libraries
import numpy as np
import matplotlib.pyplot as plt
#Creating a new figure
plt.figure()
#Initializing x and y values
x_values = np.arange(-200, 250, 0.1)
y_values = (x_values - 25)**2 + 20
#Plotting the values of x and y in the output
plt.plot(x_values, y_values, '-m', lw = 2)
#Annotating the graph
plt.annotate('y=(x-25)^2 + 20', xy=(200, 30645), xytext=(-50, 40000),
arrowprops = dict(facecolor='black', shrink=0.05))
plt.annotate('Vertex', xy=(25, 20), xytext=(25, 10000),
arrowprops = dict(facecolor='black', shrink=0.05))
#Inserting proper labels and title
plt.xlabel('X-Axis')
plt.ylabel('Y-Axis')
plt.title('Question-1(b)')
#Displaying the graph in the output
plt.show()
|
StarcoderdataPython
|
96633
|
<filename>src/utils/pythonSrc/watchFaceParser/models/elements/goalProgress/circularGoalProgressElement.py
import logging
from watchFaceParser.models.elements.common.circularProgressElement import CircularProgressElement
class CircularGoalProgressElement(CircularProgressElement):
def __init__(self, parameter, parent, name = None):
super(CircularGoalProgressElement, self).__init__(parameter = parameter, parent = parent, name = name)
def draw3(self, drawer, resources, state):
assert(type(resources) == list)
super(CircularGoalProgressElement, self).draw4(drawer, resources, state.getSteps(), state.getGoal())
|
StarcoderdataPython
|
41668
|
#!/usr/bin/env python3
import zipfile
# The file to USE inside the zip, before compression
filein = "index.php"
print("[i] FileIn: %s\n" % filein)
# How deep are we going?
depth = ""
# Loop 11 times (00-10)
for i in range(11):
# The .zip file to use
zipname = "depth-%02d.zip" % i
print("[i] ZipName: %s" % zipname)
# Get the zip file out ready
with zipfile.ZipFile(zipname , 'w') as zip:
# The file INSIDDE the zip
filezip = "%s%s" % (depth, filein)
print("[i] ZipFile: %s" % filezip)
# Write the zip file out
zip.write(filein, filezip)
# Increase depth for next loop
depth += "../"
print("\n[i] Done")
|
StarcoderdataPython
|
3285806
|
<filename>auxilearn/hypernet.py<gh_stars>10-100
from abc import abstractmethod
from torch import nn
from torch.nn.utils import weight_norm
class HyperNet(nn.Module):
"""This module is responsible for taking the losses from all tasks and return a single loss term.
We can think of this as our learnable loss criterion
"""
def __init__(self, main_task, input_dim):
super().__init__()
self.main_task = main_task
self.input_dim = input_dim
def forward(self, losses, outputs=None, labels=None, data=None):
"""
:param losses: losses form each task. This should be a tensor of size (batch_size, self.input_dim)
:param outputs: Optional. Parameters model output.
:param labels: Optional. Target.
:param data: Optiona. Parameters model input.
:return:
"""
pass
def _init_weights(self):
pass
def get_weights(self):
"""
:return: list of model parameters
"""
return list(self.parameters())
class MonoHyperNet(HyperNet):
"""Monotonic Hypernets
"""
def __init__(self, main_task, input_dim, clamp_bias=False):
super().__init__(main_task=main_task, input_dim=input_dim)
self.clamp_bias = clamp_bias
def get_weights(self):
"""
:return: list of model parameters
"""
return list(self.parameters())
@abstractmethod
def clamp(self):
pass
class Identity(nn.Module):
def __init__(self, *args, **kwargs):
super().__init__()
def forward(self, x):
return x
class LinearHyperNet(HyperNet):
"""Linear weights, e.g. \sum_j \alpha_j * l_j
"""
def __init__(self, main_task, input_dim, skip_connection=False, init_value=1., weight_normalization=True):
super().__init__(main_task=main_task, input_dim=main_task)
self.init_value = init_value
self.skip_connection = skip_connection
self.linear = nn.Linear(input_dim, 1, bias=False)
self.weight_normalization = weight_normalization
self._init_weights()
if self.weight_normalization:
self.linear = weight_norm(self.linear)
def _init_weights(self):
# init to 1
# todo: maybe we want 1/num_tasks ?
self.linear.weight = nn.init.constant_(self.linear.weight, self.init_value)
def forward(self, losses, outputs=None, labels=None, data=None):
loss = self.linear(losses).mean()
if self.skip_connection:
loss += losses[:, self.main_task].mean()
return loss
class MonoLinearHyperNet(MonoHyperNet):
"""Linear weights, e.g. \sum_j \alpha_j * l_j
"""
def __init__(
self, main_task, input_dim, skip_connection=False, clamp_bias=False, init_value=1., weight_normalization=True
):
super().__init__(main_task=main_task, input_dim=main_task, clamp_bias=clamp_bias)
self.init_value = init_value
self.skip_connection = skip_connection
self.linear = nn.Linear(input_dim, 1, bias=False)
self._init_weights()
self.weight_normalization = weight_normalization
if self.weight_normalization:
self.linear = weight_norm(self.linear)
def _init_weights(self):
# init to 1
# todo: maybe we want 1/num_tasks ?
self.linear.weight = nn.init.constant_(self.linear.weight, self.init_value)
def forward(self, losses, outputs=None, labels=None, data=None):
loss = self.linear(losses).mean()
if self.skip_connection:
loss += losses[:, self.main_task].mean()
return loss
def clamp(self):
"""make sure parameters are non-negative
"""
if self.weight_normalization:
self.linear.weight_v.data.clamp_(0)
self.linear.weight_g.data.clamp_(0)
else:
self.linear.weight.data.clamp_(0)
if self.linear.bias is not None and self.clamp_bias:
self.linear.bias.data.clamp_(0)
class NonlinearHyperNet(HyperNet):
def __init__(
self,
main_task,
input_dim,
hidden_sizes=1,
nonlinearity=None,
bias=True,
dropout_rate=0.,
init_upper=None,
init_lower=None,
weight_normalization=True
):
super().__init__(main_task=main_task, input_dim=input_dim)
assert isinstance(hidden_sizes, (list, int)), "hidden sizes must be int or list"
if isinstance(hidden_sizes, int):
hidden_sizes = [hidden_sizes]
self.nonlinearity = nonlinearity if nonlinearity is not None else nn.Softplus()
self.dropout = nn.Dropout(dropout_rate)
self.weight_normalization = weight_normalization
self.bias = bias
dims = [self.input_dim] + hidden_sizes + [1]
self.layers = []
for j in range(len(dims) - 2):
self.layers.append(
self._get_layer(dims[j], dims[j + 1], init_upper=init_upper, init_lower=init_lower, bias=bias)
)
self.layers.append(self.nonlinearity)
self.layers.append(self.dropout)
self.layers.append(
self._get_layer(dims[-2], dims[-1], init_upper=init_upper, init_lower=init_lower, bias=False)
)
self.net = nn.Sequential(*self.layers)
def _get_layer(self, input_dim, output_dim, init_upper, init_lower, bias):
"""Create layer with weight normalization
:param input_dim:
:param output_dim:
:param init_upper:
:param init_lower:
:param bias:
:return:
"""
layer = nn.Linear(input_dim, output_dim, bias=bias)
self._init_layer(layer, init_upper=init_upper, init_lower=init_lower)
if self.weight_normalization:
return weight_norm(layer)
return layer
@staticmethod
def _init_layer(layer, init_lower, init_upper):
b = init_upper if init_upper is not None else 1.
a = init_lower if init_lower is not None else 0.
if isinstance(layer, nn.Linear):
layer.weight = nn.init.uniform_(layer.weight, b=b, a=a)
if layer.bias is not None:
layer.bias = nn.init.constant_(layer.bias, 0.)
def forward(self, losses, outputs=None, labels=None, data=None):
main_loss = losses[:, self.main_task].mean()
return self.net(losses).mean() + main_loss
class MonoNonlinearHyperNet(MonoHyperNet):
def __init__(
self,
main_task,
input_dim,
hidden_sizes=1,
nonlinearity=None,
bias=True,
dropout_rate=0.,
init_upper=None,
init_lower=None,
weight_normalization=True
):
super().__init__(main_task=main_task, input_dim=input_dim)
assert isinstance(hidden_sizes, (list, int)), "hidden sizes must be int or list"
if isinstance(hidden_sizes, int):
hidden_sizes = [hidden_sizes]
self.nonlinearity = nonlinearity if nonlinearity is not None else nn.Softplus()
self.dropout = nn.Dropout(dropout_rate)
self.weight_normalization = weight_normalization
if isinstance(self.nonlinearity, Identity):
bias = False
self.bias = bias
dims = [self.input_dim] + hidden_sizes + [1]
self.layers = []
for j in range(len(dims) - 2):
self.layers.append(
self._get_layer(dims[j], dims[j + 1], init_upper=init_upper, init_lower=init_lower, bias=bias)
)
self.layers.append(self.nonlinearity)
self.layers.append(self.dropout)
self.layers.append(
self._get_layer(dims[-2], dims[-1], init_upper=init_upper, init_lower=init_lower, bias=False)
)
self.net = nn.Sequential(*self.layers)
def _get_layer(self, input_dim, output_dim, init_upper, init_lower, bias):
"""Create layer with weight normalization
:param input_dim:
:param output_dim:
:param init_upper:
:param init_lower:
:param bias:
:return:
"""
layer = nn.Linear(input_dim, output_dim, bias=bias)
self._init_layer(layer, init_upper=init_upper, init_lower=init_lower)
if self.weight_normalization:
return weight_norm(layer)
return layer
@staticmethod
def _init_layer(layer, init_lower, init_upper):
b = init_upper if init_upper is not None else 1.
a = init_lower if init_lower is not None else 0.
if isinstance(layer, nn.Linear):
layer.weight = nn.init.uniform_(layer.weight, b=b, a=a)
if layer.bias is not None:
layer.bias = nn.init.constant_(layer.bias, 0.)
def forward(self, losses, outputs=None, labels=None, data=None):
main_loss = losses[:, self.main_task].mean()
return self.net(losses).mean() + main_loss
def clamp(self):
for l in self.net:
if isinstance(l, nn.Linear):
if self.weight_normalization:
l.weight_v.data.clamp_(0)
l.weight_g.data.clamp_(0)
else:
l.weight.data.clamp_(0)
if l.bias is not None and self.clamp_bias:
l.bias.data.clamp_(0)
class NoFCCNNHyperNet(HyperNet):
# NYU Input shape is (3, 288, 384)
def __init__(
self, main_task, reduction='mean', input_channels=3, init_upper=.1, init_lower=0, weight_normalization=False
):
super().__init__(input_dim=-1, main_task=main_task)
self.main_task = main_task
assert reduction in ['mean', 'sum']
self.reduction = reduction
self.weight_normalization = weight_normalization
self.conv = nn.Sequential(
self._get_layer(
input_channels, 8, kernel_size=3, stride=1, padding=1, init_lower=init_lower, init_upper=init_upper
),
nn.Softplus(),
nn.AvgPool2d(kernel_size=2, padding=0),
self._get_layer(8, 16, kernel_size=3, stride=1, padding=1, init_lower=init_lower, init_upper=init_upper),
nn.Softplus(),
nn.AvgPool2d(kernel_size=2, padding=0),
self._get_layer(16, 32, kernel_size=3, stride=1, padding=1, init_lower=init_lower, init_upper=init_upper),
nn.Softplus(),
nn.AvgPool2d(kernel_size=2, padding=0),
self._get_layer(32, 1, kernel_size=1, bias=False),
)
def _get_layer(
self,
input_channels,
output_channel,
init_upper=None,
init_lower=None,
bias=True,
kernel_size=3,
stride=1,
padding=1,
):
layer = nn.Conv2d(
input_channels,
output_channel,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=bias
)
self._init_layer(layer, init_upper=init_upper, init_lower=init_lower)
if self.weight_normalization:
return weight_norm(layer)
return layer
@staticmethod
def _init_layer(layer, init_lower, init_upper):
b = init_upper if init_upper is not None else 1.
a = init_lower if init_lower is not None else 0.
if isinstance(layer, (nn.Linear, nn.Conv2d)):
layer.weight = nn.init.uniform_(layer.weight, b=b, a=a)
if layer.bias is not None:
layer.bias = nn.init.constant_(layer.bias, 0.)
def forward(self, losses, outputs=None, labels=None, data=None):
main_loss = .0
if self.main_task is not None:
# (bs, tasks, h, w)
if self.reduction == 'mean':
main_loss = losses[:, self.main_task, :, :].mean(dim=(1, 2)).mean(0)
else:
main_loss = losses[:, self.main_task, :, :].sum(dim=(1, 2)).mean(0)
# self.conv(losses) is of shape (bs, 1, 36, 48)
img_loss = self.conv(losses).mean((1, 2, 3))
return img_loss.mean() + main_loss
class MonoNoFCCNNHyperNet(MonoHyperNet):
# NYU Input shape is (3, 288, 384)
def __init__(
self, main_task, reduction='mean', init_upper=.1, init_lower=0., input_channels=3, weight_normalization=False
):
super().__init__(input_dim=-1, main_task=main_task)
self.main_task = main_task
assert reduction in ['mean', 'sum']
self.reduction = reduction
self.weight_normalization = weight_normalization
self.conv = nn.Sequential(
self._get_layer(
input_channels, 8, kernel_size=3, stride=1, padding=1, init_lower=init_lower, init_upper=init_upper
),
nn.Softplus(),
nn.AvgPool2d(kernel_size=2, padding=0),
self._get_layer(8, 16, kernel_size=3, stride=1, padding=1, init_lower=init_lower, init_upper=init_upper),
nn.Softplus(),
nn.AvgPool2d(kernel_size=2, padding=0),
self._get_layer(16, 1, kernel_size=1, bias=False, init_lower=init_lower, init_upper=init_upper),
)
def _get_layer(
self,
input_channels,
output_channel,
init_upper=None,
init_lower=None,
bias=True,
kernel_size=3,
stride=1,
padding=1,
):
layer = nn.Conv2d(
input_channels,
output_channel,
kernel_size=kernel_size,
stride=stride,
padding=padding,
bias=bias
)
self._init_layer(layer, init_upper=init_upper, init_lower=init_lower)
if self.weight_normalization:
return weight_norm(layer)
return layer
@staticmethod
def _init_layer(layer, init_lower, init_upper):
b = init_upper if init_upper is not None else 1.
a = init_lower if init_lower is not None else 0.
if isinstance(layer, (nn.Linear, nn.Conv2d)):
layer.weight = nn.init.uniform_(layer.weight, b=b, a=a)
if layer.bias is not None:
layer.bias = nn.init.constant_(layer.bias, 0.)
def forward(self, losses, outputs=None, labels=None, data=None):
main_loss = .0
if self.main_task is not None:
# (bs, tasks, h, w)
if self.reduction == 'mean':
main_loss = losses[:, self.main_task, :, :].mean(dim=(1, 2)).mean(0)
else:
main_loss = losses[:, self.main_task, :, :].sum(dim=(1, 2)).mean(0)
# self.conv(losses) is of shape (bs, 1, 36, 48)
img_loss = self.conv(losses).mean((1, 2, 3)).mean()
return img_loss + main_loss
def clamp(self):
for l in self.conv:
if isinstance(l, nn.Conv2d):
if self.weight_normalization:
l.weight_v.data.clamp_(0)
l.weight_g.data.clamp_(0)
else:
l.weight.data.clamp_(0)
if l.bias is not None and self.clamp_bias:
l.bias.data.clamp_(0)
|
StarcoderdataPython
|
1645897
|
# -*- coding=utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the MIT License.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# MIT License for more details.
"""Default lr_scheduler configs."""
from zeus.common import ConfigSerializable
class LrSchedulerConfig(ConfigSerializable):
"""Default LrScheduler Config."""
_class_type = "trainer.lr_scheduler"
_update_all_attrs = True
_exclude_keys = ['type']
type = 'MultiStepLR'
params = {"milestones": [75, 150], "gamma": 0.5}
@classmethod
def from_json(cls, data, skip_check=True):
"""Restore config from a dictionary or a file."""
cls = super(LrSchedulerConfig, cls).from_json(data, skip_check)
if "params" not in data:
cls.params = {}
return cls
@classmethod
def rules(cls):
"""Return rules for checking."""
rules = {"type": {"type": str},
"params": {"type": dict}}
return rules
class LrSchedulerMappingDict(object):
"""Lr Scheduler Mapping Dictionary."""
type_mapping_dict = dict(
)
params_mapping_dict = dict(
)
|
StarcoderdataPython
|
113233
|
from .core import GetJob
# program Design
# get available meshroombot Job
# search in scan directory for the first available folder
# check with rename
# folder name indicated status
# available
# [-] busy
# [#] done
# search in [#] scans for Meshroom/mesh.obj
# (texture phase)
# process Job
# new scans
# create meshroom room .mg file
# run toNode MeshFiltering
# Texturing
# set mesh.obj as Texture inputMesh
|
StarcoderdataPython
|
34948
|
<filename>webtool/server/models/equipment.py
# -*- coding: utf-8 -*-
from django.db import models
from .mixins import SeasonsMixin
from .time_base import TimeMixin
from . import fields
class EquipmentManager(models.Manager):
def get_by_natural_key(self, code):
return self.get(code=code)
class Equipment(SeasonsMixin, TimeMixin, models.Model):
objects = EquipmentManager()
code = models.CharField(
'Kurzzeichen',
unique=True,
max_length=10,
help_text="Kurzzeichen für die Ausrüstung",
)
name = fields.NameField(
'Bezeichnung',
help_text="Bezeichnung der Ausrüstung",
)
description = fields.DescriptionField(
'Beschreibung',
help_text="Beschreibung der Ausrüstung",
)
default = models.BooleanField(
'Die initiale Ausrüstung',
blank=True, default=False
)
def natural_key(self):
return self.code,
natural_key.dependencies = ['server.season']
def __str__(self):
return "{} ({})".format(self.name, self.code)
class Meta:
get_latest_by = "updated"
verbose_name = "Ausrüstung"
verbose_name_plural = "Ausrüstungen"
unique_together = ('code', 'name')
ordering = ('code', )
|
StarcoderdataPython
|
3310938
|
# (C) Copyright (2018,2020) Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
###
from subprocess import CalledProcessError
import shutil
import subprocess
import threading
from redfish_object import RedfishObject
from time import sleep
import requests
import os
import json
from datetime import datetime
from datetime import timedelta
from esxi_operations import *
from ilo_operations import *
from image_operations import *
def image_deployment(server, config):
"""Primary function that triggers OS deployment for each of the server hardware.
This function handles end to end operations and triggers functions necessary for OS deployment.
Arguments:
servers {dictionary} -- server details as per the input_file/server_details.json
config {dictionary} -- Config details as per the input_file/config.json
Returns:
Boolean -- returns True on successful OS deployment, returns False on failure of OS deployment
"""
try:
server_serial_number = server['Server_serial_number']
os_type = config["OS_type"]
image_path = config["HTTP_server_base_url"]+config["OS_image_name"]
# Check if iso image present or not in the given location
iso_file_check = is_iso_file_present(image_path)
if not iso_file_check:
print("ISO image not preset in the specified location")
return False
# Create a REDFISH object
redfish_obj = create_redfish_object(server)
if not redfish_obj:
print("Error occured while creating redfish object for server {}".format(server_serial_number))
return False
# Get server model
server_model = get_server_model(redfish_obj)
if not server_model:
print("Failed to get server model")
return False
if "Gen10" not in server_model:
print("Server with serial number {} is not supported for this solution".format(server['Server_serial_number']))
return False
# Create custom is image with the given kickstart file
custom_iso_created = create_custom_iso_image_esxi(os_type, server, config, config['base_kickstart_filepath'])
print("Starting OS installation for server: " + server_serial_number)
# Get custom image path
custom_image_path = get_custom_image_path(config["HTTP_file_path"], os_type, server_serial_number)
# Get custom image url
custom_image_url = get_custom_image_url(config["HTTP_server_base_url"], os_type, server_serial_number)
# Get custom kickstart file path
custom_kickstart_path = get_custom_kickstart_path(config["HTTP_file_path"], os_type, server_serial_number)
custom_iso_present = is_iso_file_present(custom_image_url)
if(custom_iso_created and custom_iso_present):
# Unmount the previous ISO and mount the custom ISO image
unmount_virtual_media_iso(redfish_obj)
mount_virtual_media_iso(redfish_obj, custom_image_url, True)
power_staus = get_post_state(redfish_obj)
if power_staus == "PowerOff":
change_server_power_state(redfish_obj, server_serial_number, power_state="On")
else:
change_server_power_state(redfish_obj, server_serial_number, power_state="ForceRestart")
is_complete = wait_for_os_deployment_to_complete(redfish_obj, server['Server_serial_number'])
#unmount ISO once OS deployment is complete
unmount_virtual_media_iso(redfish_obj)
# Moving custom ISO image and Kickstart files to files folder
print("Moving custom image for server {}".format(server_serial_number))
move_file(custom_image_path)
print("Moving custom kickstart file for server {}".format(server_serial_number))
move_file(custom_kickstart_path)
# Logout of iLO
print("Logging out of iLO for server {}".format(server_serial_number))
redfish_obj.redfish_client.logout()
if is_complete:
print("OS installation is complete for server {}".format(server_serial_number))
return True
else:
print("OS installation failed on server {}".format(server_serial_number))
return False
else:
print("Error in fetching custom image for server {}".format(server_serial_number))
return False
except Exception as e:
print("Failure: Error occurred while deploying image on server {}".format(e))
return False
def unmount_virtual_media(server):
"""This function is to call unmount_virtual_media_iso function
Arguments:
servers {dict} -- server details
"""
try:
# Creating redfish object
redfish_obj = create_redfish_object(server)
unmount_virtual_media_iso(redfish_obj)
except Exception as e:
print("Failure: Failed to unmount virtual media {}".format(e))
def wait_for_os_deployment(server):
"""This function is to call wait_for_os_deployment function
Arguments:
servers {dict} -- server details
"""
try:
# Creating redfish object
redfish_obj = create_redfish_object(server)
wait_for_os_deployment_to_complete(redfish_obj, server['Server_serial_number'])
except Exception as e:
print("Failure: Image deployment failed {}".format(e))
|
StarcoderdataPython
|
3227453
|
<gh_stars>1-10
# Standard library
# 3rd party packages
import pytest
# Local source
from parametrization_clean.domain.cost.reax_error import ReaxError
@pytest.mark.usefixtures('reax_energies', 'dft_energies', 'weights')
def test_reax_error(reax_energies, dft_energies, weights):
reax_energy = reax_energies[0][0]
dft_energy = dft_energies[0]
weight = weights[0]
true_error = ((reax_energy - dft_energy) / weight) ** 2
assert ReaxError.error(reax_energy, dft_energy, weight) == true_error
assert reax_energies[0][0] == reax_energy
assert dft_energies[0] == dft_energy
assert weights[0] == weight
|
StarcoderdataPython
|
1692572
|
#!/usr/bin/env python3
# Simple test for NeoPixels on Raspberry Pi
import time
import board
import neopixel
# Choose an open pin connected to the Data In of the NeoPixel strip, i.e. board.D18
# NeoPixels must be connected to D10, D12, D18 or D21 to work.
pixel_pin = board.D18
global_brightness=0.2
# The number of NeoPixels
num_pixels = 60
# For cylon / kitt
width=6
start=10
length=40
delay=0.07
sidered=10
centrered=255
# The order of the pixel colors - RGB or GRB. Some NeoPixels have red and green reversed!
# For RGBW NeoPixels, simply change the ORDER to RGBW or GRBW.
ORDER = neopixel.GRB
pixels = neopixel.NeoPixel(pixel_pin, num_pixels, brightness=global_brightness, auto_write=False,
pixel_order=ORDER)
def cylonup(side, center, delay, start, end, width):
st = start
end = start-width+end
for i in range(st,end):
# leading edge
pixels[i+1]=(side,0,0)
# main eye
pixels[i]=(center,0,0)
dim=0.5
#trailing tail
for tail in range (i-1, i-(width-2),-1):
if (tail>0):
pixels[tail]=(int(side/(dim*2)),0,0)
dim+=1
# clear the end of the tail
pixels[i-(width-1)]=(0,0,0)
pixels.show()
time.sleep(delay)
def cylondown(side, center, delay, start, end, width):
st = start
end = start-width+end
for i in range(end, st, -1):
# leading edge
pixels[i-1]=(side,0,0)
# main eye
pixels[i]=(center,0,0)
dim=0.5
#trailing tail
for tail in range (i+1, i+(width+2)):
if (tail>0):
pixels[tail]=(int(side/(dim*2)),0,0)
dim+=1
# clear the end of the tail
pixels[i-(width+1)]=(0,0,0)
pixels.show()
time.sleep(delay)
def white(start, length):
for i in range(start,start+length):
pixels[i]=(255,255,255)
pixels.show()
def clear():
pixels.fill((0,0,0))
def whitepulse(updown=3):
if (updown==3):
whitepulse(1)
whitepulse(2)
else:
if (updown==1):
start=0
end=255
step=2
if (updown==2):
start=255
end=0
step=-2
for bright in range(start, end, step):
pixels.fill((bright,bright,bright))
pixels.show()
time.sleep(delay/2)
def splitin(side, center, delay, start, length, pulsewidth):
leftstart = start
leftend = start+(length/2)
rightstart = start+length
rightend = leftend
left = leftstart
right = rightstart
while left <= leftend+width:
# print("left: ",left, "right: ", right)
if (left<=leftend):
# leading edge
pixels[left+1]=(side,0,0)
pixels[right-1]=(side,0,0)
# main eye
pixels[left]=(center,0,0)
pixels[right]=(center,0,0)
dim=0.5
#trailing tail
for tail in range (left-1, left-(pulsewidth+2)):
if (tail>leftstart and tail<rightend):
pixels[tail]=(int(side/(dim*2)),0,0)
dim+=1
for tail in range (right+1, right+(pulsewidth+2)):
if (tail>leftstart and tail<rightend):
pixels[tail]=(int(side/(dim*2)),0,0)
dim+=1
# clear the end of the tail
pixels[left-(pulsewidth-1)]=(0,0,0)
pixels[right+(pulsewidth+1)]=(0,0,0)
pixels.show()
time.sleep(delay)
left+=1
right-=1
def splitout(side, center, delay, start, length, pulsewidth):
leftend = start
leftstart = start+(length/2)
rightend = start+length
rightstart = leftend
left = leftstart
right = rightstart
while left <= leftend:
# print("left: ",left, "right: ", right)
# leading edge
pixels[left-1]=(side,0,0)
pixels[right+1]=(side,0,0)
# main eye
pixels[left]=(center,0,0)
pixels[right]=(center,0,0)
dim=0.5
#trailing tail
for tail in range (left+1, left+(pulsewidth+2)):
if (tail>leftstart and tail<rightend):
pixels[tail]=(int(side/(dim*2)),0,0)
dim+=1
for tail in range (right-1, right-(pulsewidth+2)):
if (tail>leftstart and tail<rightend):
pixels[tail]=(int(side/(dim*2)),0,0)
dim+=1
# clear the end of the tail
pixels[left+(pulsewidth+1)]=(0,0,0)
pixels[right-(pulsewidth-1)]=(0,0,0)
pixels.show()
time.sleep(delay)
left-=1
right+=1
while True:
whitepulse(3)
time.sleep(0.5)
cylonup(sidered,centrered,delay,start,length, width)
time.sleep(delay)
cylondown(sidered,centrered,delay,start,length, width)
time.sleep(delay)
splitin(sidered,centrered,delay,start,length, width)
time.sleep(delay)
# splitout(sidered,centrered,delay,start,length, width)
# time.sleep(delay)
# clear()
|
StarcoderdataPython
|
187203
|
<reponame>shagun30/djambala-2<gh_stars>0
from django.core import validators
from django.core.exceptions import ImproperlyConfigured
from django.db import backend, connection, models
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import gettext_lazy as _
from django.utils.encoding import smart_str
import datetime
from django.utils.translation import ugettext as _
##############################################################################
#
# Copyright (c) 2001 Zope Corporation and Contributors. All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.0 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE
#
# <NAME>
# <NAME>, 14.01.2007
#
##############################################################################
import sha, binascii
from binascii import b2a_base64, a2b_base64
from random import choice, randrange
class SSHADigestScheme:
'''
SSHA is a modification of the SHA digest scheme with a salt
starting at byte 20 of the base64-encoded string.
'''
# Source: http://developer.netscape.com/docs/technote/ldap/pass_sha.html
def generate_salt(self):
# Salt can be any length, but not more than about 37 characters
# because of limitations of the binascii module.
# 7 is what Netscape's example used and should be enough.
# All 256 characters are available.
salt = ''
for n in range(7):
salt += chr(randrange(256))
return salt
def encrypt(self, pw):
pw = str(pw)
salt = self.generate_salt()
return b2a_base64(sha.new(pw + salt).digest() + salt)[:-1]
def validate(self, reference, attempt):
try:
ref = a2b_base64(reference)
except binascii.Error:
# Not valid base64.
return 0
salt = smart_str(ref[20:])
compare = b2a_base64(sha.new(smart_str(attempt) + salt).digest() + salt)[:-1]
return (compare == reference)
def pw_validate(reference, attempt):
"""Validate the provided password string, which uses LDAP-style encoding
notation. Reference is the correct password, attempt is clear text
password attempt."""
lp = len('{SSHA}')
return SSHADigestScheme().validate(reference[lp:], attempt)
def pw_encrypt(pw):
"""Encrypt the provided plain text password using the encoding if provided
and return it in an LDAP-style representation."""
return '{SSHA}' + SSHADigestScheme().encrypt(pw)
def check_password(raw_password, enc_password):
"""
Returns a boolean of whether the raw_password was correct. Handles
encryption formats behind the scenes.
"""
lp = len('{SSHA}')
#return SSHADigestScheme().validate(reference[lp:], attempt)
return SSHADigestScheme().validate(enc_password[lp:], raw_password)
"""
algo, salt, hsh = enc_password.split('$')
if algo == 'md5':
import md5
return hsh == md5.new(salt+raw_password).hexdigest()
elif algo == 'sha1':
import sha
return hsh == sha.new(salt+raw_password).hexdigest()
raise ValueError, "Got unknown password algorithm type in password."
"""
class SiteProfileNotAvailable(Exception):
pass
class UserManager(models.Manager):
def create_user(self, username, email, password):
"Creates and saves a User with the given username, e-mail and password."
now = datetime.datetime.now()
user = self.model(None, username, '', '', email.strip().lower(), 'placeholder', False, True, False, now, now)
user.set_password(password)
user.save()
return user
def make_random_password(self, length=10, allowed_chars='abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRSTUVWXYZ23456789'):
"Generates a random password with the given length and given allowed_chars"
# Note that default value of allowed_chars does not have "I" or letters
# that look like it -- just to avoid confusion.
from random import choice
return ''.join([choice(allowed_chars) for i in range(length)])
class User(models.Model):
"""Users within the Django authentication system are represented by this model.
Username and password are required. Other fields are optional.
"""
username = models.CharField(_(u'username'), max_length=60, unique=True, validator_list=[validators.isAlphaNumericURL],
help_text=_(u"Required. 30 characters or fewer. Alphanumeric characters only (letters, digits and underscores)."))
sex = models.CharField(_(u'sex'), max_length=10)
title = models.CharField(_(u'title'), max_length=30, blank=True)
first_name = models.CharField(_(u'first name'), max_length=30, blank=True)
last_name = models.CharField(_(u'last name'), max_length=40, blank=True)
email = models.EmailField(_(u'e-mail address'), blank=True)
password = models.CharField(_(u'password'), max_length=80,
help_text=_(u"Use '[algo]$[salt]$[hexdigest]' or use the <a href=\"password/\">change password form</a>."))
is_staff = models.BooleanField(_(u'staff status'), default=False, help_text=_(u"Designates whether the user can log into this admin site."))
is_active = models.BooleanField(_(u'active'), default=True,
help_text=_(u"Designates whether this user can log into the Django admin. Unselect this instead of deleting accounts."))
is_superuser = models.BooleanField(_(u'superuser status'), default=False,
help_text=_(u"Designates that this user has all permissions without explicitly assigning them."))
last_login = models.DateTimeField(_(u'last login'), default=datetime.datetime.now)
date_joined = models.DateTimeField(_(u'date joined'), default=datetime.datetime.now)
objects = UserManager()
class Meta:
verbose_name = _(u'user')
verbose_name_plural = _(u'users')
ordering = ('username',)
class Admin:
fields = (
(None, {'fields': ('username', 'password')}),
(_(u'Personal info'), {'fields': ('sex', 'first_name', 'last_name', 'email')}),
(_(u'Permissions'), {'fields': ('is_active', 'is_superuser')}),
(_(u'Important dates'), {'fields': ('last_login', 'date_joined')}),
)
list_display = ('username', 'email', 'sex', 'title', 'first_name', 'last_name')
list_filter = ('is_superuser',)
search_fields = ('username', 'first_name', 'last_name', 'email')
def __unicode__(self):
return self.username
def get_absolute_url(self):
return u"/users/%s/" % self.username
def is_anonymous(self):
"Always returns False. This is a way of comparing User objects to anonymous users."
return False
def is_authenticated(self):
"""Always return True. This is a way to tell if the user has been authenticated in templates.
"""
return True
def get_full_name(self):
"Returns the first_name plus the last_name, with a space in between."
if self.title :
full_name = u'%s %s %s' % (self.title, self.first_name, self.last_name)
else :
full_name = u'%s %s' % (self.first_name, self.last_name)
return full_name.strip()
def get_standard_name(self):
"Returns the last_name, first_name"
if self.title :
full_name = u'%s, %s %s' % (self.last_name, self.title, self.first_name)
else :
full_name = u'%s, %s' % (self.last_name, self.first_name)
return full_name.strip()
def set_password(self, raw_password):
#import sha, random
#algo = 'sha1'
#salt = sha.new(str(random.random())).hexdigest()[:5]
#hsh = sha.new(salt+raw_password).hexdigest()
#self.password = <PASSWORD>' % (algo, salt, hsh)
self.password = pw_<PASSWORD>(raw_password)
def check_password(self, raw_password):
"""
Returns a boolean of whether the raw_password was correct. Handles
encryption formats behind the scenes.
"""
return check_password(raw_password, self.password)
def has_perm(self, perm):
"Returns True if the user has the specified permission."
if not self.is_active:
return False
if self.is_superuser:
return True
return perm in self.get_all_permissions()
def has_module_perms(self, app_label):
"Returns True if the user has any permissions in the given app label."
if not self.is_active:
return False
if self.is_superuser:
return True
return bool(len([p for p in self.get_all_permissions() if p[:p.index('.')] == app_label]))
def get_and_delete_messages(self):
messages = []
for m in self.message_set.all():
messages.append(m.message)
m.delete()
return messages
def email_user(self, subject, message, from_email=None):
"Sends an e-mail to this User."
from django.core.mail import send_mail
send_mail(subject, message, from_email, [self.email])
def get_profile(self):
"""
Returns site-specific profile for this user. Raises
SiteProfileNotAvailable if this site does not allow profiles.
"""
if not hasattr(self, '_profile_cache'):
from django.conf import settings
if not settings.AUTH_PROFILE_MODULE:
raise SiteProfileNotAvailable
try:
app_label, model_name = settings.AUTH_PROFILE_MODULE.split('.')
model = models.get_model(app_label, model_name)
self._profile_cache = model._default_manager.get(user__id__exact=self.id)
except (ImportError, ImproperlyConfigured):
raise SiteProfileNotAvailable
return self._profile_cache
class AnonymousUser(object):
id = None
username = ''
def __init__(self):
pass
def __unicode__(self):
return 'AnonymousUser'
def __eq__(self, other):
return isinstance(other, self.__class__)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return 1 # instances always return the same hash value
def save(self):
raise NotImplementedError
def delete(self):
raise NotImplementedError
def set_password(self, raw_password):
raise NotImplementedError
def check_password(self, raw_password):
raise NotImplementedError
def get_and_delete_messages(self):
return []
def is_anonymous(self):
return True
def is_authenticated(self):
return False
|
StarcoderdataPython
|
3300663
|
#
"""Demonstrate WeakValueDictionary.
"""
# end_pymotw_header
import gc
from pprint import pprint
import weakref
gc.set_debug(gc.DEBUG_UNCOLLECTABLE)
class ExpensiveObject:
def __init__(self, name):
self.name = name
def __repr__(self):
return "ExpensiveObject({})".format(self.name)
def __del__(self):
print(" (Deleting {})".format(self))
def demo(cache_factory):
# hold objects so any weak references
# are not removed immediately
all_refs = {}
# create the cache using the factory
print("CACHE TYPE:", cache_factory)
cache = cache_factory()
for name in ["one", "two", "three"]:
o = ExpensiveObject(name)
cache[name] = o
all_refs[name] = o
del o # decref
print(" all_refs =", end=" ")
pprint(all_refs)
print("\n Before, cache contains:", list(cache.keys()))
for name, value in cache.items():
print(" {} = {}".format(name, value))
del value # decref
# remove all references to the objects except the cache
print("\n Cleanup:")
del all_refs
gc.collect()
print("\n After, cache contains:", list(cache.keys()))
for name, value in cache.items():
print(" {} = {}".format(name, value))
print(" demo returning")
return
demo(dict)
print()
demo(weakref.WeakValueDictionary)
|
StarcoderdataPython
|
1611454
|
<reponame>davidkhala/oci-designer-toolk
# Copyright (c) 2020, 2021, Oracle and/or its affiliates.
# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl.
"""Provide Module Description
"""
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
__author__ = ["<NAME> (Oracle Cloud Solutions A-Team)"]
__version__ = "1.0.0"
__module__ = "okitWebDesigner"
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~#
import configparser
import functools
import oci
import os
import shutil
import tempfile
import time
import urllib
import giturlparse
import glob
import ast
from git import Repo
from flask import Blueprint
from flask import current_app
from flask import jsonify
from flask import redirect
from flask import render_template
from flask import request
from flask import send_from_directory
from flask import session
from flask import url_for
import json
from common.okitCommon import jsonToFormattedString
from common.okitCommon import logJson
from common.okitCommon import readJsonFile
from common.okitCommon import standardiseIds
from common.okitCommon import writeJsonFile
from common.okitLogging import getLogger
from model.okitValidation import OCIJsonValidator
from generators.okitAnsibleGenerator import OCIAnsibleGenerator
from generators.okitTerraform11Generator import OCITerraform11Generator
from generators.okitTerraformGenerator import OCITerraformGenerator
from generators.okitResourceManagerGenerator import OCIResourceManagerGenerator
from generators.okitMarkdownGenerator import OkitMarkdownGenerator
# Configure logging
logger = getLogger()
bp = Blueprint('okit', __name__, url_prefix='/okit', static_folder='static/okit')
debug_mode = bool(str(os.getenv('DEBUG_MODE', 'False')).title())
template_root = '/okit/visualiser/templates'
def standardiseJson(json_data={}, **kwargs):
logJson(json_data)
json_data = standardiseIds(json_data)
logJson(json_data)
return json_data
def readConfigFileSections(config_file='~/.oci/config'):
if os.getenv('OCI_CLI_AUTH', 'config') != 'instance_principal':
logger.debug('Config File {0!s:s}'.format(config_file))
abs_config_file = os.path.expanduser(config_file)
logger.debug('Config File {0!s:s}'.format(abs_config_file))
config = configparser.ConfigParser()
config.read(abs_config_file)
config_sections = []
if 'DEFAULT' in config:
config_sections = ['DEFAULT']
config_sections.extend(config.sections())
logger.info('Config Sections {0!s:s}'.format(config_sections))
else:
config_sections = ['Instance Principal']
return config_sections
def readGitConfigFile(config_file='~/.oci/git_repositories'):
logger.debug('Setting File {0!s:s}'.format(config_file))
abs_config_file = os.path.expanduser(config_file)
logger.debug('Setting File {0!s:s}'.format(abs_config_file))
config = configparser.ConfigParser()
config.read(abs_config_file)
repo_list = []
for each_git_section in config.sections():
repo_list.append({'label': each_git_section, 'branch': config[each_git_section]['branch'], 'url': config[each_git_section]['url']})
logger.info(repo_list)
return repo_list
def getConfigFileValue(section, key, config_file='~/.oci/config'):
value = ''
if os.getenv('OCI_CLI_AUTH', 'config') != 'instance_principal':
logger.debug('Config File {0!s:s}'.format(config_file))
abs_config_file = os.path.expanduser(config_file)
logger.debug('Config File {0!s:s}'.format(abs_config_file))
config = configparser.ConfigParser()
config.read(abs_config_file)
value = config[section][key]
return value
def validateConfigFile(config_file='~/.oci/config'):
results = []
if os.getenv('OCI_CLI_AUTH', 'config') != 'instance_principal':
logger.debug('Config File {0!s:s}'.format(config_file))
abs_config_file = os.path.expanduser(config_file)
logger.debug('Config File {0!s:s}'.format(abs_config_file))
config = configparser.ConfigParser()
config.read(abs_config_file)
if len(config.sections()) == 0 and 'DEFAULT' not in config:
results.append('OCI Connect Config file is either missing or empty.')
else:
for section in config:
key_file = config[section]['key_file']
if not os.path.exists(os.path.expanduser(key_file)):
results.append('[{0!s:s}] Key File {1!s:s} does not exist.'.format(section, key_file))
logger.info(results)
return results
#
# Define Error Handlers
#
@bp.errorhandler(Exception)
def handle_exception(error):
message = [str(x) for x in error.args]
status_code = 500
success = False
response = {
'success': success,
'error': {
'type': error.__class__.__name__,
'message': message
}
}
logger.exception(error)
logJson(response)
return jsonify(response), status_code
#
# Define Endpoints
#
@bp.route('/designer', methods=(['GET']))
def designer():
local = current_app.config.get('LOCAL', False)
if not local and session.get('username', None) is None:
logger.info('<<<<<<<<<<<<<<<<<<<<<<<<< Redirect to Login >>>>>>>>>>>>>>>>>>>>>>>>>')
return redirect(url_for('okit.login'), code=302)
# Test if developer mode
developer_mode = (request.args.get('developer', default='false') == 'true')
if developer_mode:
logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<< Developer Mode >>>>>>>>>>>>>>>>>>>>>>>>>>")
# Test if experimental mode
experimental_mode = (request.args.get('experimental', default='false') == 'true')
if experimental_mode:
logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<< Experimental Mode >>>>>>>>>>>>>>>>>>>>>>>>>>")
# Test if cd3 mode
cd3_mode = (request.args.get('cd3', default='false') == 'true')
if cd3_mode:
logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<< CD3 Mode >>>>>>>>>>>>>>>>>>>>>>>>>>")
# Test if PCA mode
pca_mode = (request.args.get('pca', default='false') == 'true')
if pca_mode:
logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<< PCA Mode >>>>>>>>>>>>>>>>>>>>>>>>>>")
# Test if A2C mode
a2c_mode = (request.args.get('a2c', default='false') == 'true')
if a2c_mode:
logger.info("<<<<<<<<<<<<<<<<<<<<<<<<<< A2C Mode >>>>>>>>>>>>>>>>>>>>>>>>>>")
# Test if Ansible mode
ansible_mode = (request.args.get('ansible', default='false') == 'true')
if ansible_mode:
logger.info("<<<<<<<<<<<<<<<<<<<<<<<< Ansible Mode >>>>>>>>>>>>>>>>>>>>>>>>")
# Read Artifact Model Specific JavaScript Files
artefact_model_js_files = sorted(os.listdir(os.path.join(bp.static_folder, 'model', 'js', 'artefacts')))
# Read Artifact View Specific JavaScript Files
if os.path.exists(os.path.join(bp.static_folder, 'view', 'js', 'artefacts')) and os.path.isdir(os.path.join(bp.static_folder, 'view', 'js', 'artefacts')):
artefact_view_js_files = sorted(os.listdir(os.path.join(bp.static_folder, 'view', 'js', 'artefacts')))
else:
artefact_view_js_files = []
artefact_view_js_files.extend(sorted(os.listdir(os.path.join(bp.static_folder, 'view', 'designer', 'js', 'artefacts'))))
# Read Pallete Json
palette_json = readJsonFile(os.path.join(bp.static_folder, 'palette', 'palette.json'))
# # Get Palette Icon Groups / Icons
# svg_files = []
# svg_icon_groups = {}
# # Read Files
# for (dirpath, dirnames, filenames) in os.walk(os.path.join(bp.static_folder, 'palette')):
# logger.debug('dirpath : {0!s:s}'.format(dirpath))
# logger.debug('dirnames : {0!s:s}'.format(dirnames))
# logger.debug('filenames : {0!s:s}'.format(filenames))
# if os.path.basename(dirpath) != 'palette':
# svg_files.extend([os.path.join(os.path.basename(dirpath), f) for f in filenames if f.endswith(".svg")])
# svg_icon_groups[os.path.basename(dirpath)] = [f for f in filenames if f.endswith(".svg")]
# else:
# svg_files.extend([f for f in filenames if f.endswith(".svg")])
# logger.debug('Files Walk : {0!s:s}'.format(svg_files))
# logger.debug('SVG Icon Groups {0!s:s}'.format(svg_icon_groups))
# palette_icon_groups = []
# for key in sorted(svg_icon_groups.keys()):
# palette_icon_group = {'name': str(key).title(), 'icons': []}
# for palette_svg in sorted(svg_icon_groups[key]):
# palette_icon = {'svg': os.path.join(key, palette_svg), 'title': os.path.basename(palette_svg).split('.')[0].replace('_', ' ')}
# palette_icon_group['icons'].append(palette_icon)
# palette_icon_groups.append(palette_icon_group)
# logger.debug('Palette Icon Groups : {0!s:s}'.format(palette_icon_groups))
# logJson(palette_icon_groups)
config_sections = {"sections": readConfigFileSections()}
#Render The Template
return render_template('okit/okit_designer.html',
artefact_model_js_files=artefact_model_js_files,
artefact_view_js_files=artefact_view_js_files,
palette_json=palette_json,
# palette_icon_groups=palette_icon_groups,
# fragment_icons=fragment_icons,
# okit_templates_groups=template_groups,
# okit_template_categories=template_categories,
local_okit=local,
developer_mode=developer_mode, experimental_mode=experimental_mode, cd3_mode=cd3_mode, a2c_mode=a2c_mode, pca_mode=pca_mode, ansible_mode=ansible_mode)
# Template Processing
@bp.route('/panel/templates', methods=(['GET']))
def templates_panel():
# ref_arch_root = os.path.join(bp.static_folder, 'templates', 'reference_architecture')
ref_arch_root = os.path.join(current_app.instance_path, 'templates', 'reference_architecture')
ref_arch_templates = dir_to_json(ref_arch_root, current_app.instance_path, 'children', 'templates')
# ref_arch_templates = dir_to_json(ref_arch_root, ref_arch_root, 'children', 'templates')
ref_arch_category = {'name': 'Reference Architectures', 'path': 'reference_architecture', 'children': [], 'templates': []}
ref_arch_category = hierarchy_category(ref_arch_category, ref_arch_templates, current_app.instance_path)
# user_root = os.path.join('okit', 'templates', 'user')
user_root = os.path.join(current_app.instance_path, 'templates', 'user')
user_templates = dir_to_json(user_root, current_app.instance_path, 'children', 'templates')
# user_templates = dir_to_json(user_root, user_root, 'children', 'templates')
user_category = {'name': 'User', 'path': 'user', 'children': [], 'templates': []}
user_category = hierarchy_category(user_category, user_templates, current_app.instance_path)
template_categories = [ref_arch_category, user_category]
logger.debug(f'Template Categories : {jsonToFormattedString(template_categories)}')
#Render The Template
return render_template('okit/templates_panel.html', template_categories=template_categories)
def dir_to_json(rootdir, reltodir=None, dkey='dirs', fkey='files'):
# logger.info(f'Root Path: {rootdir}')
# logger.info(f'Relative to Path: {reltodir}')
# logger.info(f'Relative Path: {os.path.relpath(rootdir, reltodir)}')
hierarchy = {
'id': os.path.relpath(rootdir, reltodir).replace('/','_'),
'name': os.path.basename(rootdir),
'path': rootdir
}
hierarchy[dkey] = []
hierarchy[fkey] = []
if reltodir is not None:
hierarchy['path'] = os.path.relpath(rootdir, reltodir)
with os.scandir(rootdir) as it:
for entry in it:
if not entry.name.startswith('.'):
if entry.name.endswith('.json') and entry.is_file():
# hierarchy[fkey].append(entry.name)
hierarchy[fkey].append({'id': entry.name.replace('.','_'), 'name': entry.name, 'json': entry.name, 'path': hierarchy['path']})
elif entry.is_dir():
hierarchy[dkey].append(dir_to_json(os.path.join(rootdir, entry.name), reltodir, dkey, fkey))
logger.debug(f'Directory Hierarchy : {jsonToFormattedString(hierarchy)}')
return hierarchy
def hierarchy_category(category, hierarchy, root=''):
logger.debug(f'Category : {jsonToFormattedString(category)}')
logger.debug(f'Hierarchy : {jsonToFormattedString(hierarchy)}')
logger.debug(f'Root : {root}')
for template in hierarchy['templates']:
path = hierarchy['path'] if hierarchy['path'] != '.' else ''
category['templates'].append(get_template_entry(root, path, template['json']))
for child in hierarchy['children']:
category['children'].append(hierarchy_category({"name": os.path.basename(child["path"]).replace("_", " ").title(), "path": child["path"], "id": child["id"], "children": [], "templates": []}, child, root))
return category
def get_template_entry(root, path, json_file):
# json_file = os.path.join(path, template_file)
okit_template = {'path': path, 'json': json_file, 'id': json_file.replace('.', '_').replace('/', '_')}
try:
filename = os.path.join(root, okit_template['path'], okit_template['json'])
template_json = readJsonFile(filename)
logger.debug('Template Json : {0!s:s}'.format(template_json))
okit_template['name'] = template_json['title']
okit_template['description'] = template_json.get('description', template_json['title'])
okit_template['description'] = template_json['title']
except Exception as e:
logger.debug(e)
return okit_template
@bp.route('/templates/load', methods=(['GET']))
def templates():
if request.method == 'GET':
query_string = request.query_string
parsed_query_string = urllib.parse.unquote(query_string.decode())
query_json = json.loads(parsed_query_string)
templates_root = os.path.join(current_app.instance_path, query_json['root_dir'].strip('/'))
templates = dir_to_json(templates_root, current_app.instance_path)
logger.debug(f'Templates : {jsonToFormattedString(templates)}')
return templates
@bp.route('/template/load', methods=(['GET']))
def template_load():
if request.method == 'GET':
query_string = request.query_string
parsed_query_string = urllib.parse.unquote(query_string.decode())
query_json = json.loads(parsed_query_string)
template_file = query_json['template_file']
return send_from_directory(current_app.instance_path, template_file, mimetype='application/json', as_attachment=False)
@bp.route('/template/save', methods=(['POST']))
def template_save():
if request.method == 'POST':
instance_path = current_app.instance_path
root_dir = request.json["root_dir"].strip('/')
template_filename = request.json["template_file"].strip('/')
okit_json = request.json["okit_json"]
git = request.json.get('git', False)
git_commit_msg = request.json.get('git_commit_msg', '')
logger.info(f'Save Template : {root_dir}')
template_dir = os.path.dirname(template_filename)
full_dir = os.path.join(instance_path, root_dir, template_dir)
full_filename = os.path.join(full_dir, os.path.basename(template_filename))
full_filename = os.path.join(instance_path, root_dir, template_filename)
if not os.path.exists(full_dir):
os.makedirs(full_dir, exist_ok=True)
writeJsonFile(okit_json, full_filename)
if git:
top_dir = os.path.normpath(os.path.dirname(template_filename)).split(os.sep)
git_repo_dir = os.path.join(instance_path, root_dir, top_dir[0], top_dir[1])
# while top_dir != '':
# git_repo_dir = os.path.join(instance_path, root_dir, top_dir)
# logger.info(f'Top Dir : {top_dir}')
# top_dir = os.path.dirname(top_dir)
logger.info(f'Git Root Dir : {git_repo_dir}')
repo = Repo(git_repo_dir)
repo.index.add(full_filename)
repo.index.commit("commit changes from okit:" + git_commit_msg)
repo.remotes.origin.pull()
repo.remotes.origin.push()
return template_filename
# Git Processing
@bp.route('/panel/git', methods=(['GET']))
def git_panel():
if request.method == 'GET':
repositories = readGitConfigFile()
git_resources = {}
for repo in repositories:
logger.debug(f'Repo: {jsonToFormattedString(repo)}')
label = repo['label']
branch = repo['branch']
url = repo['url']
parsed_url = giturlparse.parse(url)
logger.debug(f'Parsed Url: {parsed_url}')
git_resource_dir = os.path.join(current_app.instance_path, 'git', parsed_url.resource)
git_repo_dir = os.path.join(git_resource_dir, parsed_url.name)
try:
if os.path.exists(git_repo_dir):
repo = Repo(git_repo_dir)
repo.remotes.origin.pull()
else:
repo = Repo.clone_from(url, git_repo_dir, branch=branch, no_single_branch=True)
repo.remotes.origin.pull()
except Exception as e:
logger.exception(e)
git_resources[parsed_url.resource] = git_resource_dir
git_repositories = []
for git_resource, git_resource_dir in git_resources.items():
repo_templates = dir_to_json(git_resource_dir, current_app.instance_path, 'children', 'templates')
repository = {'name': git_resource, 'path': git_resource_dir, 'children': [], 'templates': []}
git_repositories.append(repo_templates)
#Render The Template
logger.debug(f'Repository: {jsonToFormattedString(git_repositories)}')
return render_template('okit/git_repositories_panel.html', git_repositories=git_repositories)
# Local Filesystem Processing
@bp.route('/panel/local', methods=(['GET']))
def local_panel():
if request.method == 'GET':
local_filesystem_dir = os.path.join(current_app.instance_path, 'local')
local_filesystem = [dir_to_json(local_filesystem_dir, current_app.instance_path, 'children', 'templates')]
#Render The Template
logger.debug(f'Local Filesystem: {jsonToFormattedString(local_filesystem)}')
return render_template('okit/local_panel.html', local_filesystem=local_filesystem)
@bp.route('/propertysheets/<string:sheet>', methods=(['GET']))
def propertysheets(sheet):
return render_template('okit/propertysheets/{0:s}'.format(sheet))
@bp.route('/valueproposition/<string:sheet>', methods=(['GET']))
def valueproposition(sheet):
return render_template('okit/valueproposition/{0:s}'.format(sheet))
@bp.route('/generate/<string:language>/<string:destination>', methods=(['GET', 'POST']))
def generate(language, destination):
logger.info('Language : {0:s} - {1:s}'.format(str(language), str(request.method)))
logger.info('Destination : {0:s} - {1:s}'.format(str(destination), str(request.method)))
logger.debug('JSON : {0:s}'.format(str(request.json)))
if request.method == 'POST':
use_vars = request.json.get("use_variables", True)
try:
if destination == 'git':
git_url, git_branch = request.json['git_repository'].split('*')
parsed_git_url = giturlparse.parse(git_url)
generate_git_dir = os.path.abspath(os.path.join(bp.static_folder, 'git'))
logger.info(generate_git_dir)
if not os.path.exists(generate_git_dir):
os.makedirs(generate_git_dir, exist_ok=True)
git_repo_dir = os.path.abspath(os.path.join(generate_git_dir, parsed_git_url.name))
if os.path.exists(git_repo_dir):
repo = Repo(git_repo_dir)
repo.remotes.origin.pull()
else:
repo = Repo.clone_from(git_url, git_repo_dir, branch=git_branch, no_single_branch=True)
repo.remotes.origin.pull()
destination_dir = os.path.abspath(os.path.join(git_repo_dir, request.json['git_repository_filename']))
else:
destination_dir = tempfile.mkdtemp();
if language == 'terraform':
generator = OCITerraformGenerator(template_root, destination_dir, request.json, use_vars=use_vars)
elif language == 'ansible':
generator = OCIAnsibleGenerator(template_root, destination_dir, request.json, use_vars=use_vars)
elif language == 'terraform11':
generator = OCITerraform11Generator(template_root, destination_dir, request.json)
elif language == 'resource-manager':
generator = OCIResourceManagerGenerator(template_root, destination_dir, request.json)
elif language == 'markdown':
generator = OkitMarkdownGenerator(template_root, destination_dir, request.json)
generator.generate()
generator.writeFiles()
if destination == 'git':
git_commit_msg = request.json['git_repository_commitmsg']
repo.index.add(destination_dir)
repo.index.commit("commit changes from okit:" + git_commit_msg)
repo.remotes.origin.push(git_branch)
return language.capitalize()+" files successfully uploaded to GIT Repository"
else:
zipname = generator.createZipArchive(os.path.join(destination_dir, language), "/tmp/okit-{0:s}".format(str(language)))
logger.info('Zipfile : {0:s}'.format(str(zipname)))
shutil.rmtree(destination_dir)
filename = os.path.split(zipname)
logger.info('Split Zipfile : {0:s}'.format(str(filename)))
return zipname
except Exception as e:
logger.exception(e)
return str(e), 500
else:
return send_from_directory('/tmp', "okit-{0:s}.zip".format(str(language)), mimetype='application/zip', as_attachment=True)
# TODO: Delete
@bp.route('/saveas/<string:savetype>', methods=(['POST']))
def saveas(savetype):
logger.info('Save Type : {0:s} - {1:s}'.format(str(savetype), str(request.method)))
logger.debug('JSON : {0:s}'.format(str(request.json)))
if request.method == 'POST':
try:
filename = '{0!s:s}.json'.format(request.json['title'].replace(' ', '_').lower())
if savetype == 'template':
template_type = request.json['template_type']
if len(template_type.strip()) == 0:
fullpath = os.path.abspath(os.path.join(bp.static_folder, 'templates', 'uncategorised', filename))
else:
typedir = os.path.abspath(os.path.join(bp.static_folder, 'templates', template_type.strip().replace(' ', '_').lower()))
if not os.path.exists(typedir):
os.makedirs(typedir, exist_ok=True)
fullpath = os.path.abspath(os.path.join(typedir, filename))
logger.info('Template File Name : {0!s:s}'.format(filename))
logger.info('>>>>>> Path to file {0!s:s}'.format(fullpath))
writeJsonFile(request.json, fullpath)
return filename
elif savetype == 'git':
git_url, git_branch = request.json['git_repository'].split('*')
git_commit_msg = request.json['git_repository_commitmsg']
if request.json['git_repository_filename'] != '':
filename = request.json['git_repository_filename'].replace(' ', '_').lower()
if not filename.endswith('.json'):
filename = '{0!s:s}.json'.format(filename)
if request.json['git_repository_directory'] != '':
filename = os.path.join(request.json['git_repository_directory'], filename)
parsed_git_url = giturlparse.parse(git_url)
template_git_dir = os.path.abspath(os.path.join(bp.static_folder, 'templates', 'git'))
if not os.path.exists(template_git_dir):
os.makedirs(template_git_dir, exist_ok=True)
git_repo_dir = os.path.abspath(os.path.join(template_git_dir, parsed_git_url.name))
if os.path.exists(git_repo_dir):
repo = Repo(git_repo_dir)
repo.remotes.origin.pull()
else:
repo = Repo.clone_from(git_url, git_repo_dir, branch=git_branch, no_single_branch=True)
repo.remotes.origin.pull()
fullpath = os.path.abspath(os.path.join(git_repo_dir, filename))
# Remove git info
del request.json['git_repository']
del request.json['git_repository_directory']
del request.json['git_repository_filename']
del request.json['git_repository_commitmsg']
writeJsonFile(request.json, fullpath)
repo.index.add(fullpath)
repo.index.commit("commit changes from okit:" + git_commit_msg)
repo.remotes.origin.push(git_branch)
return filename
except Exception as e:
logger.exception(e)
return str(e), 500
@bp.route('/dropdown/data/<string:profile>', methods=(['GET', 'POST']))
def dropdownData(profile):
dropdown_dir = os.path.abspath(os.path.join(bp.static_folder, 'json', 'dropdown'))
shipped_dropdown_file = os.path.abspath(os.path.join(dropdown_dir, 'dropdown.json'))
# shipped_dropdown_file = os.path.abspath(os.path.join(bp.static_folder, 'json', 'dropdown', 'dropdown.json'))
profile_dropdown_dir = os.path.abspath(os.path.join(dropdown_dir, 'profiles'))
profile_dropdown_file = os.path.abspath(os.path.join(profile_dropdown_dir, f'{profile}.json'))
# Check if profile specific dropdown file exists if not use the default
if request.method == 'GET':
if os.path.exists(profile_dropdown_file):
dropdown_file = profile_dropdown_file
logger.info(f'Loading Dropdown file {dropdown_file}')
dropdown_json = readJsonFile(dropdown_file)
else:
dropdown_file = shipped_dropdown_file
logger.info(f'Loading Dropdown file {dropdown_file}')
dropdown_json = readJsonFile(dropdown_file)
dropdown_json["shipped"] = True
dropdown_json["default"] = True
return dropdown_json
elif request.method == 'POST':
logger.info(f'Saving Dropdown file {profile_dropdown_file}')
writeJsonFile(request.json, profile_dropdown_file)
return request.json
else:
return 'Unknown Method', 500
@bp.route('config/sections', methods=(['GET']))
def configSections():
if request.method == 'GET':
config_sections = {"sections": readConfigFileSections()}
logger.info('Config Sections {0!s:s}'.format(config_sections))
return config_sections
else:
return 'Unknown Method', 500
@bp.route('config/appsettings', methods=(['GET']))
def appSettings():
if request.method == 'GET':
config_settings = {"gitsections": readGitConfigFile()}
logger.info('Config Settings {0!s:s}'.format(config_settings))
return config_settings
else:
return 'Unknown Method', 500
@bp.route('config/region/<string:section>', methods=(['GET']))
def configRegion(section):
if request.method == 'GET':
response = {"name": getConfigFileValue(section, 'region')}
return response
else:
return 'Unknown Method', 500
@bp.route('config/validate', methods=(['GET']))
def configValidate():
if request.method == 'GET':
response = {"results": validateConfigFile()}
return response
else:
return 'Unknown Method', 500
@bp.route('validate', methods=(['POST']))
def validateJson():
logger.debug('JSON : {0:s}'.format(str(request.json)))
if request.method == 'POST':
logJson(request.json)
# Validate input json
validator = OCIJsonValidator(request.json)
result = {"valid": validator.validate(), "results": validator.getResults()}
return json.dumps(result, sort_keys=False, indent=2, separators=(',', ': '))
else:
return '404'
# TODO: Delete
@bp.route('loadfromgit', methods=(['POST']))
def loadfromgit():
logger.debug('JSON : {0:s}'.format(str(request.json)))
if request.method == 'POST':
try:
git_url, git_branch = request.json['git_repository'].split('*')
parsed_git_url = giturlparse.parse(git_url)
template_git_dir = os.path.abspath(os.path.join(bp.static_folder, 'templates', 'git'))
if not os.path.exists(template_git_dir):
os.makedirs(template_git_dir, exist_ok=True)
git_repo_dir = os.path.abspath(os.path.join(template_git_dir, parsed_git_url.name))
if os.path.exists(git_repo_dir):
repo = Repo(git_repo_dir)
repo.remotes.origin.pull()
else:
repo = Repo.clone_from(git_url, git_repo_dir, branch=git_branch, no_single_branch=True)
repo.remotes.origin.pull()
files = list(glob.iglob(os.path.join(git_repo_dir, "*.json")))
logger.info(files)
files_list = [f.replace("okit/okitweb/", "") for f in files]
logger.debug('JSON : {0:s}'.format(str(request.json)))
logger.debug('Files Walk : {0!s:s}'.format(files_list))
result = {"fileslist": files_list}
logger.info(result)
return json.dumps(result)
except Exception as e:
logger.exception(e)
return str(e), 500
|
StarcoderdataPython
|
1707623
|
<reponame>tulth/diy-acura-bluetooth<filename>target_desktop/py_lib/mbus/__init__.py
#!/bin/env python
from __future__ import print_function
import sys
import os
import ctypes
from . import nibbles
BIT_TIME = 3100
BIT_ZERO_LOW_TIME = 675
BIT_ONE_LOW_TIME = 1890
BIT_ONEVAL_THRESH_TIME = (BIT_ZERO_LOW_TIME + BIT_ONE_LOW_TIME) >> 1
BIT_LOW_TOO_LONG_TIME = (BIT_TIME + BIT_ONE_LOW_TIME) >> 1
NIBBLE_END_GAP_TIME = (BIT_TIME)
INTERBIT_TIMEOUT_TIME = (BIT_TIME)
def nibbleSeq2Str(nibbleSeq):
return "0x" + "".join(["{:x}".format(nibble) for nibble in nibbleSeq])
class fifo(ctypes.Structure):
_fields_ = [
("buffer", ctypes.c_void_p),
("bufferEnd", ctypes.c_void_p),
("capacity", ctypes.c_size_t),
("count", ctypes.c_size_t),
("elementSize", ctypes.c_size_t),
("head", ctypes.c_void_p),
("tail", ctypes.c_void_p),
]
def __str__(self):
return "fifo: count {}".format(self.count)
class MbusPhyTxRxStruct(ctypes.Structure):
_fields_ = [
("state", ctypes.c_uint8),
("bitShifter", ctypes.c_uint8),
("microSecTimeStamp", ctypes.c_ulong),
("byteFifo", fifo),
]
def __str__(self):
return "state{} bitshftr{} usecTS{} {}".format(self.state, self.bitShifter,
self.microSecTimeStamp, self.byteFifo)
class MbusPhyStruct(ctypes.Structure):
_fields_ = [
("tx", MbusPhyTxRxStruct),
("rx", MbusPhyTxRxStruct),
]
def __str__(self):
return "tx {} / rx {}".format(self.tx, self.rx)
class MbusLinkStruct(ctypes.Structure):
_fields_ = [
("rxMsgFifo", fifo),
("txMsgFifo", fifo),
("nibbles", nibbles.MbusRawNibbleListStruct),
("rxNotTxMode", ctypes.c_bool),
("phyTxNibbleFifo", ctypes.POINTER(fifo)),
]
def __str__(self):
return "tx {} / rx {} / nibbles {}".format(self.txMsgFifo, self.rxMsgFifo, self.nibbles)
# ERROR IDS
ERR_ID_SIGNAL = 0x1
ERR_ID_LENGTH = 0x2
ERR_ID_CHECKSUM = 0x3
ERR_ID_DIRECTION = 0x4
ERR_ID_UNKNOWN_TYPE = 0x5
# MSG IDS
MSGTYPE_ping = 0x10
MSGTYPE_setPlayState = 0x11
MSGTYPE_headPowerOn = 0x12
MSGTYPE_setDiskTrack = 0x13
MSGTYPE_setMode = 0x14
MSGTYPE_pong = 0x20
MSGTYPE_ackWait = 0x21
MSGTYPE_cdPowerOn = 0x22
MSGTYPE_changing = 0x23
MSGTYPE_playState = 0x24
MSGTYPE_diskInfo = 0x25
MSGTYPE_unknownStatus = 0x26
# HEAD2CD MSG BODY
class MbusMsg_setPlayState_BodyStruct(ctypes.Structure):
_fields_ = [
("resume", ctypes.c_bool),
("stop", ctypes.c_bool),
("scanStop", ctypes.c_bool),
("fastReverse", ctypes.c_bool),
("fastForward", ctypes.c_bool),
("pause", ctypes.c_bool),
("play", ctypes.c_bool),
]
def __str__(self):
argTuple = (self.resume, self.stop, self.scanStop,
self.fastReverse, self.fastForward, self.pause, self.play)
fmtStr = "resume{:d} stop{:d} scanStop{:d} fastReverse{:d} fastForward{:d} pause{:d} play{:d}"
return fmtStr.format(*argTuple)
class MbusMsg_setDiskTrack_BodyStruct(ctypes.Structure):
_fields_ = [
("disk", ctypes.c_uint8),
("track", ctypes.c_uint8),
("pause", ctypes.c_bool),
("play", ctypes.c_bool),
("random", ctypes.c_bool),
]
def __str__(self):
argTuple = self.disk, self.track, self.pause, self.play, self.random
fmtStr = "disk#{} track:{:02d} pause{:d} play{:d} random{:d}"
return fmtStr.format(*argTuple)
class MbusMsg_setMode_BodyStruct(ctypes.Structure):
_fields_ = [
("", ctypes.c_bool),
("repeatAll", ctypes.c_bool),
("repeatOne", ctypes.c_bool),
("introScan", ctypes.c_bool),
("random", ctypes.c_bool),
]
def __str__(self):
argTuple = (self.repeatAll, self.repeatOne, self.introScan, self.random, )
fmtStr = "repeatAll{:d} repeatOne{:d} introScan{:d} random{:d}"
return fmtStr.format(*argTuple)
# CD2HEAD MSG BODY
class MbusMsg_changing_BodyStruct(ctypes.Structure):
_fields_ = [
("disk", ctypes.c_uint8),
("track", ctypes.c_uint8),
("eject", ctypes.c_bool),
("noshuttle", ctypes.c_bool),
("busy", ctypes.c_bool),
("repeatAll", ctypes.c_bool),
("repeatOne", ctypes.c_bool),
("random", ctypes.c_bool),
("done", ctypes.c_bool),
]
def __str__(self):
argTuple = (self.disk, self.track, self.eject, self.noshuttle, self.busy, self.repeatAll,
self.repeatOne, self.random, self.done, )
fmtStr = "disk#{} Trk{:02d} eject{:d} noshuttle{:d} busy{:d} repeatAll{:d} repeatOne{:d} random{:d} done{:d}"
return fmtStr.format(*argTuple)
class MbusMsg_playState_BodyStruct(ctypes.Structure):
_fields_ = [
("track", ctypes.c_uint8),
("index", ctypes.c_uint8),
("minute", ctypes.c_uint8),
("second", ctypes.c_uint8),
("repeatAll", ctypes.c_bool),
("repeatOne", ctypes.c_bool),
("introScan", ctypes.c_bool),
("random", ctypes.c_bool),
("stopped", ctypes.c_bool),
("paused", ctypes.c_bool),
("play", ctypes.c_bool),
]
def __str__(self):
argTuple = (self.track, self.index, self.minute, self.second, self.repeatAll,
self.repeatOne, self.introScan, self.random, self.stopped, self.paused, self.play, )
fmtStr = ("Trk:Idx {:02}:{:02} min:sec {:02}:{:02} repeatAll{:d} " +
"repeatOne{:d} introScan{:d} random{:d} stop{:d} pause{:d} play{:d}")
return fmtStr.format(*argTuple)
class MbusMsg_diskInfo_BodyStruct(ctypes.Structure):
_fields_ = [
("disk", ctypes.c_uint8),
("tracks", ctypes.c_uint8),
("minutes", ctypes.c_uint8),
("seconds", ctypes.c_uint8),
("flags", ctypes.c_uint8),
]
def __str__(self):
argTuple = (self.disk, self.tracks, self.minutes, self.seconds, self.flags)
fmtStr = "disk #{} tracks {:02d} min:sec {:02d}:{:02d} flags {:x}"
return fmtStr.format(*argTuple)
#
class MbusMsgBodyUnion(ctypes.Union):
_fields_ = [
("setPlayState", MbusMsg_setPlayState_BodyStruct),
("setDiskTrack", MbusMsg_setDiskTrack_BodyStruct),
("setMode", MbusMsg_setMode_BodyStruct),
("changing", MbusMsg_changing_BodyStruct),
("playState", MbusMsg_playState_BodyStruct),
("diskInfo", MbusMsg_diskInfo_BodyStruct),
]
class MbusMsgParsedStruct(ctypes.Structure):
_fields_ = [
("directionH2C", ctypes.c_bool),
("msgType", ctypes.c_uint8), # contains msg type or err type
("body", MbusMsgBodyUnion),
]
def __str__(self):
if self.directionH2C:
dirStr = "H2C"
else:
dirStr = "C2H"
bodyStr = ""
if self.msgType == MSGTYPE_ping:
# FUTURE: bodyStr = str(self.body.ping)
msgTypeStr = "ping"
elif self.msgType == MSGTYPE_setPlayState:
bodyStr = str(self.body.setPlayState)
msgTypeStr = "setPlayState"
elif self.msgType == MSGTYPE_setDiskTrack:
bodyStr = str(self.body.setDiskTrack)
msgTypeStr = "setDiskTrack"
elif self.msgType == MSGTYPE_setMode:
bodyStr = str(self.body.setMode)
msgTypeStr = "setMode"
elif self.msgType == MSGTYPE_headPowerOn:
# FUTURE: bodyStr = str(self.body.headPowerOn)
msgTypeStr = "headPowerOn"
elif self.msgType == MSGTYPE_pong:
# FUTURE? bodyStr = str(self.body.pong)
msgTypeStr = "pong"
elif self.msgType == MSGTYPE_ackWait:
# FUTURE: bodyStr = str(self.body.ackWait)
msgTypeStr = "ackWait"
elif self.msgType == MSGTYPE_cdPowerOn:
# FUTURE: bodyStr = str(self.body.cdPowerOn)
msgTypeStr = "cdPowerOn"
elif self.msgType == MSGTYPE_changing:
bodyStr = str(self.body.changing)
msgTypeStr = "changing"
elif self.msgType == MSGTYPE_playState:
bodyStr = str(self.body.playState)
msgTypeStr = "playState"
elif self.msgType == MSGTYPE_diskInfo:
bodyStr = str(self.body.diskInfo)
msgTypeStr = "diskInfo"
elif self.msgType == MSGTYPE_unknownStatus:
# FUTURE: bodyStr = str(self.body.unknownStatus)
msgTypeStr = "unknownStatus"
else:
msgTypeStr = "STRIFY ERR"
return "{} {:15} {}".format(dirStr, msgTypeStr, bodyStr)
class MbusTxMsgStruct(ctypes.Structure):
_fields_ = [
("nibbles", nibbles.MbusRawNibbleListStruct),
]
def __str__(self):
return str(self.nibbles)
class MbusRxMsgStruct(ctypes.Structure):
_fields_ = [
("errId", ctypes.c_uint8), # 0 = no error
("parsed", MbusMsgParsedStruct), # if not an error, parsed contents here
("rawNibbles", nibbles.MbusRawNibbleListStruct),
]
def __str__(self):
if self.errId == ERR_ID_SIGNAL:
content = "ERROR: Bad bus signaling"
if self.errId == ERR_ID_LENGTH:
content = "ERROR: Bad message length"
elif self.errId == ERR_ID_CHECKSUM:
content = "ERROR: CheckSum"
elif self.errId == ERR_ID_DIRECTION:
content = "ERROR: Unexpected Direction"
elif self.errId == ERR_ID_UNKNOWN_TYPE:
content = "ERROR: Unknown message type"
else:
content = str(self.parsed)
return "{:18} {}".format(str(self.rawNibbles), content)
if __name__ == '__main__':
print("Library only.")
|
StarcoderdataPython
|
3357873
|
<filename>eternity_backend_server/blueprints/admin/admin.py
# -*- coding: utf-8 -*-
from flask import (
Blueprint,
current_app,
flash,
redirect,
render_template,
request,
url_for,
abort
)
from flask_login import login_required, login_user, logout_user, current_user
from eternity_backend_server.utils import flash_errors, redirect_back
from eternity_backend_server.extensions import db
admin_bp = Blueprint("admin", __name__)
|
StarcoderdataPython
|
1624952
|
import numpy
import matplotlib.pyplot as plot
def relu(arr):
return numpy.maximum(0, arr)
x = numpy.arange(-10, 10, 0.1)
y = relu(x)
plot.plot(x, y, label="Sigmoid function")
plot.xlabel('x')
plot.ylabel('y')
plot.show()
|
StarcoderdataPython
|
3381310
|
<filename>setup.py
#!/usr/bin/env python
from setuptools import setup, find_packages
# Get version string
with open('gdx2py/version.py') as f:
exec(f.read())
with open("README.md", "r") as f:
readme = f.read()
setup(
name='GDX2py',
version=__version__, # pylint: disable=undefined-variable
author='<NAME>',
author_email='<EMAIL>',
description='Read and write GAMS Data eXchange (GDX) files using Python',
long_description=readme,
long_description_content_type="text/markdown",
python_requires='>=3.6',
install_requires=['gdxcc>=7'],
setup_requires=['pytest-runner'],
tests_require=['pytest', 'pytest-datadir', 'pandas'],
url='https://github.com/ererkka/GDX2py',
packages=find_packages(exclude=['tests']),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering",
"Intended Audience :: Developers",
"Development Status :: 5 - Production/Stable",
],
license="MIT",
)
|
StarcoderdataPython
|
1652612
|
from django.shortcuts import render
from django.http import HttpResponse
def index(request):
return HttpResponse("Testing basic Http Response in form views.py")
# Create your views here.
|
StarcoderdataPython
|
25142
|
#!/usr/bin/env python
# Lint as: python3
"""E2E tests for the timeline flow."""
import csv
import io
from typing import Sequence
from typing import Text
from absl.testing import absltest
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.util import temp
from grr_response_proto.api import timeline_pb2
from grr_response_test.end_to_end_tests import test_base
class TestTimelineLinux(test_base.EndToEndTest):
"""A class with Linux-specific timeline tests."""
platforms = [test_base.EndToEndTest.Platform.LINUX]
def testUsrBin(self):
args = self.grr_api.types.CreateFlowArgs("TimelineFlow")
args.root = "/bin/".encode("utf-8")
flow = self.RunFlowAndWait("TimelineFlow", args=args)
with temp.AutoTempFilePath(suffix=".body") as temp_filepath:
timeline_format = timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
body = flow.GetCollectedTimeline(timeline_format)
body.WriteToFile(temp_filepath)
with io.open(temp_filepath, mode="r", encoding="utf-8") as temp_filedesc:
entries = list(csv.reader(temp_filedesc, delimiter="|"))
paths = [entry[1] for entry in entries]
self.assertIn("/bin/bash", paths)
self.assertIn("/bin/cat", paths)
self.assertIn("/bin/chmod", paths)
self.assertIn("/bin/cp", paths)
self.assertIn("/bin/rm", paths)
self.assertIn("/bin/sleep", paths)
for entry in entries:
assertBodyEntrySanity(self, entry)
class TestTimelineWindows(test_base.EndToEndTest):
"""A class with Windows-specific timeline tests."""
platforms = [test_base.EndToEndTest.Platform.WINDOWS]
def testWindows(self):
args = self.grr_api.types.CreateFlowArgs("TimelineFlow")
args.root = "C:\\Windows".encode("utf-8")
flow = self.RunFlowAndWait("TimelineFlow", args=args)
with temp.AutoTempFilePath(suffix=".body") as temp_filepath:
timeline_format = timeline_pb2.ApiGetCollectedTimelineArgs.Format.BODY
body = flow.GetCollectedTimeline(timeline_format)
body.WriteToFile(temp_filepath)
with io.open(temp_filepath, mode="r", encoding="utf-8") as temp_filedesc:
entries = list(csv.reader(temp_filedesc, delimiter="|"))
paths = [entry[1].lower() for entry in entries]
self.assertIn("C:\\Windows\\explorer.exe".lower(), paths)
self.assertIn("C:\\Windows\\notepad.exe".lower(), paths)
self.assertIn("C:\\Windows\\regedit.exe".lower(), paths)
self.assertIn("C:\\Windows\\System32\\dwm.exe".lower(), paths)
for entry in entries:
assertBodyEntrySanity(self, entry)
def testWindowsBackslashEscape(self):
args = self.grr_api.types.CreateFlowArgs("TimelineFlow")
args.root = "C:\\Windows".encode("utf-8")
flow = self.RunFlowAndWait("TimelineFlow", args=args)
with temp.AutoTempFilePath(suffix=".body") as temp_filepath:
body = flow.GetCollectedTimelineBody(backslash_escape=True)
body.WriteToFile(temp_filepath)
with io.open(temp_filepath, mode="r", encoding="utf-8") as temp_filedesc:
content = temp_filedesc.read().lower()
self.assertIn("|C:\\\\Windows\\\\explorer.exe|".lower(), content)
self.assertIn("|C:\\\\Windows\\\\notepad.exe|".lower(), content)
self.assertIn("|C:\\\\Windows\\\\regedit.exe|".lower(), content)
self.assertIn("|C:\\\\Windows\\\\System32\\\\dwm.exe|".lower(), content)
def assertBodyEntrySanity( # pylint: disable=invalid-name
test: absltest.TestCase,
entry: Sequence[Text],
) -> None:
"""Asserts that given row of a body file is sane."""
# Size should be non-negative (some files might be empty, though).
test.assertGreaterEqual(int(entry[6]), 0)
# All timestamps should be positive.
test.assertGreater(int(entry[7]), 0)
test.assertGreater(int(entry[8]), 0)
test.assertGreater(int(entry[9]), 0)
# All timestamps should be older than now.
now = rdfvalue.RDFDatetime.Now()
test.assertLessEqual(int(entry[7]), now.AsSecondsSinceEpoch())
test.assertLessEqual(int(entry[8]), now.AsSecondsSinceEpoch())
test.assertLessEqual(int(entry[9]), now.AsSecondsSinceEpoch())
|
StarcoderdataPython
|
1652371
|
from PIL import Image
import os
basepath = 'fashionshop\static\img\product\Fragrance'
# os.chdir(basepath)
def save_img(img):
i = Image.open(os.path.join(basepath,img))
t, f_ext = os.path.splitext(i.filename)
text = t.replace("-"," ")
f = text + f_ext
print('infor:',img, i.format, i.size, i.mode)
if i.mode == 'RGBA':
i = i.convert('RGB')
output = (264,363)
i.thumbnail(output, Image.ANTIALIAS)
# i = i.resize(output, Image.ANTIALIAS)
i.save(f, "JPEG")
print('infor changed:',img, i.format, i.size, i.mode)
pass
for img in os.listdir(basepath):
save_img(img)
|
StarcoderdataPython
|
162045
|
import os
from subaligner.predictor import Predictor
from subaligner.subtitle import Subtitle
if __name__ == "__main__":
examples_dir = os.path.dirname(os.path.abspath(__file__))
output_dir = os.path.join(examples_dir, "tmp")
os.makedirs(output_dir, exist_ok=True)
video_file_path = os.path.join(examples_dir, "..", "tests/subaligner/resource/test.mp4")
srt_file_path = os.path.join(examples_dir, "..", "tests/subaligner/resource/test.srt")
predictor = Predictor()
subs, audio_file_path, voice_probabilities, frame_rate = predictor.predict_single_pass(video_file_path, srt_file_path)
aligned_subtitle_path = os.path.join(output_dir, "test_aligned_1.srt")
Subtitle.export_subtitle(srt_file_path, subs, aligned_subtitle_path)
print("Aligned subtitle saved to: {}".format(aligned_subtitle_path))
log_loss = predictor.get_log_loss(voice_probabilities, subs)
print("Alignment finished with overall loss: {}".format(log_loss))
subs_list, subs, voice_probabilities, frame_rate = predictor.predict_dual_pass(video_file_path, srt_file_path, stretch=False)
aligned_subtitle_path = os.path.join(output_dir, "test_aligned_2.srt")
Subtitle.export_subtitle(srt_file_path, subs_list, aligned_subtitle_path)
print("Aligned subtitle saved to: {}".format(aligned_subtitle_path))
log_loss = predictor.get_log_loss(voice_probabilities, subs)
print("Alignment finished with overall loss: {}".format(log_loss))
|
StarcoderdataPython
|
3221604
|
<gh_stars>0
# Parameter:
# config-file: path to cfg file
# weight_path: path to the pretrained weight
# dataset_path: path to a directory of images
# This script predicts bboxes of every image in the dataset path,
# write the ground truth into yolo format .txt filess
import argparse
import glob
import multiprocessing as mp
import os
import time
import cv2
import tqdm
from detectron2.config import get_cfg
from detectron2.data.detection_utils import read_image
from detectron2.utils.logger import setup_logger
from predictor import VisualizationDemo
def setup_cfg(args):
# load config from file and command-line arguments
cfg = get_cfg()
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
# Set score_threshold for builtin models
cfg.MODEL.RETINANET.SCORE_THRESH_TEST = args.confidence_threshold
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = args.confidence_threshold
cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = args.confidence_threshold
cfg.freeze()
return cfg
def get_parser():
parser = argparse.ArgumentParser(description="Detectron2 predict whole folder")
parser.add_argument(
"--config-file",
default="configs/quick_schedules/e2e_mask_rcnn_R_50_FPN_inference_acc_test.yaml",
metavar="FILE",
help="path to config file",
)
parser.add_argument("--dataset_folder", help="A folder of images to predict")
parser.add_argument(
"--output",
help="A file or directory to save output visualizations. "
"If not given, will show output in an OpenCV window.",
)
parser.add_argument(
"--confidence-threshold",
type=float,
default=0.5,
help="Minimum score for instance predictions to be shown",
)
parser.add_argument(
"--opts",
help="Modify model config options using the command-line",
default=[],
nargs=argparse.REMAINDER,
)
return parser
if __name__ == "__main__":
mp.set_start_method("spawn", force=True)
args = get_parser().parse_args()
logger = setup_logger()
logger.info("Arguments: " + str(args))
cfg = setup_cfg(args)
demo = VisualizationDemo(cfg)
while True:
#for each_img in glob.glob(os.path.join(args.dataset_folder, "*.jpg")):
# use PIL, to be consistent with evaluation
each_img = "/home/cambricon/Cambricon-MLU100/datasets_old/Tsinghua_traffic_sign/test_imgs/41372.jpg"
each_img = "/home/cambricon/Cambricon-MLU100/datasets_old/COCO/interested_val/000000011197.jpg"
img = read_image(each_img, format="BGR")
start_time = time.time()
predictions, visualized_output = demo.run_on_image(img)
logger.info(
"{}: detected {} instances in {:.2f}s".format(
each_img, len(predictions["instances"]), time.time() - start_time
)
)
pred_classes = predictions["instances"].pred_classes.cpu().numpy()
pred_boxes = predictions["instances"].pred_boxes.tensor.cpu().numpy()
records = []
print(pred_boxes)
for each_cls, each_box in zip(pred_classes, pred_boxes):
if int(each_cls) in interested_cls:
cls_id = interested_cls.index(int(each_cls))
[x1, y1, x2, y2] = each_box
w, h = x2 - x1, y2 - y1
(img_h, img_w) = predictions["instances"].image_size
x_center = (x1 + w/2)/img_w
y_center = (y1 + h/2)/img_h
w /= img_w
h /= img_h
records.append(" ".join([str(x) for x in [cls_id, x_center, y_center, w, h]]))
break
#each_txt = each_img.replace(".jpg", ".txt")
#txt_writer = open(each_txt, "a+")
#txt_writer.write("\n".join(records) + "\n")
|
StarcoderdataPython
|
1760424
|
#!/usr/bin/env python3
# Mikhail (myke) Kolodin
# testing redis and redis-queue (rq)
# from http://python-rq.org/ etc
# 2016-02-04 2018-05-05 1.4
import redis
import requests
from redis import Redis
from rq import Queue
# test redis itfself
r = redis.StrictRedis(host='localhost', port=6379, db=0)
r.set('foo', 'bar')
x = r.get('foo')
print (x)
q = Queue(connection=Redis())
from rq_my_module import count_words_at_url
result = q.enqueue (count_words_at_url, 'http://nvie.com')
print (result)
# my_module.py is:
#~ #!/usr/bin/env python3
#~ import requests
#~ from redis import Redis
#~ from rq import Queue
#~ # test redis queue (rq))
#~ def count_words_at_url(url):
#~ resp = requests.get(url)
#~ return len(resp.text.split())
# in parallel, the worker runs:
#~ $ rq worker
#~ *** Listening for work on default
#~ Got count_words_at_url('http://nvie.com') from default
#~ Job result = 818
#~ *** Listening for work on default
|
StarcoderdataPython
|
3397105
|
import pandas as pd
from sqlalchemy import rgb_db.py
df = pd.read_csv('https://github.com/techthumb1/DS-Unit-3-Sprint-2-SQL-and-Databases/blob/master/module1-introduction-to-sql/buddymove_holidayiq.csv')
df.to_sql('Buddy Move', con)
|
StarcoderdataPython
|
4836765
|
<gh_stars>0
# Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API over the neutron LBaaS v2 service.
"""
from six.moves import _thread as thread
from time import sleep
from django.views import generic
from horizon import conf
from openstack_dashboard.api import neutron
from openstack_dashboard.api.rest import urls
from openstack_dashboard.api.rest import utils as rest_utils
neutronclient = neutron.neutronclient
def poll_loadbalancer_status(request, loadbalancer_id, callback,
from_state='PENDING_UPDATE', to_state='ACTIVE',
callback_kwargs=None):
"""Poll for the status of the load balancer.
Polls for the status of the load balancer and calls a function when the
status changes to a specified state.
:param request: django request object
:param loadbalancer_id: id of the load balancer to poll
:param callback: function to call when polling is complete
:param from_state: initial expected state of the load balancer
:param to_state: state to check for
:param callback_kwargs: kwargs to pass into the callback function
"""
interval = conf.HORIZON_CONFIG['ajax_poll_interval'] / 1000.0
status = from_state
while status == from_state:
sleep(interval)
lb = neutronclient(request).show_loadbalancer(
loadbalancer_id).get('loadbalancer')
status = lb['provisioning_status']
if status == to_state:
kwargs = {'loadbalancer_id': loadbalancer_id}
if callback_kwargs:
kwargs.update(callback_kwargs)
callback(request, **kwargs)
def create_loadbalancer(request):
data = request.DATA
spec = {
'vip_subnet_id': data['loadbalancer']['subnet']
}
if data['loadbalancer'].get('name'):
spec['name'] = data['loadbalancer']['name']
if data['loadbalancer'].get('description'):
spec['description'] = data['loadbalancer']['description']
if data['loadbalancer'].get('ip'):
spec['vip_address'] = data['loadbalancer']['ip']
loadbalancer = neutronclient(request).create_loadbalancer(
{'loadbalancer': spec}).get('loadbalancer')
if data.get('listener'):
# There is work underway to add a new API to LBaaS v2 that will
# allow us to pass in all information at once. Until that is
# available we use a separate thread to poll for the load
# balancer status and create the other resources when it becomes
# active.
args = (request, loadbalancer['id'], create_listener)
kwargs = {'from_state': 'PENDING_CREATE'}
thread.start_new_thread(poll_loadbalancer_status, args, kwargs)
return loadbalancer
def create_listener(request, **kwargs):
"""Create a new listener.
"""
data = request.DATA
listenerSpec = {
'protocol': data['listener']['protocol'],
'protocol_port': data['listener']['port'],
'loadbalancer_id': kwargs['loadbalancer_id']
}
if data['listener'].get('name'):
listenerSpec['name'] = data['listener']['name']
if data['listener'].get('description'):
listenerSpec['description'] = data['listener']['description']
if data.get('certificates'):
listenerSpec['default_tls_container_ref'] = data['certificates'][0]
listener = neutronclient(request).create_listener(
{'listener': listenerSpec}).get('listener')
if data.get('pool'):
args = (request, kwargs['loadbalancer_id'], create_pool)
kwargs = {'callback_kwargs': {'listener_id': listener['id']}}
thread.start_new_thread(poll_loadbalancer_status, args, kwargs)
return listener
def create_pool(request, **kwargs):
"""Create a new pool.
"""
data = request.DATA
poolSpec = {
'protocol': data['pool']['protocol'],
'lb_algorithm': data['pool']['method'],
'listener_id': kwargs['listener_id']
}
if data['pool'].get('name'):
poolSpec['name'] = data['pool']['name']
if data['pool'].get('description'):
poolSpec['description'] = data['pool']['description']
pool = neutronclient(request).create_lbaas_pool(
{'pool': poolSpec}).get('pool')
if data.get('members'):
args = (request, kwargs['loadbalancer_id'], add_member)
kwargs = {'callback_kwargs': {'pool_id': pool['id'],
'index': 0}}
thread.start_new_thread(poll_loadbalancer_status, args, kwargs)
elif data.get('monitor'):
args = (request, kwargs['loadbalancer_id'], create_health_monitor)
kwargs = {'callback_kwargs': {'pool_id': pool['id']}}
thread.start_new_thread(poll_loadbalancer_status, args, kwargs)
return pool
def create_health_monitor(request, **kwargs):
"""Create a new health monitor for a pool.
"""
data = request.DATA
monitorSpec = {
'type': data['monitor']['type'],
'delay': data['monitor']['interval'],
'timeout': data['monitor']['timeout'],
'max_retries': data['monitor']['retry'],
'pool_id': kwargs['pool_id']
}
if data['monitor'].get('method'):
monitorSpec['http_method'] = data['monitor']['method']
if data['monitor'].get('path'):
monitorSpec['url_path'] = data['monitor']['path']
if data['monitor'].get('status'):
monitorSpec['expected_codes'] = data['monitor']['status']
return neutronclient(request).create_lbaas_healthmonitor(
{'healthmonitor': monitorSpec}).get('healthmonitor')
def add_member(request, **kwargs):
"""Add a member to a pool.
"""
data = request.DATA
members = data.get('members')
pool_id = kwargs.get('pool_id')
if kwargs.get('members_to_add'):
members_to_add = kwargs['members_to_add']
index = [members.index(member) for member in members
if member['id'] == members_to_add[0]][0]
loadbalancer_id = data.get('loadbalancer_id')
else:
index = kwargs.get('index')
loadbalancer_id = kwargs.get('loadbalancer_id')
member = members[index]
memberSpec = {
'address': member['address'],
'protocol_port': member['port'],
'subnet_id': member['subnet']
}
if member.get('weight'):
memberSpec['weight'] = member['weight']
member = neutronclient(request).create_lbaas_member(
pool_id, {'member': memberSpec}).get('member')
index += 1
if kwargs.get('members_to_add'):
args = (request, loadbalancer_id, update_member_list)
members_to_add = kwargs['members_to_add']
members_to_add.pop(0)
kwargs = {'callback_kwargs': {
'existing_members': kwargs.get('existing_members'),
'members_to_add': members_to_add,
'members_to_delete': kwargs.get('members_to_delete'),
'pool_id': pool_id}}
thread.start_new_thread(poll_loadbalancer_status, args, kwargs)
elif len(members) > index:
args = (request, loadbalancer_id, add_member)
kwargs = {'callback_kwargs': {'pool_id': pool_id,
'index': index}}
thread.start_new_thread(poll_loadbalancer_status, args, kwargs)
elif data.get('monitor'):
args = (request, loadbalancer_id, create_health_monitor)
kwargs = {'callback_kwargs': {'pool_id': pool_id}}
thread.start_new_thread(poll_loadbalancer_status, args, kwargs)
return member
def remove_member(request, **kwargs):
"""Remove a member from the pool.
"""
data = request.DATA
loadbalancer_id = data.get('loadbalancer_id')
pool_id = kwargs.get('pool_id')
if kwargs.get('members_to_delete'):
members_to_delete = kwargs['members_to_delete']
member_id = members_to_delete.pop(0)
neutronclient(request).delete_lbaas_member(member_id, pool_id)
args = (request, loadbalancer_id, update_member_list)
kwargs = {'callback_kwargs': {
'existing_members': kwargs.get('existing_members'),
'members_to_add': kwargs.get('members_to_add'),
'members_to_delete': members_to_delete}}
thread.start_new_thread(poll_loadbalancer_status, args, kwargs)
def update_loadbalancer(request, **kwargs):
"""Update a load balancer.
"""
data = request.DATA
spec = {}
loadbalancer_id = kwargs.get('loadbalancer_id')
if data['loadbalancer'].get('name'):
spec['name'] = data['loadbalancer']['name']
if data['loadbalancer'].get('description'):
spec['description'] = data['loadbalancer']['description']
return neutronclient(request).update_loadbalancer(
loadbalancer_id, {'loadbalancer': spec}).get('loadbalancer')
def update_listener(request, **kwargs):
"""Update a listener.
"""
data = request.DATA
listener_spec = {}
listener_id = data['listener'].get('id')
loadbalancer_id = data.get('loadbalancer_id')
if data['listener'].get('name'):
listener_spec['name'] = data['listener']['name']
if data['listener'].get('description'):
listener_spec['description'] = data['listener']['description']
if data.get('certificates'):
listener_spec['default_tls_container_ref'] = data['certificates'][0]
listener = neutronclient(request).update_listener(
listener_id, {'listener': listener_spec}).get('listener')
if data.get('pool'):
args = (request, loadbalancer_id, update_pool)
thread.start_new_thread(poll_loadbalancer_status, args)
return listener
def update_pool(request, **kwargs):
"""Update a pool.
"""
data = request.DATA
pool_spec = {}
pool_id = data['pool'].get('id')
loadbalancer_id = data.get('loadbalancer_id')
if data['pool'].get('name'):
pool_spec['name'] = data['pool']['name']
if data['pool'].get('description'):
pool_spec['description'] = data['pool']['description']
pools = neutronclient(request).update_lbaas_pool(
pool_id, {'pool': pool_spec}).get('pools')
# Assemble the lists of member id's to add and remove, if any exist
tenant_id = request.user.project_id
request_member_data = data.get('members', [])
existing_members = neutronclient(request).list_lbaas_members(
pool_id, tenant_id=tenant_id).get('members')
(members_to_add, members_to_delete) = get_members_to_add_remove(
request_member_data, existing_members)
if members_to_add or members_to_delete:
args = (request, loadbalancer_id, update_member_list)
kwargs = {'callback_kwargs': {'existing_members': existing_members,
'members_to_add': members_to_add,
'members_to_delete': members_to_delete,
'pool_id': pool_id}}
thread.start_new_thread(poll_loadbalancer_status, args, kwargs)
elif data.get('monitor'):
args = (request, loadbalancer_id, update_monitor)
thread.start_new_thread(poll_loadbalancer_status, args)
return pools
def update_monitor(request, **kwargs):
"""Update a health monitor.
"""
data = request.DATA
monitor_spec = {}
monitor_id = data['monitor']['id']
if data['monitor'].get('interval'):
monitor_spec['delay'] = data['monitor']['interval']
if data['monitor'].get('timeout'):
monitor_spec['timeout'] = data['monitor']['timeout']
if data['monitor'].get('retry'):
monitor_spec['max_retries'] = data['monitor']['retry']
if data['monitor'].get('method'):
monitor_spec['http_method'] = data['monitor']['method']
if data['monitor'].get('path'):
monitor_spec['url_path'] = data['monitor']['path']
if data['monitor'].get('status'):
monitor_spec['expected_codes'] = data['monitor']['status']
healthmonitor = neutronclient(request).update_lbaas_healthmonitor(
monitor_id, {'healthmonitor': monitor_spec}).get('healthmonitor')
return healthmonitor
def update_member_list(request, **kwargs):
"""Update the list of members by adding or removing the necessary members.
"""
data = request.DATA
loadbalancer_id = data.get('loadbalancer_id')
pool_id = kwargs.get('pool_id')
existing_members = kwargs.get('existing_members')
members_to_add = kwargs.get('members_to_add')
members_to_delete = kwargs.get('members_to_delete')
if members_to_delete:
kwargs = {'existing_members': existing_members,
'members_to_add': members_to_add,
'members_to_delete': members_to_delete,
'pool_id': pool_id}
remove_member(request, **kwargs)
elif members_to_add:
kwargs = {'existing_members': existing_members,
'members_to_add': members_to_add,
'members_to_delete': members_to_delete,
'pool_id': pool_id}
add_member(request, **kwargs)
elif data.get('monitor'):
args = (request, loadbalancer_id, update_monitor)
thread.start_new_thread(poll_loadbalancer_status, args)
def get_members_to_add_remove(request_member_data, existing_members):
new_member_ids = [member['id'] for member in request_member_data]
existing_member_ids = [member['id'] for member in existing_members]
members_to_add = [member_id for member_id in new_member_ids
if member_id not in existing_member_ids]
members_to_delete = [member_id for member_id in existing_member_ids
if member_id not in new_member_ids]
return members_to_add, members_to_delete
def add_floating_ip_info(request, loadbalancers):
"""Add floating IP address info to each load balancer.
"""
floating_ips = neutron.tenant_floating_ip_list(request)
for lb in loadbalancers:
floating_ip = {}
associated_ip = next((fip for fip in floating_ips
if fip['fixed_ip'] == lb['vip_address']), None)
if associated_ip is not None:
floating_ip['id'] = associated_ip['id']
floating_ip['ip'] = associated_ip['ip']
lb['floating_ip'] = floating_ip
@urls.register
class LoadBalancers(generic.View):
"""API for load balancers.
"""
url_regex = r'lbaas/loadbalancers/$'
@rest_utils.ajax()
def get(self, request):
"""List load balancers for current project.
The listing result is an object with property "items".
"""
tenant_id = request.user.project_id
loadbalancers = neutronclient(request).list_loadbalancers(
tenant_id=tenant_id).get('loadbalancers')
if request.GET.get('full') and neutron.floating_ip_supported(request):
add_floating_ip_info(request, loadbalancers)
return {'items': loadbalancers}
@rest_utils.ajax()
def post(self, request):
"""Create a new load balancer.
Creates a new load balancer as well as other optional resources such as
a listener, pool, monitor, etc.
"""
return create_loadbalancer(request)
@urls.register
class LoadBalancerStatusTree(generic.View):
"""API for retrieving the resource status tree for a single load balancer.
"""
url_regex = r'lbaas/loadbalancers/(?P<loadbalancer_id>[^/]+)/statuses/$'
@rest_utils.ajax()
def get(self, request, loadbalancer_id):
"""Get the status tree for a specific load balancer.
http://localhost/api/lbaas/loadbalancers/cc758c90-3d98-4ea1-af44-aab405c9c915/statuses
"""
return neutronclient(request).retrieve_loadbalancer_status(
loadbalancer_id)
@urls.register
class LoadBalancer(generic.View):
"""API for retrieving, updating, and deleting a single load balancer.
"""
url_regex = r'lbaas/loadbalancers/(?P<loadbalancer_id>[^/]+)/$'
@rest_utils.ajax()
def get(self, request, loadbalancer_id):
"""Get a specific load balancer.
http://localhost/api/lbaas/loadbalancers/cc758c90-3d98-4ea1-af44-aab405c9c915
"""
loadbalancer = neutronclient(request).show_loadbalancer(
loadbalancer_id).get('loadbalancer')
if request.GET.get('full') and neutron.floating_ip_supported(request):
add_floating_ip_info(request, [loadbalancer])
return loadbalancer
@rest_utils.ajax()
def put(self, request, loadbalancer_id):
"""Edit a load balancer.
"""
kwargs = {'loadbalancer_id': loadbalancer_id}
update_loadbalancer(request, **kwargs)
@rest_utils.ajax()
def delete(self, request, loadbalancer_id):
"""Delete a specific load balancer.
http://localhost/api/lbaas/loadbalancers/cc758c90-3d98-4ea1-af44-aab405c9c915
"""
neutronclient(request).delete_loadbalancer(loadbalancer_id)
@urls.register
class Listeners(generic.View):
"""API for load balancer listeners.
"""
url_regex = r'lbaas/listeners/$'
@rest_utils.ajax()
def get(self, request):
"""List of listeners for the current project.
The listing result is an object with property "items".
"""
loadbalancer_id = request.GET.get('loadbalancerId')
tenant_id = request.user.project_id
result = neutronclient(request).list_listeners(tenant_id=tenant_id)
listener_list = result.get('listeners')
if loadbalancer_id:
listener_list = self._filter_listeners(listener_list,
loadbalancer_id)
return {'items': listener_list}
@rest_utils.ajax()
def post(self, request):
"""Create a new listener.
Creates a new listener as well as other optional resources such as
a pool, members, and health monitor.
"""
kwargs = {'loadbalancer_id': request.DATA.get('loadbalancer_id')}
return create_listener(request, **kwargs)
def _filter_listeners(self, listener_list, loadbalancer_id):
filtered_listeners = []
for listener in listener_list:
if listener['loadbalancers'][0]['id'] == loadbalancer_id:
filtered_listeners.append(listener)
return filtered_listeners
@urls.register
class Listener(generic.View):
"""API for retrieving, updating, and deleting a single listener.
"""
url_regex = r'lbaas/listeners/(?P<listener_id>[^/]+)/$'
@rest_utils.ajax()
def get(self, request, listener_id):
"""Get a specific listener.
If the param 'includeChildResources' is passed in as a truthy value,
the details of all resources that exist under the listener will be
returned along with the listener details.
http://localhost/api/lbaas/listeners/cc758c90-3d98-4ea1-af44-aab405c9c915
"""
listener = neutronclient(request).show_listener(
listener_id).get('listener')
if request.GET.get('includeChildResources'):
resources = {}
resources['listener'] = listener
if listener.get('default_pool_id'):
pool_id = listener['default_pool_id']
pool = neutronclient(request).show_lbaas_pool(
pool_id).get('pool')
resources['pool'] = pool
if pool.get('members'):
tenant_id = request.user.project_id
members = neutronclient(request).list_lbaas_members(
pool_id, tenant_id=tenant_id).get('members')
resources['members'] = members
if pool.get('healthmonitor_id'):
monitor_id = pool['healthmonitor_id']
monitor = neutronclient(request).show_lbaas_healthmonitor(
monitor_id).get('healthmonitor')
resources['monitor'] = monitor
return resources
else:
return listener
@rest_utils.ajax()
def put(self, request, listener_id):
"""Edit a listener as well as any resources below it.
"""
kwargs = {'listener_id': listener_id}
update_listener(request, **kwargs)
@rest_utils.ajax()
def delete(self, request, listener_id):
"""Delete a specific listener.
http://localhost/api/lbaas/listeners/cc758c90-3d98-4ea1-af44-aab405c9c915
"""
neutronclient(request).delete_listener(listener_id)
@urls.register
class Pools(generic.View):
"""API for load balancer pools.
"""
url_regex = r'lbaas/pools/$'
@rest_utils.ajax()
def post(self, request):
"""Create a new pool.
Creates a new pool as well as other optional resources such as
members and health monitor.
"""
kwargs = {'loadbalancer_id': request.DATA.get('loadbalancer_id'),
'listener_id': request.DATA.get('parentResourceId')}
return create_pool(request, **kwargs)
@urls.register
class Pool(generic.View):
"""API for retrieving a single pool.
"""
url_regex = r'lbaas/pools/(?P<pool_id>[^/]+)/$'
@rest_utils.ajax()
def get(self, request, pool_id):
"""Get a specific pool.
If the param 'includeChildResources' is passed in as a truthy value,
the details of all resources that exist under the pool will be
returned along with the pool details.
http://localhost/api/lbaas/pools/cc758c90-3d98-4ea1-af44-aab405c9c915
"""
pool = neutronclient(request).show_lbaas_pool(pool_id).get('pool')
if request.GET.get('includeChildResources'):
resources = {}
resources['pool'] = pool
if pool.get('members'):
tenant_id = request.user.project_id
members = neutronclient(request).list_lbaas_members(
pool_id, tenant_id=tenant_id).get('members')
resources['members'] = members
if pool.get('healthmonitor_id'):
monitor_id = pool['healthmonitor_id']
monitor = neutronclient(request).show_lbaas_healthmonitor(
monitor_id).get('healthmonitor')
resources['monitor'] = monitor
return resources
else:
return pool
@rest_utils.ajax()
def put(self, request, pool_id):
"""Edit a listener as well as any resources below it.
"""
kwargs = {'pool_id': pool_id}
update_pool(request, **kwargs)
@rest_utils.ajax()
def delete(self, request, pool_id):
"""Delete a specific pool.
http://localhost/api/lbaas/pools/cc758c90-3d98-4ea1-af44-aab405c9c915
"""
neutronclient(request).delete_lbaas_pool(pool_id)
@urls.register
class Members(generic.View):
"""API for load balancer members.
"""
url_regex = r'lbaas/pools/(?P<pool_id>[^/]+)/members/$'
@rest_utils.ajax()
def get(self, request, pool_id):
"""List of members for the current project.
The listing result is an object with property "items".
"""
tenant_id = request.user.project_id
result = neutronclient(request).list_lbaas_members(pool_id,
tenant_id=tenant_id)
return {'items': result.get('members')}
@rest_utils.ajax()
def put(self, request, pool_id):
"""Update the list of members for the current project.
"""
# Assemble the lists of member id's to add and remove, if any exist
tenant_id = request.user.project_id
request_member_data = request.DATA.get('members', [])
existing_members = neutronclient(request).list_lbaas_members(
pool_id, tenant_id=tenant_id).get('members')
(members_to_add, members_to_delete) = get_members_to_add_remove(
request_member_data, existing_members)
if members_to_add or members_to_delete:
kwargs = {'existing_members': existing_members,
'members_to_add': members_to_add,
'members_to_delete': members_to_delete,
'pool_id': pool_id}
update_member_list(request, **kwargs)
@urls.register
class Member(generic.View):
"""API for retrieving a single member.
"""
url_regex = r'lbaas/pools/(?P<pool_id>[^/]+)' + \
'/members/(?P<member_id>[^/]+)/$'
@rest_utils.ajax()
def get(self, request, member_id, pool_id):
"""Get a specific member belonging to a specific pool.
"""
return neutronclient(request).show_lbaas_member(
member_id, pool_id).get('member')
@rest_utils.ajax()
def put(self, request, member_id, pool_id):
"""Edit a pool member.
"""
data = request.DATA
spec = {
'weight': data['weight']
}
return neutronclient(request).update_lbaas_member(
member_id, pool_id, {'member': spec})
@urls.register
class HealthMonitors(generic.View):
"""API for load balancer pool health monitors.
"""
url_regex = r'lbaas/healthmonitors/$'
@rest_utils.ajax()
def post(self, request):
"""Create a new health monitor.
"""
kwargs = {'loadbalancer_id': request.DATA.get('loadbalancer_id'),
'pool_id': request.DATA.get('parentResourceId')}
return create_health_monitor(request, **kwargs)
@urls.register
class HealthMonitor(generic.View):
"""API for retrieving a single health monitor.
"""
url_regex = r'lbaas/healthmonitors/(?P<healthmonitor_id>[^/]+)/$'
@rest_utils.ajax()
def get(self, request, healthmonitor_id):
"""Get a specific health monitor.
"""
return neutronclient(request).show_lbaas_healthmonitor(
healthmonitor_id).get('healthmonitor')
@rest_utils.ajax()
def delete(self, request, healthmonitor_id):
"""Delete a specific health monitor.
http://localhost/api/lbaas/healthmonitors/cc758c90-3d98-4ea1-af44-aab405c9c915
"""
neutronclient(request).delete_lbaas_healthmonitor(healthmonitor_id)
@rest_utils.ajax()
def put(self, request, healthmonitor_id):
"""Edit a health monitor.
"""
update_monitor(request)
|
StarcoderdataPython
|
3203793
|
<reponame>IssacCyj/RepDistiller
from __future__ import print_function
import os
import argparse
import socket
import time
import tensorboard_logger as tb_logger
import torch
import torch.optim as optim
import torch.nn as nn
import torch.backends.cudnn as cudnn
import numpy as np
from models import model_dict
from dataset.cifar100 import get_cifar100_dataloaders
from dataset.cifar10 import get_cifar10_dataloaders
from helper.util import adjust_learning_rate, accuracy, AverageMeter
from helper.loops import train_vanilla as train, validate
def parse_option():
hostname = socket.gethostname()
parser = argparse.ArgumentParser("argument for training")
parser.add_argument("--print_freq", type=int, default=100, help="print frequency")
parser.add_argument("--tb_freq", type=int, default=500, help="tb frequency")
parser.add_argument("--save_freq", type=int, default=40, help="save frequency")
parser.add_argument("--batch_size", type=int, default=64, help="batch_size")
parser.add_argument(
"--num_workers", type=int, default=8, help="num of workers to use"
)
parser.add_argument(
"--epochs", type=int, default=240, help="number of training epochs"
)
# optimization
parser.add_argument(
"--learning_rate", type=float, default=0.05, help="learning rate"
)
parser.add_argument(
"--lr_decay_epochs",
type=str,
default="150,180,210",
help="where to decay lr, can be a list",
)
parser.add_argument(
"--lr_decay_rate", type=float, default=0.1, help="decay rate for learning rate"
)
parser.add_argument("--weight_decay", type=float, default=5e-4, help="weight decay")
parser.add_argument("--momentum", type=float, default=0.9, help="momentum")
# dataset
parser.add_argument(
"--model",
type=str,
default="resnet110",
choices=[
"resnet8",
"resnet14",
"resnet20",
"resnet32",
"resnet44",
"resnet56",
"resnet110",
"resnet8x4",
"resnet32x4",
"wrn_16_1",
"wrn_16_2",
"wrn_40_1",
"wrn_40_2",
"vgg8",
"vgg11",
"vgg13",
"vgg16",
"vgg19",
"MobileNetV2",
"ShuffleV1",
"ShuffleV2",
],
)
parser.add_argument(
"--dataset",
type=str,
default="cifar10",
choices=["cifar100", "cifar10"],
help="dataset",
)
parser.add_argument(
"-t", "--trial", type=str, default="first", help="the experiment id"
)
parser.add_argument(
"--train_rule",
default="None",
type=str,
choices=["None", "Resample", "Reweight", "DRW"],
)
opt = parser.parse_args()
# set different learning rate from these 4 models
if opt.model in ["MobileNetV2", "ShuffleV1", "ShuffleV2"]:
opt.learning_rate = 0.01
# set the path according to the environment
if hostname.startswith("visiongpu"):
opt.model_path = "/path/to/my/model"
opt.tb_path = "/path/to/my/tensorboard"
else:
opt.model_path = "./save/models"
opt.tb_path = "./save/tensorboard"
iterations = opt.lr_decay_epochs.split(",")
opt.lr_decay_epochs = list([])
for it in iterations:
opt.lr_decay_epochs.append(int(it))
opt.model_name = "{}_{}_lr_{}_decay_{}_trial_{}".format(
opt.model, opt.dataset, opt.learning_rate, opt.weight_decay, opt.trial
)
opt.tb_folder = os.path.join(opt.tb_path, opt.model_name)
if not os.path.isdir(opt.tb_folder):
os.makedirs(opt.tb_folder)
opt.save_folder = os.path.join(opt.model_path, opt.model_name)
if not os.path.isdir(opt.save_folder):
os.makedirs(opt.save_folder)
return opt
def main():
best_acc = 0
best_cls_acc = []
opt = parse_option()
# dataloader
if opt.dataset == "cifar100":
train_loader, val_loader = get_cifar100_dataloaders(
batch_size=opt.batch_size,
num_workers=opt.num_workers,
train_rule=opt.train_rule,
)
n_cls = 100
elif opt.dataset == "cifar10":
train_loader, val_loader = get_cifar10_dataloaders(
batch_size=opt.batch_size,
num_workers=opt.num_workers,
train_rule=opt.train_rule,
)
n_cls = 10
else:
raise NotImplementedError(opt.dataset)
# model
model = model_dict[opt.model](num_classes=n_cls)
# optimizer
optimizer = optim.SGD(
model.parameters(),
lr=opt.learning_rate,
momentum=opt.momentum,
weight_decay=opt.weight_decay,
)
cls_num_list = train_loader.dataset.get_cls_num_list()
if opt.train_rule == "Reweight":
beta = 0.9999
effective_num = 1.0 - np.power(beta, cls_num_list)
per_cls_weights = (1.0 - beta) / np.array(effective_num)
per_cls_weights = per_cls_weights / np.sum(per_cls_weights) * len(cls_num_list)
ce_weight = torch.FloatTensor(per_cls_weights).cuda()
print(f"Re-weighting {ce_weight}")
criterion = nn.CrossEntropyLoss()
if torch.cuda.is_available():
model = model.cuda()
criterion = criterion.cuda()
cudnn.benchmark = True
# tensorboard
logger = tb_logger.Logger(logdir=opt.tb_folder, flush_secs=2)
# routine
iteration = 0
for epoch in range(1, opt.epochs + 1):
adjust_learning_rate(epoch, opt, optimizer)
print("==> training...")
time1 = time.time()
train_acc, train_loss, iteration = train(
epoch, train_loader, model, criterion, optimizer, opt, logger, iteration
)
time2 = time.time()
print("epoch {}, total time {:.2f}".format(epoch, time2 - time1))
logger.log_value("train_acc", train_acc, epoch)
logger.log_value("train_loss", train_loss, epoch)
test_acc, test_acc_top5, test_loss, cls_acc = validate(
val_loader, model, criterion, opt, logger, epoch, cls_num_list
)
# save the best model
if test_acc > best_acc:
best_acc = test_acc
best_cls_acc = cls_acc
state = {
"epoch": epoch,
"model": model.state_dict(),
"best_acc": best_acc,
"optimizer": optimizer.state_dict(),
}
save_file = os.path.join(opt.save_folder, "{}_best.pth".format(opt.model))
print("saving the best model!")
torch.save(state, save_file)
# regular saving
if epoch % opt.save_freq == 0:
print("==> Saving...")
state = {
"epoch": epoch,
"model": model.state_dict(),
"accuracy": test_acc,
"optimizer": optimizer.state_dict(),
}
save_file = os.path.join(
opt.save_folder, "ckpt_epoch_{epoch}.pth".format(epoch=epoch)
)
torch.save(state, save_file)
# This best accuracy is only for printing purpose.
# The results reported in the paper/README is from the last epoch.
print("best accuracy:", best_acc, best_cls_acc)
# save model
state = {
"opt": opt,
"model": model.state_dict(),
"optimizer": optimizer.state_dict(),
}
save_file = os.path.join(opt.save_folder, "{}_last.pth".format(opt.model))
torch.save(state, save_file)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
1625655
|
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 19 08:20:26 2021
@author: shane
"""
import dash
import dash_bootstrap_components as dbc
# Meta tags make the app mobile friendly
app = dash.Dash(__name__, suppress_callback_exceptions=True,
external_stylesheets=[dbc.themes.BOOTSTRAP],
meta_tags=[{'name': 'viewport',
'content':'width=device-width, initial-scale=1.0'}])
server = app.server
|
StarcoderdataPython
|
23636
|
<gh_stars>0
# -*- coding: utf-8 -*-
import argparse
import logging
import os
import re
from .. import __version__
from ..config import ALLOWED_IMAGE_REGEXPS
from ..config import ALLOWED_PORT_MAPPINGS
from ..config import CAPS_ADD
from ..config import CAPS_DROP
from ..config import ENV_VARS
from ..config import ENV_VARS_EXT
from ..config import NV_ALLOW_OWN_GPU_REUSE
from ..config import NV_ALLOWED_GPUS
from ..config import NV_DEFAULT_GPU_COUNT_RESERVATION
from ..config import NV_MAX_GPU_COUNT_RESERVATION
from ..config import PROBE_USED_MOUNTS
from ..config import RUN_PULL
from ..config import USER_IN_CONTAINER
from ..config import VOLUME_MOUNTS_ALWAYS
from ..config import VOLUME_MOUNTS_AVAILABLE
from ..config import VOLUME_MOUNTS_DEFAULT
from ..config import gid
from ..config import gids
from ..config import uid
from ..config import user_name
from ..helpers.cmd import init_cmd
from ..helpers.exceptions import UserDockerException
from ..helpers.execute import exec_cmd
from ..helpers.execute import exit_exec_cmd
from ..helpers.logger import logger
from ..helpers.nvidia import nvidia_get_available_gpus
from ..helpers.parser import init_subcommand_parser
def parser_run(parser):
sub_parser = init_subcommand_parser(parser, 'run')
sub_parser.add_argument(
"--no-default-mounts",
help="does not automatically add default mounts",
action="store_true",
)
mounts_help = []
if VOLUME_MOUNTS_ALWAYS:
mounts_help += ['Admin enforced: %s.' % ', '.join(VOLUME_MOUNTS_ALWAYS)]
if VOLUME_MOUNTS_DEFAULT:
mounts_help += ['Default: %s.' % ', '.join(VOLUME_MOUNTS_DEFAULT)]
if VOLUME_MOUNTS_AVAILABLE:
mounts_help += ['Available: %s.' % ', '.join(VOLUME_MOUNTS_AVAILABLE)]
if mounts_help:
sub_parser.add_argument(
"-v", "--volume",
help="user specified volume mounts (can be given multiple times). "
"%s" % " ".join(mounts_help),
action="append",
dest="volumes",
default=[],
)
sub_parser.add_argument(
"--entrypoint",
help="Overwrite the default ENTRYPOINT of the image",
)
sub_parser.add_argument(
"-w", "--workdir",
help="Working directory inside the container",
)
if ALLOWED_PORT_MAPPINGS:
sub_parser.add_argument(
"-p", "--publish",
help="Publish a container's ports to the host (see docker help). "
"Allowed: " + ', '.join(ALLOWED_PORT_MAPPINGS),
action="append",
dest="port_mappings",
default=[],
)
sub_parser.add_argument(
"image",
help="the image to run. Allowed: " + ', '.join(ALLOWED_IMAGE_REGEXPS),
)
sub_parser.add_argument(
"image_args",
help="arguments passed to the image",
nargs=argparse.REMAINDER
)
def prepare_nvidia_docker_run(args):
# mainly handles GPU arbitration via ENV var for nvidia-docker
# note that these are ENV vars for the command, not the container
if os.getenv('NV_HOST'):
raise UserDockerException('ERROR: NV_HOST env var not supported yet')
# check if allowed
if not NV_ALLOWED_GPUS:
raise UserDockerException(
"ERROR: No GPUs available due to admin setting."
)
nv_gpus = os.getenv('NV_GPU', '')
if nv_gpus:
# the user has set NV_GPU, just check if it's ok
nv_gpus = [g.strip() for g in nv_gpus.split(',')]
try:
nv_gpus = [int(gpu) for gpu in nv_gpus]
except ValueError as e:
raise UserDockerException(
"ERROR: Can't parse NV_GPU, use index notation: %s" % e
)
if not (
NV_ALLOWED_GPUS == 'ALL'
or all(gpu in NV_ALLOWED_GPUS for gpu in nv_gpus)):
raise UserDockerException(
"ERROR: Access to at least one specified NV_GPU denied by "
"admin. Available GPUs: %r" % (NV_ALLOWED_GPUS,)
)
# check if in bounds (and MAX >= 0)
if 0 <= NV_MAX_GPU_COUNT_RESERVATION < len(nv_gpus):
raise UserDockerException(
"ERROR: Number of requested GPUs > %d (admin limit)" % (
NV_MAX_GPU_COUNT_RESERVATION,)
)
# check if available
gpus_available, own_gpus = nvidia_get_available_gpus(args.executor_path)
if NV_ALLOW_OWN_GPU_REUSE:
gpus_available.extend(own_gpus)
for g in nv_gpus:
if g not in gpus_available:
msg = (
'ERROR: GPU %d is currently not available!\nUse:\n'
'"sudo userdocker ps --gpu-free" to find available GPUs.\n'
'"sudo userdocker ps --gpu-used" and "nvidia-smi" to see '
'status.' % g
)
if NV_ALLOW_OWN_GPU_REUSE and own_gpus:
msg += '\n"sudo userdocker ps --gpu-used-mine to show own' \
'(reusable) GPUs.'
raise UserDockerException(msg)
else:
# NV_GPU wasn't set, use admin defaults, tell user
gpu_default = NV_DEFAULT_GPU_COUNT_RESERVATION
logger.info(
"NV_GPU environment variable not set, trying to acquire admin "
"default of %d GPUs" % gpu_default
)
gpus_available, own_gpus = nvidia_get_available_gpus(args.executor_path)
gpus = gpus_available[:gpu_default]
if len(gpus) < gpu_default:
msg = (
'Could not find %d available GPU(s)!\nUse:\n'
'"sudo userdocker ps --gpu-used" and "nvidia-smi" to see '
'status.' % gpu_default
)
if NV_ALLOW_OWN_GPU_REUSE and own_gpus:
msg += '\n You can set NV_GPU to reuse a GPU you have already' \
' reserved.'
raise UserDockerException(msg)
gpu_env = ",".join([str(g) for g in gpus])
logger.info("Setting NV_GPU=%s" % gpu_env)
os.environ['NV_GPU'] = gpu_env
def exec_cmd_run(args):
cmd = init_cmd(args)
# check port mappings
for pm in getattr(args, 'port_mappings', []):
for pm_pattern in ALLOWED_PORT_MAPPINGS:
if re.match(pm_pattern, pm):
cmd += ['-p', pm]
break
else:
raise UserDockerException(
"ERROR: given port mapping not allowed: %s" % pm
)
# check mounts
mounts = []
mounts_available = \
VOLUME_MOUNTS_ALWAYS + VOLUME_MOUNTS_DEFAULT + VOLUME_MOUNTS_AVAILABLE
mounts += VOLUME_MOUNTS_ALWAYS
if not args.no_default_mounts:
mounts += VOLUME_MOUNTS_DEFAULT
for user_mount in getattr(args, 'volumes', []):
if user_mount in mounts:
continue
if user_mount in mounts_available:
mounts += [user_mount]
continue
# literal matches didn't work, check if the user appended a 'ro' flag
if len(user_mount.split(':')) == 3:
host_path, container_path, flag = user_mount.split(':')
if flag == 'ro':
st = ':'.join([host_path, container_path])
if st in mounts:
# upgrade mount to include ro flag
idx = mounts.index(st)
mounts[idx] = user_mount
continue
if st in mounts_available:
mounts += [user_mount]
continue
# allow potential unspecified container_path mounts
host_path = user_mount.split(':')[0] + ':'
if host_path in mounts_available:
mounts += [user_mount]
continue
raise UserDockerException(
"ERROR: given mount not allowed: %s" % user_mount
)
mount_host_paths = [m.split(':')[0] for m in mounts]
for ms in mount_host_paths:
if not os.path.exists(ms):
raise UserDockerException(
"ERROR: mount can't be found: %s" % ms
)
if PROBE_USED_MOUNTS and os.path.isdir(ms):
os.listdir(ms)
for mount in mounts:
if ':' not in mount:
raise UserDockerException(
"ERROR: anonymous mounts currently not supported: %s" % mount
)
cmd += ["-v", mount]
if args.executor == 'nvidia-docker':
prepare_nvidia_docker_run(args)
env_vars = ENV_VARS + ENV_VARS_EXT.get(args.executor, [])
env_vars += [
"USERDOCKER=%s" % __version__,
"USERDOCKER_USER=%s" % user_name,
"USERDOCKER_UID=%d" % uid,
]
if args.executor == 'nvidia-docker':
# remember which GPU was assigned to the container for ps --gpu-used
env_vars += [
"USERDOCKER_NV_GPU=%s" % os.environ['NV_GPU']
]
for env_var in env_vars:
cmd += ['-e', env_var]
if USER_IN_CONTAINER:
cmd += ["-u", "%d:%d" % (uid, gid)]
for _g in gids:
if _g < 1000 or _g == gid:
continue
cmd += ["--group-add", "%d" % (_g)]
for cap_drop in CAPS_DROP:
cmd += ["--cap-drop=%s" % cap_drop]
for cap_add in CAPS_ADD:
cmd += ["--cap-add=%s" % cap_add]
if args.workdir:
cmd += ["-w", args.workdir]
if args.entrypoint:
cmd += ["--entrypoint", args.entrypoint]
# additional injection protection, deactivated for now due to nvidia-docker
# unability to handle this
# cmd.append("--")
img = args.image
if ":" not in img and "@" not in img:
# user didn't explicitly set a tag or digest, append ":latest"
img += ":latest"
if ALLOWED_IMAGE_REGEXPS:
for air in ALLOWED_IMAGE_REGEXPS:
if re.match(air, img):
break
else:
raise UserDockerException(
"ERROR: image %s not in allowed image regexps: %s" % (
img, ALLOWED_IMAGE_REGEXPS))
# pull image?
if RUN_PULL == "default":
# just let `docker run` do its thing
pass
elif RUN_PULL == "always":
# pull image
exec_cmd(
[args.executor_path, 'pull', img],
dry_run=args.dry_run,
loglvl=logging.DEBUG,
)
elif RUN_PULL == "never":
# check if image is available locally
tmp = exec_cmd(
[args.executor_path, 'images', '-q', img],
return_status=False,
loglvl=logging.DEBUG,
)
if not tmp:
raise UserDockerException(
"ERROR: you can only use locally available images, but %s could"
" not be found locally" % img
)
else:
raise UserDockerException(
"ERROR: RUN_PULL config variable not expected range, contact admin"
)
cmd.append(img)
cmd.extend(args.image_args)
exit_exec_cmd(cmd, dry_run=args.dry_run)
|
StarcoderdataPython
|
1730961
|
<gh_stars>1-10
# ----------------------------------------------------------------------------
# Title: Scientific Visualisation - Python & Matplotlib
# Author: <NAME>
# License: BSD
# ----------------------------------------------------------------------------
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.figure import Figure
from matplotlib.backends.backend_agg import FigureCanvasAgg
from matplotlib.patches import Rectangle
def pixelated_text(dpi=100):
fig = Figure(figsize=(1, 1), dpi=dpi)
canvas, ax = FigureCanvasAgg(fig), fig.gca()
ax.text(0.5, 0.5, "a", fontsize=75, ha="center", va="center")
ax.axis("off")
canvas.draw()
image = np.frombuffer(canvas.tostring_argb(), dtype="uint8")
image = image.reshape(dpi, dpi, 4)
image = np.roll(image, 3, axis=2)
return image
def square(position, size, edgecolor, facecolor, zorder):
rect = Rectangle(
position,
size,
size,
transform=ax.transAxes,
clip_on=False,
zorder=zorder,
linewidth=0.5,
edgecolor=edgecolor,
facecolor=facecolor,
)
ax.add_artist(rect)
image = pixelated_text(75)
fig = plt.figure(figsize=(4.25, 2), dpi=100)
# Left (raster)
ax = plt.subplot(
1, 2, 1, frameon=False, aspect=1, xticks=[], yticks=[], xlim=[0, 1], ylim=[0, 1]
)
ax.imshow(image, extent=[0.1, 1.0, 0.1, 1.0], zorder=10, interpolation="nearest")
square((0.1, 0.1), 0.9, "black", "None", 20)
square((0.0, 0.0), 0.2, "black", "white", 20)
ax.imshow(image, extent=[0.0, 0.2, 0.0, 0.2], zorder=30, interpolation="nearest")
square((0.0, 0.0), 0.2, "black", "None", 40)
ax.text(0.55, 1.025, "Raster rendering", fontsize="small", ha="center", va="bottom")
ax.text(
0.6, 0.1 - 0.025, ".PNG / .JPG / .TIFF", fontsize="x-small", ha="center", va="top"
)
# Right (vector)
ax = plt.subplot(
1, 2, 2, frameon=False, aspect=1, xticks=[], yticks=[], xlim=[0, 1], ylim=[0, 1]
)
ax.text(0.55, 0.55, "a", fontsize=100, ha="center", va="center", color="#000099")
square((0.1, 0.1), 0.9, "#000099", "None", 20)
square((0.0, 0.0), 0.2, "#000099", "white", 20)
ax.text(
0.1,
0.1,
"a",
fontsize=22,
ha="center",
va="center",
clip_on=False,
zorder=30,
color="#000099",
)
square((0.0, 0.0), 0.2, "#000099", "None", 40)
ax.text(
0.55,
1.025,
"Vector rendering",
fontsize="small",
ha="center",
va="bottom",
color="#000099",
)
ax.text(
0.6,
0.1 - 0.025,
".PDF / .SVG / .PS",
fontsize="x-small",
ha="center",
va="top",
color="#000099",
)
plt.savefig("../../figures/anatomy/raster-vector.pdf", dpi=600)
plt.show()
|
StarcoderdataPython
|
3245368
|
# ==============================================================================
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root
# for full license information.
# ==============================================================================
# CNTK models collection
# Layers and building blocks that are not generic but represent more specific model types go here.
from .attention import *
|
StarcoderdataPython
|
3372161
|
<gh_stars>1-10
import pytest
import mock
import pwd
import grp
import subprocess
from collections import namedtuple
from lib.user_management import UserManagement
@mock.patch('pwd.getpwnam')
@pytest.mark.parametrize("login", [("elaine")])
def test_user_exist(mock_pwd, login):
UserManagement.user_exist(login)
pwd.getpwnam.assert_called_with('elaine')
@mock.patch('grp.getgrnam')
@pytest.mark.parametrize("github_team", [("soup")])
def test_group_exist(mock_grp, github_team):
um = UserManagement()
um.group_exist(github_team)
grp.getgrnam.assert_called_with('soup')
@mock.patch('subprocess.run')
@mock.patch('os.mkdir')
@mock.patch('builtins.open')
@mock.patch('lib.user_management.UserManagement.add_ssh_pub_key')
@pytest.mark.parametrize("login, github_team, key", [("george", "soup", 'ssh-rsa key that is public')])
def test_add_user(mock_subprocess, mock_os, mock_open, mock_add_key, login, github_team, key):
um = UserManagement()
um.add_user(login, github_team, key)
subprocess.run.assert_called_with(['useradd', '-m', '-G', 'soup', 'george'], check=True)
um.add_ssh_pub_key.assert_called_with(login, key)
@mock.patch('subprocess.run')
@pytest.mark.parametrize("github_team", [("soup")])
def test_add_group(mock_subprocess, github_team):
um = UserManagement()
um.add_group(github_team)
subprocess.run.assert_called_with(['groupadd', 'soup'], check=True)
@mock.patch('subprocess.run')
@mock.patch('os.mkdir')
@pytest.mark.parametrize("login", [("george")])
def test_purge_user(mock_subprocess, mock_os, login):
um = UserManagement()
um.purge_user(login)
subprocess.run.assert_called_with(['userdel', '-r', 'george'], check=True)
@pytest.fixture
def create_pwd():
data = []
struct_passwd = namedtuple('struct_passwd', 'pw_name pw_passwd pw_uid pw_gid pw_gecos pw_dir pw_shell')
data.append(struct_passwd(pw_name='root', pw_passwd='x', pw_uid=0, pw_gid=0, pw_gecos='root', pw_dir='/root', pw_shell='/bin/bash'))
data.append(struct_passwd(pw_name='daemon', pw_passwd='x', pw_uid=1, pw_gid=1, pw_gecos='daemon', pw_dir='/usr/sbin', pw_shell='/usr/sbin/nologin'))
data.append(struct_passwd(pw_name='soupnazi', pw_passwd='x', pw_uid=1000, pw_gid=1000, pw_gecos='soupnazi', pw_dir='/home/soup', pw_shell='/bin/bash'))
data.append(struct_passwd(pw_name='elaine', pw_passwd='x', pw_uid=3001, pw_gid=3000, pw_gecos='elaine', pw_dir='/home/elaine', pw_shell='/bin/bash'))
data.append(struct_passwd(pw_name='george', pw_passwd='x', pw_uid=3021, pw_gid=3000, pw_gecos='george', pw_dir='/home/george', pw_shell='/bin/bash'))
return data
@mock.patch('pwd.getpwall')
@pytest.mark.parametrize("uid", [(999)])
def test_get_ids(our_pwd, uid):
our_pwd.return_value = create_pwd()
um = UserManagement()
for id in um.get_ids(uid):
assert id.pw_uid >= 1000
@mock.patch('pwd.getpwall')
@pytest.mark.parametrize("uid", [(0)])
def test_list_uids(our_pwd, uid):
our_pwd.return_value = create_pwd()
um = UserManagement()
ids = []
for id in um.get_ids(uid):
ids.append(id.pw_uid)
assert len(ids) == 5
ids = []
for id in um.get_ids(1000):
ids.append(id.pw_uid)
assert len(ids) == 3
@mock.patch('pwd.getpwall')
@pytest.mark.parametrize("uid", [(1000)])
def test_list_logins(our_pwd, uid):
our_pwd.return_value = create_pwd()
um = UserManagement()
ids = []
for id in um.get_ids(uid):
ids.append(id.pw_name)
assert ids == ['soupnazi', 'elaine', 'george']
|
StarcoderdataPython
|
1600399
|
<gh_stars>10-100
import torch.nn as nn
from utils import *
from torch.nn.utils.rnn import pad_packed_sequence, pack_sequence, pack_padded_sequence
import Constants
try:
from apex import amp
APEX_AVAILABLE = True
except ModuleNotFoundError:
APEX_AVAILABLE = False
class Trainer(object):
def __init__(self, args, encoder, decoder, optimizer, criterion, device, disable_prog_bar):
super(Trainer, self).__init__()
self.args = args
self.encoder = encoder
self.decoder = decoder
self.optimizer = optimizer
self.device = device
self.criterion = criterion
self.epoch = 0
self.softmax = nn.Softmax(dim=1)
self.disable_prog_bar = disable_prog_bar
self.non_blocking = args.pin_memory
self.use_attention = args.attention
self.accum_grad = args.accum_grad
self.use_amp = APEX_AVAILABLE
@staticmethod
def create_inputs(packed_encoded_path, packed_label, init_token, remove_eos=True):
'''
:param packed_encoded_path: PackedSequence
:param packed_label: PackedSequence
:param init_token: of shape (1, h_dim)
:param encoded_ctx: of shape (batch, h_dim)
:param remove_eos: boolean
:return: labels_list: PackedSequence
'''
# encoded_path: (batch_size, length_of_longest_seq, h_dim)
encoded_path, encoded_path_lengths = pad_packed_sequence(packed_encoded_path, batch_first=True)
if init_token is not None:
encoded_path = encoded_path.type(init_token.type())
batch_size, _, h_dim = encoded_path.size()
labels_list = list()
if packed_label is None:
for i in range(batch_size):
# (1 , h_dim)
cur_labels = init_token
labels_list.append(cur_labels)
else:
remove = 1 if remove_eos is True else 0
# labels: (batch_size, length_of_longest_seq, 1)
labels, labels_lengths = pad_packed_sequence(packed_label, batch_first=True)
for i in range(batch_size):
# (labels, h_dim)
cur_labels = encoded_path[i, labels[i][:labels_lengths[i]-remove]] # -1 is for removing EOS token from input
if init_token is not None:
# (1 + labels, h_dim)
cur_labels = torch.cat([init_token, cur_labels], dim=0)
labels_list.append(cur_labels)
return pack_sequence(labels_list, enforce_sorted=False)
def calculate_acc_and_loss(self, padded_label, padded_weights, res, batch_predictions=None):
'''
:param padded_label: of shape (batch_size, output_len)
:param padded_weights: of shape (batch_size, encoded_outputs_len, output_len)
:param batch_predictions: of shape (batch_size, self.args.max_seq_len + 1)
:return:
'''
batch_size, output_len = padded_label.shape
# (batch_size * output_len)
padded_label_flat = padded_label.view(-1)
# (batch_size, output_len, encoded_outputs_len)
padded_weights = padded_weights.transpose(1, 2).contiguous()
# (batch_size * output_len, encoded_outputs_len)
padded_weights_flat = padded_weights.reshape(batch_size * output_len, -1).contiguous()
# (batch_size * output_len)
pad_mask = padded_label_flat != Constants.PAD
if batch_predictions is None:
# (batch_size * out_len, encoded_outputs_len)
scores = self.softmax(padded_weights_flat).cpu()
# with torch.backends.cudnn.benchmark = True, topk on cuda tensor gives indecies out of range when there is NaN
# for example: tensor = [[0.0001],[0.0001],[nan]]
# (batch_size * out_len, 1)
_, predicted_labels = scores.topk(1, dim=1)
predicted_labels = predicted_labels.to(self.device)
full_predicted_labels = predicted_labels.reshape(batch_size, output_len)
else:
full_predicted_labels = batch_predictions
# (batch_size, output_len)
predicted_labels = torch.zeros(size=(batch_size, output_len), device=self.device).long()
predicted_labels[:, :batch_predictions.shape[1]] = batch_predictions[:, :output_len]
res['predictions'] += full_predicted_labels.tolist()
res['targets'] += padded_label.tolist()
# (batch_size * output_len)
predicted_labels = predicted_labels.reshape((batch_size * output_len))
predicted_labels = predicted_labels[pad_mask]
true_labels_masked = padded_label_flat[pad_mask]
total_correct = (true_labels_masked == predicted_labels).sum().item()
total_targets = len(true_labels_masked)
loss = self.criterion(padded_weights_flat, padded_label_flat)
return total_correct, total_targets, loss
@staticmethod
def calculate_metrics(res):
predictions = res['predictions']
targets = res['targets']
path_ops = res['path_ops']
res['predicted_ops'] = list()
total_samples = len(predictions)
total_correct = 0
true_positive = 0
false_positive = 0
false_negative = 0
precision = 0
recall = 0
f1 = 0
for i in range(total_samples):
num_of_paths = len(path_ops[i])
pred_tokens = list(filter(lambda t: t != Constants.PAD and t != Constants.EOS, predictions[i]))
r = list()
for t in pred_tokens:
if t < num_of_paths + Constants.NUM_OF_CTRL_TOKENS:
r.append(path_ops[i][t-2])
elif t < 2 * num_of_paths + Constants.NUM_OF_CTRL_TOKENS:
r.append(path_ops[i][t - num_of_paths - Constants.NUM_OF_CTRL_TOKENS].replace("MOV", "UPD"))
else:
r.append(path_ops[i][t - 2 * num_of_paths - Constants.NUM_OF_CTRL_TOKENS].replace("MOV", "INS"))
res['predicted_ops'].append(r)
target_tokens = list(filter(lambda t: t != Constants.PAD and t != Constants.EOS, targets[i]))
total_correct += 1 if pred_tokens == target_tokens else 0
for token in pred_tokens:
if token in target_tokens:
true_positive += 1
else:
false_positive += 1
for token in target_tokens:
if token not in pred_tokens:
false_negative += 1
if true_positive + false_positive != 0:
precision = true_positive / (true_positive + false_positive)
if true_positive + false_negative != 0:
recall = true_positive / (true_positive + false_negative)
if (precision + recall) != 0:
f1 = 2 * precision * recall / (precision + recall)
res['precision'] = precision
res['recall'] = recall
res['f1'] = f1
res['acc'] = total_correct / total_samples
del res['path_ops']
def packed_to_deivce(self, entry):
entry['packed_paths'] = entry['packed_paths'].to(device=self.device, non_blocking=self.non_blocking)
entry['packed_paths_positions'] = entry['packed_paths_positions'].to(device=self.device, non_blocking=self.non_blocking)
entry['packed_srcs'] = entry['packed_srcs'].to(device=self.device, non_blocking=self.non_blocking)
entry['packed_srcs_positions'] = entry['packed_srcs_positions'].to(device=self.device, non_blocking=self.non_blocking)
entry['packed_tgts'] = entry['packed_tgts'].to(device=self.device, non_blocking=self.non_blocking)
entry['packed_tgts_positions'] = entry['packed_tgts_positions'].to(device=self.device, non_blocking=self.non_blocking)
if 'packed_ctx' in entry.keys():
entry['packed_ctx'] = entry['packed_ctx'].to(device=self.device, non_blocking=self.non_blocking)
def train(self, data_generator):
self.encoder.train()
self.decoder.train()
self.optimizer.zero_grad()
total_loss = 0.0
num_of_batches = 0
total_targets = 0
total_correct = 0
res = {'predictions': list(), 'targets': list(), 'ids': list(), 'path_ops': list()}
pbar = tqdm(data_generator, desc='Training epoch ' + str(self.epoch + 1) + '', disable=self.disable_prog_bar, dynamic_ncols=True)
for i, (samples_dict, packed_label, path_ops, ids) in enumerate(pbar):
self.packed_to_deivce(samples_dict)
packed_label = packed_label.to(device=self.device, non_blocking=self.non_blocking)
num_of_paths = samples_dict['focus_num_of_paths']
batch_size = len(num_of_paths)
res['ids'] += ids
res['path_ops'] += path_ops
# encoded_path: (batch_1, h_dim)
# encoded_ctx: (batch, num_of_paths, 2 * h_dim)
packed_encoded_path, packed_encoded_ctx, h = self.encoder(**samples_dict)
inputs = self.create_inputs(packed_encoded_path, packed_label, self.decoder.get_init_token())
c = self.decoder.create_h_or_c(batch_size)
hc = (h, c)
batch_size = len(num_of_paths)
# attention_weights: PadSequence of shape (batch_size, output_len, query_len)
packed_attention_scores, hc = self.decoder(packed_encoded_path, packed_encoded_ctx, batch_size, inputs, hc)
# (batch_size, output_len)
padded_label, labels_lengths = pad_packed_sequence(packed_label, batch_first=True)
# (batch_size, encoded_outputs_len, output_len)
padded_weights, weights_lengths = pad_packed_sequence(packed_attention_scores, batch_first=True, padding_value=Constants.NEG_INF)
correct, total, loss = self.calculate_acc_and_loss(padded_label, padded_weights, res)
total_correct += correct
total_targets += total
loss = loss / self.accum_grad
if self.use_amp:
if (i + 1) % self.accum_grad == 0:
with amp.scale_loss(loss, self.optimizer) as scaled_loss:
scaled_loss.backward()
nn.utils.clip_grad_norm_(amp.master_params(self.optimizer), 5)
self.optimizer.step()
self.optimizer.zero_grad()
else:
with amp.scale_loss(loss, self.optimizer, delay_unscale=True) as scaled_loss:
scaled_loss.backward()
nn.utils.clip_grad_norm_(amp.master_params(self.optimizer), 5)
else:
loss.backward()
nn.utils.clip_grad_norm_(self.encoder.parameters(), 5)
nn.utils.clip_grad_norm_(self.decoder.parameters(), 5)
if (i + 1) % self.accum_grad == 0:
self.optimizer.step()
self.optimizer.zero_grad()
total_loss += loss.item()
num_of_batches += 1
pbar.set_postfix_str("Loss: {}".format(total_loss / num_of_batches))
self.optimizer.step()
self.optimizer.zero_grad()
self.calculate_metrics(res)
loss = total_loss / num_of_batches
self.epoch += 1
return loss, res
def test(self, data_generator):
self.encoder.eval()
self.decoder.eval()
with torch.no_grad():
total_loss = 0.0
num_of_batches = 0
total_targets = 0
total_correct = 0
res = {'predictions': list(), 'targets': list(), 'ids': list(), 'path_ops': list()}
pbar = tqdm(data_generator, desc='Testing epoch ' + str(self.epoch) + '', disable=self.disable_prog_bar, dynamic_ncols=True)
for samples_dict, packed_label, path_ops, ids in pbar:
self.packed_to_deivce(samples_dict)
packed_label = packed_label.to(device=self.device, non_blocking=self.non_blocking)
num_of_paths = samples_dict['focus_num_of_paths']
batch_size = len(num_of_paths)
res['ids'] += ids
res['path_ops'] += path_ops
# encoded_path: (batch_1, h_dim)
# encoded_ctx: (batch, h_dim)
packed_encoded_path, packed_encoded_ctx, h = self.encoder(**samples_dict)
inputs = self.create_inputs(packed_encoded_path, None, self.decoder.get_init_token())
encoded_outputs_len = Constants.NUM_OF_CTRL_TOKENS + Constants.NUM_OF_OPS * max(num_of_paths)
c = self.decoder.create_h_or_c(batch_size)
hc = (h, c)
batch_predictions = torch.zeros(size=(batch_size, self.args.max_seq_len + 1)).long()
# (batch_size, output_len)
padded_label, labels_lengths = pad_packed_sequence(packed_label, batch_first=True)
# (batch_size, output_len, encoded_outputs_len)
batch_scores = torch.zeros(size=(batch_size, padded_label.shape[1], encoded_outputs_len), device=self.device)
# False (batch_size, 1)
pad_mask = torch.zeros(size=(batch_size, 1), device=self.device) == 1
for t in range(self.args.max_seq_len + 1):
# attention_weights: PadSequence of shape (batch_size, 1, query_len)
packed_attention_scores, hc = self.decoder(packed_encoded_path, packed_encoded_ctx, batch_size, inputs, hc)
# (batch_size, encoded_outputs_len, 1)
padded_weights, weights_lengths = pad_packed_sequence(packed_attention_scores, batch_first=True, padding_value=Constants.NEG_INF)
# (batch_size, 1, encoded_outputs_len)
padded_weights = padded_weights.transpose(1, 2).contiguous()
if t < batch_scores.shape[1]:
batch_scores[:, t] = padded_weights.squeeze(dim=1)
# (batch_size, 1)
predicted_labels = self.calculate_predicted_labels(padded_weights, batch_predictions[:, :t], force_non_repetition=True)
# (batch_size, 1)
predicted_labels[pad_mask] = Constants.PAD
batch_predictions[:, t] = predicted_labels.squeeze()
# (batch_size, 1)
ended = predicted_labels == Constants.EOS
pad_mask |= ended
predicted_labels_packed = pack_padded_sequence(predicted_labels, batch_size * [1], batch_first=True, enforce_sorted=False)
inputs = self.create_inputs(packed_encoded_path, predicted_labels_packed, None, remove_eos=False)
# (batch_size, encoded_outputs_len, output_len)
batch_scores = batch_scores.transpose(1, 2).contiguous()
correct, total, loss = self.calculate_acc_and_loss(padded_label, batch_scores, res, batch_predictions)
total_correct += correct
total_targets += total
total_loss += loss.item()
num_of_batches += 1
pbar.set_postfix_str("Loss: {}".format(total_loss / num_of_batches))
self.calculate_metrics(res)
loss = total_loss / num_of_batches
return loss, res
def calculate_predicted_labels(self, padded_weights, batch_predictions, force_non_repetition=True):
# (batch_size, encoded_outputs_len)
scores = self.softmax(padded_weights.squeeze(dim=1)).cpu()
if force_non_repetition is False:
# (batch_size, 1)
_, predicted_labels = scores.topk(1, dim=1)
predicted_labels = predicted_labels.to(self.device)
return predicted_labels
# (batch_size, max_seq_len)
_, predicted_labels = scores.topk(self.args.max_seq_len+1, dim=1, sorted=True)
predicted_labels = predicted_labels.to(self.device)
batch_size = predicted_labels.shape[0]
calc_predicted_labels = torch.zeros(size=(batch_size, 1), device=self.device).long()
for i in range(batch_size):
already_predicted = batch_predictions[i].tolist()
calc_predicted_labels[i] = list(filter(lambda x: x not in already_predicted, predicted_labels[i].tolist()))[0]
return calc_predicted_labels
|
StarcoderdataPython
|
1763661
|
<gh_stars>0
from typing import List, Tuple
import matplotlib.pyplot as plt
import random
from sys import stderr
from parameters import *
"""
Represents an individual in our population (in our case, a path going through
each city exactly once)
"""
class Individual:
# used to assign an individual id for each individual
__nb_individuals = 0
def __init__(self, route, generation_id):
# list of city indexes, sorted by visit order
self.route: List[int] = route
self.generation_id = generation_id
self.id = Individual.__nb_individuals
self._fitness: float = 0
Individual.__nb_individuals += 1
# get or compute the individual's fitness
def getFitness(self) -> float:
if self._fitness > 0: # fitness already computed
return self._fitness
else: # fitness not computed yet
raise Exception("Tried to get the fitness of an individual whose fitness has not been computed yet.")
# computes the fitness value for the given individual
# note: the higher its fitness, the better the individual is
# -> fitness and path length must have opposite variations
def computeFitness(self, dist_mat):
length = 0
for i in range(nb_cities - 1):
city1_index = self.route[i]
city2_index = self.route[i + 1]
length += dist_mat[city1_index][city2_index]
# don't forget to come back to the starting point! (I did)
length += dist_mat[self.route[nb_cities - 1]][self.route[0]]
self._fitness = 1000000 / length # arbitrary factor to keep fitness readable
return self._fitness
# plots an individual on a 2D plot
def plot(self, cities) -> None:
x_values = [cities[point][0] for point in self.route]
y_values = [cities[point][1] for point in self.route]
# don't forget to come back to the starting point! (I did)
x_values.append(cities[self.route[0]][0])
y_values.append(cities[self.route[0]][1])
plt.title(f"Gen. {self.generation_id} no {self.id}, fitness: {self.getFitness()}")
plt.plot(x_values, y_values)
plt.show()
# applies the individual a random mutation according to the mutation rate
def mutate(self) -> None:
# swap two cities with a chance of mutation_rate
if random.random() < mutation_rate:
c1 = random.randrange(nb_cities)
c2 = random.randrange(nb_cities)
self.route[c1], self.route[c2] = self.route[c2], self.route[c1]
# convenient for printing
def __str__(self):
return f"Individual no {self.id} (gen. {self.generation_id}), fitness: {self._fitness}"
# printing stuff too
def __repr__(self):
return self.__str__()
|
StarcoderdataPython
|
1644118
|
import logging
from botocore.exceptions import ClientError
from library.aws.utility import convert_tags
class EBSOperations:
@staticmethod
def snapshot_make_private(ec2_client, snapshot_id):
"""
Remove public permissions on EBS snapshot
:param ec2_client: EC2 boto3 client
:param snapshot_id: the ID of the snapshot
:return: nothing
"""
ec2_client.modify_snapshot_attribute(
Attribute="createVolumePermission",
CreateVolumePermission={
"Remove": [
{
"Group": "all"
},
]
},
GroupNames=["all"],
OperationType="remove",
SnapshotId=snapshot_id
)
class EBSVolume(object):
"""
Basic class for EBS volume.
Encapsulates `VolumeId`/`State`/`Encrypted` and list of `Attachments`.
"""
def __init__(self, account, source):
"""
:param account: `Account` instance where EBS volume is present
:param source: single `Volumes` element as AWS returns
"""
self.source = source
self.account = account
self.id = source["VolumeId"]
self.state = source["State"]
self.encrypted = source["Encrypted"]
attachments = source.get('Attachments', [])
self.attachments = { attach['InstanceId']: attach['State'] for attach in attachments } if attachments else {}
self.tags = convert_tags(source.get('Tags', []))
@property
def name(self):
""" :return: EBS volume name from tags """
return self.tags.get("Name", None) if self.tags else None
def __str__(self):
name = "" if self.name is None else f"Name={self.name}, "
return f"{self.__class__.__name__}({name}Id={self.id}, Encrypted={self.encrypted}, State={self.state}, Attachments={len(self.attachments)})"
class EBSUnencryptedVolumesChecker(object):
"""
Basic class for checking EBS volumes in account/region.
Encapsulates discovered EBS volumes.
"""
def __init__(self, account):
"""
:param account: `Account` instance with EBS volumes to check
"""
self.account = account
self.volumes = []
def get_volume(self, id):
"""
:return: `EBSVolume` by id
"""
for volume in self.volumes:
if volume.id == id:
return volume
return None
def check(self, ids=None, tags=None):
"""
Walk through not encrypted EBS volumes in the account/region and put them to `self.volumes`.
:param ids: list with EBS volume ids to check, if it is not supplied - all EBS volumes must be checked
:return: boolean. True - if check was successful,
False - otherwise
"""
args = {'DryRun': False}
if ids:
# if ids is set - check given ids regardless of encrypted status
args['VolumeIds'] = ids
else:
# else get only unencrypted volumes
args['Filters'] = [{
'Name': 'encrypted',
'Values': ["false"]
}]
if tags:
for key, value in tags.items():
args['Filters'].append(
{'Name': f"tag:{key}", 'Values': value if isinstance(value, list) else [value]},
)
try:
volume_details = self.account.client("ec2").describe_volumes(**args)["Volumes"]
except ClientError as err:
if err.response['Error']['Code'] in ["AccessDenied", "UnauthorizedOperation"]:
logging.error(f"Access denied in {self.account} "
f"(ec2:{err.operation_name})")
else:
logging.exception(f"Failed to describe volumes in {self.account}")
return False
for volume_detail in volume_details:
volume = EBSVolume(self.account, volume_detail)
self.volumes.append(volume)
return True
class EBSSnapshot(object):
"""
Basic class for EBS snapshot.
Encapsulates `SnapshotId`/`VolumeId`/`Encrypted` and list of permissions.
"""
def __init__(self, account, source, permissions):
"""
:param account: `Account` instance where EBS snapshot is present
:param source: single `Snapshots` element as AWS returns
:param permissions: result of `describe_snapshot_attribute` API call for snapshot
"""
self.source = source
self.permissions = permissions
self.account = account
self.id = source["SnapshotId"]
self.volume_id = source["VolumeId"]
self.tags = convert_tags(source.get('Tags', []))
def __str__(self):
return f"{self.__class__.__name__}(Id={self.id}, VolumeId={self.volume_id}, Public={self.public})"
@property
def public(self):
"""
:return: boolean, True - if snapshot has `all` group permissions for `CreateVolumePermissions`
False - otherwise
"""
for permission in self.permissions["CreateVolumePermissions"]:
if "Group" in permission and permission["Group"] == "all":
return True
return False
def make_private(self):
"""
Remove public permissions on snapshot
:return: nothing
"""
EBSOperations.snapshot_make_private(self.account.client("ec2"), self.id)
class EBSPublicSnapshotsChecker(object):
"""
Basic class for checking EBS snapshots in account/region.
Encapsulates discovered EBS snapshots.
"""
def __init__(self, account):
"""
:param account: `Account` instance with EBS snapshots to check
"""
self.account = account
self.snapshots = []
def get_snapshot(self, id):
"""
:return: `EBSSnapshot` by id
"""
for snapshot in self.snapshots:
if snapshot.id == id:
return snapshot
return None
def check(self, ids=None, tags=None):
"""
Walk through public EBS snapshots in the account/region and put them to `self.snapshots`.
:param ids: list with EBS snapshot ids to check, if it is not supplied - all EBS snapshots must be checked
:return: boolean. True - if check was successful,
False - otherwise
"""
args = {
'DryRun': False,
# You can specify AWS account IDs (if you own the snapshots),
# 'self' for snapshots for which you own or have explicit permissions,
# or 'all' for public snapshots.
'RestorableByUserIds': ['all'],
# The results can include the AWS account IDs of the specified owners,
# 'amazon' for snapshots owned by Amazon,
# or 'self' for snapshots that you own.
'OwnerIds': ['self']
}
if ids:
# if ids is set - check given ids regardless of encrypted status
args['SnapshotIds'] = ids
del args['RestorableByUserIds']
if tags:
args['Filters'] = []
for key, value in tags.items():
args['Filters'].append(
{'Name': f"tag:{key}", 'Values': value if isinstance(value, list) else [value]},
)
try:
snapshot_details = self.account.client("ec2").describe_snapshots(**args)["Snapshots"]
except ClientError as err:
if err.response['Error']['Code'] in ["AccessDenied", "UnauthorizedOperation"]:
logging.error(f"Access denied in {self.account} "
f"(ec2:{err.operation_name})")
else:
logging.exception(f"Failed to describe snapshots in {self.account}")
return False
for snapshot_detail in snapshot_details:
try:
# Need to check each snapshot attributes dispite of the fact
# that we ask AWS to return only restorable by all snapshots as:
# * if 'ids' set - we remove RestorableByUserIds and AWS return both public and private snapshots
# * moto does not support RestorableByUserIds and returns all snapshots
snapshot_permissions = self.account.client("ec2").describe_snapshot_attribute(
Attribute="createVolumePermission",
SnapshotId=snapshot_detail['SnapshotId']
)
except ClientError as err:
if err.response['Error']['Code'] in ["AccessDenied", "UnauthorizedOperation"]:
logging.error(f"Access denied in {self.account} "
f"(ec2:{err.operation_name})")
else:
logging.exception(f"Failed to describe '{snapshot_detail['SnapshotId']}' snapshot attribute "
f"in {self.account}")
return False
snapshot = EBSSnapshot(self.account, snapshot_detail, snapshot_permissions)
self.snapshots.append(snapshot)
return True
|
StarcoderdataPython
|
3343697
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Uniform sampling method.
Samples in batches.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from .sampling_def import SamplingMethod
class UniformSampling(SamplingMethod):
def __init__(self, X, y, seed):
self.X = X
self.y = y
self.name = 'uniform'
np.random.seed(seed)
def select_batch_(self, already_selected, N, **kwargs):
"""Returns batch of randomly sampled datapoints.
Assumes that data has already been shuffled.
Args:
already_selected: index of datapoints already selected
N: batch size
Returns:
indices of points selected to label
"""
# This is uniform given the remaining pool but biased wrt the entire pool.
sample = [i for i in range(self.X.shape[0]) if i not in already_selected]
return sample[0:N]
|
StarcoderdataPython
|
4803420
|
import re
import json
from collections import Counter
from transition_amr_parser.amr import JAMR_CorpusReader
import ast
import xml.etree.ElementTree as ET
def read_frame(xml_file):
'''
Read probpank XML
'''
root = ET.parse(xml_file).getroot()
propbank = {}
for predicate in root.findall('predicate'):
lemma = predicate.attrib['lemma']
for roleset_data in predicate.findall('roleset'):
# ID of the role e.g. run.01
pred_id = roleset_data.attrib['id']
# basic meta-data
propbank[pred_id] = {
'lemma': lemma,
'description': roleset_data.attrib['name']
}
# alias
propbank[pred_id]['aliases'] = []
for aliases in roleset_data.findall('aliases'):
for alias in aliases:
propbank[pred_id]['aliases'].append(alias.text)
# roles
propbank[pred_id]['roles'] = {}
for roles in roleset_data.findall('roles'):
for role in roles:
if role.tag == 'note':
continue
number = role.attrib['n']
propbank[pred_id]['roles'][f'ARG{number}'] = role.attrib
# examples
propbank[pred_id]['examples'] = []
for examples in roleset_data.findall('example'):
sentence = examples.findall('text')
assert len(sentence) == 1
sentence = sentence[0].text
tokens = [x.text for x in examples.findall('rel')]
args = []
for x in examples.findall('arg'):
args.append(x.attrib)
args[-1].update({'text': x.text})
propbank[pred_id]['examples'].append({
'sentence': sentence,
'tokens': tokens,
'args': args
})
return propbank
def read_action_scores(file_path):
"""
Reads scores to judge the optimality of an action set, comprise
sentence id (position in the original corpus) 1 int
unormalized scores 3 int
sequence normalized score e.g. smatch 1 float
action sequence length 1 int
saved because of {score, length, None (original)} 1 str
action sequence (tab separated) 1 str (tab separated)
TODO: Probability
"""
action_scores = []
with open(file_path) as fid:
for line in fid:
line = line.strip()
items = list(map(int, line.split()[:4]))
items.append(float(line.split()[4]))
items.append(int(line.split()[5]))
items.append(
None if line.split()[6] == 'None' else line.split()[6]
)
if line.split()[7][0] == '[':
# backwards compatibility fix
items.append(ast.literal_eval(" ".join(line.split()[7:])))
else:
items.append(line.split()[7:])
action_scores.append(items)
return action_scores
def write_action_scores(file_path, action_scores):
"""
Writes scores to judge the optimality of an action set, comprise
sentence id (position in the original corpus) 1 int
unormalized scores 3 int
sequence normalized score e.g. smatch 1 float
action sequence length 1 int
saved because of {score, length, None (original)} 1 str
action sequence (tab separated) 1 str (tab separated)
TODO: Probability
"""
with open(file_path, 'w') as fid:
for items in action_scores:
sid = items[0]
score = items[1:4]
smatch = items[4]
length = items[5]
reason = items [6]
actions = items[7]
if actions is not None:
actions = '\t'.join(actions)
fid.write(
f'{sid} {score[0]} {score[1]} {score[2]} {smatch} {length} {reason} {actions}\n'
)
def read_amr(in_amr, unicode_fixes=False):
corpus = JAMR_CorpusReader()
corpus.load_amrs(in_amr)
if unicode_fixes:
# Replacement rules for unicode chartacters
replacement_rules = {
'ˈtʃærɪti': 'charity',
'\x96': '_',
'⊙': 'O'
}
# FIXME: normalization shold be more robust. Right now use the tokens
# of the amr inside the oracle. This is why we need to normalize them.
for idx, amr in enumerate(corpus.amrs):
new_tokens = []
for token in amr.tokens:
forbidden = [x for x in replacement_rules.keys() if x in token]
if forbidden:
token = token.replace(
forbidden[0],
replacement_rules[forbidden[0]]
)
new_tokens.append(token)
amr.tokens = new_tokens
return corpus
def read_rule_stats(rule_stats_json):
with open(rule_stats_json) as fid:
rule_stats = json.loads(fid.read())
# convert to counters
rule_stats['possible_predicates'] = Counter(rule_stats['possible_predicates'])
rule_stats['action_vocabulary'] = Counter(rule_stats['action_vocabulary'])
return rule_stats
def write_rule_stats(rule_stats_json, content):
with open(rule_stats_json, 'w') as fid:
fid.write(json.dumps(content))
def read_propbank(propbank_file):
# Read frame argument description
arguments_by_sense = {}
with open(propbank_file) as fid:
for line in fid:
line = line.rstrip()
sense = line.split()[0]
arguments = [
re.match('^(ARG.+):$', x).groups()[0]
for x in line.split()[1:] if re.match('^(ARG.+):$', x)
]
arguments_by_sense[sense] = arguments
return arguments_by_sense
def writer(file_path, add_return=False):
"""
Returns a writer that writes to file_path if it is not None, does nothing
otherwise
calling the writed without arguments will close the file
"""
if file_path:
# Erase file
fid = open(file_path, 'w+')
fid.close()
# open for appending
fid = open(file_path, 'a+', encoding='utf8')
else:
fid = None
def append_data(content=None):
"""writes to open file"""
if fid:
if content is None:
fid.close()
else:
if add_return:
fid.write(content + '\n')
else:
fid.write(content)
return append_data
def tokenized_sentences_egenerator(file_path):
with open(file_path) as fid:
for line in fid:
yield line.rstrip().split()
def read_tokenized_sentences(file_path, separator=' '):
sentences = []
with open(file_path) as fid:
for line in fid:
sentences.append(line.rstrip().split(separator))
return sentences
def write_tokenized_sentences(file_path, content, separator=' '):
with open(file_path, 'w') as fid:
for line in content:
line = [str(x) for x in line]
fid.write(f'{separator.join(line)}\n')
def read_sentences(file_path, add_root_token=False):
sentences = []
with open(file_path) as fid:
for line in fid:
line = line.rstrip()
if add_root_token:
line = line + " <ROOT>"
sentences.append(line)
return sentences
|
StarcoderdataPython
|
163812
|
<reponame>PerimeterX/perimeterx-python-ws<filename>setup-gae.py
#!/usr/bin/env python
from setuptools import setup, find_packages
version = 'v3.2.1'
setup(name='perimeterx-python-wsgi-gae',
version=version,
license='MIT',
description='PerimeterX WSGI middleware for Goolge App Engine',
author='<NAME>',
author_email='<EMAIL>',
url='https://github.com/PerimeterX/perimeterx-python-wsgi',
download_url='https://github.com/PerimeterX/perimeterx-python-wsgi/tarball/' + version,
packages=find_packages(exclude=['dev', 'test*']),
package_data={'perimeterx': ['templates/*']},
install_requires=['pystache>=0.5.1,<=0.5.4', 'requests>=2.18.4,<=2.20.1', 'requests-toolbelt'],
classifiers=['Intended Audience :: Developers',
'Programming Language :: Python :: 2.7'])
|
StarcoderdataPython
|
3251446
|
<reponame>Zor-X-L/redis-cluster-manager
#!/usr/bin/env python3
import subprocess
exec(open('common.py').read())
args = ['./redis-trib.rb', 'create', '--replicas', str(config['replicas'])]
for host in config['hosts']:
for port in range(port_range_start, port_range_end):
args.append(host + ':' + str(port))
subprocess.call(args)
# vim: sw=4 et
|
StarcoderdataPython
|
1696193
|
<gh_stars>0
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from .services.controller2 import Controller2Client
from .services.controller2 import Controller2AsyncClient
from .services.debugger2 import Debugger2Client
from .services.debugger2 import Debugger2AsyncClient
from .types.controller import ListActiveBreakpointsRequest
from .types.controller import ListActiveBreakpointsResponse
from .types.controller import RegisterDebuggeeRequest
from .types.controller import RegisterDebuggeeResponse
from .types.controller import UpdateActiveBreakpointRequest
from .types.controller import UpdateActiveBreakpointResponse
from .types.data import Breakpoint
from .types.data import Debuggee
from .types.data import FormatMessage
from .types.data import SourceLocation
from .types.data import StackFrame
from .types.data import StatusMessage
from .types.data import Variable
from .types.debugger import DeleteBreakpointRequest
from .types.debugger import GetBreakpointRequest
from .types.debugger import GetBreakpointResponse
from .types.debugger import ListBreakpointsRequest
from .types.debugger import ListBreakpointsResponse
from .types.debugger import ListDebuggeesRequest
from .types.debugger import ListDebuggeesResponse
from .types.debugger import SetBreakpointRequest
from .types.debugger import SetBreakpointResponse
__all__ = (
"Controller2AsyncClient",
"Debugger2AsyncClient",
"Breakpoint",
"Controller2Client",
"Debuggee",
"Debugger2Client",
"DeleteBreakpointRequest",
"FormatMessage",
"GetBreakpointRequest",
"GetBreakpointResponse",
"ListActiveBreakpointsRequest",
"ListActiveBreakpointsResponse",
"ListBreakpointsRequest",
"ListBreakpointsResponse",
"ListDebuggeesRequest",
"ListDebuggeesResponse",
"RegisterDebuggeeRequest",
"RegisterDebuggeeResponse",
"SetBreakpointRequest",
"SetBreakpointResponse",
"SourceLocation",
"StackFrame",
"StatusMessage",
"UpdateActiveBreakpointRequest",
"UpdateActiveBreakpointResponse",
"Variable",
)
|
StarcoderdataPython
|
1718525
|
<filename>My Own projects/Playing Songs/main.pyw
import os
from tkinter import *
from tkinter import filedialog
try:
from mutagen.mp3 import MP3
except:
os.system('pip install mutagen')
from mutagen.mp3 import MP3
try:
import pygame
except:
os.system('pip install pygame')
import pygame
import time
file_dir = os.getcwd().replace('\\' , '/')
assets = f'{file_dir}/assets'
music_folder = assets.__add__('/music')
global pause_button
print(assets) #TODO Remove after relese
print(music_folder) #TODO Remove after relese
icon = f'{assets}/icon2.ico'
root = Tk()
root.title('Music Player')
root.iconbitmap(icon)
root.geometry('500x350')
# Py gamie
pygame.mixer.init()
#Grab Song length time info
def play_time():
current_time = pygame.mixer.music.get_pos() / 1000
converted_current_time = time.strftime("%M:%S" , time.gmtime(current_time))
status_bar.config(text=converted_current_time)
status_bar.after(1000 , play_time)
# Add one the current song number
song = song_box.get(ACTIVE)
song = f'{music_folder}/{song}.mp3'
# Use mutagen
#Load the song
song_mut = MP3(song)
# get song_lenght
song_length = song_mut.info.length
converted_song_length = time.strftime("%M:%S" , time.gmtime(song_length))
#output the result to the status bar
status_bar.config(text=f'Time Elapsed {converted_current_time} of {converted_song_length} ')
def add_song():
song = filedialog.askopenfilename(initialdir=music_folder , title="Choose a song" , filetypes=((".mp3 songs" , "*.mp3") , ))
song = song.split(music_folder)
# lst = song.split(music_folder)
song_box.insert(END , song[1].replace('/','').replace('.mp3',''))
def add_many_song():
songs = filedialog.askopenfilenames(initialdir=music_folder , title="Choose at least 2 songs" , filetypes=((".mp3 songs" , "*.mp3") , ))
for song in songs:
song = song.split(music_folder)
# lst = song.split(music_folder)
song_box.insert(END , song[1].replace('/','').replace('.mp3',''))
#play selected song
def play():
song = song_box.get(ACTIVE)
print('Now playing:',song)
root.title(f"Now Playing: {song}")
song = f'{music_folder}/{song}.mp3'
pygame.mixer.music.load(song)
pygame.mixer.music.play(loops = 0)
#Call the play_time function
play_time()
#stop all songs
def stop():
pygame.mixer.music.stop()
song_box.selection_clear(ACTIVE)
status_bar.config(text="|"*10)
#create global pause varible
global paused
paused = False
#pause the current song
def pause(is_paused):
global paused
paused = is_paused
#pause
if paused:
pygame.mixer.music.unpause()
paused = False
pause_button = Button(controls_frame , image=pause_btn , borderwidth=0,command=lambda: pause(paused))
pause_button.grid(row=0,column=3 , padx=10)
else:
pygame.mixer.music.pause()
paused = True
pause_button = Button(controls_frame , image=pause_btn2 , borderwidth=0,command=lambda: pause(paused))
pause_button.grid(row=0,column=3 , padx=10)
#Play the next song
def next_song():
next_one = song_box.curselection()
# print(next_one)
# Add one the current song number
next_one = next_one[0]+1
song = song_box.get(next_one)
print('Now playing:',song)
root.title(f"Now Playing: {song}")
song = f'{music_folder}/{song}.mp3'
pygame.mixer.music.load(song)
pygame.mixer.music.play(loops = 0)
#clear the startus bar
song_box.selection_clear(0 ,END)
#activate song bar
song_box.activate(next_one)
# Set the active bar to Next song
song_box.selection_set(next_one , last=None)
#play previeous song in playlist
def prev_song():
next_one = song_box.curselection()
# print(next_one)
# Add one the current song number
next_one = next_one[0]-1
song = song_box.get(next_one)
print("Now playing:",song)
root.title(f"Now Playing: {song}")
song = f'{music_folder}/{song}.mp3'
pygame.mixer.music.load(song)
pygame.mixer.music.play(loops = 0)
#clear the startus bar
song_box.selection_clear(0 ,END)
#activate song bar
song_box.activate(next_one)
# Set the active bar to Next song
song_box.selection_set(next_one , last=None)
def delete_song():
song_box.delete(ANCHOR)
pygame.mixer.music.stop()
def delete_all_song():
#Delete a song
song_box.delete(0,END)
#Stop music(if any)
pygame.mixer.music.stop()
#create playlist box
song_box = Listbox(root , bg="black" , fg='green' , width = '70',selectbackground="#3d82d1" ,selectforeground='yellow')
song_box.pack(padx=1,pady=6)
#define player control button images
back_btn = PhotoImage(file=f'{assets}/back.png')
forward_btn = PhotoImage(file=f'{assets}/forward.png')
play_btn = PhotoImage(file=f'{assets}/reload.png')
pause_btn = PhotoImage(file=f'{assets}/pause2.png')
pause_btn2 = PhotoImage(file=f'{assets}/play.png')
stop_btn = PhotoImage(file=f'{assets}/stop.png')
#Create player control frame
controls_frame = Frame(root)
controls_frame.pack()
#create play control images
back_button = Button(controls_frame , image=back_btn, borderwidth=0 , command=prev_song)
forward_button = Button(controls_frame , image=forward_btn, borderwidth=0 ,command=next_song)
play_button = Button(controls_frame , image=play_btn , borderwidth=0 ,command=play)
pause_button = Button(controls_frame , image=pause_btn , borderwidth=0,command=lambda: pause(paused))
stop_button = Button(controls_frame , image=stop_btn , borderwidth=0 , command=stop)
back_button.grid(row=0,column=0 , padx=10)
forward_button.grid(row=0,column=1 , padx=10)
play_button.grid(row=0,column=2 , padx=10)
pause_button.grid(row=0,column=3 , padx=10)
stop_button.grid(row=0,column=4 , padx=10)
# Create menu
my_menu = Menu(root)
root.config(menu=my_menu)
# Add 'Add Song' menu
add_song_menu = Menu(my_menu)
my_menu.add_cascade(label = "Add Songs" ,menu = add_song_menu)
add_song_menu.add_command(label = "Add one song to play-list" , command= add_song)
add_song_menu.add_command(label = "Add many songs to play-list" , command= add_many_song)
#Add delete Song menu
remove_song_menu = Menu(my_menu)
my_menu.add_cascade(label="Remove Songs" , menu=remove_song_menu)
remove_song_menu.add_command(label="Delete the selected song from playlist." , command=delete_song)
remove_song_menu.add_command(label="Delete all songs from playlist.",command=delete_all_song )
#create a statusbar
status_bar = Label(root , text= '' , bd=1 , relief=GROOVE , anchor=E)
status_bar.pack(fill=X , side=BOTTOM, ipady=2)
# my_slider = ttk.Scale(root , from_ = 0 , to_ = 100, orient = HORIZONTAL , value = 0 ,command = None).pack(pady = 20)
root.mainloop()
|
StarcoderdataPython
|
49329
|
from flask_wtf import FlaskForm
from wtforms import *
from wtforms.validators import InputRequired
class LoginForm(FlaskForm):
username = TextField('username',validators=[InputRequired()])
password = PasswordField('password',validators=[InputRequired()])
|
StarcoderdataPython
|
1768178
|
<filename>pythons_app/views.py
from django.forms.widgets import Media
from django.shortcuts import render, redirect
from .forms import PythonCreateForm
from .models import Python
# Create your views here.
def index(request):
pythons = Python.objects.all()
return render(request, 'index.html', {'pythons': pythons})
def create(request):
if request.method == 'GET':
form = PythonCreateForm()
return render(request, 'create.html', {'form': form})
else:
form = PythonCreateForm(request.POST, request.FILES)
print(form)
if form.is_valid():
python = form.save()
python.save()
return redirect('index')
return render(request, 'create.html', {'form': form})
def delete(request, pk):
item = Python.objects.get(pk=pk)
item.delete()
return redirect('index')
|
StarcoderdataPython
|
96677
|
<filename>test/unit/test_jinja.py
import unittest
from dbt.clients.jinja import get_template
class TestJinja(unittest.TestCase):
def test_do(self):
s = '{% set my_dict = {} %}\n{% do my_dict.update(a=1) %}'
template = get_template(s, {})
mod = template.make_module()
self.assertEqual(mod.my_dict, {'a': 1})
|
StarcoderdataPython
|
4815611
|
<gh_stars>1-10
import pytest
import cv2
import numpy as np
from plantcv.plantcv import roi_objects
@pytest.mark.parametrize("mode,exp", [["largest", 221], ["cutto", 152], ["partial", 221]])
def test_roi_objects(mode, exp, test_data):
"""Test for PlantCV."""
# Read in test data
img = cv2.imread(test_data.small_rgb_img)
cnt, cnt_str = test_data.load_contours(test_data.small_contours_file)
roi = [np.array([[[150, 150]], [[150, 174]], [[249, 174]], [[249, 150]]], dtype=np.int32)]
roi_str = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
_, _, _, area = roi_objects(img=img, roi_contour=roi, roi_hierarchy=roi_str, object_contour=cnt,
obj_hierarchy=cnt_str, roi_type=mode)
# Assert that the contours were filtered as expected
assert area == exp
def test_roi_objects_bad_input(test_data):
"""Test for PlantCV."""
# Read in test data
img = cv2.imread(test_data.small_rgb_img)
cnt, cnt_str = test_data.load_contours(test_data.small_contours_file)
roi = [np.array([[[150, 150]], [[150, 174]], [[249, 174]], [[249, 150]]], dtype=np.int32)]
roi_str = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
with pytest.raises(RuntimeError):
_ = roi_objects(img=img, roi_type="cut", roi_contour=roi, roi_hierarchy=roi_str,
object_contour=cnt, obj_hierarchy=cnt_str)
def test_roi_objects_grayscale_input(test_data):
"""Test for PlantCV."""
# Read in test data
img = cv2.imread(test_data.small_gray_img, -1)
cnt, cnt_str = test_data.load_contours(test_data.small_contours_file)
roi = [np.array([[[150, 150]], [[150, 174]], [[249, 174]], [[249, 150]]], dtype=np.int32)]
roi_str = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
_, _, _, area = roi_objects(img=img, roi_type="partial", roi_contour=roi, roi_hierarchy=roi_str,
object_contour=cnt, obj_hierarchy=cnt_str)
# Assert that the contours were filtered as expected
assert area == 221
def test_roi_objects_no_overlap(test_data):
"""Test for PlantCV."""
# Read in test data
img = cv2.imread(test_data.small_rgb_img)
cnt, cnt_str = test_data.load_contours(test_data.small_contours_file)
roi = [np.array([[[0, 0]], [[0, 24]], [[24, 24]], [[24, 0]]], dtype=np.int32)]
roi_str = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
_, _, _, area = roi_objects(img=img, roi_contour=roi, roi_hierarchy=roi_str, object_contour=cnt,
obj_hierarchy=cnt_str, roi_type="partial")
# Assert that the contours were filtered as expected
assert area == 0
def test_roi_objects_nested():
"""Test for PlantCV."""
# Create test data
img = np.zeros((100, 100), dtype=np.uint8)
cnt = [np.array([[[25, 25]], [[25, 49]], [[49, 49]], [[49, 25]]], dtype=np.int32),
np.array([[[34, 35]], [[35, 34]], [[39, 34]], [[40, 35]], [[40, 39]], [[39, 40]], [[35, 40]], [[34, 39]]],
dtype=np.int32)]
cnt_str = np.array([[[-1, -1, 1, -1], [-1, -1, -1, 0]]], dtype=np.int32)
roi = [np.array([[[0, 0]], [[0, 99]], [[99, 99]], [[99, 0]]], dtype=np.int32)]
roi_str = np.array([[[-1, -1, -1, -1]]], dtype=np.int32)
_, _, _, area = roi_objects(img=img, roi_contour=roi, roi_hierarchy=roi_str, object_contour=cnt,
obj_hierarchy=cnt_str, roi_type="largest")
assert area == 580
|
StarcoderdataPython
|
199359
|
"""Package setup entrypoint."""
from typing import IO, Sequence
from setuptools import find_packages as __find_packages, setup as __compose_package
from uurl import (
__author__ as __author,
__doc__ as __full_doc,
__email__ as __email,
__license__ as __license,
__name__ as __name,
__version__ as __version,
)
def __readme() -> str:
"""Returns project description."""
with open("README.md") as readme: # type: IO[str]
return readme.read()
def __requirements() -> Sequence[str]:
"""Returns requirements sequence."""
with open("requirements.txt") as requirements: # type: IO[str]
return tuple(map(str.strip, requirements.readlines()))
def __first_line(string: str, delimiter: str = "\n") -> str:
"""Returns only first line up to next delimiter item occurred.
Args:
string (str): given string item
delimiter (str): separator string
"""
return string.split(delimiter)[0]
if __name__ == "__main__":
__compose_package(
name=__name,
version=__version,
author=__author,
author_email=__email,
description=__first_line(__full_doc),
long_description=__readme(),
long_description_content_type="text/markdown",
url=f"https://github.com/vyahello/{__name}",
packages=__find_packages(exclude=("*.tests", "*.tests.*", "tests.*", "tests")),
include_package_data=True,
install_requires=__requirements(),
classifiers=(
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
f"License :: OSI Approved :: {__license} License",
"Operating System :: OS Independent",
),
python_requires=">=3.6",
)
|
StarcoderdataPython
|
1732749
|
<gh_stars>1-10
"""OTE-API OPTIMADE-specific Python exceptions."""
class BaseOteapiOptimadeException(Exception):
"""Base OTE-API OPTIMADE exception."""
class ConfigurationError(BaseOteapiOptimadeException):
"""An error occurred when dealing with strategy configurations."""
class RequestError(BaseOteapiOptimadeException):
"""A general error occured when performing a URL request."""
class OPTIMADEResponseError(RequestError):
"""An OPTIMADE error was returned from a URL request."""
class OPTIMADEParseError(BaseOteapiOptimadeException):
"""Could not use OPTIMADE Python tools to parse an OPTIMADE API response."""
|
StarcoderdataPython
|
3355520
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
# --- Rot Motion
freq1= 0.8
freq2= 0.4
tMax = 10
dt = 0.1
omega1 = 2*np.pi*freq1
T1 = 1/freq1
omega2 = 2*np.pi*freq2
T2 = 1/freq2
time = np.arange(0,tMax+dt/2,dt)
pos = np.zeros((len(time), 6)) # positions: x,y,z, theta_x, theta_y, theta_z
vel = np.zeros((len(time), 6)) # velocities: xdot, ydot, zdot, and omega
acc = np.zeros((len(time), 6)) # accelerations: xddot, omegadot
# -- First period we do a vertical motion
I1 = time <= T1
pos[I1,2] = 2*np.sin(omega1*time[I1])
vel[I1,2] = omega1 *2*np.cos(omega1*time[I1])
acc[I1,2] =-omega1**2*2*np.sin(omega1*time[I1])
# -- Second period we do nothing
I2 = time > T1
# -- Third period we do x rotations
I3 = time > 2*T1
pos[I3,3] = 0.10 *np.sin(omega2*time[I3])
vel[I3,3] = omega2 * 0.10 *np.cos(omega2*time[I3])
acc[I3,3] =-omega2**2* 0.10 *np.sin(omega2*time[I3])
cols = ['time_[s]', 'x_[m]', 'y_[m]', 'z_[m]' , 'theta_x_[rad]', 'theta_y_[rad]', 'theta_z_[rad]']
cols +=['xdot_[m/s]', 'ydot_[m/s]', 'zdot_[m/s]', 'omega_x_g_[rad/s]', 'omega_y_g_[rad/s]', 'omega_z_g_[rad/s]']
cols +=['xddot_[m^2/s]', 'yddot_[m^2/s]' , 'zddot_[m^2/s]', 'alpha_x_g_[rad/s]', 'alpha_y_g_[rad/s]', 'alpha_z_g_[rad/s]']
data=np.column_stack((time, pos, vel, acc))
df = pd.DataFrame( data=data, columns=cols)
df.to_csv('KiteMotionSimple.csv', index=False, sep=',', float_format='%10.6f')
print(df.shape)
# Time_[s] , x_[m], y_[m], z_[m] , theta_x_[rad], theta_y_[rad], theta_z_[rad-], xdot_[m/s], ydot_[m/s], zdot_[m/s], omega_x_g_[rad/s], omega_y_g_[rad/s], omega_z_g_[rad/s], xddot_[m^2/s], yddot_[m^2/s] , zddot_[m^2/s], alpha_x_g_[rad/s], alpha_y_g_[rad/s], alpha_z_g_[rad/s]
if __name__ == '__main__':
pass
|
StarcoderdataPython
|
52867
|
# Топ-3 + Выигрышные номера последнего тиража
def test_top_3_winning_numbers_last_draw(app):
app.ResultAndPrizes.open_page_results_and_prizes()
app.ResultAndPrizes.click_game_top_3()
app.ResultAndPrizes.button_get_report_winners()
assert "ВЫИГРЫШНЫЕ НОМЕРА" in app.ResultAndPrizes.parser_report_text_winners()
app.ResultAndPrizes.message_id_33_top_3_last_draw()
app.ResultAndPrizes.message_id_33_top_3_winning_numbers_last_draw()
app.ResultAndPrizes.comeback_main_page()
|
StarcoderdataPython
|
23501
|
#--- Exercício 2 - Variáveis
#--- Crie um menu para um sistema de cadastro de funcionários
#--- O menu deve ser impresso com a função format()
#--- As opções devem ser variáveis do tipo inteiro
#--- As descrições das opções serão:
#--- Cadastrar funcionário
#--- Listar funcionários
#--- Editar funcionário
#--- Deletar funcionário
#--- Sair
#--- Além das opções o menu deve conter um cabeçalho e um rodapé
#--- Entre o cabeçalho e o menu e entre o menu e o rodapé deverá ter espaçamento de 3 linhas
#--- Deve ser utilizado os caracteres especiais de quebra de linha e de tabulação
opcao = int(input("""
SISTEMA DE CADASTRO DE FUNCIONARIO\n\n\n
{} - Cadastrar Funcionário
{} - Listar Funcinários
{} - Editar Funcionário
{} - Deletar Funcionário
{} - Sair\n\n\n
Escolha uma opção: """.format(1,2,3,4,5)))
if opcao == 1:
print("A opção escolhida foi 'Cadastrar funcionário'")
elif opcao == 2:
print("A opção escolhida foi 'Listar funcionários'")
elif opcao == 3:
print("A opção escolhida foi 'Editar funcionário'")
elif opcao == 4:
print("A opção escolhida foi 'Deletar funcionário'")
elif opcao == 5:
print("A opção escolhida foi 'Sair'")
else:
pass
|
StarcoderdataPython
|
3298557
|
<gh_stars>0
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# pylint: disable=invalid-name,too-few-public-methods
"""
EM Slack Tableflip module: slack_tableflip.storage.
- Sets database schema for storing user data
- Initializes database structure
Copyright (c) 2015-2016 <NAME>
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
"""
from datetime import datetime
from slack_tableflip import APP
from flask_sqlalchemy import SQLAlchemy
from sqlalchemy.exc import SQLAlchemyError
# Create database
DB = SQLAlchemy(APP)
class Users(DB.Model):
"""Table for storing authenticated user data."""
__tablename__ = 'flip_users'
id = DB.Column(DB.String(16), primary_key=True)
team = DB.Column(DB.String(16))
token = DB.Column(DB.String(255))
added = DB.Column(DB.DateTime, default=datetime.now)
def __init__(self, user_id, team_id, token):
"""Initialize new User in db."""
self.id = user_id
self.team = team_id
self.token = token
def __repr__(self):
"""Friendly representation of User for debugging."""
return '<User id={user} team={team}>'.format(
user=self.id,
team=self.team
)
class Teams(DB.Model):
"""Table for storing authenticated user data."""
__tablename__ = 'flip_teams'
id = DB.Column(DB.String(16), primary_key=True)
token = DB.Column(DB.String(255))
added = DB.Column(DB.DateTime, default=datetime.now)
def __init__(self, team_id, token):
"""Initialize new Team in db."""
self.id = team_id
self.token = token
def __repr__(self):
"""Friendly representation of Team for debugging."""
return '<Team id={team}>'.format(team=self.id)
try:
# Attempt to initialize database
DB.create_all()
except SQLAlchemyError:
# Other wise, refresh the session
DB.session.rollback()
|
StarcoderdataPython
|
61259
|
#!/usr/bin/env python3
from setuptools import find_packages, setup
setup(
name="lean_proof_recording",
version="0.0.1",
packages=find_packages(),
package_data={},
install_requires=[
"mpmath",
"pandas",
"jsonlines",
"tqdm",
],
)
|
StarcoderdataPython
|
6040
|
import json
from os import path
from tweepy import OAuthHandler, Stream
from tweepy.streaming import StreamListener
from sqlalchemy.orm.exc import NoResultFound
from database import session, Tweet, Hashtag, User
consumer_key = "0qFf4T2xPWVIycLmAwk3rDQ55"
consumer_secret = "<KEY>"
access_token = "<KEY>"
acces_token_secret = "<KEY>"
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, acces_token_secret)
def save_tweets():
directory = _get_dir_absolute_path()
filepath = path.join(directory, "tweets.json")
listener = DatabaseListener(number_tweets_to_save = 1000, filepath=filepath)
stream = Stream(auth, listener)
languages = ("en",)
try:
stream.sample(languages = languages)
except KeyboardInterrupt:
listener.file.close()
class DatabaseListener(StreamListener):
def __init__(self, number_tweets_to_save, filepath = None):
self._final_count = number_tweets_to_save
self._current_count = 0
if filepath is None:
filepath = "tweets.txt"
self.file = open(filepath,"w")
#Slightly dangerous due to circular references>>
def __del__(self):
self.file.close()
def on_data(self, raw_data):
data = json.loads(raw_data)
json.dump(raw_data, self.file)
self.file.write("\n")
if "in_reply_to_status_id" in data:
return self.on_status(data)
def on_status(self, data):
#this method is define in this file
save_to_database(data)
self._current_count += 1
print("status count: {}".format(self._current_count))
if self._current_count >= self._final_count:
return False
def create_user_helper(user_data):
#alias to shorten calls
u = user_data
user = user(uid = u["id_str"],
name = u["name"],
screen_name = u["screen_name"],
created_at = u["created_at"],
description = u.get("description"),
followers_count = u["followers_count"],
statuses_count = u["statuses_count"],
favourites_count = u["favourites_count"],
listed_count = u["listed_count"],
geo_enabled = u["geo_enabled"],
lang = u.get("lang"))
return user
def create_tweet_helper(tweet_data, user):
#alias for shorten calls
t = tweet_data
retweet = True if t["text"][:3] == "RT " else False
coordinates = json.dumps(t["coordinates"])
tweet = Tweet(tid=t["id_str"],
tweet=t["text"],
user=user,
coordinates=coordinates,
created_at = t["created_at"],
favorite_count = t["favorite_count"],
in_reply_to_screen_name = t["in_reply_to_screen_name"],
in_reply_to_status_id = t["in_reply_to_status_id"],
in_reply_to_user_id = t["in_reply_to_user_id"],
lang = t.get("lang"),
quoted_status_id = t.get("quoted_status_id"),
retweet_count = t["retweet_count"],
source = t["source"],
is_retweet = retweet)
return tweet
def save_to_database(data):
try:
user = session.query(User).filter_by(id=str(data["user"]["id"])).one()
except NoResultFound:
user = create_user_helper(data["user"])
session.add(user)
hashtag_results = []
hashtags = data["entities"]["hashtags"]
for hashtag in hashtags:
hashtag = hashtag["text"].lower()
try:
hashtag_obj=session.query(Hashtag).filer_by(text = hashtag).one()
except NoResutlFound:
user = create_
hashtag_obj = Hashtag(text = hashtag)
session.add(hashtag_obj)
hashtag_results.append(hashtag_obj)
tweet = create_tweet_helper(data, user)
for hashtag in hashtag_results:
tweet.hashtags.append(hashtag)
session.add(tweet)
session.commit()
|
StarcoderdataPython
|
53358
|
from django_datatables_view.base_datatable_view import BaseDatatableView
from django.db.models import Q
from django.contrib.postgres.aggregates.general import ArrayAgg
from website.models import Genome
class GenomeTableAjax(BaseDatatableView):
# The model we're going to show
model = Genome
# set max limit of records returned, this is used to protect our site if someone tries to attack our site
# and make it return huge amount of data
max_display_length = 2000
def render_column(self, row: Genome, column: str):
if column == 'genome_tags':
html = [F'<span data-tag="{tag}">{tag}</span>' for tag in row.genome_tags if tag]
return ' '.join(html)
if column == 'organism_tags':
html = [F'<span data-tag="{tag}">{tag}</span>' for tag in row.organism_tags if tag]
return ' '.join(html)
if column == 'representative':
return 'True' if row.is_representative else 'False'
if column == 'literature_references':
return " ".join(row.literature_references)
elif column.startswith("env_"):
return " ".join(row.__getattribute__(column))
else:
return super(GenomeTableAjax, self).render_column(row, column)
def get_initial_queryset(self):
if not self.model:
raise NotImplementedError("Need to provide a model or implement get_initial_queryset!")
qs = self.model.objects.annotate(
genome_tags=ArrayAgg('tags__tag', distinct=True),
organism_tags=ArrayAgg('organism__tags__tag', distinct=True)
) \
.all() \
.prefetch_related('organism') \
.prefetch_related('organism__taxid')
return qs
def filter_queryset(self, qs):
""" If search['value'] is provided then filter all searchable columns using filter_method (istartswith
by default).
Automatic filtering only works for Datatables 1.10+. For older versions override this method
"""
columns = self._columns
if not self.pre_camel_case_notation:
# get global search value
search = self._querydict.get('search[value]', None)
q = Q()
filter_method = self.get_filter_method()
for col_no, col in enumerate(self.columns_data):
# apply global search to all searchable columns
if search and col['searchable']:
# cannot search binary fields or tags
if not columns[col_no] in ['representative', 'contaminated', 'organism.restricted']:
q |= Q(**{F"{columns[col_no].replace('.', '__')}__{filter_method}": search})
# column specific filter
if col['search.value']:
colname = col['name']
## CUSTOM FILTERS
if colname == 'representative':
if col['search.value'] == "True":
qs = qs.filter(representative__isnull=False)
else:
qs = qs.filter(representative__isnull=True)
elif colname == "genome_tags":
qs = qs.filter(tags__tag__in=col['search.value'].split("|"))
elif colname == "organism_tags":
qs = qs.filter(organism__tags__tag__in=col['search.value'].split("|"))
elif colname.endswith("_date"):
if col['search.value'].startswith("-yadcf_delim"):
range = ["0001-01-01", col['search.value'][-10:]]
elif col['search.value'].endswith("yadcf_delim-"):
range = [col['search.value'][:10], "9000-12-30"]
else:
range = [col['search.value'][:10], col['search.value'][-10:]]
qs = qs.filter(**{'{0}__{1}'.format(columns[col_no].replace('.', '__'), 'range'): range})
else:
# DEFAULT BEHAVIOUR
qs = qs.filter(**{
'{0}__{1}'.format(columns[col_no].replace('.', '__'), filter_method): col['search.value']})
qs = qs.filter(q)
return qs
|
StarcoderdataPython
|
1772798
|
<filename>100-200q/129.py
'''
Given a binary tree containing digits from 0-9 only, each root-to-leaf path could represent a number.
An example is the root-to-leaf path 1->2->3 which represents the number 123.
Find the total sum of all root-to-leaf numbers.
Note: A leaf is a node with no children.
Example:
Input: [1,2,3]
1
/ \
2 3
Output: 25
Explanation:
The root-to-leaf path 1->2 represents the number 12.
The root-to-leaf path 1->3 represents the number 13.
Therefore, sum = 12 + 13 = 25.
'''
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def sumNumbers(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if not root:
return 0
def dfs(root, num, total):
if not root:
return total
num = num*10 + root.val
if not root.left and not root.right:
total += num
return total
return dfs(root.left, num) + dfs(root.right, num)
return dfs(root, 0, 0)
|
StarcoderdataPython
|
173816
|
"This module holds the mixer state of the X-Air device"
# part of xair-remote.py
# Copyright (c) 2018, 2021 <NAME>
# Additions Copyright (c) 2021 <NAME>
# Some rights reserved. See LICENSE.
import time
import subprocess
import struct
import json
from collections import deque
from lib.xair import XAirClient, find_mixer
from lib.midicontroller import MidiController, TempoDetector
class Channel:
"""
Represents a single channel or bus
"""
def __init__(self, addr):
# LR then 6 aux bus followed by the 4 effects
self.sends = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
self.enables = [1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]
self.osc_base_addr = addr
def get_m_addr(self, bus):
if bus == 0:
if self.osc_base_addr.startswith('/config'):
return(self.osc_base_addr)
else:
return(self.osc_base_addr + "/on")
else:
return(self.osc_base_addr + '/{:0>2d}/level'.format(bus))
def get_l_addr(self, bus):
if bus == 0:
if self.osc_base_addr.startswith('/head'):
return(self.osc_base_addr + "/gain")
else:
return(self.osc_base_addr + "/fader")
else:
return(self.osc_base_addr + '/{:0>2d}/level'.format(bus))
def toggle_mute(self, bus):
"""Toggle a mute on or off."""
# print("toggle mute of %s bus %s from %s" % (self.get_m_addr(bus), bus, self.enables[bus]))
if self.enables[bus] == 1:
self.enables[bus] = 0
param=0.0
else:
self.enables[bus] = 1
param=self.sends[bus]
if bus == 0:
return(self.get_m_addr(bus), self.enables[bus], self.enables[bus])
else:
return(self.get_m_addr(bus), param, self.enables[bus])
def set_mute(self, bus, value):
"""Set the state of the channel mute."""
self.enables[bus] = value
return(self.get_m_addr(bus), self.enables[bus], self.enables[bus])
def get_mute(self, bus):
return(self.enables[bus])
def change_level(self, bus, delta):
"""Change the level of a fader, mic pre, or bus send."""
# print("changing %s bus %s by %s" % (self.osc_base_addr, bus, delta))
if bus == "gain":
bus = 0
self.sends[bus] = min(max(0.0, self.sends[bus] + (delta / 200)), 1.0)
return(self.get_l_addr(bus), self.sends[bus], self.sends[bus])
def set_level(self, bus, value):
"""Set the level of a fader, mic pre, or bus send."""
if bus == "gain":
bus = 0
self.sends[bus] = value
return(self.get_l_addr(bus),
self.sends[bus], self.sends[bus])
def get_level(self, bus):
"""Return the current level of a fader, mic pre, or bus send."""
if bus == "gain":
bus = 0
return(self.sends[bus])
class SubProc:
"""
Manages a subprocess
"""
def __init__(self, call_type, proc_name, args) -> None:
self.call_type = call_type
self.proc_name = proc_name
self.args = args
self.max = len(args)
self.current = 0
def toggle(self):
if self.call_type == "subprocess":
try:
subprocess.call([self.proc_name, self.args[self.current]])
except OSError:
pass
self.current += (self.current + 1) % self.max
# the config json file specifies a number of layers each idendified by a name
# within the layer there are currently two sections: encoders and buttons
# there are 8 encoders per section with three parts: channel, send/fader/gain, button
# fader, channel, [button]
# gain, channel
# level, channel, bus, [button]
# There are 18 buttons per section to types: lengths
button_def = {'quit': 2, 'none': 2, 'layer': 4, 'clip': 2, 'mute': 2, 'tap': 2, "send": 2}
button_types = set(button_def.keys())
class Layer:
"""
Represents a logical layer as defined by the config file
"""
encoders = []
buttons = []
faders = []
channels = {}
proc_list = {}
active_bus = 0
def __init__(self, layer_name, config_layer, channels, layer_names, proc_list) -> None:
"Initialize a Layer"
# first check for gross configurations errors
number = len(config_layer.keys())
if number > 3:
print("Warning: expected exactly three types of controls %d found" % number)
# Process the encoders
self.encoders = config_layer["encoders"]
if len(self.encoders) != 8:
print("Layer %s does not contain 8 'encoder' definitions." % layer_name)
for encoder in self.encoders:
if len(encoder) != 2:
print("Error: Encoder %s of layer %s does not contain 2 elements, exiting." % \
(encoder[0], layer_name))
exit()
if encoder[0] != "none" and encoder[0] not in channels.keys():
channels[encoder[0]] = Channel(encoder[0])
if encoder[1][0] == "mute" and encoder[1][1] not in channels.keys():
channels[encoder[1][1]] = Channel(encoder[1][1])
elif encoder[1][0] == "subprocess" and encoder[1][1] not in proc_list.keys():
proc_list[encoder[1][1]] = SubProc(encoder[1][0], encoder[1][1], encoder[1][2])
# Process the buttons
self.buttons = config_layer["buttons"]
if len(self.buttons) != 18:
print("Layer %s does not contain 18 'button' definitions." % layer_name)
for button in self.buttons:
if button[0] not in button_types:
print("Layer %s contains an unknown 'button' %s." % \
(layer_name, button[0]))
exit()
if len(button) != button_def[button[0]]:
print("Error: Button %s of layer %s does not contain %d elements" % \
(button[0], layer_name, button_def[button[0]]))
if button[0] == "layer" and button[1] not in layer_names:
print("Error: Layer %s has a change to undefined layer %s." % \
(layer_name, button[1]))
if button[0] == "mute" and button[1] not in channels.keys():
channels[button[1]] = Channel(button[1])
# Process the fader
self.faders = config_layer["fader"]
if len(self.faders) != 1:
print("Layer %s does not contain 1 'fader' definitions." % layer_name)
for fader in self.faders:
if len(fader) != 2:
print("Error: Fader %s of layer %s does not contain 2 elements, exiting." % \
(fader[0], layer_name))
exit()
if fader[0] != "none" and fader[0] != "quit" and fader[0] not in channels.keys():
channels[fader[0]] = Channel(fader[0])
self.channels = channels
self.proc_list = proc_list
def encoder_turn(self, number, value):
encoder = self.encoders[number]
if encoder[0] == "none":
return(None, None, 0.0)
return(self.channels[encoder[0]].change_level(self.active_bus, value))
def encoder_press(self, number):
encoder = self.encoders[number]
if encoder[1][0] == "reset":
return(self.channels[encoder[0]].set_level(self.active_bus, float(encoder[1][1])))
elif encoder[1][0] == "mute":
(address, param, LED) = self.channels[encoder[1][1]].toggle_mute(int(encoder[1][2]))
return(address, param, self.channels[encoder[0]].get_level(self.active_bus))
elif encoder[1][0] == "subprocess":
self.proc_list[encoder[1][1]].toggle()
return(None, None, self.channels[encoder[0]].get_level(self.active_bus))
if encoder[0] == "none":
return(None, None, 0.0)
return(None, None, self.channels[encoder[0]].get_level(self.active_bus))
def encoder_state(self, number):
encoder = self.encoders[number]
if encoder[0] == "none":
return 0.0
return(self.channels[encoder[0]].get_level(self.active_bus))
def toggle_button(self, number):
button = self.buttons[number]
if button[0] == "mute":
return (self.channels[button[1]].toggle_mute(self.active_bus))
if button[0] == "layer":
max_bus = int(button[2])
return ("layer", button[1], max_bus if self.active_bus > max_bus else self.active_bus)
if button[0] == "send":
if self.active_bus == int(button[1]):
self.active_bus = 0
return ("send", None, "Off")
else:
self.active_bus = int(button[1])
return ("send", None, "On")
# return the relevant action for the non mixer based button
return (button[0], button[1], button[-1])
def button_state(self, number):
button = self.buttons[number]
if button[0] == "mute":
return(self.channels[button[1]].get_mute(self.active_bus))
elif button[0] == "send":
return("On" if self.active_bus == int(button[1]) else "Off")
else:
return(button[-1])
def fader_move(self, value):
if self.faders[0][0] == "quit":
return("quit", "none", "none")
if self.faders[0][0] == "none":
return("none", "none", "none")
else:
return(self.channels[self.faders[0][0]].set_level(int(self.faders[0][1]), value))
def encoder_number(self, name):
number = 0
for encoder in self.encoders:
if name == encoder[0]:
return number
number += 1
return -1
def button_number(self, name):
number = 0
for button in self.buttons:
if len(button) > 1 and name == button[1]:
return number
number += 1
return -1
class Meter:
"""
Calculates the .2 second running average of the value of a channel meter
"""
values = 4
def __init__(self):
self.levels = deque(maxlen=self.values)
for _ in range(self.values):
self.levels.append(-102400)
self.mean = -102400 * self.values
def insert_level(self, value):
'push a vlue into the fixed FIFO and update the mean'
self.mean = self.mean - self.levels.popleft() + value
self.levels.append(value)
return self.mean
class MixerState:
"""
This stores the mixer state in the application. It also keeps
track of the current selected fader bank on the midi controller to
decide whether state changes from the X-Air device need to be
sent to the midi controller.
"""
quit_called = False
layers = {}
channels = {}
proc_list = {}
current_layer = None
# ID numbers for all available delay effects
_DELAY_FX_IDS = [10, 11, 12, 21, 24, 25, 26]
fx_slots = [0, 0, 0, 0]
lr = Channel('/lr/mix')
mpd_playing = True
midi_controller = None
xair_client = None
tempo_detector = None
meters = []
for i in range(16):
meters.append(Meter())
def __init__(self, args) -> None:
# split the arguments out to useful values
self.debug = args.debug
self.xair_address = args.xair_address
self.monitor = args.monitor
self.clip = args.clip
self.mac = False # args.mac
self.levels = args.levels
# initialize internal data structures
config_json = "peterdikant.json"
if args.config_file is not None:
config_json = args.config_file[0]
with open(config_json) as config_file:
config = json.load(config_file)
layer_names = config.keys()
for layer_name in layer_names:
if self.current_layer == None:
self.current_layer = layer_name
self.layers[layer_name] = Layer(layer_name, config[layer_name],
self.channels, layer_names, self.proc_list)
def initialize_state(self):
self.quit_called = False
# determine the mixer address
if self.xair_address is None:
self.xair_address = find_mixer()
if self.xair_address is None:
print('Error: Could not find any mixers in network.',
'Using default ip address.')
self.xair_address = "192.168.50.146"
# setup other modules
self.midi_controller = MidiController(self)
if self.quit_called:
self.midi_controller = None
return False
self.xair_client = XAirClient(self.xair_address, self)
self.xair_client.validate_connection()
if self.quit_called:
self.midi_controller = None
self.xair_client = None
return False
self.tempo_detector = TempoDetector(self)
self.read_initial_state()
self.midi_controller.activate_bus()
self.tempo_detector.number = self.layers[self.current_layer].button_number("tap")
return True
def shutdown(self):
self.quit_called = True
"safely shutdown all threads"
if self.xair_client is not None:
self.xair_client.stop_server()
self.xair_client = None
if self.midi_controller is not None:
self.midi_controller.cleanup_controller()
self.midi_controller = None
# if self.screen_obj is not None:
# self.screen_obj.quit()
def button_press(self, number):
"""Handle a button press."""
if self.debug:
print('Button %d pressed' % number)
(address, param, LED) = self.layers[self.current_layer].toggle_button(number)
if address == 'layer':
self.current_layer = param
self.layers[self.current_layer].active_bus = LED
self.midi_controller.activate_bus()
self.tempo_detector.number = self.layers[self.current_layer].button_number("tap")
return self.get_button(number)
elif address == 'send':
self.midi_controller.activate_bus()
return LED
elif address == 'clip':
self.clip = not self.clip
return "On" if self.clip else "Off"
elif address == 'quit':
self.shutdown()
exit()
elif address == 'record':
pass
return "Off"
elif address == 'tap':
self.tempo_detector.tap()
return "none"
if address != None:
if address.startswith('/config/mute'):
param = 1 if param == 0 else 0
self.xair_client.send(address=address, param=param)
return LED
def get_button(self, number):
if self.debug:
print('Getting state of button number %d' % number)
return(self.layers[self.current_layer].button_state(number))
# def mac_button(self, button):
# "call a function for transport buttons on mac"
# if button == 10:
# os.system("""osascript -e 'tell application "music" to previous track'""")
# elif button == 11:
# os.system("""osascript -e 'tell application "music" to next track'""")
# elif button == 12:
# self.state.shutdown()
# self.cleanup_controller()
# exit()
# elif button == 13:
# os.system("""osascript -e 'tell application "music" to pause'""")
# elif button == 14:
# os.system("""osascript -e 'tell application "music" to play'""")
def encoder_turn(self, number, delta):
"""Change the level of an encoder."""
(address, param, LED) = self.layers[self.current_layer].encoder_turn(number, delta)
if address != None:
self.xair_client.send(address=address, param=param)
return LED
def encoder_press(self, number):
(address, param, LED) = self.layers[self.current_layer].encoder_press(number)
if address != None:
print("sending %s %s" % (address, param))
if address.startswith('/config/mute'):
invert = 1 if param == 0 else 0
self.xair_client.send(address=address, param=invert)
number = self.layers[self.current_layer].button_number(address)
if number != -1:
self.midi_controller.set_channel_mute(number, param)
else:
self.xair_client.send(address=address, param=param)
return LED
def get_encoder(self, number):
if self.debug:
print('Getting state of encoder number %d' % number)
return(self.layers[self.current_layer].encoder_state(number))
def fader_move(self, msg):
value = (msg.pitch + 8192) / 16384
if self.debug:
print('Wheel set to {}'.format(msg))
(address, param, LED) = self.layers[self.current_layer].fader_move(value)
if address == "quit":
if value > .98:
self.shutdown()
exit()
elif address != 'none':
self.xair_client.send(address=address, param=param)
def received_osc(self, addr, value):
"""Process an OSC input."""
prefix = None
three_element = '/'.join(addr.split('/',4)[:-1]) # first three parts of the OSC path
two_element = '/'.join(addr.split('/',3)[:-1]) # first two parts of the OSC path
if self.debug:
print('processing OSC message with %s and %s value.' % (addr, value))
if three_element in self.channels.keys():
prefix = three_element
elif two_element in self.channels.keys():
prefix = two_element
elif addr in self.channels.keys():
prefix = addr
if prefix is not None:
if addr.startswith('/config/mute'):
invert = 1 if value == 0 else 0
if self.debug:
print (" received mute channel %s" % prefix)
self.channels[prefix].set_mute(0, invert)
number = self.layers[self.current_layer].button_number(prefix)
if number != -1:
self.midi_controller.set_channel_mute(number, invert)
elif addr.endswith('/fader'): # chanel fader level
if self.debug:
print(' Channel %s level %f' % (addr, value))
self.channels[prefix].set_level(0, value)
number = self.layers[self.current_layer].encoder_number(prefix)
if number != -1:
self.midi_controller.set_ring(number, value)
elif addr.endswith('/on'): # channel enable
if self.debug:
print(' %s unMute %d' % (addr, value))
self.channels[prefix].set_mute(0, value)
number = self.layers[self.current_layer].button_number(prefix)
if number != -1:
self.midi_controller.set_channel_mute(number, value)
elif addr.endswith('/level'):
if self.debug:
print(' %s level %f' % (addr, value))
bus = int(addr[-8:-6])
self.channels[prefix].set_level(bus, value)
number = self.layers[self.current_layer].encoder_number(prefix)
if number != -1:
self.midi_controller.set_ring(number, value)
elif addr.endswith('/gain'):
if self.debug:
print(' %s Gain level %f' % (addr, value))
self.channels[prefix].set_level("gain", value)
number = self.layers[self.current_layer].encoder_number(prefix)
if number != -1:
self.midi_controller.set_ring(number, value)
elif addr.startswith('/fx') and (addr.endswith('/par/01') or addr.endswith('/par/02')):
if self.fx_slots[int(addr[4:5]) - 1] in self._DELAY_FX_IDS:
self.tempo_detector.current_tempo = value * 3
elif addr.startswith('/fx/') and addr.endswith('/type'):
self.fx_slots[int(addr[4:5]) - 1] = value
if value in self._DELAY_FX_IDS:
# slot contains a delay, get current time value
param_id = '01'
if value == 10:
param_id = '02'
self.xair_client.send(address = '/fx/%s/par/%s' % (addr[4:5], param_id))
elif self.debug:
print('processing unknown OSC message with %s and %s value.' % (addr, value))
def read_initial_state(self):
""" Refresh state for all faders and mutes."""
for channel in self.channels.values():
addr = channel.osc_base_addr
if addr.startswith('/head'):
self.xair_client.send(address=addr + '/gain')
time.sleep(self.xair_client._WAIT_TIME)
elif addr.startswith('/config'):
self.xair_client.send(address=addr)
time.sleep(self.xair_client._WAIT_TIME)
else:
self.xair_client.send(address=addr + '/fader')
time.sleep(self.xair_client._WAIT_TIME)
self.xair_client.send(address=addr + '/on')
time.sleep(self.xair_client._WAIT_TIME)
if channel.sends is not None:
for k in range(len(channel.sends)):
self.xair_client.send(address=addr +
'/{:0>2d}/level'.format(k + 1))
time.sleep(self.xair_client._WAIT_TIME)
# get all fx types
for i in range(1, 5):
self.xair_client.send(address = '/fx/%d/type' % i)
time.sleep(self.xair_client._WAIT_TIME)
def update_tempo(self, tempo):
for i in range(0, 4):
if self.fx_slots[i] in self._DELAY_FX_IDS:
param_id = '01'
if self.fx_slots[i] == 10:
# only delay where time is set as parameter 02
param_id = '02'
self.xair_client.send(address = '/fx/%d/par/%s' % (i + 1, param_id), param = tempo / 3)
# the meter subscription is setup in the xair_client in the refresh method that runs every 5s
# a subscription sends values every 50ms for 10s
#
# meters 1 provide data per channel and bus
# meters 2, input levels, as these will match the headamps even if mapped to other channels
# layout below assumes meters 2
def received_meters(self, addr, data):
"receive an OSC Meters packet"
data_size = struct.unpack("<L", data[0][0:4])[0]
values = []
short = []
med = []
for i in range(data_size):
if i > 15:
break
# get the current meter as a 16bit signed int mapped to -128db to 128db
# 1/256 db resolution, aka .004 dB
# realistic values max at 0db
value = struct.unpack("<h", data[0][(4+(i*2)):4+((i+1)*2)])[0]
# push the value into the fixed length fifo and get the smoothed value
smooth = self.meters[i].insert_level(value)/1024
if self.debug:
values.append("%0.2f" % smooth)
short.append(value)
med.append(value/256)
if self.clip and smooth > -3 and (i < 8 or i > 11):
# if clip protection is enabled and not a drum and above -3 db
active_bank = self.active_bank
fader = i
if fader < 8:
self.active_bank = 2
else:
self.active_bank = 3
fader = fader - 8
self.change_level(fader, -1) ## needs FIXME
if self.debug:
print("Clipping Detected headamp changed to %s" %
self.banks[self.active_bank][fader].fader)
self.active_bank = active_bank
if self.debug:
print('Meters %s ch 8 %s %s %s' % (addr, values[7], short[7], med[7]))
# if self.screen_obj is not None:
# self.screen_obj.screen_loop()
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.