id
stringlengths 1
7
| text
stringlengths 6
1.03M
| dataset_id
stringclasses 1
value |
---|---|---|
1770687
|
<reponame>remram44/d3m-primitives
import numpy as np
from rpi_d3m_primitives.featSelect.helperFunctions import normalize_array, joint
from rpi_d3m_primitives.featSelect.mutualInformation import mi, joint_probability
"""---------------------------- CONDITIONAL MUTUAL INFORMATION ----------------------------"""
def mergeArrays(firstVector, secondVector, length):
if length == 0:
length = firstVector.size
results = normalize_array(firstVector, 0)
firstNumStates = results[0]
firstNormalisedVector = results[1]
results = normalize_array(secondVector, 0)
secondNumStates = results[0]
secondNormalisedVector = results[1]
stateCount = 1
stateMap = np.zeros(shape = (firstNumStates*secondNumStates,))
merge = np.zeros(shape =(length,))
joint_states = np.column_stack((firstVector,secondVector))
uniques,merge = np.unique(joint_states,axis=0,return_inverse=True)
stateCount = len(uniques)
results = []
results.append(stateCount)
results.append(merge)
return results
def conditional_entropy(dataVector, conditionVector, length):
condEntropy = 0
jointValue = 0
condValue = 0
if length == 0:
length = dataVector.size
results = joint_probability(dataVector, conditionVector, 0)
jointProbabilityVector = results[0]
numJointStates = results[1]
numFirstStates = results[3]
secondProbabilityVector = results[4]
for i in range(0, numJointStates):
jointValue = jointProbabilityVector[i]
condValue = secondProbabilityVector[int(i / numFirstStates)]
if jointValue > 0 and condValue > 0:
condEntropy -= jointValue * np.log2(jointValue / condValue);
return condEntropy
def cmi(dataVector, targetVector, conditionVector, length = 0):
if (conditionVector.size == 0):
return mi(dataVector,targetVector,0)
if (len(conditionVector.shape)>1 and conditionVector.shape[1]>1):
conditionVector = joint(conditionVector)
cmi = 0;
firstCondition = 0
secondCondition = 0
if length == 0:
length = dataVector.size
results = mergeArrays(targetVector, conditionVector, length)
mergedVector = results[1]
firstCondition = conditional_entropy(dataVector, conditionVector, length)
secondCondition = conditional_entropy(dataVector, mergedVector, length)
cmi = firstCondition - secondCondition
return cmi
|
StarcoderdataPython
|
4811282
|
import json
import discord
import logging
from random_message import *
from util.decorator import only_owner
logger = logging.getLogger("Link")
def load_link_file():
try:
with open("private/link.data", 'r') as fd:
return json.loads(fd.read())
except:
logger.error("IMPOSSBILE DE LOAD LE FICHIER private/link.data !")
return {}
def save_link_file(data):
with open("private/link.data", 'w') as fd:
fd.write(json.dumps(data))
linked = load_link_file() #format = {channel_id : [channel_id, ...], ...}
async def show(message):
txt = "Salons liés:\n"
for c, l in linked.items():
if l: txt += "<#{}> -> {}\n".format(c, ", ".join(["<#{}>".format(i) for i in l]))
await message.channel.send(txt)
async def add(message, args):
channel_id = int(args[1])
if channel_id not in linked.keys():
linked[str(channel_id)] = []
linked[str(channel_id)].append(message.channel.id)
if len(args) <= 2 or args[2] != "uni":
if message.channel.id not in linked.keys():
linked[str(message.channel.id)] = []
linked[str(message.channel.id)].append(channel_id)
save_link_file(linked)
async def delete(message, args):
deleted = []
if len(args) == 1:
for i in linked[str(message.channel.id)]:
try:
linked[str(i)].remove(message.channel.id)
deleted.append((i, message.channel.id))
if not linked[str(i)]: del linked[str(i)]
except:
pass
await message.channel.send("Link détruit : {}".format(
"\n".join(["<#{}> -> <#{}>".format(i, j) for i,j in deleted] +
["<#{}> -> <#{}>".format(
message.channel.id, i) for i in linked[str(message.channel.id)]])))
del linked[str(message.channel.id)]
else:
try:
linked[str(message.channel.id)].remove(int(args[1]))
await message.channel.send("<#{}> -> <#{}>".format(message.channel.id, args[1]))
except:
pass
try:
linked[args[1]].remove(message.channel.id)
await message.channel.send("<#{}> -> <#{}>".format(args[1], message.channel.id))
except:
pass
await message.channel.send("")
save_link_file(linked)
async def send_to_linked(client, message):
if str(message.channel.id) in linked.keys() and message.author != client.user:
em = discord.Embed(description=message.content,
colour=message.author.colour,
timestamp=message.created_at
)
try:
if message.attachments:
em.set_image(url=message.attachments[0])
except:
pass
em.set_author(name=message.author.name,
icon_url=message.author.avatar_url,
url=message.jump_url
)
em.set_footer(icon_url=message.guild.icon_url,
text= message.guild.name + " | #" +
message.channel.name)
for channel in linked[str(message.channel.id)]:
try: await client.get_channel(channel).send(None,embed=em,)
except : pass
class CmdLink:
@only_owner
async def cmd_link(self, *args, message, member, **_):
if args[0] == "show" : await show(message)
if args[0] == "add" : await add(message, args)
if args[0] == "delete": await delete(message, args)
|
StarcoderdataPython
|
177951
|
<gh_stars>0
"""
Complete the function that accepts a string parameter,
and reverses each word in the string. All spaces in
the string should be retained.
Examples
"This is an example!" ==> "sihT si na !elpmaxe"
"double spaces" ==> "elbuod secaps"
"""
def reverse_words(text):
str_list = []
for word in text.split(' '):
str_list.append(word[::-1])
return ' '.join(str_list)
|
StarcoderdataPython
|
3381051
|
import pandas as pd
import numpy as np
df = pd.read_csv('csv/rest.csv')
df = df[['names', 'category', 'rating', 'reviews', 'cost', 'cuisine', 'featured', 'location','urls']]
# print(type(df['featured'][1])) # its a string, not a list. convert to list by split.
df['names'] = df['names'].apply(lambda x : x.strip())
df['rating'] = df['rating'].str.extract(r'(^\d.\d)')
df['rating'] = df['rating'].apply(lambda x : float(x))
df['reviews'] = df['reviews'].str.extract(r'(\d*,?\d+)')
df['reviews'] = df['reviews'].apply(lambda x : float(x.replace(',','') if isinstance(x, str) else np.nan))
# df['reviews'] = df['reviews'].astype('Int32')
df['cost'] = df['cost'].str.extract(r'(\d*,?\d+)')
df['cost'] = df['cost'].apply(lambda x : float(x.replace(',','') if isinstance(x, str) else np.nan))
print(df.info())
# print(df['location'].value_counts())
urls_re = df['urls'].to_frame()
urls_re.to_csv('csv/urls.csv', index=False, header=False)
df.to_csv('csv/rest_cleaned.csv',index=False)
|
StarcoderdataPython
|
3346653
|
<filename>scraper/storage_spiders/chihienvn.py
# Auto generated by generator.py. Delete this line if you make modification.
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
XPATH = {
'name' : "//div[@class='box-content']/div[@class='box-header']/h1",
'price' : "//div[@class='product-detail-table']/div[@class='product-field']/table//tr[7]/td[2]",
'category' : "//div[@id='breadcrumbs']/a",
'description' : "//div[@id='content_description']/p",
'images' : "//div[@class='product-detail-thumb']/a/img/@src",
'canonical' : "",
'base_url' : "",
'brand' : ""
}
name = 'chihien.vn'
allowed_domains = ['chihien.vn']
start_urls = ['http://chihien.vn']
tracking_url = ''
sitemap_urls = ['']
sitemap_rules = [('', 'parse_item')]
sitemap_follow = []
rules = [
Rule(LinkExtractor(allow=['/\d+-sp+\.html']), 'parse_item'),
Rule(LinkExtractor(allow=['/danhmuc.html']), 'parse'),
#Rule(LinkExtractor(), 'parse_item_and_links'),
]
|
StarcoderdataPython
|
1795274
|
<gh_stars>10-100
from lookup import files as lookup_files
from ui.read import x as ui_read
from core.read import read as core_read
from ui.read import plasma_selection
from api.plasma import open as opener
def goto_selection(view):
read_view = ui_read.all(view)
plasmas = core_read.plasmas(read_view.ptext)
plasma = plasma_selection.plasma(view, plasmas)
if plasma != None:
_goto_module(view, plasma)
def _goto_module(view, plasma):
read_view = ui_read.all(view)
candidates = lookup_files.file_by_dep(read_view.base, plasma, read_view.nests)
opener.file_list_open(view, candidates)
|
StarcoderdataPython
|
178588
|
import numpy as np
from scipy import ndimage
'''
See paper: Sensors 2018, 18(4), 1055; https://doi.org/10.3390/s18041055
"Divide and Conquer-Based 1D CNN Human Activity Recognition Using Test Data Sharpening"
by <NAME> & <NAME>
This code loads and sharpens UCI HAR Dataset data.
UCI HAR Dataset data can be downloaded from:
https://archive.ics.uci.edu/ml/datasets/human+activity+recognition+using+smartphones
Unzipped dataset should be placed inside the '../data/UCI HAR Dataset/' folder.
'''
dir_path = '../data/UCI HAR Dataset/'
def load_x(train_or_test):
global dir_path
if train_or_test is "train":
x_path = dir_path + 'train/X_train.txt'
elif train_or_test is "test":
x_path = dir_path + 'test/X_test.txt'
with open(x_path) as f:
container = f.readlines()
result = []
for line in container:
tmp1 = line.strip()
tmp2 = tmp1.replace(' ', ' ') # removes inconsistent blank spaces
tmp_ary = map(float, tmp2.split(' '))
result.append(tmp_ary)
return np.array(result)
def load_y(train_or_test):
global dir_path
if train_or_test is "train":
y_path = dir_path + 'train/y_train.txt'
elif train_or_test is "test":
y_path = dir_path + 'test/y_test.txt'
with open(y_path) as f:
container = f.readlines()
result = []
for line in container:
num_str = line.strip()
result.append(int(num_str))
return np.array(result)
def sharpen(x_test, sigma, alpha):
r = x_test.shape[0]
c = x_test.shape[1]
container = np.empty((r, c))
i = 0
for row in x_test:
test = np.array([row])
blurred = ndimage.gaussian_filter(test, sigma)
sharpened = test + alpha * (test - blurred)
container[i] = sharpened
i = i + 1
return container
|
StarcoderdataPython
|
3352768
|
<gh_stars>0
# Copyright (c) 2006-2010 <NAME> http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Unit tests for subscribing SQS queues to SNS topics.
"""
import hashlib
import time
import json
from tests.unit import unittest
from boto.sqs.connection import SQSConnection
from boto.sns.connection import SNSConnection
class SNSSubcribeSQSTest(unittest.TestCase):
sqs = True
sns = True
def setUp(self):
self.sqsc = SQSConnection()
self.snsc = SNSConnection()
def get_policy_statements(self, queue):
attrs = queue.get_attributes('Policy')
policy = json.loads(attrs.get('Policy', "{}"))
return policy.get('Statement', {})
def test_correct_sid(self):
now = time.time()
topic_name = queue_name = "test_correct_sid%d" % (now)
timeout = 60
queue = self.sqsc.create_queue(queue_name, timeout)
self.addCleanup(self.sqsc.delete_queue, queue, True)
queue_arn = queue.arn
topic = self.snsc.create_topic(topic_name)
topic_arn = topic['CreateTopicResponse']['CreateTopicResult']\
['TopicArn']
self.addCleanup(self.snsc.delete_topic, topic_arn)
expected_sid = hashlib.md5(topic_arn + queue_arn).hexdigest()
resp = self.snsc.subscribe_sqs_queue(topic_arn, queue)
found_expected_sid = False
statements = self.get_policy_statements(queue)
for statement in statements:
if statement['Sid'] == expected_sid:
found_expected_sid = True
break
self.assertTrue(found_expected_sid)
def test_idempotent_subscribe(self):
now = time.time()
topic_name = queue_name = "test_idempotent_subscribe%d" % (now)
timeout = 60
queue = self.sqsc.create_queue(queue_name, timeout)
self.addCleanup(self.sqsc.delete_queue, queue, True)
initial_statements = self.get_policy_statements(queue)
queue_arn = queue.arn
topic = self.snsc.create_topic(topic_name)
topic_arn = topic['CreateTopicResponse']['CreateTopicResult']\
['TopicArn']
self.addCleanup(self.snsc.delete_topic, topic_arn)
resp = self.snsc.subscribe_sqs_queue(topic_arn, queue)
time.sleep(3)
first_subscribe_statements = self.get_policy_statements(queue)
self.assertEqual(len(first_subscribe_statements),
len(initial_statements) + 1)
resp2 = self.snsc.subscribe_sqs_queue(topic_arn, queue)
time.sleep(3)
second_subscribe_statements = self.get_policy_statements(queue)
self.assertEqual(len(second_subscribe_statements),
len(first_subscribe_statements))
|
StarcoderdataPython
|
78130
|
import os
import argparse
import matplotlib.pyplot as plt
from datetime import datetime, timedelta, date
import nottingham_covid_modelling.lib.priors as priors
import numpy as np
import pints
from nottingham_covid_modelling import MODULE_DIR
# Load project modules
from nottingham_covid_modelling.lib._command_line_args import NOISE_MODEL_MAPPING
from nottingham_covid_modelling.lib.equations import get_model_SIUR_solution, get_model_solution, get_model_SIR_solution, get_model_SEIUR_solution
from nottingham_covid_modelling.lib.settings import Params, get_file_name_suffix
MODEL_FUNCTIONS ={'SItD':get_model_solution, 'SIR': get_model_SIR_solution, 'SIRDeltaD': get_model_SIR_solution, 'SIUR':get_model_SIUR_solution, 'SEIUR':get_model_SEIUR_solution}
# Functions
def parameter_to_optimise_list(FitFull, FitStep, model_name):
# Valid model_names: 'SIR', 'SIRDeltaD', 'SItD', 'SIUR'
assert model_name in ['SIR', 'SIRDeltaD', 'SItD', 'SIUR', 'SEIUR'], "Unknown model"
parameters_to_optimise = ['rho', 'Iinit1']
if FitFull:
if model_name != 'SItD':
parameters_to_optimise.extend(['theta'])
if model_name == 'SIUR':
parameters_to_optimise.extend(['xi'])
if model_name == 'SEIUR':
parameters_to_optimise.extend(['eta'])
parameters_to_optimise.extend(['xi'])
if model_name == 'SIRDeltaD':
parameters_to_optimise.extend(['DeltaD'])
# parameters_to_optimise.extend(['negative_binomial_phi']) <- this one is added in the likelihood class
if FitStep:
parameters_to_optimise.extend(['lockdown_baseline', 'lockdown_offset'])
return parameters_to_optimise
def run_optimise():
parser = argparse.ArgumentParser()
parser.add_argument("-r", "--repeats", type=int, help="number of CMA-ES repeats", default=5)
parser.add_argument("-d", "--detailed_output", action='store_true',
help="whether to output detailed information (CMA-ES logs and all repeat parameters) or not",
default=False)
parser.add_argument("--cmaes_fits", type=str, help="folder to store cmaes fits files in, default: ./cmaes_fits_SIR",
default=os.path.join(MODULE_DIR, 'cmaes_fits_SIR'))
parser.add_argument("--limit_pints_iterations", type=int, default=None,
help=("limit pints to a maximum number of iterations. NOTE: this is mostly for debug and "
"testing purposes, you probably don't want to use this to get meaningful results!"))
parser.add_argument("--model_name", type=str, help="which model to use", choices=MODEL_FUNCTIONS.keys(), default='SIR')
parser.add_argument("-full", "--fit_full", action='store_false', help='Whether to fit all the model parameters, or only [rho, I0, NB_phi], ', default=True)
parser.add_argument("-fitstep", "--fit_step", action='store_false', help='Whether to fit step parameters', default=True)
parser.add_argument("--syndata_num", type=int, help="Give the number of the synthetic data set you want to fit, default 1", default=1)
# At the moment, syntethic data sets 2-9 have travel and step options only. There is only one data sets without step and one with neither travel nor step.
args = parser.parse_args()
repeats = args.repeats
FitFull = args.fit_full
FitStep = args.fit_step
ModelName = args.model_name
SyntDataNum_file = args.syndata_num
max_iterations = args.limit_pints_iterations
# For reproducibility:
np.random.seed(100)
# Number of days to fit
maxtime_fit = 150
# Get parameters, p
p = Params()
# Fixed for the synth data, based on UK google and ONS data:
p.N = 59.1e6
p.numeric_max_age = 35
p.extra_days_to_simulate = 10
p.IFR = 0.00724 # UK time
p.square_lockdown = True
p.alpha = np.ones(p.maxtime)
p.lockdown_baseline = 0.2814 #0.2042884852266899
p.lockdown_offset = 31.57 #34.450147247864166
# For saving file names:
rho_label = '_rho_0-2'
Noise_label = 'NBphi_2e-3_'
# Storing values in the models so there is no error after (since there is no store_params for the simple models)
if ModelName != 'SItD':
p.beta = 1
p.theta = 1 / p.beta_mean
p.eta = 1 / p.beta_mean
p.DeltaD = 0
p.xi = 1 / (p.death_mean - p.beta_mean)
# define the params to optimize
parameters_to_optimise = parameter_to_optimise_list(FitFull, FitStep, ModelName)
# Get noise model
noise_model = NOISE_MODEL_MAPPING['NegBinom']
# Get simulated Age data from file
print('Getting simulated data...')
# folder to load data
if SyntDataNum_file == 1: #Original default syntethic data
folder_path = os.path.join(MODULE_DIR, 'out_SIRvsAGEfits')
full_fit_data_file = 'SItRDmodel_ONSparams_noise_NB_NO-R_travel_TRUE_step_TRUE.npy'
else:
folder_path = os.path.join(MODULE_DIR, 'out_SIRvsAGE_SuplementaryFig')
full_fit_data_file = 'SynteticSItD_default_params_travel_TRUE_step_TRUE_' + str(SyntDataNum_file) + '.npy'
data_filename = full_fit_data_file
# Load data
data = np.load(os.path.join(folder_path, data_filename ))
data_S = data[:,0]
data_Itot = data[:,1]
data_R = data[:,2]
data_Dreal = data[:,3]
data_D = data[:,4] # noise data
data_I = data[:,5:].T # transpose to get exactly the same shape as other code
if len(data_R) < maxtime_fit:
p.maxtime = len(data_R) -1
maxtime_fit = len(data_R) -1
else:
p.maxtime = maxtime_fit
# cut the data to the maxtime lenght:
data_D = data_D[:p.maxtime+1]
data_Dreal = data_Dreal[:p.maxtime+1]
data_S_long = data_S
data_S = data_S[:p.maxtime+1]
data_Itot = data_Itot[:p.maxtime+1]
data_R = data_R[:p.maxtime+1]
data_I = data_I[:,:p.maxtime+1]
# to get the same data and fit lenghts as in Data_loader
p.maxtime = p.maxtime + p.numeric_max_age + p.extra_days_to_simulate #D[p.day_1st_death_after_150220: -(p.numeric_max_age + p.extra_days_to_simulate)]
p.day_1st_death_after_150220 = 22
# OPTIMISATION:
print('Starting optimization...')
# Set up optimisation
folder = args.cmaes_fits
os.makedirs(folder, exist_ok=True) # Create CMA-ES output destination folder
filename = os.path.join(folder, get_file_name_suffix(p, 'SimSItD-' + str(SyntDataNum_file) + rho_label, Noise_label + 'model-' + ModelName + '_full-fit-' + str(FitFull), parameters_to_optimise))
print('Selected data source: ' + data_filename)
print('Selected noise model: Negative Binomial')
print('Storing results to: ' + filename + '.txt')
# Get likelihood function
model_func = MODEL_FUNCTIONS[ModelName]
LL = noise_model(p, data_D[p.day_1st_death_after_150220:] , parameters_to_optimise, model_func = model_func)
upper_sigma = np.max(data_D)
log_prior = priors.LogPrior(LL, upper_sigma, model_name = ModelName)
parameters, scores = [], []
# Tell CMA-ES about the bounds of this optimisation problem (helps it work out sensible sigma)
bounds = pints.RectangularBoundaries(log_prior.lower, log_prior.upper)
# Repeat optimisation multiple times from different initial guesses and pick best
for i in range(repeats):
print('Repeat: ' + str(i + 1))
# Random initial guesses from uniform priors
x0 = priors.get_good_starting_point(log_prior, LL, niterations=1000)
# Create optimiser
opt = pints.OptimisationController(LL, x0, boundaries=bounds, method=pints.CMAES)
opt.set_max_iterations(max_iterations)
opt.set_parallel(True)
# Run optimisation
with np.errstate(all='ignore'): # Tell numpy not to issue warnings
xbest, fbest = opt.run()
parameters.append(xbest)
scores.append(-fbest)
# Sort according to smallest function score
order = np.argsort(scores)
scores = np.asarray(scores)[order]
parameters = np.asarray(parameters)[order]
print('---- Summary ...')
print('Best parameters: ')
print(parameters[0])
print('Best score:')
print(-scores[0])
# Extract best
obtained_parameters = parameters[0]
# Store results
print('Storing best result...')
with open(filename + '.txt', 'w') as f:
for x in obtained_parameters:
f.write(pints.strfloat(x) + '\n')
print('Storing all errors...')
with open(filename + '-errors.txt', 'w') as f:
for score in scores:
f.write(pints.strfloat(-score) + '\n')
if args.detailed_output:
print('Storing all parameters...')
for i, param in enumerate(parameters):
with open(filename + '-parameters-' + str(1 + i) + '.txt', 'w') as f:
for x in param:
f.write(pints.strfloat(x) + '\n')
|
StarcoderdataPython
|
3396472
|
import pygame
pygame.init()
screen = pygame.display.set_mode((640, 480))
background = pygame.Surface(screen.get_size())
background.fill((255, 255, 255))
sprite = pygame.image.load("mario.png")
x=150
y=150
clock = pygame.time.Clock()
while 1:
clock.tick(40)
pygame.event.pump()
keyinput = pygame.key.get_pressed()
if keyinput[pygame.K_UP]:
y = y - 20
elif keyinput[pygame.K_DOWN]:
y = y + 20
elif keyinput[pygame.K_ESCAPE]:
raise SystemExit
screen.blit(background, (0,0))
screen.blit(sprite, (x, y))
pygame.display.flip()
|
StarcoderdataPython
|
192253
|
<filename>caffe-dslt/examples/test.py
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 1 22:05:35 2016
@author: luxiankai
"""
import numpy as np
import matplotlib.pyplot as plt
#%matplotlib inline
# Make sure that caffe is on the python path:
caffe_root = '../' # this file is expected to be in {caffe_root}/examples
import sys
sys.path.insert(0, caffe_root + 'python')
import caffe
plt.rcParams['figure.figsize'] = (10, 10)
plt.rcParams['image.interpolation'] = 'nearest'
plt.rcParams['image.cmap'] = 'gray'
import os
MEAN_FILE=caffe_root+'examples/ResNet/ResNet_mean.binaryproto'
mean_blob = caffe.proto.caffe_pb2.BlobProto()
mean_blob.ParseFromString(open(MEAN_FILE, 'rb').read())
# 将均值blob转为numpy.array
mean_npy = caffe.io.blobproto_to_array(mean_blob)
print mean_npy.shape
|
StarcoderdataPython
|
17167
|
<filename>wxtbx/wx4_compatibility.py
from __future__ import absolute_import, division, print_function
'''
Author : Lyubimov, A.Y.
Created : 04/14/2014
Last Changed: 11/05/2018
Description : wxPython 3-4 compatibility tools
The context managers, classes, and other tools below can be used to make the
GUI code compatible with wxPython 3 and 4. Mostly, the tools convert the
functions, enumerations, and classes which have been renamed in wxPython 4;
the name mismatches result in exceptions.
Use case 1: subclassing wx.PyControl or wx.Control:
from wxtbx import wx4_compatibility as wx4c
WxCtrl = wx4c.get_wx_mod(wx, wx.Control)
class MyCustomControl(WxCtrl): ...
Use case 2: brush style (NOTE: you can do that with fonts as well, but it
doesn't seem to be necessary):
from wxtbx import wx4_compatibility as wx4c
bkgrd = self.GetBackgroundColour()
with wx4c.set_brush_style(wx.BRUSHSTYLE_SOLID) as bstyle:
brush = wx.Brush(bkgrd, bstyle)
Use case 3: Toolbars
from wxtbx import wx4_compatibility as wx4c, bitmaps
class MyFrame(wx.Frame):
def __init__(self, parent, id, title, *args, **kwargs):
wx.Frame.__init__(self, parent, id, title, *args, **kwargs)
self.toolbar = wx4c.ToolBar(self, style=wx.TB_TEXT)
self.quit_button = self.toolbar.AddTool(toolId=wx.ID_ANY,
label='Quit',
kind=wx.ITEM_NORMAL,
bitmap=bitmaps.fetch_icon_bitmap('actions', 'exit')
shortHelp='Exit program')
...
self.SetToolBar(self.toolbar)
self.toolbar.Realize()
'''
import wx
from contextlib import contextmanager
import importlib
wx4 = wx.__version__[0] == '4'
modnames = [
('PyControl', 'Control'),
('PyDataObjectSimple', 'DataObjectSimple'),
('PyDropTarget', 'DropTarget'),
('PyEvtHandler', 'EvtHandler'),
('PyImageHandler', 'ImageHandler'),
('PyLocale', 'Locale'),
('PyLog', 'Log'),
('PyPanel', 'Panel'),
('PyPickerBase', 'PickerBase'),
('PyPreviewControlBar', 'PreviewControlBar'),
('PyPreviewFrame', 'PreviewFrame'),
('PyPrintPreview', 'PrintPreview'),
('PyScrolledWindow', 'ScrolledWindow'),
('PySimpleApp', 'App'),
('PyTextDataObject', 'TextDataObject'),
('PyTimer', 'Timer'),
('PyTipProvider', 'adv.TipProvider'),
('PyValidator', 'Validator'),
('PyWindow'', Window')
]
font_families = [
(wx.DEFAULT, wx.FONTFAMILY_DEFAULT),
(wx.DECORATIVE, wx.FONTFAMILY_DECORATIVE),
(wx.ROMAN, wx.FONTFAMILY_ROMAN),
(wx.SCRIPT, wx.FONTFAMILY_SCRIPT),
(wx.SWISS, wx.FONTFAMILY_SWISS),
(wx.MODERN, wx.FONTFAMILY_MODERN),
(wx.TELETYPE, wx.FONTFAMILY_TELETYPE)
]
font_weights = [
(wx.NORMAL, wx.FONTWEIGHT_NORMAL),
(wx.LIGHT, wx.FONTWEIGHT_LIGHT),
(wx.BOLD, wx.FONTWEIGHT_BOLD)
]
font_styles = [
(wx.NORMAL, wx.FONTSTYLE_NORMAL),
(wx.ITALIC, wx.FONTSTYLE_ITALIC),
(wx.SLANT, wx.FONTSTYLE_SLANT)
]
pen_styles = [
(wx.SOLID, wx.PENSTYLE_SOLID),
(wx.DOT, wx.PENSTYLE_DOT),
(wx.LONG_DASH, wx.PENSTYLE_LONG_DASH),
(wx.SHORT_DASH, wx.PENSTYLE_SHORT_DASH),
(wx.DOT_DASH, wx.PENSTYLE_DOT_DASH),
(wx.USER_DASH, wx.PENSTYLE_USER_DASH),
(wx.TRANSPARENT, wx.PENSTYLE_TRANSPARENT)
]
brush_styles = [
(wx.SOLID, wx.BRUSHSTYLE_SOLID),
(wx.TRANSPARENT, wx.BRUSHSTYLE_TRANSPARENT),
(wx.STIPPLE_MASK_OPAQUE, wx.BRUSHSTYLE_STIPPLE_MASK_OPAQUE),
(wx.STIPPLE_MASK, wx.BRUSHSTYLE_STIPPLE_MASK),
(wx.STIPPLE, wx.BRUSHSTYLE_STIPPLE),
(wx.BDIAGONAL_HATCH, wx.BRUSHSTYLE_BDIAGONAL_HATCH),
(wx.CROSSDIAG_HATCH, wx.BRUSHSTYLE_CROSSDIAG_HATCH),
(wx.FDIAGONAL_HATCH, wx.BRUSHSTYLE_FDIAGONAL_HATCH),
(wx.CROSS_HATCH, wx.BRUSHSTYLE_CROSS_HATCH),
(wx.HORIZONTAL_HATCH, wx.BRUSHSTYLE_HORIZONTAL_HATCH),
(wx.VERTICAL_HATCH, wx.BRUSHSTYLE_VERTICAL_HATCH),
]
def find_module(module):
for m in modnames:
if module.__name__ in m:
return m
def find_enum(enums, item):
for en in enums:
if item in en:
value = en[1] if wx4 else en[0]
return value
def get_wx_mod(base, module):
mname = find_module(module)[1] if wx4 else find_module(module)[0]
bname = base.__name__
if '.' in mname:
spl = [i for i in mname.split('.') if i != bname]
modname = '.'.join(spl[:-1])
mod = importlib.import_module('{}.{}'.format(bname, modname))
return getattr(mod, spl[-1])
else:
return getattr(base, mname)
@contextmanager
def wx_mod(base, module):
''' Identify and import the appropriate wxPython module '''
yield get_wx_mod(base, module)
@contextmanager
def set_font_style(style):
yield find_enum(font_styles, style)
@contextmanager
def set_font_weight(weight):
yield find_enum(font_weights, weight)
@contextmanager
def set_font_family(family):
yield find_enum(font_families, family)
@contextmanager
def set_pen_style(style):
yield find_enum(pen_styles, style)
@contextmanager
def set_brush_style(style):
yield find_enum(brush_styles, style)
@contextmanager
def create_measuring_context():
dc = wx.GraphicsContext.Create() if wx4 else \
wx.GraphicsContext.CreateMeasuringContext()
yield dc
class Wx3ToolBar(wx.ToolBar):
''' Special toolbar class that accepts wxPython 4-style AddTool command and
converts it to a wxPython 3-style AddLabelTool command '''
def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=wx.TB_HORIZONTAL, name='toolbar'):
wx.ToolBar.__init__(self, parent, id, pos, size, style, name)
def AddTool(self, toolId, label, bitmap, bmpDisabled=wx.NullBitmap,
kind=wx.ITEM_NORMAL, shortHelp='', longHelp='',
clientData=None):
''' Override to make this a very thin wrapper for AddLabelTool, which in
wxPython 3 is the same as AddTool in wxPython 4 '''
return self.AddLabelTool(id=toolId, label=label, bitmap=bitmap,
bmpDisabled=bmpDisabled, kind=kind,
shortHelp=shortHelp, longHelp=longHelp,
clientData=clientData)
class Wx4ToolBar(wx.ToolBar):
''' Special toolbar class that accepts wxPython 3-style AddLabelTool command
and converts it to a wxPython 4-style AddTool command '''
def __init__(self, parent, id=wx.ID_ANY, pos=wx.DefaultPosition,
size=wx.DefaultSize, style=wx.TB_HORIZONTAL, name='toolbar'):
wx.ToolBar.__init__(self, parent, id, pos, size, style, name)
def AddLabelTool(self, id, label, bitmap, bmpDisabled=wx.NullBitmap,
kind=wx.ITEM_NORMAL, shortHelp='', longHelp='',
clientData=None):
''' Override to make this a very thin wrapper for AddTool, which in
wxPython 4 is the same as AddLabelTool in wxPython 3 '''
return self.AddTool(toolId=id, label=label, bitmap=bitmap,
bmpDisabled=bmpDisabled, kind=kind,
shortHelp=shortHelp, longHelp=longHelp,
clientData=clientData)
# Use this ToolBar class to create toolbars in frames
ToolBar = Wx4ToolBar if wx4 else Wx3ToolBar
|
StarcoderdataPython
|
1680123
|
from octopus.engine.explorer import Explorer
from requests.exceptions import ConnectionError as RequestsConnectionError
import json
EOS_DEFAULT_RPC_PORT = 8888
EOS_WALLET_RPC_PORT = 8889
class EosExplorer(Explorer):
"""
EOS REST RPC client class
doc: https://eosio.github.io/eos/group__eosiorpc.html
cleos source code: https://github.com/EOSIO/eos/blob/master/programs/cleos/main.cpp
"""
def __init__(self, host='localhost', port=EOS_DEFAULT_RPC_PORT, tls=False, max_retries=3):
Explorer.__init__(self, host=host, port=port, tls=tls, max_retries=max_retries)
def call(self, method, params={}, version='v1', api_type='chain'):
current_url = '{}/{}/{}/{}'.format(self.url, version, api_type, method)
try:
r = self.session.post(current_url, headers=self.headers, data=json.dumps(params))
except RequestsConnectionError:
raise Exception('RPC connection Error')
if not 200 <= r.status_code < 300:
raise Exception('RPC connection failure: ' + str(r.status_code) + ' ' + r.reason + ' ' + r.text)
try:
response = r.json()
except ValueError:
raise Exception('JSON response parsing error: ' + str(r.text))
try:
return response
except KeyError:
raise Exception('\"result\" field in JSON response error: ' + str(response))
##########################
# Chain API #
##########################
def get_info(self):
'''Get latest information related to a node
TESTED
'''
return self.call('get_info')
def get_block(self, block_num_or_id):
'''Get information related to a block.
TESTED
'''
data = {'block_num_or_id': block_num_or_id}
return self.call('get_block', data)
def get_raw_code_and_abi(self, account_name):
'''Fetch smart contract code.
TESTED
'''
data = {'account_name': account_name}
return self.call('get_raw_code_and_abi', data)
def get_account(self, account_name):
'''Get information related to an account.
TESTED
'''
data = {'account_name': account_name}
return self.call('get_account', data)
def get_code(self, account_name):
'''Fetch smart contract code.
TESTED
'''
data = {'account_name': account_name}
return self.call('get_code', data)
def get_table_rows(self, scope, code, table, json=False, lower_bound=None, upper_bound=None, limit=None):
'''Fetch smart contract data from an account.
NOT TESTED
'''
data = {'scope': scope,
'code': code,
'table': table,
'json': json}
if lower_bound:
data['lower_bound'] = lower_bound
if upper_bound:
data['upper_bound'] = upper_bound
if limit:
data['limit'] = limit
return self.call('get_table_rows', data)
def abi_json_to_bin(self, code, action, args):
'''Serialize json to binary hex. The resulting binary hex is usually used for the data field in push_transaction.
NOT TESTED
'''
data = {'code': code,
'action': action,
'args': args}
print(data)
return self.call('abi_json_to_bin', data)
def abi_bin_to_json(self, code, action, binargs):
'''Serialize back binary hex to json.
NOT TESTED
'''
data = {'code': code,
'action': action,
'binargs': binargs}
return self.call('abi_bin_to_json', data)
def push_transaction(self, tx_json):
'''This method expects a transaction in JSON format and will attempt to apply it to the blockchain,
NOT TESTED
'''
return self.call('push_transaction', tx_json)
def push_transactions(self, list_tx_json):
'''This method push multiple transactions at once.
NOT TESTED
'''
return self.call('push_transactions', list_tx_json)
def get_required_keys(self, transaction):
'''Get required keys to sign a transaction from list of your keys.
NOT TESTED
'''
data = {'transaction': transaction}
return self.call('get_required_keys', data)
##########################
# Wallet API #
##########################
def wallet_create(self, name):
'''Create a new wallet with the given name
TESTED
'''
return self.call('create', name, api_type='wallet')
def wallet_open(self, name):
'''Open an existing wallet of the given name
TESTED
'''
return self.call('open', name, api_type='wallet')
def wallet_lock(self, name):
'''Lock a wallet of the given name
TESTED
'''
return self.call('lock', name, api_type='wallet')
def wallet_lock_all(self):
'''Lock all wallets
TESTED
'''
return self.call('lock_all', api_type='wallet')
def wallet_unlock(self, name, password):
'''Unlock a wallet with the given name and password
TESTED
'''
return self.call('unlock', [name, password], api_type='wallet')
def wallet_import_key(self, name, priv_key):
'''Import a private key to the wallet of the given name
TESTED
'''
return self.call('import_key', [name, priv_key], api_type='wallet')
def wallet_list(self):
'''List all wallets
TESTED
'''
return self.call('list_wallets', api_type='wallet')
def wallet_list_keys(self):
'''List all key pairs across all wallets
TESTED
'''
return self.call('list_keys', api_type='wallet')
def wallet_get_public_keys(self):
'''List all public keys across all wallets
TESTED
'''
return self.call('get_public_keys', api_type='wallet')
def wallet_set_timeout(self, timeout_s):
'''Set wallet auto lock timeout (in seconds)
TESTED
'''
return self.call('set_timeout', timeout_s, api_type='wallet')
def wallet_sign_trx(self, tx_json):
'''Sign transaction given an array of transaction, require public keys, and chain id
NOT TESTED
'''
return self.call('sign_transaction', tx_json)
|
StarcoderdataPython
|
1674552
|
#
# The Python Imaging Library.
# $Id$
#
# package placeholder
#
# Copyright (c) 1999 by Secret Labs AB.
#
# See the README file for information on usage and redistribution.
#
# ;-)
|
StarcoderdataPython
|
4826917
|
# EMACS settings: -*- tab-width: 2; indent-tabs-mode: t; python-indent-offset: 2 -*-
# vim: tabstop=2:shiftwidth=2:noexpandtab
# kate: tab-width 2; replace-tabs off; indent-width 2;
# ==============================================================================
# Authors: <NAME>
#
# Python functions: A streaming VHDL parser
#
# Description:
# ------------------------------------
# TODO:
#
# License:
# ==============================================================================
# Copyright 2007-2017 <NAME> - Dresden, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# load dependencies
from pyVHDLParser.Token import SpaceToken, LinebreakToken, CommentToken, CharacterToken, IndentationToken, MultiLineCommentToken
from pyVHDLParser.Token.Keywords import StringToken, BoundaryToken, IdentifierToken, GenericKeyword, ParameterKeyword, ProcedureKeyword, EndKeyword, \
ImpureKeyword, PureKeyword
from pyVHDLParser.Token.Keywords import UseKeyword, ConstantKeyword, VariableKeyword, IsKeyword, EndToken, BeginKeyword, FunctionKeyword, ReportKeyword
from pyVHDLParser.Blocks import Block, TokenParserException, CommentBlock, ParserState
from pyVHDLParser.Blocks.Common import LinebreakBlock, IndentationBlock, WhitespaceBlock
# from pyVHDLParser.Blocks.ControlStructure import If, Case, ForLoop, WhileLoop, Return
from pyVHDLParser.Blocks.Generic import SequentialBeginBlock, SequentialDeclarativeRegion
from pyVHDLParser.Blocks.Generic1 import EndBlock as EndBlockBase
from pyVHDLParser.Blocks.List import GenericList, ParameterList
class EndBlock(EndBlockBase):
KEYWORD = ProcedureKeyword
KEYWORD_IS_OPTIONAL = True
EXPECTED_NAME = KEYWORD.__KEYWORD__
class BeginBlock(SequentialBeginBlock):
END_BLOCK = EndBlock
class DeclarativeRegion(SequentialDeclarativeRegion):
BEGIN_BLOCK = BeginBlock
END_BLOCK = EndBlock
class NameBlock(Block):
@classmethod
def stateProcedureKeyword(cls, parserState: ParserState):
token = parserState.Token
if isinstance(token, SpaceToken):
parserState.NewToken = BoundaryToken(token)
parserState.NextState = cls.stateWhitespace1
return
elif isinstance(token, (LinebreakToken, CommentToken)):
block = LinebreakBlock if isinstance(token, LinebreakToken) else CommentBlock
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=token.PreviousToken, multiPart=True)
_ = block(parserState.NewBlock, token)
parserState.TokenMarker = None
parserState.NextState = cls.stateWhitespace1
return
raise TokenParserException("Expected whitespace after keyword PROCEDURE.", token)
@classmethod
def stateWhitespace1(cls, parserState: ParserState):
token = parserState.Token
if isinstance(token, StringToken):
parserState.NewToken = IdentifierToken(token)
parserState.NextState = cls.stateProcedureName
return
elif isinstance(token, LinebreakToken):
if (not (isinstance(parserState.LastBlock, CommentBlock) and isinstance(parserState.LastBlock.StartToken, MultiLineCommentToken))):
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=token.PreviousToken, multiPart=True)
_ = LinebreakBlock(parserState.NewBlock, token)
else:
parserState.NewBlock = LinebreakBlock(parserState.LastBlock, token)
parserState.TokenMarker = None
return
elif isinstance(token, CommentToken):
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=token.PreviousToken, multiPart=True)
_ = CommentBlock(parserState.NewBlock, token)
parserState.TokenMarker = None
return
elif (isinstance(token, SpaceToken) and (isinstance(parserState.LastBlock, CommentBlock) and isinstance(parserState.LastBlock.StartToken, MultiLineCommentToken))):
parserState.NewToken = BoundaryToken(token)
parserState.NewBlock = WhitespaceBlock(parserState.LastBlock, parserState.NewToken)
parserState.TokenMarker = None
return
raise TokenParserException("Expected procedure name (designator).", token)
@classmethod
def stateProcedureName(cls, parserState: ParserState):
token = parserState.Token
if (isinstance(token, CharacterToken) and (token == "(")):
parserState.NewToken = BoundaryToken(token)
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=parserState.NewToken.PreviousToken)
_ = ParameterList.OpenBlock(parserState.NewBlock, parserState.NewToken)
parserState.TokenMarker = None
parserState.NextState = VoidBlock.stateAfterParameterList
parserState.PushState = ParameterList.OpenBlock.stateOpeningParenthesis
parserState.Counter = 1
return
elif isinstance(token, SpaceToken):
parserState.NewToken = BoundaryToken(token)
parserState.NextState = cls.stateWhitespace2
return
elif isinstance(token, (LinebreakToken, CommentToken)):
block = LinebreakBlock if isinstance(token, LinebreakToken) else CommentBlock
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=token.PreviousToken, multiPart=True)
_ = block(parserState.NewBlock, token)
parserState.TokenMarker = None
parserState.NextState = cls.stateWhitespace2
return
raise TokenParserException("Expected '(' or whitespace after procedure name.", token)
@classmethod
def stateWhitespace2(cls, parserState: ParserState):
token = parserState.Token
if (isinstance(token, CharacterToken) and (token == "(")):
parserState.NewToken = BoundaryToken(token)
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=parserState.NewToken.PreviousToken)
_ = ParameterList.OpenBlock(parserState.NewBlock, parserState.NewToken)
parserState.TokenMarker = None
parserState.NextState = VoidBlock.stateAfterParameterList
parserState.PushState = ParameterList.OpenBlock.stateOpeningParenthesis
parserState.Counter = 1
return
elif isinstance(token, StringToken):
keyword = token.Value.lower()
if (keyword == "is"):
parserState.NewToken = IsKeyword(token)
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=parserState.NewToken.PreviousToken)
_ = VoidBlock(parserState.NewBlock, parserState.NewToken)
parserState.TokenMarker = parserState.NewToken
parserState.NextState = VoidBlock.stateDeclarativeRegion
return
elif (keyword == "generic"):
parserState.NewToken = GenericKeyword(token)
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=parserState.NewToken.PreviousToken)
parserState.NextState = GenericList.OpenBlock.stateGenericKeyword
parserState.TokenMarker = parserState.NewToken
return
elif (keyword == "parameter"):
parserState.NewToken = ParameterKeyword(token)
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=parserState.NewToken.PreviousToken)
parserState.NextState = ParameterList.OpenBlock.stateParameterKeyword
parserState.TokenMarker = parserState.NewToken
return
elif isinstance(token, LinebreakToken):
if (not (isinstance(parserState.LastBlock, CommentBlock) and isinstance(parserState.LastBlock.StartToken, MultiLineCommentToken))):
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=token.PreviousToken, multiPart=True)
_ = LinebreakBlock(parserState.NewBlock, token)
else:
parserState.NewBlock = LinebreakBlock(parserState.LastBlock, token)
parserState.TokenMarker = None
return
elif isinstance(token, CommentToken):
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=token.PreviousToken, multiPart=True)
_ = CommentBlock(parserState.NewBlock, token)
parserState.TokenMarker = None
return
elif (isinstance(token, SpaceToken) and (isinstance(parserState.LastBlock, CommentBlock) and isinstance(parserState.LastBlock.StartToken, MultiLineCommentToken))):
parserState.NewToken = BoundaryToken(token)
parserState.NewBlock = WhitespaceBlock(parserState.LastBlock, parserState.NewToken)
parserState.TokenMarker = None
return
raise TokenParserException("Expected '(' or keywords GENERIC, PARAMETER or RETURN after procedure name.", token)
class VoidBlock(Block):
@classmethod
def stateAfterParameterList(cls, parserState: ParserState):
token = parserState.Token
if (isinstance(token, CharacterToken) and (token == ";")):
parserState.NewToken = EndToken(token)
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=parserState.NewToken)
parserState.Pop()
return
elif isinstance(token, StringToken):
if (token <= "is"):
parserState.NewToken = IsKeyword(token)
parserState.NewBlock = VoidBlock(parserState.LastBlock, parserState.TokenMarker, parserState.NewToken)
parserState.NextState = DeclarativeRegion.stateDeclarativeRegion
return
elif isinstance(token, SpaceToken):
parserState.NewToken = BoundaryToken(token)
parserState.NextState = cls.stateWhitespace1
return
elif isinstance(token, (LinebreakToken, CommentToken)):
block = LinebreakBlock if isinstance(token, LinebreakToken) else CommentBlock
parserState.NewBlock = block(parserState.LastBlock, token)
parserState.TokenMarker = None
parserState.NextState = cls.stateWhitespace1
return
raise TokenParserException("Expected keyword RETURN.", token)
@classmethod
def stateWhitespace1(cls, parserState: ParserState):
token = parserState.Token
if (isinstance(token, StringToken) and (token <= "is")):
parserState.NewToken = IsKeyword(token)
parserState.NextState = DeclarativeRegion.stateDeclarativeRegion
return
elif isinstance(token, LinebreakToken):
if (not (isinstance(parserState.LastBlock, CommentBlock) and isinstance(parserState.LastBlock.StartToken, MultiLineCommentToken))):
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=token.PreviousToken, multiPart=True)
_ = LinebreakBlock(parserState.NewBlock, token)
else:
parserState.NewBlock = LinebreakBlock(parserState.LastBlock, token)
parserState.TokenMarker = None
return
elif isinstance(token, CommentToken):
parserState.NewBlock = cls(parserState.LastBlock, parserState.TokenMarker, endToken=token.PreviousToken, multiPart=True)
_ = CommentBlock(parserState.NewBlock, token)
parserState.TokenMarker = None
return
elif (isinstance(token, SpaceToken) and (isinstance(parserState.LastBlock, CommentBlock) and isinstance(parserState.LastBlock.StartToken, MultiLineCommentToken))):
parserState.NewToken = BoundaryToken(token)
parserState.NewBlock = WhitespaceBlock(parserState.LastBlock, parserState.NewToken)
parserState.TokenMarker = None
return
raise TokenParserException("Expected procedure name (designator).", token)
|
StarcoderdataPython
|
4820377
|
<reponame>webdev188/tytus<gh_stars>10-100
usuarios = [
{"name":"Admin", "password":"<PASSWORD>"}
]
|
StarcoderdataPython
|
3216836
|
<reponame>Erick0212/thedefender
import random
import json
from pygame.locals import *
import os
import pygame
import pygameMenu
from pygameMenu.locals import *
WIDTH = 900
HEIGHT = 700
FPS = 60
pygame.init()
os.environ['SDL_VIDEO_CENTERED'] = '1'
pygame.mixer.init()
screen = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("The Defender")
clock = pygame.time.Clock()
font_name = pygame.font.match_font('arial')
def pontuacao(surf, text,size,x,y):
font = pygame.font.Font(font_name,size)
text_surface = font.render(text,True, (255,0,0))
text_rect = text_surface.get_rect()
text_rect.midtop = (x,y)
surf.blit(text_surface,text_rect)
class Score(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.score = 0
self.dificuldade =1
def update(self):
self.score += 1
if self.score ==10:
self.dificuldade = 4
print(self.dificuldade)
if self.score ==30:
self.dificuldade = 6
print(self.dificuldade)
if self.score == 70:
self.dificuldade = 8
print(self.dificuldade)
if self.score == 100:
self.dificuldade = 10
print(self.dificuldade)
if self.score == 130:
self.dificuldade = 14
print(self.dificuldade)
if self.score == 200:
self.dificuldade = 18
print(self.dificuldade)
class Player(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load('oldplayer.PNG')
self.rect = self.image.get_rect()
self.rect.centerx = WIDTH / 2
self.rect.bottom = HEIGHT - 10
self.speedx = 0
self.col0 = 0
def update(self):
self.speedx = 0
keystate = pygame.key.get_pressed()
if keystate[pygame.K_LEFT]:
self.speedx = -8
if keystate[pygame.K_RIGHT]:
self.speedx = 8
self.rect.x += self.speedx
if self.rect.right > WIDTH:
self.rect.right = WIDTH
if self.rect.left < 0:
self.rect.left = 0
def shoot(self):
pts = Score()
if pts.score == 200:
if self.col0 == 0:
bullet = Bullet(self.rect.centerx, self.rect.top)
all_sprites.add(bullet)
bullets.add(bullet)
self.col0 = 100
else:
bullet = Bullet(self.rect.centerx, self.rect.top)
all_sprites.add(bullet)
bullets.add(bullet)
if self.col0 > 0 :
self.col0 -=1
class Mob(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load('inimigo.PNG')
self.image= pygame .transform.scale(self.image,(120,80))
self.rect = self.image.get_rect()
self.rect.x = random.randrange(100 , 800)
self.rect.y = random.randrange(-100, -40)
self.speedy = random.randrange(1, 4)
if self.rect.x < WIDTH/2:
self.speedx = random.randrange(0, 1)
else:
self.speedx = random.randrange(-1, 0)
def update(self):
self.rect.x += self.speedx
self.rect.y += self.speedy
if self.rect.top > HEIGHT - 10 or self.rect.left < -25 or self.rect.right > WIDTH + 20:
self.rect.y = random.randrange(-100, -40)
self.speedy = random.randrange(1, 2)
class Bullet(pygame.sprite.Sprite):
def __init__(self, x, y):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.image.load('tiro.PNG')
self.rect = self.image.get_rect()
self.rect.bottom = y
self.rect.centerx = x
self.speedy = -10
def update(self):
self.rect.y += self.speedy
if self.rect.bottom < 0:
self.kill()
audio=pygame.mixer.Sound('boom.wav')
audio_tiro= pygame.mixer.Sound('disparo.wav')
audio_jogo = pygame.mixer.Sound('tema.wav')
audio_gameOver = pygame.mixer.Sound('gameOver.wav')
font = pygame.font.get_default_font()
font2= pygame.font.SysFont(font,70)
pygame.font.init()
try:
abre=open('pontuação.json','r')
Mpts = json.load(abre)
abre.close()
except:
Mpts = 0
coracao = pygame.image.load('vida.PNG')
coracao = pygame.transform.scale(coracao,(30,20))
bg = pygame.image.load('fundo.PNG')
bg = pygame.transform.scale(bg,(WIDTH,HEIGHT))
all_sprites = pygame.sprite.Group()
mobs = pygame.sprite.Group()
bullets = pygame.sprite.Group()
player = Player()
all_sprites.add(player)
def jogo():
vida =3
score = 0
pts = Score()
running = True
col =0
col2 =0
audio_jogo.play(3)
while running:
if col2 == 0:
for i in range(pts.dificuldade):
m = Mob()
all_sprites.add(m)
mobs.add(m)
mobs.draw(screen)
col2 = 100
if pts.dificuldade == 8:
col2 = 150
if pts.dificuldade == 14:
col2 = 200
clock.tick(FPS)
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
if pts.score == 200:
pygame.key.set_repeat(10,50)
press = pygame.key.get_pressed()
if col == 0:
if press[pygame.K_SPACE]:
audio_tiro.play()
player.shoot()
col = 1000
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE:
audio_tiro.play()
player.shoot()
all_sprites.update()
hits = pygame.sprite.groupcollide(mobs, bullets, True, True)
for hit in hits:
pts.update()
score = pts.score
hits = pygame.sprite.spritecollide(player, mobs, True)
for hit in hits:
audio.play()
if hits:
vida -= 1
if vida == 0:
if score > Mpts:
abre = open('pontuação.json','w')
abre.write(str(score))
abre.close()
running = False
audio_jogo.stop()
audio_gameOver.play(3)
show_go_screen(score,Mpts,)
if col >0:
col -=0.1
if col2 >0:
col2 -= 1
screen.blit(bg,(0,0))
screen.blit(coracao,(WIDTH/2 -400, 10))
all_sprites.draw(screen)
pontuacao(screen,"PONTUAÇÃO: ", 18, WIDTH/2 -100, 10)
pontuacao(screen, str(score),18,WIDTH/2 -30,10 )
pontuacao(screen,str(vida), 18, WIDTH/2 -360, 10)
pontuacao(screen,"MELHOR PONTUAÇÃO:", 18, WIDTH/2 +250, 10)
pontuacao(screen,str(Mpts), 18, WIDTH/2 +380, 10)
pygame.display.flip()
pygame.quit()
def draw_text(surf, text, size, x, y):
font = pygame.font.Font(font_name, size)
text_surface = font.render(text, True, (255,255,255))
text_rect = text_surface.get_rect()
text_rect.midtop = (x, y)
surf.blit(text_surface, text_rect)
def draw_gameOver(surf, text, size, x, y):
font = pygame.font.Font(font_name, size)
text_surface = font.render(text, True, (255,0,0))
text_rect = text_surface.get_rect()
text_rect.midtop = (x, y)
surf.blit(text_surface, text_rect)
def show_go_screen(score,Mpts):
screen.fill((0,0,0))
draw_gameOver(screen, 'Game Over',64,WIDTH/2,HEIGHT/4)
draw_text(screen, 'Pontuação',25,WIDTH/2 -100,HEIGHT/4 + 100)
draw_text(screen, str(score),25,WIDTH/2 -100,HEIGHT/4 + 150)
if score > Mpts:
draw_text(screen, 'Nova Melhor Pontuação',25,WIDTH/2 +100,HEIGHT/4 + 100)
draw_text(screen, str(score),25,WIDTH/2 +100,HEIGHT/4 + 150)
else:
draw_text(screen, 'Melhor Pontuação',25,WIDTH/2 +100,HEIGHT/4 + 100)
draw_text(screen, str(Mpts),25,WIDTH/2 +100,HEIGHT/4 + 150)
draw_text(screen, 'Precione Esc para sair!',18,WIDTH/2,HEIGHT/4 + 450)
draw_text(screen, 'Desenvolvido por: <NAME> e <NAME>',18,WIDTH/2 + 250,HEIGHT/4 + 500)
pygame.display.flip()
true = True
while true:
clock.tick(FPS)
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
pygame.quit()
COLOR_BACKGROUND = (0, 0, 0)
COLOR_BLACK = (0, 0, 0)
COLOR_WHITE = (255, 255, 255)
FPS = 60.0
MENU_BACKGROUND_COLOR = (3, 64, 137)
WINDOW_SIZE = (WIDTH, HEIGHT)
# -----------------------------------------------------------------------------
# Init pygame
pygame.init()
os.environ['SDL_VIDEO_CENTERED'] = '1'
# Create pygame screen and objects
surface = pygame.display.set_mode(WINDOW_SIZE)
pygame.display.set_caption('MENU INICIAL')
clock = pygame.time.Clock()
dt = 1 / FPS
def play_function():
jogo()
main_menu.disable()
main_menu.reset(1)
while True:
clock.tick(60)
# Application events
events = pygame.event.get()
for e in events:
if e.type == QUIT:
exit()
elif e.type == KEYDOWN:
if e.key == K_ESCAPE and main_menu.is_disabled():
main_menu.enable()
return
main_menu.mainloop(events)
pygame.display.flip()
def main_background():
surface.fill(COLOR_BACKGROUND)
# PLAY MENU
play_menu= pygameMenu.Menu(surface,
bgfun=main_background,
color_selected=COLOR_WHITE,
font=pygameMenu.fonts.FONT_BEBAS,
font_color=COLOR_BLACK,
font_size=30,
menu_alpha=100,
menu_color=MENU_BACKGROUND_COLOR,
menu_height=int(WINDOW_SIZE[1]*1),
menu_width=int(WINDOW_SIZE[0] *1),
onclose=PYGAME_MENU_DISABLE_CLOSE,
option_shadow=False,
title='The defender',
window_height=WINDOW_SIZE[1],
window_width=WINDOW_SIZE[0]
)
play_menu.add_option('Iniciar', play_function)
play_menu.add_option('Retornar ao menu principal', PYGAME_MENU_BACK)
# MAIN MENU
main_menu = pygameMenu.Menu(surface,
bgfun=main_background,
color_selected=COLOR_WHITE,
font=pygameMenu.fonts.FONT_BEBAS,
font_color=COLOR_BLACK,
font_size=30,
menu_alpha=100,
menu_color=MENU_BACKGROUND_COLOR,
menu_height=int(WINDOW_SIZE[1] * 1),
menu_width=int(WINDOW_SIZE[0] * 1),
option_shadow= False,
title='The defender',
window_height=WINDOW_SIZE[1],
window_width=WINDOW_SIZE[0]
)
main_menu.add_option('Jogar', play_menu)
main_menu.add_option('Sair', PYGAME_MENU_EXIT)
# -----------------------------------------------------------------------------
# Main loop
def menu():
while True:
# Tick
clock.tick(60)
# Application events
events = pygame.event.get()
for event in events:
if event.type == QUIT:
exit()
# Main menu
main_menu.mainloop(events)
# Flip surface
pygame.display.flip()
menu()
|
StarcoderdataPython
|
82206
|
#!/usr/bin/python3 -S
# -*- coding: utf-8 -*-
import unittest
import pickle
import copy
from cargo import aliased, Model
from cargo.fields import Field
from unit_tests import configure
class Tc(object):
def __init__(self, field):
self.field = field
class FieldModel(Model):
field = Field()
class TestField(unittest.TestCase):
orm = FieldModel()
def setUp(self):
self.orm.clear()
@property
def base(self):
return self.orm.field
@property
def base_array(self):
return getattr(self.orm, 'array_' + self.base.field_name)
def test_init(self, *args, **kwargs):
base = self.base.__class__()
self.assertEqual(base.value, base.empty)
base = self.base.__class__(primary=True)
self.assertEqual(base.primary, True)
base = self.base.__class__(unique=True)
self.assertEqual(base.unique, True)
base = self.base.__class__(index=True)
self.assertEqual(base.index, True)
base = self.base.__class__(default='field')
self.assertEqual(base.default, 'field')
base = self.base.__class__(not_null=True)
self.assertEqual(base.not_null, True)
base = self.base.__class__(validator=Tc)
self.assertIsInstance(base.validator, Tc)
def test_slots(self):
self.assertFalse(hasattr(self.base, '__dict__'))
def test_copy(self):
fielda = self.base
fieldb = self.base.copy()
self.assertIsNot(fielda, fieldb)
for k in list(fielda.__slots__):
if k == 'validator':
self.assertEqual(fielda.validator.__class__,
fieldb.validator.__class__)
else:
self.assertEqual(getattr(fielda, k), getattr(fieldb, k))
self.assertEqual(fielda.table, fieldb.table)
fielda = self.base
fieldb = copy.copy(self.base)
self.assertIsNot(fielda, fieldb)
for k in list(fielda.__slots__):
if k == 'validator':
self.assertEqual(fielda.validator.__class__,
fieldb.validator.__class__)
else:
self.assertEqual(getattr(fielda, k), getattr(fieldb, k))
self.assertEqual(fielda.table, fieldb.table)
def test_deepcopy(self):
fielda = self.base
fieldb = copy.deepcopy(self.base)
self.assertIsNot(fielda, fieldb)
for k in list(fielda.__slots__):
if isinstance(
getattr(fielda, k), (str, list, tuple, dict, int, float)):
self.assertEqual(getattr(fielda, k), getattr(fieldb, k))
else:
self.assertTrue(
getattr(fielda, k).__class__, getattr(fieldb, k).__class__)
self.assertEqual(fielda.table, fieldb.table)
def test_pickle(self):
b = pickle.loads(pickle.dumps(self.base))
for k in list(self.base.__slots__):
if isinstance(
getattr(self.base, k), (str, list, tuple, dict, int, float)):
self.assertEqual(getattr(self.base, k), getattr(b, k))
else:
self.assertTrue(
getattr(self.base, k).__class__ == getattr(b, k).__class__)
def test_set_alias(self):
field = self.base.copy()
field.table = 'foo'
field.field_name = 'bar'
field.set_alias(table="foo_b")
self.assertEqual(str(aliased(field)), "foo_b.bar")
field.set_alias(name="foo_b")
self.assertEqual(str(aliased(field)), "foo_b")
field.set_alias("foo_b", "bar_b")
self.assertEqual(str(aliased(field)), "foo_b.bar_b")
def test___call__(self):
self.base('foobar')
self.assertEqual(self.base.value, 'foobar')
self.assertEqual(self.base(), 'foobar')
self.assertEqual(self.base(None), None)
if __name__ == '__main__':
# Unit test
configure.run_tests(TestField)
|
StarcoderdataPython
|
1611507
|
<reponame>vumichien/hummingbird
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
"""
Converters for ONNX-ML tree-ensemble models.
"""
import numpy as np
from onnxconverter_common.registration import register_converter
from .. import constants
from .._gbdt_commons import convert_gbdt_classifier_common, convert_gbdt_common
from .._tree_commons import TreeParameters, convert_decision_ensemble_tree_common, get_parameters_for_tree_trav_common
def _get_tree_infos_from_onnx_ml_operator(model):
"""
Function used to extract the parameters from a ONNXML TreeEnsemble model.
"""
tree_infos = []
left = right = features = values = threshold = None
tree_ids = target_node_ids = target_tree_ids = modes = None
classes = post_transform = None
# The list of attributes is a merge between the classifier and regression operators.
# The operators descriptions can be found here
# https://github.com/onnx/onnx/blob/master/docs/Operators-ml.md#aionnxmltreeensembleclassifier and
# here https://github.com/onnx/onnx/blob/master/docs/Operators-ml.md#aionnxmltreeensembleregressor
for attr in model.origin.attribute:
if attr.name == "nodes_falsenodeids":
right = attr.ints
elif attr.name == "nodes_truenodeids":
left = attr.ints
elif attr.name == "nodes_featureids":
features = attr.ints
elif attr.name == "nodes_values":
threshold = attr.floats
elif attr.name == "class_weights" or attr.name == "target_weights":
values = attr.floats
if len(values) == 0:
raise TypeError("Type mismatch with attribute {}.".format(attr))
elif attr.name == "class_nodeids" or attr.name == "target_nodeids":
target_node_ids = attr.ints
elif attr.name == "class_treeids" or attr.name == "target_treeids":
target_tree_ids = attr.ints
elif attr.name == "nodes_treeids":
tree_ids = attr.ints
elif attr.name == "classlabels_int64s":
classes = list(attr.ints)
elif attr.name == "classlabels_strings ":
if len(attr.strings) > 0:
raise AssertionError("String class labels not supported yet.")
elif attr.name == "post_transform":
post_transform = attr.s.decode("utf-8")
if post_transform not in ["NONE", "LOGISTIC", "SOFTMAX"]:
raise AssertionError("Post transform {} not supported".format(post_transform))
elif attr.name == "nodes_modes":
modes = attr.strings
for mode in modes:
if (not mode == b"BRANCH_LEQ") and (not mode == b"LEAF"):
raise AssertionError("Modality {} not supported".format(mode))
is_decision_tree = post_transform == "NONE"
# Order values based on target node and tree ids.
new_values = []
n_classes = 1 if classes is None or not is_decision_tree else len(classes)
j = 0
for i in range(max(target_tree_ids) + 1):
k = j
while k < len(target_tree_ids) and target_tree_ids[k] == i:
k += 1
target_ids = target_node_ids[j:k]
target_ids_zipped = dict(zip(target_ids, range(len(target_ids))))
for key in sorted(target_ids_zipped):
if is_decision_tree and n_classes > 2: # For multiclass we have 2d arrays.
tmp_values = []
for c in range(n_classes):
tmp_values.append(values[j + c + (target_ids_zipped[key] - (n_classes - 1))])
new_values.append(tmp_values)
else:
new_values.append(values[j + target_ids_zipped[key]])
j = k
values = new_values
i = 0
prev_id = 0
count = 0
l_count = 0
for n, id in enumerate(tree_ids):
if id == i:
if modes[n] == b"LEAF":
left[n] = -1
right[n] = -1
threshold[n] = -1
else:
t_left = left[prev_id:count]
t_right = right[prev_id:count]
t_features = features[prev_id:count]
t_threshold = threshold[prev_id:count]
t_values = np.zeros((len(t_left), n_classes)) if is_decision_tree else np.zeros(len(t_left))
if len(t_left) == 1:
# Model creating trees with just a single leaf node. We transform it
# to a model with one internal node.
t_left = [1, -1, -1]
t_right = [2, -1, -1]
t_features = [0, 0, 0]
t_threshold = [0, -1, -1]
if l_count < len(values):
t_values[0] = values[l_count]
l_count += 1
else:
for j in range(len(t_left)):
if t_threshold[j] == -1 and l_count < len(values):
t_values[j] = values[l_count]
l_count += 1
if t_values.shape[0] == 1:
# Model creating trees with just a single leaf node. We fix the values here.
n_classes = t_values.shape[1] if len(t_values.shape) > 1 else 1
t_values = np.array([np.array([0.0]), t_values[0], t_values[0]])
t_values.reshape(3, n_classes)
if is_decision_tree and n_classes == 2: # We need to fix the probabilities in this case.
max_tree_ids = max(tree_ids) + 1
for k in range(len(t_left)):
prob = (1 / max_tree_ids) - t_values[k][1]
t_values[k][0] = prob
tree_infos.append(
TreeParameters(t_left, t_right, t_features, t_threshold, np.array(t_values).reshape(-1, n_classes))
)
prev_id = count
i += 1
count += 1
t_left = left[prev_id:count]
t_right = right[prev_id:count]
t_features = features[prev_id:count]
t_threshold = threshold[prev_id:count]
t_values = np.zeros((len(t_left), n_classes)) if is_decision_tree else np.zeros(len(t_left))
if len(t_left) == 1:
# Model creating trees with just a single leaf node. We transform it
# to a model with one internal node.
t_left = [1, -1, -1]
t_right = [2, -1, -1]
t_features = [0, 0, 0]
t_threshold = [0, -1, -1]
if l_count < len(values):
t_values[0] = values[l_count]
l_count += 1
else:
for j in range(len(t_left)):
if t_threshold[j] == -1 and l_count < len(values):
t_values[j] = values[l_count]
l_count += 1
if t_values.shape[0] == 1:
# Model creating trees with just a single leaf node. We fix the values here.
n_classes = t_values.shape[1] if len(t_values.shape) > 1 else 1
t_values = np.array([np.array([0.0]), t_values[0], t_values[0]])
t_values.reshape(3, n_classes)
if is_decision_tree and n_classes == 2: # We need to fix the probabilities in this case.
for k in range(len(t_left)):
prob = (1 / (max(tree_ids) + 1)) - t_values[k][1]
t_values[k][0] = prob
tree_infos.append(TreeParameters(t_left, t_right, t_features, t_threshold, np.array(t_values).reshape(-1, n_classes)))
return tree_infos, classes, post_transform
def _dummy_get_parameter(tree_info, extra_config):
"""
Dummy function used to return parameters (TreeEnsemble converters already have parameters in the right format)
"""
return tree_info
def _get_tree_infos_from_tree_ensemble(operator, device=None, extra_config={}):
"""
Base method for extracting parameters from `ai.onnx.ml.TreeEnsemble`s.
"""
assert (
constants.N_FEATURES in extra_config
), "Cannot retrive the number of features. Please fill an issue at https://github.com/microsoft/hummingbird."
# Get the number of features.
n_features = extra_config[constants.N_FEATURES]
tree_infos, classes, post_transform = _get_tree_infos_from_onnx_ml_operator(operator)
# Get tree informations from the operator.
return n_features, tree_infos, classes, post_transform
def convert_onnx_tree_ensemble_classifier(operator, device=None, extra_config={}):
"""
Converter for `ai.onnx.ml.TreeEnsembleClassifier`.
Args:
operator: An operator wrapping a `ai.onnx.ml.TreeEnsembleClassifier` model
device: String defining the type of device the converted operator should be run on
extra_config: Extra configuration used to select the best conversion strategy
Returns:
A PyTorch model
"""
assert operator is not None, "Cannot convert None operator"
# Get tree informations from the operator.
n_features, tree_infos, classes, post_transform = _get_tree_infos_from_tree_ensemble(
operator.raw_operator, device, extra_config
)
# Generate the model.
if post_transform == "NONE":
return convert_decision_ensemble_tree_common(
operator, tree_infos, _dummy_get_parameter, get_parameters_for_tree_trav_common, n_features, classes, extra_config
)
extra_config[constants.POST_TRANSFORM] = post_transform
return convert_gbdt_classifier_common(
operator, tree_infos, _dummy_get_parameter, n_features, len(classes), classes, extra_config
)
def convert_onnx_tree_ensemble_regressor(operator, device=None, extra_config={}):
"""
Converter for `ai.onnx.ml.TreeEnsembleRegressor`.
Args:
operator: An operator wrapping a `ai.onnx.ml.TreeEnsembleRegressor` model
device: String defining the type of device the converted operator should be run on
extra_config: Extra configuration used to select the best conversion strategy
Returns:
A PyTorch model
"""
assert operator is not None, "Cannot convert None operator"
# Get tree informations from the operator.
n_features, tree_infos, _, _ = _get_tree_infos_from_tree_ensemble(operator.raw_operator, device, extra_config)
# Generate the model.
return convert_gbdt_common(operator, tree_infos, _dummy_get_parameter, n_features, extra_config=extra_config)
register_converter("ONNXMLTreeEnsembleClassifier", convert_onnx_tree_ensemble_classifier)
register_converter("ONNXMLTreeEnsembleRegressor", convert_onnx_tree_ensemble_regressor)
|
StarcoderdataPython
|
70373
|
from django.views.decorators.http import require_http_methods
from graphene_django.views import GraphQLView
@require_http_methods(['POST'])
def graphql_view(request):
from graph_wrap.tastypie import schema
schema = schema()
view = GraphQLView.as_view(schema=schema)
return view(request)
|
StarcoderdataPython
|
93529
|
<filename>pipeline2/modules/preprocess.py
import json
import os
from typing import List, Any, Union, Iterator, Tuple
from natsort import natsorted
# noinspection PyTypeChecker
from pandas import DataFrame, Index, Series
from pandas.core.arrays import ExtensionArray
from pandas.core.generic import NDFrame
from pandas.io.json import json_normalize
import statistics
# noinspection PyTypeChecker
def load_json_file(num_file: object, folder_path):
"""
:type folder_path: object
:param folder_path:
:type num_file: object
"""
#print('Loading the json file......please wait it may take some time depending on size of file')
json_arr = [] # type: List[Any]
for i in range(num_file):
try:
with open(folder_path + str(i) + '.json') as f:
d = json.load(f)
json_arr.append(d)
except IOError:
print('file ' + '' + str(i) + '.json' + '' + ' not' + '' + ' found')
#print('LOaded')
return json_arr
def sort_json_file(json_path):
#print('sorting the json files in natural way!')
# This is the path where all the files are stored.
# Open one of the files,
lst = [] # type: List[Union[bytes, str]]
for data_file in os.listdir(json_path): # type: Union[bytes, str]
lst.append(data_file)
json_file = natsorted(lst) # type: list
return [json_file, lst]
# noinspection PyTypeChecker
def map_json_to_pose(json_file: object, peeps: object) -> object:
#print('mapping the json file and no of pose!')
# lets connect the number of peeps,json file name and arr[] i.e keypoints
mapped = zip(json_file, peeps) # type: Iterator[Tuple[Any, Any]]
# converting values to print as set
mapped = set(mapped)
json_to_peeps = list(mapped)
json_to_peeps = natsorted(json_to_peeps) # type: list
return json_to_peeps
# noinspection PyTypeChecker
def get_pose_list(json_arr2: object) -> object:
"""
:type json_arr2: object
"""
arr = [] # type: List[Union[Union[Series, ExtensionArray, None, Index, NDFrame, DataFrame], Any]]
for j in range(0, len(json_arr2)): # type: int
try:
keypt = json_normalize(json_arr2[j]['people']) # type: DataFrame
for i in range(len(keypt['pose_keypoints_2d'])):
arr.append(keypt['pose_keypoints_2d'][i])
print(j)
print(keypt['pose_keypoints_2d'])
except KeyError as e:
print('I got a KeyError - reason "%s"' % str(e))
return arr
# noinspection PyTypeChecker
def remove_confidence_map(arr: object):
"""
:type arr: object
"""
point = [] # type: List[Any]
for j in range(len(arr)):
for i in range(0, 53):
if i == 0 or i == 1 or i == 3 or i == 4 or i == 6 or i == 7 or i == 9 or i == 10 or i == 12 or i == 13 or \
i == 15 or i == 16 or i == 18 or i == 19 or i == 21 or i == 22 or i == 24 or i == 25 or i == 27 \
or i == 28 or i == 30 or i == 31 or i == 33 or i == 34 or i == 36 or i == 37 or i == 39 or i == \
40 or i == 42 or i == 43 or i == 45 or i == 46 or i == 48 or i == 49 or i == 51 or i == 52:
point.append(arr[j][i])
return point
# noinspection PyTypeChecker
def convert(lst: object, var_lst: object) -> object:
"""
:type lst: object
:type var_lst: object
"""
idx = 0 # type: int
for var_len in var_lst:
yield lst[idx: idx + var_len]
idx += var_len
# noinspection PyTypeChecker
def divide_chunks(l, n: object):
"""
:type n: object
:param l:
:param n:
"""
for i in range(0, len(l), n):
yield l[i:i + n]
def remove_zero(n, boolean):
"""
:param n:
"""
if boolean == True:
for j in range(n):
for i in range(36):
if points36[j][i] == 0:
points36[j][i] = statistics.stdev(points36[j])
def file_number(fn):
num = fn.split('.')[0]
min_val = sum(ppl[:int(num)])
max_value = sum(ppl[int(num) + 1:int(num) + 2]) + sum(ppl[:int(num)]) # type: Union[int, Any]
return min_val, max_value
def makefolder(jsonfile):
try:
assert isinstance(jsonfile, object)
os.makedirs(jsonfile)
except OSError:
pass
def select_json(j, points36):
"""
:param j:
:return:
:type points36: object
"""
global x_data
x_data = []
y_data = []
for i in range(36):
if i % 2 == 0:
x_data.append(points36[j][i])
else:
y_data.append(points36[j][i])
X = x_data
Y = y_data # type: List[Any]
return X, Y
|
StarcoderdataPython
|
41508
|
<filename>test_bot/cogs/misc.py
import io
from base64 import b64decode
import disnake
from disnake.ext import commands
class Misc(commands.Cog):
def __init__(self, bot):
self.bot: commands.Bot = bot
def _get_file(self, description: str) -> disnake.File:
# just a white 100x100 png
data = b64decode(
"<KEY>"
)
return disnake.File(io.BytesIO(data), "image.png", description=description)
@commands.slash_command()
async def attachment_desc(self, inter: disnake.AppCmdInter, desc: str = "test") -> None:
"""
Send an attachment with the given description (or the default)
Parameters
----------
desc: The attachment description
"""
await inter.response.send_message(file=self._get_file(desc))
@commands.slash_command()
async def attachment_desc_edit(self, inter: disnake.AppCmdInter, desc: str = "test") -> None:
"""
Send a message with a button, which sends an attachment with the given description (or the default)
Parameters
----------
desc: The attachment description
"""
button = disnake.ui.Button(label="edit")
button.callback = lambda interaction: interaction.response.edit_message(
file=self._get_file(desc)
)
view = disnake.ui.View()
view.add_item(button)
await inter.response.send_message(".", view=view)
def setup(bot):
bot.add_cog(Misc(bot))
print(f"> Extension {__name__} is ready\n")
|
StarcoderdataPython
|
1716455
|
import unittest2 as unittest
import os
import datetime
class HouseKeeping(unittest.TestCase):
def test_license_year(self):
self.assertTrue(os.path.exists('LICENSE.txt'))
now = datetime.datetime.now()
current_year = datetime.datetime.strftime(now, '%Y')
license_text = open('LICENSE.txt').read()
expected_text = 'Copyright %s <NAME> <<EMAIL>>' \
% current_year
self.assertIn(expected_text, license_text)
def test_pip_install(self):
x = os.popen("pip uninstall graphitesend -y")
print(x.read())
y = os.popen("pip install -e .")
print(y.read())
pip_freeze_stdout = os.popen("pip freeze").read()
self.assertIn("graphitesend", pip_freeze_stdout)
|
StarcoderdataPython
|
148267
|
<reponame>chandojo/climbbeta<filename>video/management/commands/loadvideos.py
from django.core import management
from django.core.management.base import BaseCommand, CommandError
from django.core.management.commands import loaddata
from datetime import date
class Command(BaseCommand):
help = "Uploads video fixtures to database"
def handle(self, *args, **options):
today = str(date.today())
return management.call_command('loaddata', today + '.json')
|
StarcoderdataPython
|
3228580
|
RALI_LIB_NAME = 'librali.so'
from enum import Enum
from enum import IntEnum
class ColorFormat(Enum):
IMAGE_RGB24 = 0
IMAGE_BGR24 = 1
IMAGE_U8 = 2
class Affinity(Enum):
PROCESS_GPU = 0
PROCESS_CPU = 1
class TensorLayout(Enum):
NHWC = 0
NCHW = 1
class TensorDataType(IntEnum):
FLOAT32 = 0
FLOAT16 = 1
|
StarcoderdataPython
|
96043
|
from collections import Counter, namedtuple
from itertools import product
from operator import attrgetter
from random import randint
init_possible_codes = set(product([1, 2, 3, 4, 5, 6], repeat=4))
Feedback = namedtuple('Feedback', ['blacks', 'whites'])
ScoreData = namedtuple('ScoreData', ['guess', 'score', 'is_possible_code'])
def feedback(code, guess):
"""
Return a namedtuple Feedback(blacks, whites) where
blacks is the number of pegs from the guess that
are correct in both color and position and
whites is the number of pegs of the right color but wrong position.
"""
blacks = sum(g == c for g, c in zip(guess, code))
whites = sum((Counter(guess) & Counter(code)).values()) - blacks
return Feedback(blacks, whites)
def turn(guess, turn_num):
"""Input feedback of the turn."""
def input_pegs(message):
while True:
try:
user_input = int(input(message))
except ValueError:
print("Enter an integer from 0 to 4 inclusively. Try again.")
else:
if user_input >= 0 and user_input <= 4:
return user_input
else:
print("Enter an integer from 0 to 4. Try again.")
print()
print("Turn", turn_num)
print("Guess:", guess)
blacks = input_pegs("Blacks: ")
whites = input_pegs("Whites: ")
return Feedback(blacks, whites)
def reduce_possible_codes(possible_codes, guess, fb):
"""Return a set with all elements from possible_codes that would receive
the same feedback as the actual feedback from guess, fb,
if guess was the secret code."""
return {code for code in possible_codes if feedback(code, guess) == fb}
def next_guess(possible_codes, past_guesses):
"""
Return the next guess.
A score is calculated for each possible guess
(any unguessed code in the original 1296 set).
The score is the minimum number of possibilites it might
eliminate from possible_guesses.
The minimum eliminated is the count of elements in possible_codes
minus the highest hit count (the count of the most frequent black/white peg
feedback when passed through possible_codes)
The next guess is the guess with the highest score and is in possible_set
whenever possible.
"""
def score(guess):
fbs = [feedback(code, guess) for code in possible_codes]
return len(possible_codes) - max(Counter(fbs).values())
scores = [ScoreData(guess, score(guess), guess in possible_codes)
for guess in sorted(init_possible_codes - past_guesses)]
return max(scores, key=attrgetter('score', 'is_possible_code')).guess
def game():
"""
Five-guess algorithm steps are directly from the Mastermind wikipedia page:
https://en.wikipedia.org/wiki/Mastermind_(board_game)#Five-guess_algorithm
"""
# 1. Create the set S of 1296 possible codes
# (1111, 1112 ... 6665, 6666)
possible_codes = init_possible_codes.copy()
guesses = set()
# 2. Start with initial guess 1122
turn_num = 1
guess = (1, 1, 2, 2)
while True:
# 3. Play the guess to get a response of coloured and white pegs.
guesses.add(guess)
fb = turn(guess, turn_num)
# 4. If the response is four colored pegs,
# the game is won, the algorithm terminates.
if fb.blacks == 4:
print()
break
# 5. Otherwise, remove from S any code that would not give the same
# response if it (the guess) were the code.
possible_codes = reduce_possible_codes(possible_codes, guess, fb)
# 6. Apply minimax technique to find a next guess.
guess = next_guess(possible_codes, guesses)
# 7. Repeat from step 3
turn_num += 1
def main():
while True:
game()
again = ""
while again not in ['y', 'n']:
answer = input("Play again? (y/n) ")
if answer:
again = answer[0].lower()
if again == 'n':
break
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
98022
|
<gh_stars>1-10
from .AggregateMatrix import AggregateMatrix as aggregate_matrix
|
StarcoderdataPython
|
4801935
|
from model.contact import Contact
from random import randrange
import random
def test_edit_contact(app,db, check_ui):
if app.contact.count_contact() == 0: # falls keine Gruppe gibt´s
app.contact.create_contact(Contact(firstname_of_contact="test_firstname", lastname_of_contact="lastname_changed"))
#contactnickname="contactnickname_changed",
old_contacts = db.get_contact_list()
#index = randrange(len(old_contacts)) # генерируется рандомный номер контакта
contact = random.choice(old_contacts)
old_contacts.remove(contact)
edit_contact = Contact(firstname_of_contact="jbbk",
lastname_of_contact="iuhon")
#contactnickname="kb",
#contactcompany="lk"
edit_contact.id = contact.id
#contact.id = old_contacts[index].id # идентификатор после модификации должен сохранятся и если это не указать явно,
# модифицированной группе присвоится новый идентификатор
old_contacts.append(edit_contact)
app.contact.edit_contact_by_id(contact.id, edit_contact)
new_contacts = db.get_contact_list()
#assert len(old_contacts) == len(new_contacts)
#old_contacts[index] = contact
assert sorted(old_contacts, key=Contact.id_or_max) == sorted(new_contacts, key=Contact.id_or_max) #сравниваем по ключу id
if check_ui:
assert sorted(new_contacts, key=Contact.id_or_max) == sorted(app.contact.get_contact_list(),
key=Contact.id_or_max)
'''def test_edit_empty_contact(app):
if app.contact.count_contact() == 0: # falls keine Gruppe gibt´s
app.contact.create_contact(
Contact(firstname_of_contact="", lastname_of_contact="",
contactnickname="", ))
old_contacts = app.contact.get_contact_list()
contact = Contact(firstname_of_contact="",
lastname_of_contact="",
contactnickname="",
contactcompany="", )
contact.id = old_contacts[0].id
app.contact.edit_first_contact(contact)
new_contacts = app.contact.get_contact_list()
assert len(old_contacts) == len(new_contacts)
old_contacts[0] = contact
assert sorted(old_contacts, key=Contact.id_or_max) == sorted(new_contacts, key=Contact.id_or_max)
old_groups = app.group.get_group_list() # Liste der Gruppen bevors Hinzufügen einer neuen Gruppe
group = Group(name="New_group")
group.id = old_groups[0].id # eine id von der alte gruppe behalten wir bei
app.group.edit_first_group(group)
new_groups = app.group.get_group_list()
assert len(old_groups) == len(new_groups)
old_groups[0] = group
assert sorted(old_groups, key=Group.id_or_max) == sorted(new_groups, key=Group.id_or_max)
# lastname wurde nicht geändert
def test_edit_firstname_of_contact(app):
if app.contact.count_contact() == 0: # falls keine Gruppe gibt´s
app.contact.create_contact(Contact(firstname_of_contact="test_firstname", mobilenumber="test_mo")) # erstellen wir eine Gruppe
app.contact.edit_first_contact(Contact(firstname_of_contact="firstname_changed"))
def test_edit_middlename_of_contact(app):
app.contact.edit_first_contact(
Contact(middlename_of_contact="middlename_changed"))
def test_edit_contactnickname(app):
app.contact.edit_first_contact(
Contact(contactnickname="contactnickname_changed"))
def test_edit_contacttittle(app):
app.contact.edit_first_contact(
Contact(contacttittle="contacttittle_changed"))
def test_edit_contactcompany(app):
app.contact.edit_first_contact(
Contact(contactcompany="contactcompany_changed"))
def test_edit_contactaddress(app):
app.contact.edit_first_contact(
Contact(contactaddress="contactaddress_changed"))
def test_edit_homenumber(app):
app.contact.edit_first_contact(
Contact(homenumber="homenumber_changed"))
def test_edit_mobilenumber(app):
app.contact.edit_first_contact(
Contact(mobilenumber="mobilenumber_changed"))
def test_edit_worknumber(app):
app.contact.edit_first_contact(
Contact(worknumber="worknumber_changed"))
def test_edit_contact_email(app):
app.contact.edit_first_contact(
Contact(contact_email="contact_email_changed"))
def test_contact_notes(app):
app.contact.edit_first_contact(
Contact(contact_notes="contact_notes_changed"))
def test_contact_email2(app):
app.contact.edit_first_contact(
Contact(contact_email2="contact_email2_changed"))
def test_contact_email3(app):
app.contact.edit_first_contact(
Contact(contact_email3="contact_email3_changed"))
def test_contact_homepage(app):
app.contact.edit_first_contact(
Contact(contact_homepage="contact_homepage_changed"))
def test_contact_address2(app):
app.contact.edit_first_contact(
Contact(contact_address2="contact_address2_changed"))
def test_contact_phone2(app):
app.contact.edit_first_contact(
Contact(contact_phone2="contact_phone2_changed"))
'''
|
StarcoderdataPython
|
197908
|
<filename>onlinejudge/implementation/command/test.py
# Python Version: 3.x
import onlinejudge
import onlinejudge.implementation.utils as utils
import onlinejudge.implementation.logging as log
import onlinejudge.implementation.command.utils as cutils
import sys
import os
import os.path
import re
import glob
import colorama
import collections
import time
import math
from typing import *
if TYPE_CHECKING:
import argparse
def compare_as_floats(xs_: str, ys_: str, error: float) -> bool:
def f(x):
try:
y = float(x)
if not math.isfinite(y):
log.warning('not an real number found: %f', y)
return y
except ValueError:
return x
xs = list(map(f, xs_.split()))
ys = list(map(f, ys_.split()))
if len(xs) != len(ys):
return False
for x, y in zip(xs, ys):
if isinstance(x, float) and isinstance(y, float):
if not math.isclose(x, y, rel_tol=error, abs_tol=error):
return False
else:
if x != y:
return False
return True
def test(args: 'argparse.Namespace') -> None:
# prepare
if not args.test:
args.test = cutils.glob_with_format(args.directory, args.format) # by default
if args.ignore_backup:
args.test = cutils.drop_backup_or_hidden_files(args.test)
tests = cutils.construct_relationship_of_files(args.test, args.directory, args.format)
if args.error: # float mode
match = lambda a, b: compare_as_floats(a, b, args.error)
else:
def match(a, b):
if a == b:
return True
if args.rstrip and a.rstrip(rstrip_targets) == b.rstrip(rstrip_targets):
log.warning('WA if no rstrip')
return True
return False
rstrip_targets = ' \t\r\n\f\v\0' # ruby's one, follow AnarchyGolf
slowest: Union[int, float] = -1
slowest_name = ''
ac_count = 0
for name, it in sorted(tests.items()):
is_printed_input = not args.print_input
def print_input():
nonlocal is_printed_input
if not is_printed_input:
is_printed_input = True
with open(it['in']) as inf:
log.emit('input:\n%s', log.bold(inf.read()))
log.emit('')
log.info('%s', name)
# run the binary
with open(it['in']) as inf:
begin = time.perf_counter()
answer_byte, proc = utils.exec_command(args.command, shell=True, stdin=inf, timeout=args.tle)
end = time.perf_counter()
answer = answer_byte.decode()
if slowest < end - begin:
slowest = end - begin
slowest_name = name
log.status('time: %f sec', end - begin)
proc.terminate()
# check TLE, RE or not
result = 'AC'
if proc.returncode is None:
log.failure(log.red('TLE'))
result = 'TLE'
print_input()
elif proc.returncode != 0:
log.failure(log.red('RE') + ': return code %d', proc.returncode)
result = 'RE'
print_input()
# check WA or not
if 'out' in it:
with open(it['out']) as outf:
correct = outf.read()
# compare
if args.mode == 'all':
if not match(answer, correct):
log.failure(log.red('WA'))
print_input()
if not args.silent:
log.emit('output:\n%s', log.bold(answer))
log.emit('expected:\n%s', log.bold(correct))
result = 'WA'
elif args.mode == 'line':
answer_words = answer .splitlines()
correct_words = correct.splitlines()
for i, (x, y) in enumerate(zip(answer_words + [ None ] * len(correct_words), correct_words + [ None ] * len(answer_words))): # type: ignore
if x is None and y is None:
break
elif x is None:
print_input()
log.failure(log.red('WA') + ': line %d: line is nothing: expected "%s"', i + 1, log.bold(y))
result = 'WA'
elif y is None:
print_input()
log.failure(log.red('WA') + ': line %d: unexpected line: output "%s"', i + 1, log.bold(x))
result = 'WA'
elif not match(x, y):
print_input()
log.failure(log.red('WA') + ': line %d: output "%s": expected "%s"', i + 1, log.bold(x), log.bold(y))
result = 'WA'
else:
assert False
else:
if not args.silent:
log.emit(log.bold(answer))
if result == 'AC':
log.success(log.green('AC'))
ac_count += 1
# summarize
log.emit('')
log.status('slowest: %f sec (for %s)', slowest, slowest_name)
if ac_count == len(tests):
log.success('test ' + log.green('success') + ': %d cases', len(tests))
else:
log.failure('test ' + log.red('failed') + ': %d AC / %d cases', ac_count, len(tests))
sys.exit(1)
|
StarcoderdataPython
|
3367396
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=unused-argument
from azure.cli.core.util import sdk_no_wait
from azure.mgmt.synapse.models import SqlPool, SqlPoolPatchInfo, Sku
from .._client_factory import cf_synapse_client_workspace_factory
from ..constant import SynapseSqlCreateMode
# Synapse sqlpool
def create_sql_pool(cmd, client, resource_group_name, workspace_name, sql_pool_name, performance_level, tags=None,
no_wait=False):
workspace_client = cf_synapse_client_workspace_factory(cmd.cli_ctx)
workspace_object = workspace_client.get(resource_group_name, workspace_name)
location = workspace_object.location
sku = Sku(name=performance_level)
sql_pool_info = SqlPool(sku=sku, location=location, create_mode=SynapseSqlCreateMode.Default, tags=tags)
return sdk_no_wait(no_wait, client.create, resource_group_name, workspace_name, sql_pool_name, sql_pool_info)
def update_sql_pool(cmd, client, resource_group_name, workspace_name, sql_pool_name, sku_name=None, tags=None):
sku = Sku(name=sku_name)
sql_pool_patch_info = SqlPoolPatchInfo(sku=sku, tags=tags)
return client.update(resource_group_name, workspace_name, sql_pool_name, sql_pool_patch_info)
|
StarcoderdataPython
|
3207642
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# __author__ = 'kute'
# __mtime__ = '2016/12/24 20:45'
"""
多线程,协称 执行器
"""
import os
import attr
import gevent
from gevent import monkey
from gevent.pool import Pool
monkey.patch_all()
def valide_func(instance, attribute, value):
if not callable(value):
raise TypeError("{} is not callable")
@attr.s
class Eventor(object):
func = attr.ib(validator=valide_func)
taskunitcount = attr.ib(default=100, convert=int)
threadcount = attr.ib(default=os.cpu_count() * 5, convert=int)
interval = attr.ib(default=0, convert=int)
def _slice_list_by_size(self, tasklist, slicesize):
"""按指定大小分隔集合
"""
size = len(tasklist)
if size <= slicesize:
yield tasklist
else:
for i in list(range(0, size // slicesize + 1)):
posi = i * slicesize
templist = tasklist[posi: posi + slicesize]
if len(templist) > 0:
yield templist
def _run(self, pool, tasklist, async=False):
if async:
return pool.map_async(self.func, tasklist)
else:
return pool.map(self.func, tasklist)
def run_with_tasklist(self, tasklist=None, async=False, timeout=None):
if not tasklist or len(tasklist) == 0:
raise ValueError("parameters tasklist null value")
if not isinstance(tasklist, list):
raise ValueError("parameters tasklist wrong type, should be list, not {}".format(tasklist.__class__.__name__))
if not callable(self.func):
raise ValueError("func is illegal function")
if async and timeout is None:
raise ValueError("timeout should be seted if special async=True")
threadcount = self.threadcount or os.cpu_count() * 5
taskunitcount = self.taskunitcount or 100
pool = Pool(threadcount)
size = len(tasklist)
total = 0
resultlist = []
if size <= taskunitcount:
result = self._run(pool, tasklist, async)
resultlist.extend(result.get(timeout) if async else result)
print("finished {} total tasks".format(size))
else:
for slicelist in self._slice_list_by_size(tasklist, taskunitcount):
result = self._run(pool, slicelist, async)
resultlist.extend(result.get(timeout) if async else result)
total += len(slicelist)
gevent.sleep(self.interval)
print("finished {} total tasks".format(total))
pool.join()
return resultlist
def run_with_file(self, file=None, async=False, timeout=None):
if not os.path.exists(file) or not os.path.isfile(file):
raise ValueError("wrong file or not exists")
if not callable(self.func):
raise ValueError("func is illegal function")
if async and timeout is None:
raise ValueError("timeout should be seted if special async=True")
threadcount = self.threadcount or os.cpu_count() * 5
taskunitcount = self.taskunitcount or 100
pool = Pool(threadcount)
plist = []
total = 0
resultlist = []
with open(file, "r") as f:
for line in f:
plist.append(line.strip())
if len(plist) >= taskunitcount:
result = self._run(pool, plist, async)
resultlist.extend(result.get(timeout) if async else result)
total += len(plist)
plist.clear()
gevent.sleep(self.interval)
if len(plist) > 0:
result = self._run(pool, plist, async)
resultlist.extend(result.get(timeout) if async else result)
total += len(plist)
plist.clear()
print("finished {} total tasks".format(total))
pool.join()
return resultlist
|
StarcoderdataPython
|
1776048
|
#
# Copyright 2016 Pixar
#
# Licensed under the Apache License, Version 2.0 (the "Apache License")
# with the following modification; you may not use this file except in
# compliance with the Apache License and the following modification to it:
# Section 6. Trademarks. is deleted and replaced with:
#
# 6. Trademarks. This License does not grant permission to use the trade
# names, trademarks, service marks, or product names of the Licensor
# and its affiliates, except as required to comply with Section 4(c) of
# the License and to reproduce the content of the NOTICE file.
#
# You may obtain a copy of the Apache License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the Apache License with the above modification is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the Apache License for the specific
# language governing permissions and limitations under the Apache License.
#
"""
Contains UsdQt-specific Qt user roles, as well as some hint types for custom
editor widgets.
"""
from __future__ import absolute_import
from ._Qt import QtCore
# The editor hint role is used to provide additional information for UI
# instantiation that the value of the edit role alone may not be sufficient to
# provide. For example, we may need to differentiate between a GfVec3f that
# represents a 3-tuple and a GfVec3f that represents a color.
# All UsdQt EditorHints are defined below and are prefixed with EditorHint.
EditorHintRole = QtCore.Qt.UserRole + 2
# Used to retrieve the prim object in hierarchy models.
HierarchyPrimRole = QtCore.Qt.UserRole + 3
# Specializations that leverage UsdQt at its core can use UsdQtUserRole as the
# first safe index for additional user roles
UsdQtUserRole = QtCore.Qt.UserRole + 16
class EditorHintBasicValue(object):
"""Used for values whose editor can be inferred soley from the TfType"""
__slots__ = ('__type',)
def __init__(self, tfType):
self.__type = tfType
@property
def type(self):
return self.__type
class EditorHintColorValue(object):
"""Hint for when a color editor needs to be instantiated"""
__slots__ = ('__type',)
def __init__(self, tfType):
self.__type = tfType
@property
def type(self):
return self.__type
class EditorHintTextCombo(object):
"""Used for a string/token editor restricted by a list of allowed values"""
__slots__ = ('__allowedValues',)
def __init__(self, allowedValues):
self.__allowedValues = allowedValues
@property
def allowedValues(self):
return self.__allowedValues
class EditorHintTab(object):
"""Used when an item should be drawn as a tab"""
__slots__ = ()
def __init__(self):
pass
|
StarcoderdataPython
|
1730333
|
import json
import pytest
from rest_framework import status
QUERY_URL = "/api/v3/genotype_browser/query"
pytestmark = pytest.mark.usefixtures(
"wdae_gpf_instance", "dae_calc_gene_sets")
def test_simple_query_variants_preview(db, admin_client, remote_settings):
data = {
"datasetId": "TEST_REMOTE_iossifov_2014",
"sources": [{"source": "location"}]
}
response = admin_client.post(
QUERY_URL, json.dumps(data), content_type="application/json"
)
assert status.HTTP_200_OK == response.status_code
res = json.loads(
"".join(map(lambda x: x.decode("utf-8"), response.streaming_content))
)
assert len(res) == 1001
|
StarcoderdataPython
|
12393
|
<reponame>Ublimjo/nwt
def task_clean_junk():
"""Remove junk file"""
return {
'actions': ['rm -rdf $(find . | grep pycache)'],
'clean': True,
}
|
StarcoderdataPython
|
1731865
|
import os
import sipconfig
from PyQt4 import pyqtconfig
from distutils import sysconfig
vcs_so = '%s/vcs/_vcs.so' % sysconfig.get_python_lib()
vcs_inc = '%s/vcs/Include' % sysconfig.get_python_lib()
## vcs_so = '/Users/hvo/src/uvcdat/cdatBuild/lib/python2.7/site-packages/vcs/_vcs.so'
## vcs_inc = '/Users/hvo/src/uvcdat/cdat/Packages/vcs/Include'
# The name of the SIP build file generated by SIP and used by the build
# system.
build_file = "pyqtscripting.sbf"
# Get the PyQt configuration information.
config = pyqtconfig.Configuration()
# Get the extra SIP flags needed by the imported qt module. Note that
# this normally only includes those flags (-x and -t) that relate to SIP's
# versioning system.
qt_sip_flags = config.pyqt_sip_flags
os.system("rm -rf cdatwrap")
os.mkdir("cdatwrap")
os.system("touch cdatwrap/__init__.py")
# Run SIP to generate the code. Note that we tell SIP where to find the qt
# module's specification files using the -I flag.
os.system(" ".join([ \
config.sip_bin, \
"-c", "cdatwrap", \
"-b", build_file, \
"-I", config.pyqt_sip_dir, \
qt_sip_flags, \
"cdat.sip" \
]))
# Create the Makefile. The QtModuleMakefile class provided by the
# pyqtconfig module takes care of all the extra preprocessor, compiler and
# linker flags needed by the Qt library.
makefile = pyqtconfig.QtGuiModuleMakefile(
dir="cdatwrap",
configuration=config,
build_file='../' + build_file
)
# Add the library we are wrapping. The name doesn't include any platform
# specific prefixes or extensions (e.g. the "lib" prefix on UNIX, or the
# ".dll" extension on Windows).
#makefile.extra_libs = ["vcs"]
import cdat_info
makefile.CFLAGS.append("-I%s/include" % cdat_info.externals)
makefile.CFLAGS.append("-I%s" % vcs_inc)
makefile.CFLAGS.append("-I%s/.." % sysconfig.get_python_inc())
makefile.CXXFLAGS.append("-I%s/include" % cdat_info.externals)
makefile.CXXFLAGS.append("-I%s" % vcs_inc)
makefile.CXXFLAGS.append("-I%s/.." % sysconfig.get_python_inc())
cwd = os.getcwd()
makefile.LFLAGS.append("-Wl,-rpath,%s/cdatwrap" % cwd)
# Generate the Makefile itself.
makefile.generate()
os.chdir("cdatwrap")
os.system("make clean")
os.system("MACOSX_DEPLOYMENT_TARGET=10.6 make -j")
os.system("make install")
|
StarcoderdataPython
|
4829623
|
<filename>examples/launch_moasha_instance_tuning.py
# Copyright 2021 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
"""
Example showing how to tune instance types and hyperparameters with a Sagemaker Framework.
"""
import logging
from pathlib import Path
from sagemaker.huggingface import HuggingFace
from syne_tune.backend.sagemaker_backend.instance_info import select_instance_type
from syne_tune.backend import SageMakerBackend
from syne_tune.backend.sagemaker_backend.sagemaker_utils import get_execution_role
from syne_tune.constants import ST_WORKER_TIME, ST_WORKER_COST
from syne_tune.optimizer.schedulers.multiobjective import MOASHA
from syne_tune.remote.remote_launcher import RemoteLauncher
from syne_tune import StoppingCriterion, Tuner
from syne_tune.config_space import loguniform, choice
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
n_workers = 2
epochs = 4
# Select the instance types that are searched.
# Alternatively, you can define the instance list explicitly: `instance_types = ['ml.c5.xlarge', 'ml.m5.2xlarge']`
instance_types = select_instance_type(min_gpu=1, max_cost_per_hour=5.0)
print(f"tuning over hyperparameters and instance types: {instance_types}")
# define a search space that contains hyperparameters (learning-rate, weight-decay) and instance-type.
config_space = {
'st_instance_type': choice(instance_types),
'learning_rate': loguniform(1e-6, 1e-4),
'weight_decay': loguniform(1e-5, 1e-2),
'epochs': epochs,
'dataset_path': './',
}
entry_point = Path(__file__).parent / "training_scripts" / "distilbert_on_imdb" / "distilbert_on_imdb.py"
metric = "accuracy"
# Define a MOASHA scheduler that searches over the config space to maximise accuracy and minimize cost and time.
scheduler = MOASHA(
max_t=epochs,
time_attr="step",
metrics=[metric, ST_WORKER_COST, ST_WORKER_TIME],
mode=['max', 'min', 'min'],
config_space=config_space,
)
# Define the training function to be tuned, use the Sagemaker backend to execute trials as separate training job
# (since they are quite expensive).
trial_backend = SageMakerBackend(
sm_estimator=HuggingFace(
entry_point=str(entry_point),
base_job_name='hpo-transformer',
# instance-type given here are override by Syne Tune with values sampled from `st_instance_type`.
instance_type='ml.m5.large',
instance_count=1,
transformers_version='4.4',
pytorch_version='1.6',
py_version='py36',
max_run=3600,
role=get_execution_role(),
dependencies=[str(Path(__file__).parent.parent / "benchmarking")],
),
)
remote_launcher = RemoteLauncher(
tuner=Tuner(
trial_backend=trial_backend,
scheduler=scheduler,
stop_criterion=StoppingCriterion(max_wallclock_time=3600, max_cost=10.0),
n_workers=n_workers,
sleep_time=5.0,
),
dependencies=[str(Path(__file__).parent.parent / "benchmarking")],
)
remote_launcher.run(wait=False)
|
StarcoderdataPython
|
36238
|
<gh_stars>0
class Solution:
def threeSum(self, nums: List[int]) -> List[List[int]]:
if len(nums) < 3:
return []
ans = []
nums.sort()
for i in range(0, len(nums)-2):
if nums[i] > 0:
break
if i > 0 and nums[i-1] == nums[i]:
continue
left, right = i+1, len(nums)-1
while right > left:
s = nums[left] + nums[right] + nums[i]
if s == 0:
ans.append([nums[i], nums[left], nums[right]])
left += 1
right -= 1
while right > left and nums[left] == nums[left-1]:
left += 1
while right > left and nums[right] == nums[right+1]:
right -= 1
elif s < 0:
left += 1
else:
right -= 1
return ans
|
StarcoderdataPython
|
1779114
|
# -*- coding: utf-8 -*-
from openprocurement.api.utils import (
json_view,
opresource,
APIResource,
ROUTE_PREFIX,
context_unpack
)
from openprocurement.tender.core.utils import save_tender, optendersresource
from openprocurement.relocation.core.utils import change_ownership
from openprocurement.relocation.core.validation import validate_ownership_data
from openprocurement.relocation.tenders.validation import validate_tender_accreditation_level
@optendersresource(name='Tender ownership',
path='/tenders/{tender_id}/ownership',
description="Tenders Ownership")
class TenderResource(APIResource):
@json_view(permission='create_tender',
validators=(validate_tender_accreditation_level,
validate_ownership_data,))
def post(self):
tender = self.request.validated['tender']
location = self.request.route_path('{}:Tender'.format(tender['procurementMethodType']), tender_id=tender.id)
location = location[len(ROUTE_PREFIX):] # strips /api/<version>
if change_ownership(self.request, location) and save_tender(self.request):
self.LOGGER.info('Updated ownership of tender {}'.format(tender.id),
extra=context_unpack(self.request, {'MESSAGE_ID': 'tender_ownership_update'}))
return {'data': tender.serialize('view')}
|
StarcoderdataPython
|
4838788
|
try:
from workers_defaults import sink_instance, sender_instance
except:
sink_instance = {
'ami': 'ami-915394eb',
'first_ip': '10.0.17.0',
'security': ['sg-21d88f5c'], # nosec
'net_subnet': 'subnet-3557de19',
'instance_type': 't2.micro',
'key_name': 'gate',
'table_dir': 'workers',
'storage': 'sinks.json',
}
sender_instance = {
'ami': 'ami-ad5b9cd7',
'first_ip': '10.0.18.0',
'security': ['sg-21d88f5c'], # nosec
'net_subnet': 'subnet-3557de19',
'instance_type': 't2.micro',
'key_name': 'gate',
'table_dir': 'workers',
'storage': 'senders.json',
}
|
StarcoderdataPython
|
64475
|
from abc import ABC, abstractmethod
import asyncio
from typing import (
AsyncIterator,
Tuple,
)
from cancel_token import (
CancelToken,
OperationCancelled,
)
from eth.constants import GENESIS_BLOCK_NUMBER
from eth.exceptions import (
HeaderNotFound,
)
from eth_typing import (
BlockNumber,
Hash32,
)
from eth_utils import (
encode_hex,
ValidationError,
)
from eth.abc import (
BlockAPI,
BlockHeaderAPI,
SignedTransactionAPI,
)
from eth2.beacon.types.blocks import BaseBeaconBlock
from p2p.constants import (
MAX_REORG_DEPTH,
SEAL_CHECK_RANDOM_SAMPLE_RATE,
)
from p2p.disconnect import DisconnectReason
from p2p.service import BaseService
from trinity._utils.headers import (
skip_complete_headers,
)
from trinity._utils.humanize import (
humanize_integer_sequence,
)
from trinity.chains.base import AsyncChainAPI
from trinity.db.eth1.header import BaseAsyncHeaderDB
from trinity.protocol.common.peer import (
BaseChainPeer,
)
from eth2.beacon.chains.base import (
BaseBeaconChain
)
from .types import SyncProgress
class PeerHeaderSyncer(BaseService):
"""
Sync as many headers as possible with a given peer.
Here, the run() method will execute the sync loop until our local head is the same as the one
with the highest TD announced by any of our peers.
"""
_seal_check_random_sample_rate = SEAL_CHECK_RANDOM_SAMPLE_RATE
def __init__(self,
chain: AsyncChainAPI,
db: BaseAsyncHeaderDB,
peer: BaseChainPeer,
token: CancelToken = None) -> None:
super().__init__(token)
self.chain = chain
self.db = db
self.sync_progress: SyncProgress = None
self._peer = peer
self._target_header_hash = peer.head_info.head_hash
def get_target_header_hash(self) -> Hash32:
if self._target_header_hash is None:
raise ValidationError("Cannot check the target hash when there is no active sync")
else:
return self._target_header_hash
async def _run(self) -> None:
await self.events.cancelled.wait()
async def next_header_batch(self) -> AsyncIterator[Tuple[BlockHeaderAPI, ...]]:
"""Try to fetch headers until the given peer's head_hash.
Returns when the peer's head_hash is available in our ChainDB, or if any error occurs
during the sync.
"""
peer = self._peer
head = await self.wait(self.db.coro_get_canonical_head())
head_td = await self.wait(self.db.coro_get_score(head.hash))
if peer.head_info.head_td <= head_td:
self.logger.info(
"Head TD (%d) announced by %s not higher than ours (%d), not syncing",
peer.head_info.head_td, peer, head_td)
return
else:
self.logger.debug(
"%s announced Head TD %d, which is higher than ours (%d), starting sync",
peer, peer.head_info.head_td, head_td)
self.sync_progress = SyncProgress(
head.block_number,
head.block_number,
peer.head_info.head_number,
)
self.logger.info("Starting sync with %s", peer)
last_received_header: BlockHeaderAPI = None
# When we start the sync with a peer, we always request up to MAX_REORG_DEPTH extra
# headers before our current head's number, in case there were chain reorgs since the last
# time _sync() was called. All of the extra headers that are already present in our DB
# will be discarded by skip_complete_headers() so we don't unnecessarily process them
# again.
start_at = BlockNumber(max(GENESIS_BLOCK_NUMBER + 1, head.block_number - MAX_REORG_DEPTH))
while self.is_operational:
if not peer.is_operational:
self.logger.info("%s disconnected, aborting sync", peer)
break
try:
all_headers = await self.wait(self._request_headers(peer, start_at))
if last_received_header is None:
# Skip over existing headers on the first run-through
completed_headers, new_headers = await self.wait(
skip_complete_headers(all_headers, self.db.coro_header_exists)
)
if len(new_headers) == 0 and len(completed_headers) > 0:
head = await self.wait(self.db.coro_get_canonical_head())
start_at = BlockNumber(max(
all_headers[-1].block_number + 1,
head.block_number - MAX_REORG_DEPTH
))
self.logger.debug(
"All %d headers redundant, head at %s, fetching from #%d",
len(completed_headers),
head,
start_at,
)
continue
elif completed_headers:
self.logger.debug(
"Header sync skipping over (%d) already stored headers %s: %s..%s",
len(completed_headers),
humanize_integer_sequence(h.block_number for h in completed_headers),
completed_headers[0],
completed_headers[-1],
)
else:
new_headers = all_headers
self.logger.debug2('sync received new headers: %s', new_headers)
except OperationCancelled:
self.logger.info("Sync with %s completed", peer)
break
except asyncio.TimeoutError:
self.logger.warning("Timeout waiting for header batch from %s, aborting sync", peer)
await peer.disconnect(DisconnectReason.TIMEOUT)
break
except ValidationError as err:
self.logger.warning(
"Invalid header response sent by peer %s disconnecting: %s",
peer, err,
)
await peer.disconnect(DisconnectReason.USELESS_PEER)
break
if not new_headers:
if last_received_header is None:
request_parent = head
else:
request_parent = last_received_header
if head_td < peer.head_info.head_td:
# peer claims to have a better header, but didn't return it. Boot peer
# TODO ... also blacklist, because it keeps trying to reconnect
self.logger.warning(
"%s announced difficulty %s, but didn't return any headers after %r@%s",
peer,
peer.head_info.head_td,
request_parent,
head_td,
)
await peer.disconnect(DisconnectReason.SUBPROTOCOL_ERROR)
else:
self.logger.info("Got no new headers from %s, aborting sync", peer)
break
first = new_headers[0]
first_parent = None
if last_received_header is None:
# on the first request, make sure that the earliest ancestor has a parent in our db
try:
first_parent = await self.wait(
self.db.coro_get_block_header_by_hash(first.parent_hash)
)
except HeaderNotFound:
self.logger.warning(
"Unable to find common ancestor betwen our chain and %s",
peer,
)
break
elif last_received_header.hash != first.parent_hash:
# on follow-ups, require the first header in this batch to be next in succession
self.logger.warning(
"Header batch starts with %r, with parent %s, but last header was %r",
first,
encode_hex(first.parent_hash[:4]),
last_received_header,
)
break
self.logger.debug(
"Got new header chain from %s: %s..%s",
peer,
first,
new_headers[-1],
)
try:
await self.chain.coro_validate_chain(
last_received_header or first_parent,
new_headers,
self._seal_check_random_sample_rate,
)
except ValidationError as e:
self.logger.warning("Received invalid headers from %s, disconnecting: %s", peer, e)
await peer.disconnect(DisconnectReason.SUBPROTOCOL_ERROR)
break
for header in new_headers:
head_td += header.difficulty
# Setting the latest header hash for the peer, before queuing header processing tasks
self._target_header_hash = peer.head_info.head_hash
yield new_headers
last_received_header = new_headers[-1]
self.sync_progress = self.sync_progress.update_current_block(
last_received_header.block_number,
)
start_at = BlockNumber(last_received_header.block_number + 1)
async def _request_headers(
self, peer: BaseChainPeer, start_at: BlockNumber) -> Tuple[BlockHeaderAPI, ...]:
"""Fetch a batch of headers starting at start_at and return the ones we're missing."""
self.logger.debug("Requsting chain of headers from %s starting at #%d", peer, start_at)
return await peer.chain_api.get_block_headers(
start_at,
peer.max_headers_fetch,
skip=0,
reverse=False,
)
class BaseBlockImporter(ABC):
@abstractmethod
async def import_block(
self,
block: BlockAPI) -> Tuple[BlockAPI, Tuple[BlockAPI, ...], Tuple[BlockAPI, ...]]:
...
async def preview_transactions(
self,
header: BlockHeaderAPI,
transactions: Tuple[SignedTransactionAPI, ...],
parent_state_root: Hash32,
lagging: bool = True) -> None:
"""
Give the importer a chance to preview upcoming blocks. This can improve performance
:param header: The header of the upcoming block
:param transactions: The transactions in the upcoming block
:param parent_state_root: The state root hash at the beginning of the upcoming block
(the end of the previous block)
:param lagging: Is the upcoming block *very* far ahead of the current block?
The lagging parameter is used to take actions that may be resource-intensive and slow,
but will accelerate the block once we catch up to it. A slow preparation is a waste of
resources unless the upcoming block is far enough in the future.
"""
# default action: none
pass
class SimpleBlockImporter(BaseBlockImporter):
def __init__(self, chain: AsyncChainAPI) -> None:
self._chain = chain
async def import_block(
self,
block: BlockAPI) -> Tuple[BlockAPI, Tuple[BlockAPI, ...], Tuple[BlockAPI, ...]]:
return await self._chain.coro_import_block(block, perform_validation=True)
class BaseSyncBlockImporter(ABC):
@abstractmethod
def import_block(
self,
block: BlockAPI) -> Tuple[BlockAPI, Tuple[BlockAPI, ...], Tuple[BlockAPI, ...]]:
...
class SyncBlockImporter(BaseSyncBlockImporter):
def __init__(self, chain: BaseBeaconChain) -> None:
self._chain = chain
def import_block(
self,
block: BaseBeaconBlock
) -> Tuple[BaseBeaconBlock, Tuple[BaseBeaconBlock, ...], Tuple[BaseBeaconBlock, ...]]:
return self._chain.import_block(block, perform_validation=True)
|
StarcoderdataPython
|
1754132
|
import math
import sys
def main():
a, b, k = map(int, sys.stdin.readline().split())
query_range = range(a, b + 1)
query_range = list(query_range)
print("なんだこのバグ")
exit()
if a == b:
res = query_range
elif k >= math.ceil((b - a + 1) / 2):
res = query_range
else:
res = query_range[:k] + query_range[-k:]
for i in res:
print(i)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
1666791
|
<gh_stars>0
import pprint
import scipy
import scipy.linalg # SciPy Linear Algebra Library
import numpy as np
a = np.matrix([ [1, 2, 3], [2, 3, 4], [1, 2, 5] ])
k = scipy.array([ [8, 2, 9], [4, 9, 4], [6, 7, 9] ])
#print("Secret key is ")
#print(k)
P, L, U = scipy.linalg.lu(k)
#print ("A:")
#pprint.pprint(k)
#print( "P:")
#pprint.pprint(P)
#print ("L:")
#pprint.pprint(L)
#print ("U:")
#pprint.pprint(U)
|
StarcoderdataPython
|
1711273
|
<reponame>Catalyst9k-SLA/Cat9k
# Importing the variable file in the dir Variable
import sys
import os
import inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
dirParent = os.path.dirname(currentdir)
dirVariable = dirParent + "/Variables"
sys.path.insert(0, dirVariable)
from argparse import ArgumentParser
from SparkVariables import *
from SparkFunctions import *
def kittyHelp():
post_message_markdown("# Kitty Help \n"
"Usage : @" + botName_Kitty + " instruction1 instruction2 ...\n"
"Here is what I can do : \n"
"* **help** : print those inscructions\n"
"* **Salut** : greetings (in French !)\n"
"* **save config** : backup the configturation on the TFTP server\n"
"* **last config** : information about the last config changes", roomID_SoftwareProject, bearer_Bot)
return "OK"
|
StarcoderdataPython
|
195611
|
<gh_stars>10-100
# encoding: utf-8
import xadmin
from xadmin.views import BaseAdminPlugin, CreateAdminView, ModelFormAdminView, UpdateAdminView
from DjangoUeditor.models import UEditorField
from DjangoUeditor.widgets import UEditorWidget
from django.conf import settings
class XadminUEditorWidget(UEditorWidget):
def __init__(self,**kwargs):
self.ueditor_options=kwargs
self.Media.js = None
super(XadminUEditorWidget,self).__init__(kwargs)
class UeditorPlugin(BaseAdminPlugin):
def get_field_style(self, attrs, db_field, style, **kwargs):
if style == 'ueditor':
if isinstance(db_field, UEditorField):
widget = db_field.formfield().widget
param = {}
param.update(widget.ueditor_settings)
param.update(widget.attrs)
return {'widget': XadminUEditorWidget(**param)}
return attrs
# 在我们生成的页面中放入自己的js文件
def block_extrahead(self, context, nodes):
# 自己的静态目录
js = '<script type="text/javascript" src="%s"></script>' % (settings.STATIC_URL + "ueditor/ueditor.config.js")
# 自己的静态目录
js += '<script type="text/javascript" src="%s"></script>' % (settings.STATIC_URL + "ueditor/ueditor.all.min.js")
nodes.append(js)
# 新增页面
xadmin.site.register_plugin(UeditorPlugin, UpdateAdminView)
# 修改页面
xadmin.site.register_plugin(UeditorPlugin, CreateAdminView)
|
StarcoderdataPython
|
3353409
|
<reponame>velovix/debutizer<gh_stars>1-10
import re
from pathlib import Path
from typing import List
def find_binary_packages(path: Path, recursive: bool = False) -> List[Path]:
return _glob_search(path, BINARY_PACKAGE_GLOB, recursive)
def find_debian_source_files(path: Path, recursive: bool = False) -> List[Path]:
return _glob_search(path, DEBIAN_SOURCE_FILE_GLOB, recursive)
def find_source_archives(path: Path, recursive: bool = False) -> List[Path]:
results = _glob_search(path, SOURCE_ARCHIVE_GLOB, recursive)
return [r for r in results if _SOURCE_ARCHIVE_REGEX.match(r.name)]
def find_debian_archives(path: Path, recursive: bool = False) -> List[Path]:
return _glob_search(path, DEBIAN_ARCHIVE_GLOB, recursive)
def find_changes_files(path: Path, recursive: bool = False) -> List[Path]:
return _glob_search(path, CHANGES_GLOB, recursive)
def find_artifacts(path: Path, recursive: bool = False) -> List[Path]:
return (
find_binary_packages(path, recursive)
+ find_debian_source_files(path, recursive)
+ find_source_archives(path, recursive)
+ find_debian_archives(path, recursive)
)
def _glob_search(path: Path, glob: str, recursive: bool) -> List[Path]:
if recursive:
output = path.rglob(glob)
else:
output = path.glob(glob)
return list(output)
BINARY_PACKAGE_GLOB = "*.deb"
DEBIAN_SOURCE_FILE_GLOB = "*.dsc"
SOURCE_ARCHIVE_GLOB = "*.orig.tar.*"
DEBIAN_ARCHIVE_GLOB = "*.debian.tar.*"
CHANGES_GLOB = "*.changes"
_SOURCE_ARCHIVE_REGEX = re.compile(r"^.*\.orig\.tar\.[a-zA-Z]+$")
"""A more specific version of the source archive glob that filters out files that aren't
actually archives, like mypackage.orig.tar.gz.asc.
"""
|
StarcoderdataPython
|
3284153
|
def solvepart1():
freq = 0
with open('inputs/day1.txt') as f:
for i in f:
freq += int(i)
return freq
def solvepart2():
freq = 0
seen_freqs = set([freq])
while True:
with open('inputs/day1.txt') as f:
for i in f:
freq += int(i)
if freq in seen_freqs:
return freq
seen_freqs.add(freq)
return freq
if __name__=='__main__':
print solvepart1()
print solvepart2()
|
StarcoderdataPython
|
3228228
|
from tensorflow.keras.layers import Conv2D
def RPN(inputs, k):
x = Conv2D(256, kernel_size=(3, 3), activation='relu')(inputs)
cls = Conv2D(2 * k, kernel_size=(1, 1))(x)
reg = Conv2D(4 * k, kernel_size=(1, 1))(x)
return [cls, reg]
|
StarcoderdataPython
|
149907
|
<filename>practice/src/decorator/class/cached_property.py
# -*- coding: utf-8 -*-
#
# © 2011 <NAME>, MIT License
#
#引数ありクラスデコレータ
import time
import random
class cached_property(object):
"""Decorator for read-only properties evaluated only once within TTL period.
It can be used to created a cached property like this::
import random
# the class containing the property must be a new-style class
class MyClass(object):
# create property whose value is cached for ten minutes
@cached_property(ttl=600)
def randint(self):
# will only be evaluated every 10 min. at maximum.
return random.randint(0, 100)
The value is cached in the '_cache' attribute of the object instance that
has the property getter method wrapped by this decorator. The '_cache'
attribute value is a dictionary which has a key for every property of the
object which is wrapped by this decorator. Each entry in the cache is
created only when the property is accessed for the first time and is a
two-element tuple with the last computed property value and the last time
it was updated in seconds since the epoch.
The default time-to-live (TTL) is 300 seconds (5 minutes). Set the TTL to
zero for the cached value to never expire.
To expire a cached property value manually just do::
del instance._cache[<property name>]
"""
def __init__(self, ttl=300):
print "__init__"
self.ttl = ttl
def __call__(self, fget, doc=None):
print "__call__"
#print "fget"
#print fget #function randint
self.fget = fget
self.__doc__ = doc or fget.__doc__
self.__name__ = fget.__name__
self.__module__ = fget.__module__
return self
def __get__(self, inst, owner):
print "__get__"
now = time.time()
try:
value, last_update = inst._cache[self.__name__]
if self.ttl > 0 and now - last_update > self.ttl:
raise AttributeError
except (KeyError, AttributeError):
#print "inst"
#print inst #MyClass object
#print self #cached_property object
value = self.fget(inst)
try:
cache = inst._cache
except AttributeError:
cache = inst._cache = {}
cache[self.__name__] = (value, now)
return value
class MyClass(object):
# create property whose value is cached for ten minutes
#__init__
#__call__
@cached_property(ttl=600)
def randint(self):
print "randint"
# will only be evaluated every 10 min. at maximum.
return random.randint(0, 100)
print random.randint(0, 100)
my = MyClass()
#__get__
print my.randint
print my.randint
|
StarcoderdataPython
|
6903
|
<reponame>RomanMahar/personalsite
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2018-02-06 16:25
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0028_merge'),
('home', '0009_remove_homepagesection_sectiontitle'),
]
operations = [
migrations.CreateModel(
name='SnippetClass',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('text', models.CharField(max_length=255)),
('page', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='snippy', to='wagtailcore.Page')),
],
),
migrations.AlterField(
model_name='homepagesection',
name='sectionClassName',
field=models.SlugField(default='homepage-section', help_text='no spaces', max_length=100),
),
migrations.AddField(
model_name='homepagesection',
name='advert',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='+', to='home.SnippetClass'),
),
]
|
StarcoderdataPython
|
1779410
|
<gh_stars>0
"""Bazel rule for loading external repository deps for J2CL."""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
_IO_BAZEL_RULES_CLOSURE_VERSION = "master"
def load_j2cl_repo_deps():
http_archive(
name = "io_bazel_rules_closure",
strip_prefix = "rules_closure-%s" % _IO_BAZEL_RULES_CLOSURE_VERSION,
url = "https://github.com/bazelbuild/rules_closure/archive/%s.zip"% _IO_BAZEL_RULES_CLOSURE_VERSION,
)
|
StarcoderdataPython
|
3277285
|
<filename>bugtests/test373.py
"""
Test for bug [608628] long(java.math.BigInteger) does not work.
"""
import support
# local name bugtests/test381.py
ns = '10000000000'
import java
ns2 = str(long(java.math.BigInteger(ns)))
assert ns == ns2, ns2
|
StarcoderdataPython
|
1673712
|
<reponame>kundajelab/chip-nexus-pipeline
#!/usr/bin/env python
# ENCODE DCC filter wrapper
# Author: <NAME> (<EMAIL>)
import sys
import os
import argparse
import multiprocessing
from encode_common_genomic import *
def parse_arguments():
parser = argparse.ArgumentParser(prog='ENCODE DCC filter.',
description='')
parser.add_argument('bam', type=str,
help='Path for raw BAM file.')
parser.add_argument('--mapq-thresh', default=30, type=int,
help='Threshold for low MAPQ reads removal.')
parser.add_argument('--no-dup-removal', action="store_true",
help='No dupe reads removal when filtering BAM.')
parser.add_argument('--paired-end', action="store_true",
help='Paired-end BAM.')
parser.add_argument('--multimapping', default=0, type=int,
help='Multimapping reads.')
parser.add_argument('--mito-chr-name', default='chrM',
help='Mito chromosome name.')
parser.add_argument('--nth', type=int, default=1,
help='Number of threads to parallelize.')
parser.add_argument('--out-dir', default='', type=str,
help='Output directory.')
parser.add_argument('--log-level', default='INFO',
choices=['NOTSET','DEBUG','INFO',
'WARNING','CRITICAL','ERROR','CRITICAL'],
help='Log level')
args = parser.parse_args()
log.setLevel(args.log_level)
log.info(sys.argv)
return args
def rm_unmapped_lowq_reads_se(bam, multimapping, mapq_thresh, nth, out_dir):
prefix = os.path.join(out_dir,
os.path.basename(strip_ext_bam(bam)))
filt_bam = '{}.filt.bam'.format(prefix)
if multimapping:
# qname_sort_bam = samtools_name_sort(bam, nth, out_dir)
qname_sort_bam = sambamba_name_sort(bam, nth, out_dir)
cmd2 = 'samtools view -h {} | '
cmd2 += '$(which assign_multimappers.py) -k {} | '
cmd2 += 'samtools view -F 1804 -Su /dev/stdin | '
# cmd2 += 'samtools sort /dev/stdin -o {} -T {} -@ {}'
# cmd2 = cmd2.format(
# qname_sort_bam,
# multimapping,
# filt_bam,
# prefix,
# nth)
cmd2 += 'sambamba sort /dev/stdin -o {} -t {}'
cmd2 = cmd2.format(
qname_sort_bam,
multimapping,
filt_bam,
nth)
run_shell_cmd(cmd2)
rm_f(qname_sort_bam) # remove temporary files
else:
cmd = 'samtools view -F 1804 -q {} -u {} | '
cmd += 'samtools sort /dev/stdin -o {} -T {} -@ {}'
cmd = cmd.format(
mapq_thresh,
bam,
filt_bam,
prefix,
nth)
run_shell_cmd(cmd)
return filt_bam
def rm_unmapped_lowq_reads_pe(bam, multimapping, mapq_thresh, nth, out_dir):
raise NotImplementedError
def rm_dup_se(filt_bam, nth, out_dir):
prefix = os.path.join(out_dir,
os.path.basename(strip_ext_bam(filt_bam)))
# strip extension appended in the previous step
prefix = strip_ext(prefix,'filt')
nodup_bam = '{}.nodup.bam'.format(prefix)
cmd1 = 'nimnexus dedup -t {} {} | samtools view - -b > {}'
cmd1 = cmd1.format(
nth,
filt_bam,
nodup_bam)
run_shell_cmd(cmd1)
return nodup_bam
def rm_dup_pe(filt_bam, nth, out_dir):
raise NotImplementedError
def pbc_qc_se(filt_bam, nodup_bam, mito_chr_name, out_dir):
prefix = os.path.join(out_dir,
os.path.basename(strip_ext_bam(filt_bam)))
pbc_qc = '{}.pbc.qc'.format(prefix)
mt = int(run_shell_cmd('samtools view {} | grep -v "\\b{}\\b" | wc -l'.format(filt_bam, mito_chr_name)))
m0 = int(run_shell_cmd('samtools view {} | grep -v "\\b{}\\b" | wc -l'.format(nodup_bam, mito_chr_name)))
m0_mt = 0 if mt==0.0 else m0/float(mt)
with open(pbc_qc, 'w') as fp:
fp.write("{}\t{}\tN/A\tN/A\t{}\tN/A\tN/A\n".format(mt, m0, m0_mt))
# cmd2 = 'bedtools bamtobed -i {} | '
# cmd2 += 'awk \'BEGIN{{OFS="\\t"}}{{print $1,$2,$3,$6}}\' | '
# cmd2 += 'grep -v "^{}\\b" | sort | uniq -c | '
# cmd2 += 'awk \'BEGIN{{mt=0;m0=0;m1=0;m2=0}} ($1==1){{m1=m1+1}} '
# cmd2 += '($1==2){{m2=m2+1}} {{m0=m0+1}} {{mt=mt+$1}} END{{m1_m2=-1.0; '
# cmd2 += 'if(m2>0) m1_m2=m1/m2; m0_mt=0; if (mt>0) m0_mt=m0/mt; m1_m0=0; if (m0>0) m1_m0=m1/m0; '
# cmd2 += 'printf "%d\\t%d\\t%s\\t%s\\t%f\\t%s\\t%s\\n",'
# cmd2 += 'mt,m0,"N/A","N/A",m0_mt,"N/A","N/A"}}\' > {}'
# cmd2 = cmd2.format(
# bam,
# mito_chr_name,
# pbc_qc)
# run_shell_cmd(cmd2)
return pbc_qc
def pbc_qc_pe(filt_bam, nodup_bam, mito_chr_name, nth, out_dir):
raise NotImplementedError
# if --no-dup-removal is on,
# Cromwell/WDL wants to have a empty file
# for output { File pbc_qc, File dup_qc }
def main():
# filt_bam - dupmark_bam - nodup_bam
# \ dup_qc \ pbc_qc
# read params
args = parse_arguments()
log.info('Initializing and making output directory...')
mkdir_p(args.out_dir)
# declare temp arrays
temp_files = [] # files to deleted later at the end
log.info('Removing unmapped/low-quality reads...')
if args.paired_end:
filt_bam = rm_unmapped_lowq_reads_pe(
args.bam, args.multimapping, args.mapq_thresh,
args.nth, args.out_dir)
else:
filt_bam = rm_unmapped_lowq_reads_se(
args.bam, args.multimapping, args.mapq_thresh,
args.nth, args.out_dir)
if args.no_dup_removal:
nodup_bam = filt_bam
else:
log.info('Removing dupes...')
if args.paired_end:
nodup_bam = rm_dup_pe(
filt_bam, args.nth, args.out_dir)
else:
nodup_bam = rm_dup_se(
filt_bam, args.nth, args.out_dir)
temp_files.append(filt_bam)
# initialize multithreading
log.info('Initializing multi-threading...')
num_process = min(3,args.nth)
log.info('Number of threads={}.'.format(num_process))
pool = multiprocessing.Pool(num_process)
# log.info('samtools index...')
# ret_val_1 = pool.apply_async(samtools_index,
# (nodup_bam, args.out_dir))
# log.info('samtools flagstat...')
# ret_val_2 = pool.apply_async(samtools_flagstat,
# (nodup_bam, args.out_dir))
log.info('sambamba index...')
ret_val_1 = pool.apply_async(sambamba_index,
(nodup_bam, args.nth, args.out_dir))
log.info('sambamba flagstat...')
ret_val_2 = pool.apply_async(sambamba_flagstat,
(nodup_bam, args.nth, args.out_dir))
log.info('Generating PBC QC log...')
if not args.no_dup_removal:
if args.paired_end:
ret_val_3 = pool.apply_async(pbc_qc_pe,
(filt_bam, nodup_bam, args.mito_chr_name,
max(1,args.nth-2),
args.out_dir))
else:
ret_val_3 = pool.apply_async(pbc_qc_se,
(filt_bam, nodup_bam, args.mito_chr_name, args.out_dir))
# gather
nodup_bai = ret_val_1.get(BIG_INT)
nodup_flagstat_qc = ret_val_2.get(BIG_INT)
if not args.no_dup_removal:
pbc_qc = ret_val_3.get(BIG_INT)
log.info('Closing multi-threading...')
pool.close()
pool.join()
log.info('Removing temporary files...')
rm_f(temp_files)
log.info('List all files in output directory...')
ls_l(args.out_dir)
log.info('All done.')
if __name__=='__main__':
main()
|
StarcoderdataPython
|
3352855
|
<reponame>uktrade/enav-alpha<gh_stars>0
from django.contrib import admin
from .models import Market, Article
admin.site.register(Market)
admin.site.register(Article)
|
StarcoderdataPython
|
3349828
|
<filename>checkov/kubernetes/parser/parser.py<gh_stars>0
import logging
from yaml import YAMLError
from checkov.kubernetes.parser import k8_yaml, k8_json
try:
from json.decoder import JSONDecodeError
except ImportError:
JSONDecodeError = ValueError
logger = logging.getLogger(__name__)
def parse(filename):
template = None
template_lines = None
try:
if filename.endswith(".yaml") or filename.endswith(".yml"):
(template, template_lines) = k8_yaml.load(filename)
if filename.endswith(".json"):
(template, template_lines) = k8_json.load(filename)
if template:
if isinstance(template, list):
for i in range(len(template)):
if isinstance(template[i], dict):
if not ('apiVersion' in template[i].keys() and 'kind' in template[i].keys()):
return
else:
return
else:
return
else:
return
except IOError as e:
if e.errno == 2:
logger.error('Template file not found: %s', filename)
return
elif e.errno == 21:
logger.error('Template references a directory, not a file: %s',
filename)
return
elif e.errno == 13:
logger.error('Permission denied when accessing template file: %s',
filename)
return
except UnicodeDecodeError as err:
logger.error('Cannot read file contents: %s', filename)
return
except YAMLError as err:
return
return template, template_lines
|
StarcoderdataPython
|
20359
|
<gh_stars>0
import os
import warnings
from dotenv import find_dotenv, load_dotenv
from yacs.config import CfgNode as ConfigurationNode
from pathlib import Path
# Please configure your own settings here #
# YACS overwrite these settings using YAML
__C = ConfigurationNode()
### EXAMPLE ###
"""
# data augmentation parameters with albumentations library
__C.DATASET.AUGMENTATION = ConfigurationNode()
__C.DATASET.AUGMENTATION.BLURRING_PROB = 0.25
__C.DATASET.AUGMENTATION.GAUSS_NOISE_PROB = 0.25
__C.DATASET.AUGMENTATION.GAUSS_VAR_LIMIT =(10.0, 40.0)
__C.DATASET.AUGMENTATION.BLUR_LIMIT = 7
...
# model backbone configs
__C.MODEL.BACKBONE = ConfigurationNode()
__C.MODEL.BACKBONE.NAME = 'mobilenet_v2'
__C.MODEL.BACKBONE.RGB = True
__C.MODEL.BACKBONE.PRETRAINED_PATH = 'C:/data-science/kaggle/bengali.ai/models/mobilenet_v2-b0353104.pth'
# model head configs
__C.MODEL.HEAD = ConfigurationNode()
__C.MODEL.HEAD.NAME = 'simple_head_module'
__C.MODEL.HEAD.ACTIVATION = 'leaky_relu'
__C.MODEL.HEAD.OUTPUT_DIMS = [168, 11, 7]
__C.MODEL.HEAD.INPUT_DIM = 1280 # mobilenet_v2
__C.MODEL.HEAD.HIDDEN_DIMS = [512, 256]
__C.MODEL.HEAD.BATCH_NORM = True
__C.MODEL.HEAD.DROPOUT = 0.4
"""
def get_cfg_defaults():
"""
Get a yacs CfgNode object with default values for my_project.
"""
# Return a clone so that the defaults will not be altered
# This is for the "local variable" use pattern recommended by the YACS repo.
# It will be subsequently overwritten with local YAML.
return __C.clone()
def combine_cfgs(path_cfg_data: Path=None, path_cfg_override: Path=None):
"""
An internal facing routine thaat combined CFG in the order provided.
:param path_output: path to output files
:param path_cfg_data: path to path_cfg_data files
:param path_cfg_override: path to path_cfg_override actual
:return: cfg_base incorporating the overwrite.
"""
if path_cfg_data is not None:
path_cfg_data=Path(path_cfg_data)
if path_cfg_override is not None:
path_cfg_override=Path(path_cfg_override)
# Path order of precedence is:
# Priority 1, 2, 3, 4 respectively
# .env > other CFG YAML > data.yaml > default.yaml
# Load default lowest tier one:
# Priority 4:
cfg_base = get_cfg_defaults()
# Merge from the path_data
# Priority 3:
if path_cfg_data is not None and path_cfg_data.exists():
cfg_base.merge_from_file(path_cfg_data.absolute())
# Merge from other cfg_path files to further reduce effort
# Priority 2:
if path_cfg_override is not None and path_cfg_override.exists():
cfg_base.merge_from_file(path_cfg_override.absolute())
# Merge from .env
# Priority 1:
list_cfg = update_cfg_using_dotenv()
if list_cfg is not []:
cfg_base.merge_from_list(list_cfg)
return cfg_base
def update_cfg_using_dotenv() -> list:
"""
In case when there are dotenvs, try to return list of them.
# It is returning a list of hard overwrite.
:return: empty list or overwriting information
"""
# If .env not found, bail
if find_dotenv() == '':
warnings.warn(".env files not found. YACS config file merging aborted.")
return []
# Load env.
load_dotenv(find_dotenv(), verbose=True)
# Load variables
list_key_env = {
"DATASET.TRAIN_DATA_PATH",
"DATASET.VAL_DATA_PATH",
"MODEL.BACKBONE.PRETRAINED_PATH",
"MODEL.SOLVER.LOSS.LABELS_WEIGHTS_PATH"
}
# Instantiate return list.
path_overwrite_keys = []
# Go through the list of key to be overwritten.
for key in list_key_env:
# Get value from the env.
value = os.getenv("path_overwrite_keys")
# If it is none, skip. As some keys are only needed during training and others during the prediction stage.
if value is None:
continue
# Otherwise, adding the key and the value to the dictionary.
path_overwrite_keys.append(key)
path_overwrite_keys.append(value)
return path_overwrite_keys
|
StarcoderdataPython
|
1742873
|
<filename>pydm/widgets/baseplot.py
import functools
from qtpy.QtGui import QColor, QBrush
from qtpy.QtCore import Signal, Slot, Property, QTimer, Qt
from .. import utilities
from pyqtgraph import PlotWidget, PlotDataItem, mkPen, ViewBox, InfiniteLine, SignalProxy, CurvePoint, TextItem
from collections import OrderedDict
from .base import PyDMPrimitiveWidget, widget_destroyed
class NoDataError(Exception):
"""NoDataError is raised when a curve tries to perform an operation,
but does not yet have any data."""
pass
class BasePlotCurveItem(PlotDataItem):
"""
BasePlotCurveItem represents a single curve in a plot.
In addition to the parameters listed below, WaveformCurveItem accepts
keyword arguments for all plot options that pyqtgraph.PlotDataItem accepts.
Each subclass of ``BasePlotCurveItem`` should have a class attribute
`_channels` that lets us know the attribute names where we can find
PyDMChannel objects. This allows us to connect and disconnect these
connections when appropriate
Parameters
----------
color : QColor, optional
The color used to draw the curve line and the symbols.
lineStyle: int, optional
Style of the line connecting the data points.
Must be a value from the Qt::PenStyle enum
(see http://doc.qt.io/qt-5/qt.html#PenStyle-enum).
lineWidth: int, optional
Width of the line connecting the data points.
**kargs: optional
PlotDataItem keyword arguments, such as symbol and symbolSize.
"""
REDRAW_ON_X, REDRAW_ON_Y, REDRAW_ON_EITHER, REDRAW_ON_BOTH = range(4)
symbols = OrderedDict([('None', None),
('Circle', 'o'),
('Square', 's'),
('Triangle', 't'),
('Star', 'star'),
('Pentagon', 'p'),
('Hexagon', 'h'),
('X', 'x'),
('Diamond', 'd'),
('Plus', '+')])
lines = OrderedDict([('NoLine', Qt.NoPen),
('Solid', Qt.SolidLine),
('Dash', Qt.DashLine),
('Dot', Qt.DotLine),
('DashDot', Qt.DashDotLine),
('DashDotDot', Qt.DashDotDotLine)])
data_changed = Signal()
def __init__(self, color=None, lineStyle=None, lineWidth=None, **kws):
self._color = QColor('white')
self._pen = mkPen(self._color)
if lineWidth is not None:
self._pen.setWidth(lineWidth)
if lineStyle is not None:
self._pen.setStyle(lineStyle)
kws['pen'] = self._pen
super(BasePlotCurveItem, self).__init__(**kws)
self.setSymbolBrush(None)
if color is not None:
self.color = color
if hasattr(self, "channels"):
self.destroyed.connect(functools.partial(widget_destroyed,
self.channels))
@property
def color_string(self):
"""
A string representation of the color used for the curve. This string
will be a hex color code, like #FF00FF, or an SVG spec color name, if
a name exists for the color.
Returns
-------
str
"""
return str(utilities.colors.svg_color_from_hex(self.color.name(),
hex_on_fail=True))
@color_string.setter
def color_string(self, new_color_string):
"""
A string representation of the color used for the curve. This string
will be a hex color code, like #FF00FF, or an SVG spec color name, if
a name exists for the color.
Parameters
-------
new_color_string: int
The new string to use for the curve color.
"""
self.color = QColor(str(new_color_string))
@property
def color(self):
"""
The color used for the curve.
Returns
-------
QColor
"""
return self._color
@color.setter
def color(self, new_color):
"""
The color used for the curve.
Parameters
-------
new_color: QColor or str
The new color to use for the curve.
Strings are passed to WaveformCurveItem.color_string.
"""
if isinstance(new_color, str):
self.color_string = new_color
return
self._color = new_color
self._pen.setColor(self._color)
self.setPen(self._pen)
self.setSymbolPen(self._color)
@property
def lineStyle(self):
"""
Return the style of the line connecting the data points.
Must be a value from the Qt::PenStyle enum
(see http://doc.qt.io/qt-5/qt.html#PenStyle-enum).
Returns
-------
int
"""
return self._pen.style()
@lineStyle.setter
def lineStyle(self, new_style):
"""
Set the style of the line connecting the data points.
Must be a value from the Qt::PenStyle enum
(see http://doc.qt.io/qt-5/qt.html#PenStyle-enum).
Parameters
-------
new_style: int
"""
if new_style in self.lines.values():
self._pen.setStyle(new_style)
self.setPen(self._pen)
@property
def lineWidth(self):
"""
Return the width of the line connecting the data points.
Returns
-------
int
"""
return self._pen.width()
@lineWidth.setter
def lineWidth(self, new_width):
"""
Set the width of the line connecting the data points.
Parameters
-------
new_width: int
"""
self._pen.setWidth(int(new_width))
self.setPen(self._pen)
@property
def symbol(self):
"""
The single-character code for the symbol drawn at each datapoint.
See the documentation for pyqtgraph.PlotDataItem for possible values.
Returns
-------
str or None
"""
return self.opts['symbol']
@symbol.setter
def symbol(self, new_symbol):
"""
The single-character code for the symbol drawn at each datapoint.
See the documentation for pyqtgraph.PlotDataItem for possible values.
Parameters
-------
new_symbol: str or None
"""
if new_symbol in self.symbols.values():
self.setSymbol(new_symbol)
self.setSymbolPen(self._color)
@property
def symbolSize(self):
"""
Return the size of the symbol to represent the data.
Returns
-------
int
"""
return self.opts['symbolSize']
@symbolSize.setter
def symbolSize(self, new_size):
"""
Set the size of the symbol to represent the data.
Parameters
-------
new_size: int
"""
self.setSymbolSize(int(new_size))
def to_dict(self):
"""
Returns an OrderedDict representation with values for all properties
needed to recreate this curve.
Returns
-------
OrderedDict
"""
return OrderedDict([("name", self.name()),
("color", self.color_string),
("lineStyle", self.lineStyle),
("lineWidth", self.lineWidth),
("symbol", self.symbol),
("symbolSize", self.symbolSize)])
def close(self):
pass
class BasePlot(PlotWidget, PyDMPrimitiveWidget):
crosshair_position_updated = Signal(float, float)
def __init__(self, parent=None, background='default', axisItems=None):
PlotWidget.__init__(self, parent=parent, background=background,
axisItems=axisItems)
PyDMPrimitiveWidget.__init__(self)
self.plotItem = self.getPlotItem()
self.plotItem.hideButtons()
self._auto_range_x = None
self.setAutoRangeX(True)
self._auto_range_y = None
self.setAutoRangeY(True)
self._min_x = 0.0
self._max_x = 1.0
self._min_y = 0.0
self._max_y = 1.0
self._show_x_grid = None
self.setShowXGrid(False)
self._show_y_grid = None
self.setShowYGrid(False)
self._show_right_axis = False
self.redraw_timer = QTimer(self)
self.redraw_timer.timeout.connect(self.redrawPlot)
self._redraw_rate = 30 # Redraw at 30 Hz by default.
self.maxRedrawRate = self._redraw_rate
self._curves = []
self._title = None
self._show_legend = False
self._legend = self.addLegend()
self._legend.hide()
# Drawing crosshair on the ViewBox
self.vertical_crosshair_line = None
self.horizontal_crosshair_line = None
self.crosshair_movement_proxy = None
def addCurve(self, plot_item, curve_color=None):
if curve_color is None:
curve_color = utilities.colors.default_colors[
len(self._curves) % len(utilities.colors.default_colors)]
plot_item.color_string = curve_color
self._curves.append(plot_item)
self.addItem(plot_item)
self.redraw_timer.start()
# Connect channels
for chan in plot_item.channels():
if chan:
chan.connect()
# self._legend.addItem(plot_item, plot_item.curve_name)
def removeCurve(self, plot_item):
self.removeItem(plot_item)
self._curves.remove(plot_item)
if len(self._curves) < 1:
self.redraw_timer.stop()
# Disconnect channels
for chan in plot_item.channels():
if chan:
chan.disconnect()
def removeCurveWithName(self, name):
for curve in self._curves:
if curve.name() == name:
self.removeCurve(curve)
def removeCurveAtIndex(self, index):
curve_to_remove = self._curves[index]
self.removeCurve(curve_to_remove)
def setCurveAtIndex(self, index, new_curve):
old_curve = self._curves[index]
self._curves[index] = new_curve
# self._legend.addItem(new_curve, new_curve.name())
self.removeCurve(old_curve)
def curveAtIndex(self, index):
return self._curves[index]
def curves(self):
return self._curves
def clear(self):
legend_items = [label.text for (sample, label) in self._legend.items]
for item in legend_items:
self._legend.removeItem(item)
self.plotItem.clear()
self._curves = []
@Slot()
def redrawPlot(self):
pass
def getShowXGrid(self):
return self._show_x_grid
def setShowXGrid(self, value, alpha=None):
self._show_x_grid = value
self.showGrid(x=self._show_x_grid, alpha=alpha)
def resetShowXGrid(self):
self.setShowXGrid(False)
showXGrid = Property("bool", getShowXGrid, setShowXGrid, resetShowXGrid)
def getShowYGrid(self):
return self._show_y_grid
def setShowYGrid(self, value, alpha=None):
self._show_y_grid = value
self.showGrid(y=self._show_y_grid, alpha=alpha)
def resetShowYGrid(self):
self.setShowYGrid(False)
showYGrid = Property("bool", getShowYGrid, setShowYGrid, resetShowYGrid)
def getBackgroundColor(self):
return self.backgroundBrush().color()
def setBackgroundColor(self, color):
if self.backgroundBrush().color() != color:
self.setBackgroundBrush(QBrush(color))
backgroundColor = Property(QColor, getBackgroundColor, setBackgroundColor)
def getAxisColor(self):
return self.getAxis('bottom')._pen.color()
def setAxisColor(self, color):
if self.getAxis('bottom')._pen.color() != color:
self.getAxis('bottom').setPen(color)
self.getAxis('left').setPen(color)
self.getAxis('top').setPen(color)
self.getAxis('right').setPen(color)
axisColor = Property(QColor, getAxisColor, setAxisColor)
def getBottomAxisLabel(self):
return self.getAxis('bottom').labelText
def getShowRightAxis(self):
"""
Provide whether the right y-axis is being shown.
Returns : bool
-------
True if the graph shows the right y-axis. False if not.
"""
return self._show_right_axis
def setShowRightAxis(self, show):
"""
Set whether the graph should show the right y-axis.
Parameters
----------
show : bool
True for showing the right axis; False is for not showing.
"""
if show:
self.showAxis("right")
else:
self.hideAxis("right")
self._show_right_axis = show
showRightAxis = Property("bool", getShowRightAxis, setShowRightAxis)
def getPlotTitle(self):
if self._title is None:
return ""
return str(self._title)
def setPlotTitle(self, value):
self._title = str(value)
if len(self._title) < 1:
self._title = None
self.setTitle(self._title)
def resetPlotTitle(self):
self._title = None
self.setTitle(self._title)
title = Property(str, getPlotTitle, setPlotTitle, resetPlotTitle)
def getShowLegend(self):
"""
Check if the legend is being shown.
Returns : bool
-------
True if the legend is displayed on the graph; False if not.
"""
return self._show_legend
def setShowLegend(self, value):
"""
Set to display the legend on the graph.
Parameters
----------
value : bool
True to display the legend; False is not.
"""
self._show_legend = value
if self._show_legend:
if self._legend is None:
self._legend = self.addLegend()
else:
self._legend.show()
else:
if self._legend is not None:
self._legend.hide()
def resetShowLegend(self):
"""
Reset the legend display status to hidden.
"""
self.setShowLegend(False)
showLegend = Property(bool, getShowLegend, setShowLegend, resetShowLegend)
def getAutoRangeX(self):
return self._auto_range_x
def setAutoRangeX(self, value):
self._auto_range_x = value
if self._auto_range_x:
self.plotItem.enableAutoRange(ViewBox.XAxis, enable=self._auto_range_x)
def resetAutoRangeX(self):
self.setAutoRangeX(True)
def getAutoRangeY(self):
return self._auto_range_y
def setAutoRangeY(self, value):
self._auto_range_y = value
if self._auto_range_y:
self.plotItem.enableAutoRange(ViewBox.YAxis, enable=self._auto_range_y)
def resetAutoRangeY(self):
self.setAutoRangeY(True)
def getMinXRange(self):
"""
Minimum X-axis value visible on the plot.
Returns
-------
float
"""
return self.plotItem.viewRange()[0][0]
def setMinXRange(self, new_min_x_range):
"""
Set the minimum X-axis value visible on the plot.
Parameters
-------
new_min_x_range : float
"""
if self._auto_range_x:
return
self._min_x = new_min_x_range
self.plotItem.setXRange(self._min_x, self._max_x, padding=0)
def getMaxXRange(self):
"""
Maximum X-axis value visible on the plot.
Returns
-------
float
"""
return self.plotItem.viewRange()[0][1]
def setMaxXRange(self, new_max_x_range):
"""
Set the Maximum X-axis value visible on the plot.
Parameters
-------
new_max_x_range : float
"""
if self._auto_range_x:
return
self._max_x = new_max_x_range
self.plotItem.setXRange(self._min_x, self._max_x, padding=0)
def getMinYRange(self):
"""
Minimum Y-axis value visible on the plot.
Returns
-------
float
"""
return self.plotItem.viewRange()[1][0]
def setMinYRange(self, new_min_y_range):
"""
Set the minimum Y-axis value visible on the plot.
Parameters
-------
new_min_y_range : float
"""
if self._auto_range_y:
return
self._min_y = new_min_y_range
self.plotItem.setYRange(self._min_y, self._max_y, padding=0)
def getMaxYRange(self):
"""
Maximum Y-axis value visible on the plot.
Returns
-------
float
"""
return self.plotItem.viewRange()[1][1]
def setMaxYRange(self, new_max_y_range):
"""
Set the maximum Y-axis value visible on the plot.
Parameters
-------
new_max_y_range : float
"""
if self._auto_range_y:
return
self._max_y = new_max_y_range
self.plotItem.setYRange(self._min_y, self._max_y, padding=0)
@Property(bool)
def mouseEnabledX(self):
"""
Whether or not mouse interactions are enabled for the X-axis.
Returns
-------
bool
"""
return self.plotItem.getViewBox().state['mouseEnabled'][0]
@mouseEnabledX.setter
def mouseEnabledX(self, x_enabled):
"""
Whether or not mouse interactions are enabled for the X-axis.
Parameters
-------
x_enabled : bool
"""
self.plotItem.setMouseEnabled(x=x_enabled)
@Property(bool)
def mouseEnabledY(self):
"""
Whether or not mouse interactions are enabled for the Y-axis.
Returns
-------
bool
"""
return self.plotItem.getViewBox().state['mouseEnabled'][1]
@mouseEnabledY.setter
def mouseEnabledY(self, y_enabled):
"""
Whether or not mouse interactions are enabled for the Y-axis.
Parameters
-------
y_enabled : bool
"""
self.plotItem.setMouseEnabled(y=y_enabled)
@Property(int)
def maxRedrawRate(self):
"""
The maximum rate (in Hz) at which the plot will be redrawn.
The plot will not be redrawn if there is not new data to draw.
Returns
-------
int
"""
return self._redraw_rate
@maxRedrawRate.setter
def maxRedrawRate(self, redraw_rate):
"""
The maximum rate (in Hz) at which the plot will be redrawn.
The plot will not be redrawn if there is not new data to draw.
Parameters
-------
redraw_rate : int
"""
self._redraw_rate = redraw_rate
self.redraw_timer.setInterval(int((1.0/self._redraw_rate)*1000))
def pausePlotting(self):
self.redraw_timer.stop() if self.redraw_timer.isActive() else self.redraw_timer.start()
return self.redraw_timer.isActive()
def mouseMoved(self, evt):
"""
A handler for the crosshair feature. Every time the mouse move, the mouse coordinates are updated, and the
horizontal and vertical hairlines will be redrawn at the new coordinate. If a PyDMDisplay object is available,
that display will also have the x- and y- values to update on the UI.
Parameters
-------
evt: MouseEvent
The mouse event type, from which the mouse coordinates are obtained.
"""
pos = evt[0]
if self.sceneBoundingRect().contains(pos):
mouse_point = self.getViewBox().mapSceneToView(pos)
self.vertical_crosshair_line.setPos(mouse_point.x())
self.horizontal_crosshair_line.setPos(mouse_point.y())
self.crosshair_position_updated.emit(mouse_point.x(), mouse_point.y())
def enableCrosshair(self, is_enabled, starting_x_pos, starting_y_pos, vertical_angle=90, horizontal_angle=0,
vertical_movable=False, horizontal_movable=False):
"""
Enable the crosshair to be drawn on the ViewBox.
Parameters
----------
is_enabled : bool
True is to draw the crosshair, False is to not draw.
starting_x_pos : float
The x coordinate where to start the vertical crosshair line.
starting_y_pos : float
The y coordinate where to start the horizontal crosshair line.
vertical_angle : float
The angle to tilt the vertical crosshair line. Default at 90 degrees.
horizontal_angle
The angle to tilt the horizontal crosshair line. Default at 0 degrees.
vertical_movable : bool
True if the vertical line can be moved by the user; False is not.
horizontal_movable
False if the horizontal line can be moved by the user; False is not.
"""
if is_enabled:
self.vertical_crosshair_line = InfiniteLine(pos=starting_x_pos, angle=vertical_angle,
movable=vertical_movable)
self.horizontal_crosshair_line = InfiniteLine(pos=starting_y_pos, angle=horizontal_angle,
movable=horizontal_movable)
self.plotItem.addItem(self.vertical_crosshair_line)
self.plotItem.addItem(self.horizontal_crosshair_line)
self.crosshair_movement_proxy = SignalProxy(self.plotItem.scene().sigMouseMoved, rateLimit=60,
slot=self.mouseMoved)
else:
if self.vertical_crosshair_line:
self.plotItem.removeItem(self.vertical_crosshair_line)
if self.horizontal_crosshair_line:
self.plotItem.removeItem(self.horizontal_crosshair_line)
if self.crosshair_movement_proxy:
self.crosshair_movement_proxy.disconnect()
|
StarcoderdataPython
|
3303865
|
# Generated by Django 3.1.5 on 2021-03-07 16:32
from django.db import migrations, models
import django_gotolong.uploaddoc.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='UploadDocModel',
fields=[
('uploaddoc_id', models.AutoField(primary_key=True, serialize=False)),
('uploaddoc_scope',
models.CharField(choices=[('input-global-data', '---GLOBAL---'), ('input-user-data', '---USER---')],
max_length=255)),
('uploaddoc_type', models.CharField(
choices=[('global', '---GLOBAL---'), ('amfi', 'AMFI'), ('bhav', 'BHAV'), ('ftwhl', 'FTWHL'),
('isin', 'ISIN'), ('nach', 'NACH'), ('screener', 'SCREENER'), ('trendlyne', 'TRNDLYNE'),
('gweight', 'WEIGHT'), ('user', '---USER---'), ('dematsum', 'DEMATSUM'),
('dematxn', 'DEMATTXN'), ('dividend', 'DIVIDEND')], max_length=255)),
('uploaddoc_year', models.CharField(blank=True, choices=[('all', 'ALL'), ('latest', 'LATEST'),
('FY 2011-2012', 'FY 2011-2012'),
('FY 2012-2013', 'FY 2012-2013'),
('FY 2013-2014', 'FY 2013-2014'),
('FY 2014-2015', 'FY 2014-2015'),
('FY 2015-2016', 'FY 2015-2016'),
('FY 2016-2017', 'FY 2016-2017'),
('FY 2017-2018', 'FY 2017-2018'),
('FY 2018-2019', 'FY 2018-2019'),
('FY 2019-2020', 'FY 2019-2020'),
('FY 2020-2021', 'FY 2020-2021'),
('FY 2021-2022', 'FY 2021-2022')],
max_length=255)),
('uploaddoc_fpath',
models.FileField(upload_to=django_gotolong.uploaddoc.models.UploadDocModel.file_rename)),
],
options={
'db_table': 'user_doc',
},
),
]
|
StarcoderdataPython
|
3348453
|
<reponame>smnarayanan/slimbootloader
## @file
# Create makefile for MS nmake and GNU make
#
# Copyright (c) 2019, Intel Corporation. All rights reserved.<BR>
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
## Import Modules
#
from __future__ import print_function
from __future__ import absolute_import
import os.path as path
import copy
from collections import defaultdict
from .BuildEngine import BuildRule,gDefaultBuildRuleFile,AutoGenReqBuildRuleVerNum
from .GenVar import VariableMgr, var_info
from . import GenMake
from AutoGen.DataPipe import MemoryDataPipe
from AutoGen.ModuleAutoGen import ModuleAutoGen
from AutoGen.AutoGen import AutoGen
from AutoGen.AutoGen import CalculatePriorityValue
from Workspace.WorkspaceCommon import GetModuleLibInstances
from CommonDataClass.CommonClass import SkuInfoClass
from Common.caching import cached_class_function
from Common.Expression import ValueExpressionEx
from Common.StringUtils import StringToArray,NormPath
from Common.BuildToolError import *
from Common.DataType import *
from Common.Misc import *
import Common.VpdInfoFile as VpdInfoFile
## Split command line option string to list
#
# subprocess.Popen needs the args to be a sequence. Otherwise there's problem
# in non-windows platform to launch command
#
def _SplitOption(OptionString):
OptionList = []
LastChar = " "
OptionStart = 0
QuotationMark = ""
for Index in range(0, len(OptionString)):
CurrentChar = OptionString[Index]
if CurrentChar in ['"', "'"]:
if QuotationMark == CurrentChar:
QuotationMark = ""
elif QuotationMark == "":
QuotationMark = CurrentChar
continue
elif QuotationMark:
continue
if CurrentChar in ["/", "-"] and LastChar in [" ", "\t", "\r", "\n"]:
if Index > OptionStart:
OptionList.append(OptionString[OptionStart:Index - 1])
OptionStart = Index
LastChar = CurrentChar
OptionList.append(OptionString[OptionStart:])
return OptionList
## AutoGen class for platform
#
# PlatformAutoGen class will process the original information in platform
# file in order to generate makefile for platform.
#
class PlatformAutoGen(AutoGen):
# call super().__init__ then call the worker function with different parameter count
def __init__(self, Workspace, MetaFile, Target, Toolchain, Arch, *args, **kwargs):
if not hasattr(self, "_Init"):
self._InitWorker(Workspace, MetaFile, Target, Toolchain, Arch)
self._Init = True
#
# Used to store all PCDs for both PEI and DXE phase, in order to generate
# correct PCD database
#
_DynaPcdList_ = []
_NonDynaPcdList_ = []
_PlatformPcds = {}
## Initialize PlatformAutoGen
#
#
# @param Workspace WorkspaceAutoGen object
# @param PlatformFile Platform file (DSC file)
# @param Target Build target (DEBUG, RELEASE)
# @param Toolchain Name of tool chain
# @param Arch arch of the platform supports
#
def _InitWorker(self, Workspace, PlatformFile, Target, Toolchain, Arch):
EdkLogger.debug(EdkLogger.DEBUG_9, "AutoGen platform [%s] [%s]" % (PlatformFile, Arch))
GlobalData.gProcessingFile = "%s [%s, %s, %s]" % (PlatformFile, Arch, Toolchain, Target)
self.MetaFile = PlatformFile
self.Workspace = Workspace
self.WorkspaceDir = Workspace.WorkspaceDir
self.ToolChain = Toolchain
self.BuildTarget = Target
self.Arch = Arch
self.SourceDir = PlatformFile.SubDir
self.FdTargetList = self.Workspace.FdTargetList
self.FvTargetList = self.Workspace.FvTargetList
# get the original module/package/platform objects
self.BuildDatabase = Workspace.BuildDatabase
self.DscBuildDataObj = Workspace.Platform
# flag indicating if the makefile/C-code file has been created or not
self.IsMakeFileCreated = False
self._DynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
self._NonDynamicPcdList = None # [(TokenCName1, TokenSpaceGuidCName1), (TokenCName2, TokenSpaceGuidCName2), ...]
self._AsBuildInfList = []
self._AsBuildModuleList = []
self.VariableInfo = None
if GlobalData.gFdfParser is not None:
self._AsBuildInfList = GlobalData.gFdfParser.Profile.InfList
for Inf in self._AsBuildInfList:
InfClass = PathClass(NormPath(Inf), GlobalData.gWorkspace, self.Arch)
M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
if not M.IsBinaryModule:
continue
self._AsBuildModuleList.append(InfClass)
# get library/modules for build
self.LibraryBuildDirectoryList = []
self.ModuleBuildDirectoryList = []
self.DataPipe = MemoryDataPipe(self.BuildDir)
self.DataPipe.FillData(self)
return True
def FillData_LibConstPcd(self):
libConstPcd = {}
for LibAuto in self.LibraryAutoGenList:
if LibAuto.ConstPcd:
libConstPcd[(LibAuto.MetaFile.File,LibAuto.MetaFile.Root,LibAuto.Arch,LibAuto.MetaFile.Path)] = LibAuto.ConstPcd
self.DataPipe.DataContainer = {"LibConstPcd":libConstPcd}
## hash() operator of PlatformAutoGen
#
# The platform file path and arch string will be used to represent
# hash value of this object
#
# @retval int Hash value of the platform file path and arch
#
@cached_class_function
def __hash__(self):
return hash((self.MetaFile, self.Arch))
@cached_class_function
def __repr__(self):
return "%s [%s]" % (self.MetaFile, self.Arch)
## Create autogen code for platform and modules
#
# Since there's no autogen code for platform, this method will do nothing
# if CreateModuleCodeFile is set to False.
#
# @param CreateModuleCodeFile Flag indicating if creating module's
# autogen code file or not
#
@cached_class_function
def CreateCodeFile(self, CreateModuleCodeFile=False):
# only module has code to be created, so do nothing if CreateModuleCodeFile is False
if not CreateModuleCodeFile:
return
for Ma in self.ModuleAutoGenList:
Ma.CreateCodeFile(CreateModuleCodeFile)
## Generate Fds Command
@cached_property
def GenFdsCommand(self):
return self.Workspace.GenFdsCommand
## Create makefile for the platform and modules in it
#
# @param CreateModuleMakeFile Flag indicating if the makefile for
# modules will be created as well
#
def CreateMakeFile(self, CreateModuleMakeFile=False, FfsCommand = {}):
if CreateModuleMakeFile:
for Ma in self._MaList:
key = (Ma.MetaFile.File, self.Arch)
if key in FfsCommand:
Ma.CreateMakeFile(CreateModuleMakeFile, FfsCommand[key])
else:
Ma.CreateMakeFile(CreateModuleMakeFile)
self.CreateLibModuelDirs()
def CreateLibModuelDirs(self):
# no need to create makefile for the platform more than once
if self.IsMakeFileCreated:
return
# create library/module build dirs for platform
Makefile = GenMake.PlatformMakefile(self)
self.LibraryBuildDirectoryList = Makefile.GetLibraryBuildDirectoryList()
self.ModuleBuildDirectoryList = Makefile.GetModuleBuildDirectoryList()
self.IsMakeFileCreated = True
@property
def AllPcdList(self):
return self.DynamicPcdList + self.NonDynamicPcdList
## Deal with Shared FixedAtBuild Pcds
#
def CollectFixedAtBuildPcds(self):
for LibAuto in self.LibraryAutoGenList:
FixedAtBuildPcds = {}
ShareFixedAtBuildPcdsSameValue = {}
for Module in LibAuto.ReferenceModules:
for Pcd in set(Module.FixedAtBuildPcds + LibAuto.FixedAtBuildPcds):
DefaultValue = Pcd.DefaultValue
# Cover the case: DSC component override the Pcd value and the Pcd only used in one Lib
if Pcd in Module.LibraryPcdList:
Index = Module.LibraryPcdList.index(Pcd)
DefaultValue = Module.LibraryPcdList[Index].DefaultValue
key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if key not in FixedAtBuildPcds:
ShareFixedAtBuildPcdsSameValue[key] = True
FixedAtBuildPcds[key] = DefaultValue
else:
if FixedAtBuildPcds[key] != DefaultValue:
ShareFixedAtBuildPcdsSameValue[key] = False
for Pcd in LibAuto.FixedAtBuildPcds:
key = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if (Pcd.TokenCName, Pcd.TokenSpaceGuidCName) not in self.NonDynamicPcdDict:
continue
else:
DscPcd = self.NonDynamicPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)]
if DscPcd.Type != TAB_PCDS_FIXED_AT_BUILD:
continue
if key in ShareFixedAtBuildPcdsSameValue and ShareFixedAtBuildPcdsSameValue[key]:
LibAuto.ConstPcd[key] = FixedAtBuildPcds[key]
def CollectVariables(self, DynamicPcdSet):
VpdRegionSize = 0
VpdRegionBase = 0
if self.Workspace.FdfFile:
FdDict = self.Workspace.FdfProfile.FdDict[GlobalData.gFdfParser.CurrentFdName]
for FdRegion in FdDict.RegionList:
for item in FdRegion.RegionDataList:
if self.Platform.VpdToolGuid.strip() and self.Platform.VpdToolGuid in item:
VpdRegionSize = FdRegion.Size
VpdRegionBase = FdRegion.Offset
break
VariableInfo = VariableMgr(self.DscBuildDataObj._GetDefaultStores(), self.DscBuildDataObj.SkuIds)
VariableInfo.SetVpdRegionMaxSize(VpdRegionSize)
VariableInfo.SetVpdRegionOffset(VpdRegionBase)
Index = 0
for Pcd in DynamicPcdSet:
pcdname = ".".join((Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
for SkuName in Pcd.SkuInfoList:
Sku = Pcd.SkuInfoList[SkuName]
SkuId = Sku.SkuId
if SkuId is None or SkuId == '':
continue
if len(Sku.VariableName) > 0:
if Sku.VariableAttribute and 'NV' not in Sku.VariableAttribute:
continue
VariableGuidStructure = Sku.VariableGuidValue
VariableGuid = GuidStructureStringToGuidString(VariableGuidStructure)
for StorageName in Sku.DefaultStoreDict:
VariableInfo.append_variable(var_info(Index, pcdname, StorageName, SkuName, StringToArray(Sku.VariableName), VariableGuid, Sku.VariableOffset, Sku.VariableAttribute, Sku.HiiDefaultValue, Sku.DefaultStoreDict[StorageName] if Pcd.DatumType in TAB_PCD_NUMERIC_TYPES else StringToArray(Sku.DefaultStoreDict[StorageName]), Pcd.DatumType, Pcd.CustomAttribute['DscPosition'], Pcd.CustomAttribute.get('IsStru',False)))
Index += 1
return VariableInfo
def UpdateNVStoreMaxSize(self, OrgVpdFile):
if self.VariableInfo:
VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)
PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]
if PcdNvStoreDfBuffer:
try:
OrgVpdFile.Read(VpdMapFilePath)
PcdItems = OrgVpdFile.GetOffset(PcdNvStoreDfBuffer[0])
NvStoreOffset = list(PcdItems.values())[0].strip() if PcdItems else '0'
except:
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
NvStoreOffset = int(NvStoreOffset, 16) if NvStoreOffset.upper().startswith("0X") else int(NvStoreOffset)
default_skuobj = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)
maxsize = self.VariableInfo.VpdRegionSize - NvStoreOffset if self.VariableInfo.VpdRegionSize else len(default_skuobj.DefaultValue.split(","))
var_data = self.VariableInfo.PatchNVStoreDefaultMaxSize(maxsize)
if var_data and default_skuobj:
default_skuobj.DefaultValue = var_data
PcdNvStoreDfBuffer[0].DefaultValue = var_data
PcdNvStoreDfBuffer[0].SkuInfoList.clear()
PcdNvStoreDfBuffer[0].SkuInfoList[TAB_DEFAULT] = default_skuobj
PcdNvStoreDfBuffer[0].MaxDatumSize = str(len(default_skuobj.DefaultValue.split(",")))
return OrgVpdFile
## Collect dynamic PCDs
#
# Gather dynamic PCDs list from each module and their settings from platform
# This interface should be invoked explicitly when platform action is created.
#
def CollectPlatformDynamicPcds(self):
self.CategoryPcds()
self.SortDynamicPcd()
def CategoryPcds(self):
# Category Pcds into DynamicPcds and NonDynamicPcds
# for gathering error information
NoDatumTypePcdList = set()
FdfModuleList = []
for InfName in self._AsBuildInfList:
InfName = mws.join(self.WorkspaceDir, InfName)
FdfModuleList.append(os.path.normpath(InfName))
for M in self._MbList:
# F is the Module for which M is the module autogen
ModPcdList = self.ApplyPcdSetting(M, M.ModulePcdList)
LibPcdList = []
for lib in M.LibraryPcdList:
LibPcdList.extend(self.ApplyPcdSetting(M, M.LibraryPcdList[lib], lib))
for PcdFromModule in ModPcdList + LibPcdList:
# make sure that the "VOID*" kind of datum has MaxDatumSize set
if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, M.MetaFile))
# Check the PCD from Binary INF or Source INF
if M.IsBinaryModule == True:
PcdFromModule.IsFromBinaryInf = True
# Check the PCD from DSC or not
PcdFromModule.IsFromDsc = (PcdFromModule.TokenCName, PcdFromModule.TokenSpaceGuidCName) in self.Platform.Pcds
if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET or PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
if M.MetaFile.Path not in FdfModuleList:
# If one of the Source built modules listed in the DSC is not listed
# in FDF modules, and the INF lists a PCD can only use the PcdsDynamic
# access method (it is only listed in the DEC file that declares the
# PCD as PcdsDynamic), then build tool will report warning message
# notify the PI that they are attempting to build a module that must
# be included in a flash image in order to be functional. These Dynamic
# PCD will not be added into the Database unless it is used by other
# modules that are included in the FDF file.
if PcdFromModule.Type in PCD_DYNAMIC_TYPE_SET and \
PcdFromModule.IsFromBinaryInf == False:
# Print warning message to let the developer make a determine.
continue
# If one of the Source built modules listed in the DSC is not listed in
# FDF modules, and the INF lists a PCD can only use the PcdsDynamicEx
# access method (it is only listed in the DEC file that declares the
# PCD as PcdsDynamicEx), then DO NOT break the build; DO NOT add the
# PCD to the Platform's PCD Database.
if PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
continue
#
# If a dynamic PCD used by a PEM module/PEI module & DXE module,
# it should be stored in Pcd PEI database, If a dynamic only
# used by DXE module, it should be stored in DXE PCD database.
# The default Phase is DXE
#
if M.ModuleType in SUP_MODULE_SET_PEI:
PcdFromModule.Phase = "PEI"
if PcdFromModule not in self._DynaPcdList_:
self._DynaPcdList_.append(PcdFromModule)
elif PcdFromModule.Phase == 'PEI':
# overwrite any the same PCD existing, if Phase is PEI
Index = self._DynaPcdList_.index(PcdFromModule)
self._DynaPcdList_[Index] = PcdFromModule
elif PcdFromModule not in self._NonDynaPcdList_:
self._NonDynaPcdList_.append(PcdFromModule)
elif PcdFromModule in self._NonDynaPcdList_ and PcdFromModule.IsFromBinaryInf == True:
Index = self._NonDynaPcdList_.index(PcdFromModule)
if self._NonDynaPcdList_[Index].IsFromBinaryInf == False:
#The PCD from Binary INF will override the same one from source INF
self._NonDynaPcdList_.remove (self._NonDynaPcdList_[Index])
PcdFromModule.Pending = False
self._NonDynaPcdList_.append (PcdFromModule)
DscModuleSet = {os.path.normpath(ModuleInf.Path) for ModuleInf in self.Platform.Modules}
# add the PCD from modules that listed in FDF but not in DSC to Database
for InfName in FdfModuleList:
if InfName not in DscModuleSet:
InfClass = PathClass(InfName)
M = self.BuildDatabase[InfClass, self.Arch, self.BuildTarget, self.ToolChain]
# If a module INF in FDF but not in current arch's DSC module list, it must be module (either binary or source)
# for different Arch. PCDs in source module for different Arch is already added before, so skip the source module here.
# For binary module, if in current arch, we need to list the PCDs into database.
if not M.IsBinaryModule:
continue
# Override the module PCD setting by platform setting
ModulePcdList = self.ApplyPcdSetting(M, M.Pcds)
for PcdFromModule in ModulePcdList:
PcdFromModule.IsFromBinaryInf = True
PcdFromModule.IsFromDsc = False
# Only allow the DynamicEx and Patchable PCD in AsBuild INF
if PcdFromModule.Type not in PCD_DYNAMIC_EX_TYPE_SET and PcdFromModule.Type not in TAB_PCDS_PATCHABLE_IN_MODULE:
EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
File=self.MetaFile,
ExtraData="\n\tExisted %s PCD %s in:\n\t\t%s\n"
% (PcdFromModule.Type, PcdFromModule.TokenCName, InfName))
# make sure that the "VOID*" kind of datum has MaxDatumSize set
if PcdFromModule.DatumType == TAB_VOID and not PcdFromModule.MaxDatumSize:
NoDatumTypePcdList.add("%s.%s [%s]" % (PcdFromModule.TokenSpaceGuidCName, PcdFromModule.TokenCName, InfName))
if M.ModuleType in SUP_MODULE_SET_PEI:
PcdFromModule.Phase = "PEI"
if PcdFromModule not in self._DynaPcdList_ and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
self._DynaPcdList_.append(PcdFromModule)
elif PcdFromModule not in self._NonDynaPcdList_ and PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE:
self._NonDynaPcdList_.append(PcdFromModule)
if PcdFromModule in self._DynaPcdList_ and PcdFromModule.Phase == 'PEI' and PcdFromModule.Type in PCD_DYNAMIC_EX_TYPE_SET:
# Overwrite the phase of any the same PCD existing, if Phase is PEI.
# It is to solve the case that a dynamic PCD used by a PEM module/PEI
# module & DXE module at a same time.
# Overwrite the type of the PCDs in source INF by the type of AsBuild
# INF file as DynamicEx.
Index = self._DynaPcdList_.index(PcdFromModule)
self._DynaPcdList_[Index].Phase = PcdFromModule.Phase
self._DynaPcdList_[Index].Type = PcdFromModule.Type
for PcdFromModule in self._NonDynaPcdList_:
# If a PCD is not listed in the DSC file, but binary INF files used by
# this platform all (that use this PCD) list the PCD in a [PatchPcds]
# section, AND all source INF files used by this platform the build
# that use the PCD list the PCD in either a [Pcds] or [PatchPcds]
# section, then the tools must NOT add the PCD to the Platform's PCD
# Database; the build must assign the access method for this PCD as
# PcdsPatchableInModule.
if PcdFromModule not in self._DynaPcdList_:
continue
Index = self._DynaPcdList_.index(PcdFromModule)
if PcdFromModule.IsFromDsc == False and \
PcdFromModule.Type in TAB_PCDS_PATCHABLE_IN_MODULE and \
PcdFromModule.IsFromBinaryInf == True and \
self._DynaPcdList_[Index].IsFromBinaryInf == False:
Index = self._DynaPcdList_.index(PcdFromModule)
self._DynaPcdList_.remove (self._DynaPcdList_[Index])
# print out error information and break the build, if error found
if len(NoDatumTypePcdList) > 0:
NoDatumTypePcdListString = "\n\t\t".join(NoDatumTypePcdList)
EdkLogger.error("build", AUTOGEN_ERROR, "PCD setting error",
File=self.MetaFile,
ExtraData="\n\tPCD(s) without MaxDatumSize:\n\t\t%s\n"
% NoDatumTypePcdListString)
self._NonDynamicPcdList = sorted(self._NonDynaPcdList_)
self._DynamicPcdList = self._DynaPcdList_
def SortDynamicPcd(self):
#
# Sort dynamic PCD list to:
# 1) If PCD's datum type is VOID* and value is unicode string which starts with L, the PCD item should
# try to be put header of dynamicd List
# 2) If PCD is HII type, the PCD item should be put after unicode type PCD
#
# The reason of sorting is make sure the unicode string is in double-byte alignment in string table.
#
UnicodePcdArray = set()
HiiPcdArray = set()
OtherPcdArray = set()
VpdPcdDict = {}
VpdFile = VpdInfoFile.VpdInfoFile()
NeedProcessVpdMapFile = False
for pcd in self.Platform.Pcds:
if pcd not in self._PlatformPcds:
self._PlatformPcds[pcd] = self.Platform.Pcds[pcd]
for item in self._PlatformPcds:
if self._PlatformPcds[item].DatumType and self._PlatformPcds[item].DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
self._PlatformPcds[item].DatumType = TAB_VOID
if (self.Workspace.ArchList[-1] == self.Arch):
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)
Sku.VpdOffset = Sku.VpdOffset.strip()
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
Pcd.DatumType = TAB_VOID
# if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
# if found HII type PCD then insert to right of UnicodeIndex
if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
VpdPcdDict[(Pcd.TokenCName, Pcd.TokenSpaceGuidCName)] = Pcd
#Collect DynamicHii PCD values and assign it to DynamicExVpd PCD gEfiMdeModulePkgTokenSpaceGuid.PcdNvStoreDefaultValueBuffer
PcdNvStoreDfBuffer = VpdPcdDict.get(("PcdNvStoreDefaultValueBuffer", "gEfiMdeModulePkgTokenSpaceGuid"))
if PcdNvStoreDfBuffer:
self.VariableInfo = self.CollectVariables(self._DynamicPcdList)
vardump = self.VariableInfo.dump()
if vardump:
#
#According to PCD_DATABASE_INIT in edk2\MdeModulePkg\Include\Guid\PcdDataBaseSignatureGuid.h,
#the max size for string PCD should not exceed USHRT_MAX 65535(0xffff).
#typedef UINT16 SIZE_INFO;
#//SIZE_INFO SizeTable[];
if len(vardump.split(",")) > 0xffff:
EdkLogger.error("build", RESOURCE_OVERFLOW, 'The current length of PCD %s value is %d, it exceeds to the max size of String PCD.' %(".".join([PcdNvStoreDfBuffer.TokenSpaceGuidCName,PcdNvStoreDfBuffer.TokenCName]) ,len(vardump.split(","))))
PcdNvStoreDfBuffer.DefaultValue = vardump
for skuname in PcdNvStoreDfBuffer.SkuInfoList:
PcdNvStoreDfBuffer.SkuInfoList[skuname].DefaultValue = vardump
PcdNvStoreDfBuffer.MaxDatumSize = str(len(vardump.split(",")))
else:
#If the end user define [DefaultStores] and [XXX.Menufacturing] in DSC, but forget to configure PcdNvStoreDefaultValueBuffer to PcdsDynamicVpd
if [Pcd for Pcd in self._DynamicPcdList if Pcd.UserDefinedDefaultStoresFlag]:
EdkLogger.warn("build", "PcdNvStoreDefaultValueBuffer should be defined as PcdsDynamicExVpd in dsc file since the DefaultStores is enabled for this platform.\n%s" %self.Platform.MetaFile.Path)
PlatformPcds = sorted(self._PlatformPcds.keys())
#
# Add VPD type PCD into VpdFile and determine whether the VPD PCD need to be fixed up.
#
VpdSkuMap = {}
for PcdKey in PlatformPcds:
Pcd = self._PlatformPcds[PcdKey]
if Pcd.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD] and \
PcdKey in VpdPcdDict:
Pcd = VpdPcdDict[PcdKey]
SkuValueMap = {}
DefaultSku = Pcd.SkuInfoList.get(TAB_DEFAULT)
if DefaultSku:
PcdValue = DefaultSku.DefaultValue
if PcdValue not in SkuValueMap:
SkuValueMap[PcdValue] = []
VpdFile.Add(Pcd, TAB_DEFAULT, DefaultSku.VpdOffset)
SkuValueMap[PcdValue].append(DefaultSku)
for (SkuName, Sku) in Pcd.SkuInfoList.items():
Sku.VpdOffset = Sku.VpdOffset.strip()
PcdValue = Sku.DefaultValue
if PcdValue == "":
PcdValue = Pcd.DefaultValue
if Sku.VpdOffset != TAB_STAR:
if PcdValue.startswith("{"):
Alignment = 8
elif PcdValue.startswith("L"):
Alignment = 2
else:
Alignment = 1
try:
VpdOffset = int(Sku.VpdOffset)
except:
try:
VpdOffset = int(Sku.VpdOffset, 16)
except:
EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, Pcd.TokenSpaceGuidCName, Pcd.TokenCName))
if VpdOffset % Alignment != 0:
if PcdValue.startswith("{"):
EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(Pcd.TokenSpaceGuidCName, Pcd.TokenCName), File=self.MetaFile)
else:
EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, Alignment))
if PcdValue not in SkuValueMap:
SkuValueMap[PcdValue] = []
VpdFile.Add(Pcd, SkuName, Sku.VpdOffset)
SkuValueMap[PcdValue].append(Sku)
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:
NeedProcessVpdMapFile = True
if self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == '':
EdkLogger.error("Build", FILE_NOT_FOUND, \
"Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
VpdSkuMap[PcdKey] = SkuValueMap
#
# Fix the PCDs define in VPD PCD section that never referenced by module.
# An example is PCD for signature usage.
#
for DscPcd in PlatformPcds:
DscPcdEntry = self._PlatformPcds[DscPcd]
if DscPcdEntry.Type in [TAB_PCDS_DYNAMIC_VPD, TAB_PCDS_DYNAMIC_EX_VPD]:
if not (self.Platform.VpdToolGuid is None or self.Platform.VpdToolGuid == ''):
FoundFlag = False
for VpdPcd in VpdFile._VpdArray:
# This PCD has been referenced by module
if (VpdPcd.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
(VpdPcd.TokenCName == DscPcdEntry.TokenCName):
FoundFlag = True
# Not found, it should be signature
if not FoundFlag :
# just pick the a value to determine whether is unicode string type
SkuValueMap = {}
SkuObjList = list(DscPcdEntry.SkuInfoList.items())
DefaultSku = DscPcdEntry.SkuInfoList.get(TAB_DEFAULT)
if DefaultSku:
defaultindex = SkuObjList.index((TAB_DEFAULT, DefaultSku))
SkuObjList[0], SkuObjList[defaultindex] = SkuObjList[defaultindex], SkuObjList[0]
for (SkuName, Sku) in SkuObjList:
Sku.VpdOffset = Sku.VpdOffset.strip()
# Need to iterate DEC pcd information to get the value & datumtype
for eachDec in self.PackageList:
for DecPcd in eachDec.Pcds:
DecPcdEntry = eachDec.Pcds[DecPcd]
if (DecPcdEntry.TokenSpaceGuidCName == DscPcdEntry.TokenSpaceGuidCName) and \
(DecPcdEntry.TokenCName == DscPcdEntry.TokenCName):
# Print warning message to let the developer make a determine.
EdkLogger.warn("build", "Unreferenced vpd pcd used!",
File=self.MetaFile, \
ExtraData = "PCD: %s.%s used in the DSC file %s is unreferenced." \
%(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, self.Platform.MetaFile.Path))
DscPcdEntry.DatumType = DecPcdEntry.DatumType
DscPcdEntry.DefaultValue = DecPcdEntry.DefaultValue
DscPcdEntry.TokenValue = DecPcdEntry.TokenValue
DscPcdEntry.TokenSpaceGuidValue = eachDec.Guids[DecPcdEntry.TokenSpaceGuidCName]
# Only fix the value while no value provided in DSC file.
if not Sku.DefaultValue:
DscPcdEntry.SkuInfoList[list(DscPcdEntry.SkuInfoList.keys())[0]].DefaultValue = DecPcdEntry.DefaultValue
if DscPcdEntry not in self._DynamicPcdList:
self._DynamicPcdList.append(DscPcdEntry)
Sku.VpdOffset = Sku.VpdOffset.strip()
PcdValue = Sku.DefaultValue
if PcdValue == "":
PcdValue = DscPcdEntry.DefaultValue
if Sku.VpdOffset != TAB_STAR:
if PcdValue.startswith("{"):
Alignment = 8
elif PcdValue.startswith("L"):
Alignment = 2
else:
Alignment = 1
try:
VpdOffset = int(Sku.VpdOffset)
except:
try:
VpdOffset = int(Sku.VpdOffset, 16)
except:
EdkLogger.error("build", FORMAT_INVALID, "Invalid offset value %s for PCD %s.%s." % (Sku.VpdOffset, DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName))
if VpdOffset % Alignment != 0:
if PcdValue.startswith("{"):
EdkLogger.warn("build", "The offset value of PCD %s.%s is not 8-byte aligned!" %(DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName), File=self.MetaFile)
else:
EdkLogger.error("build", FORMAT_INVALID, 'The offset value of PCD %s.%s should be %s-byte aligned.' % (DscPcdEntry.TokenSpaceGuidCName, DscPcdEntry.TokenCName, Alignment))
if PcdValue not in SkuValueMap:
SkuValueMap[PcdValue] = []
VpdFile.Add(DscPcdEntry, SkuName, Sku.VpdOffset)
SkuValueMap[PcdValue].append(Sku)
if not NeedProcessVpdMapFile and Sku.VpdOffset == TAB_STAR:
NeedProcessVpdMapFile = True
if DscPcdEntry.DatumType == TAB_VOID and PcdValue.startswith("L"):
UnicodePcdArray.add(DscPcdEntry)
elif len(Sku.VariableName) > 0:
HiiPcdArray.add(DscPcdEntry)
else:
OtherPcdArray.add(DscPcdEntry)
# if the offset of a VPD is *, then it need to be fixed up by third party tool.
VpdSkuMap[DscPcd] = SkuValueMap
if (self.Platform.FlashDefinition is None or self.Platform.FlashDefinition == '') and \
VpdFile.GetCount() != 0:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE,
"Fail to get FLASH_DEFINITION definition in DSC file %s which is required when DSC contains VPD PCD." % str(self.Platform.MetaFile))
if VpdFile.GetCount() != 0:
self.FixVpdOffset(VpdFile)
self.FixVpdOffset(self.UpdateNVStoreMaxSize(VpdFile))
PcdNvStoreDfBuffer = [item for item in self._DynamicPcdList if item.TokenCName == "PcdNvStoreDefaultValueBuffer" and item.TokenSpaceGuidCName == "gEfiMdeModulePkgTokenSpaceGuid"]
if PcdNvStoreDfBuffer:
PcdName,PcdGuid = PcdNvStoreDfBuffer[0].TokenCName, PcdNvStoreDfBuffer[0].TokenSpaceGuidCName
if (PcdName,PcdGuid) in VpdSkuMap:
DefaultSku = PcdNvStoreDfBuffer[0].SkuInfoList.get(TAB_DEFAULT)
VpdSkuMap[(PcdName,PcdGuid)] = {DefaultSku.DefaultValue:[SkuObj for SkuObj in PcdNvStoreDfBuffer[0].SkuInfoList.values() ]}
# Process VPD map file generated by third party BPDG tool
if NeedProcessVpdMapFile:
VpdMapFilePath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY, "%s.map" % self.Platform.VpdToolGuid)
try:
VpdFile.Read(VpdMapFilePath)
# Fixup TAB_STAR offset
for pcd in VpdSkuMap:
vpdinfo = VpdFile.GetVpdInfo(pcd)
if vpdinfo is None:
# just pick the a value to determine whether is unicode string type
continue
for pcdvalue in VpdSkuMap[pcd]:
for sku in VpdSkuMap[pcd][pcdvalue]:
for item in vpdinfo:
if item[2] == pcdvalue:
sku.VpdOffset = item[1]
except:
EdkLogger.error("build", FILE_READ_FAILURE, "Can not find VPD map file %s to fix up VPD offset." % VpdMapFilePath)
# Delete the DynamicPcdList At the last time enter into this function
for Pcd in self._DynamicPcdList:
# just pick the a value to determine whether is unicode string type
Sku = Pcd.SkuInfoList.get(TAB_DEFAULT)
Sku.VpdOffset = Sku.VpdOffset.strip()
if Pcd.DatumType not in [TAB_UINT8, TAB_UINT16, TAB_UINT32, TAB_UINT64, TAB_VOID, "BOOLEAN"]:
Pcd.DatumType = TAB_VOID
PcdValue = Sku.DefaultValue
if Pcd.DatumType == TAB_VOID and PcdValue.startswith("L"):
# if found PCD which datum value is unicode string the insert to left size of UnicodeIndex
UnicodePcdArray.add(Pcd)
elif len(Sku.VariableName) > 0:
# if found HII type PCD then insert to right of UnicodeIndex
HiiPcdArray.add(Pcd)
else:
OtherPcdArray.add(Pcd)
del self._DynamicPcdList[:]
self._DynamicPcdList.extend(list(UnicodePcdArray))
self._DynamicPcdList.extend(list(HiiPcdArray))
self._DynamicPcdList.extend(list(OtherPcdArray))
self._DynamicPcdList.sort()
allskuset = [(SkuName, Sku.SkuId) for pcd in self._DynamicPcdList for (SkuName, Sku) in pcd.SkuInfoList.items()]
for pcd in self._DynamicPcdList:
if len(pcd.SkuInfoList) == 1:
for (SkuName, SkuId) in allskuset:
if isinstance(SkuId, str) and eval(SkuId) == 0 or SkuId == 0:
continue
pcd.SkuInfoList[SkuName] = copy.deepcopy(pcd.SkuInfoList[TAB_DEFAULT])
pcd.SkuInfoList[SkuName].SkuId = SkuId
pcd.SkuInfoList[SkuName].SkuIdName = SkuName
def FixVpdOffset(self, VpdFile ):
FvPath = os.path.join(self.BuildDir, TAB_FV_DIRECTORY)
if not os.path.exists(FvPath):
try:
os.makedirs(FvPath)
except:
EdkLogger.error("build", FILE_WRITE_FAILURE, "Fail to create FV folder under %s" % self.BuildDir)
VpdFilePath = os.path.join(FvPath, "%s.txt" % self.Platform.VpdToolGuid)
if VpdFile.Write(VpdFilePath):
# retrieve BPDG tool's path from tool_def.txt according to VPD_TOOL_GUID defined in DSC file.
BPDGToolName = None
for ToolDef in self.ToolDefinition.values():
if TAB_GUID in ToolDef and ToolDef[TAB_GUID] == self.Platform.VpdToolGuid:
if "PATH" not in ToolDef:
EdkLogger.error("build", ATTRIBUTE_NOT_AVAILABLE, "PATH attribute was not provided for BPDG guid tool %s in tools_def.txt" % self.Platform.VpdToolGuid)
BPDGToolName = ToolDef["PATH"]
break
# Call third party GUID BPDG tool.
if BPDGToolName is not None:
VpdInfoFile.CallExtenalBPDGTool(BPDGToolName, VpdFilePath)
else:
EdkLogger.error("Build", FILE_NOT_FOUND, "Fail to find third-party BPDG tool to process VPD PCDs. BPDG Guid tool need to be defined in tools_def.txt and VPD_TOOL_GUID need to be provided in DSC file.")
## Return the platform build data object
@cached_property
def Platform(self):
return self.BuildDatabase[self.MetaFile, self.Arch, self.BuildTarget, self.ToolChain]
## Return platform name
@cached_property
def Name(self):
return self.Platform.PlatformName
## Return the meta file GUID
@cached_property
def Guid(self):
return self.Platform.Guid
## Return the platform version
@cached_property
def Version(self):
return self.Platform.Version
## Return the FDF file name
@cached_property
def FdfFile(self):
if self.Workspace.FdfFile:
RetVal= mws.join(self.WorkspaceDir, self.Workspace.FdfFile)
else:
RetVal = ''
return RetVal
## Return the build output directory platform specifies
@cached_property
def OutputDir(self):
return self.Platform.OutputDirectory
## Return the directory to store all intermediate and final files built
@cached_property
def BuildDir(self):
if os.path.isabs(self.OutputDir):
GlobalData.gBuildDirectory = RetVal = path.join(
path.abspath(self.OutputDir),
self.BuildTarget + "_" + self.ToolChain,
)
else:
GlobalData.gBuildDirectory = RetVal = path.join(
self.WorkspaceDir,
self.OutputDir,
self.BuildTarget + "_" + self.ToolChain,
)
return RetVal
## Return directory of platform makefile
#
# @retval string Makefile directory
#
@cached_property
def MakeFileDir(self):
return path.join(self.BuildDir, self.Arch)
## Return build command string
#
# @retval string Build command string
#
@cached_property
def BuildCommand(self):
RetVal = []
if "MAKE" in self.ToolDefinition and "PATH" in self.ToolDefinition["MAKE"]:
RetVal += _SplitOption(self.ToolDefinition["MAKE"]["PATH"])
if "FLAGS" in self.ToolDefinition["MAKE"]:
NewOption = self.ToolDefinition["MAKE"]["FLAGS"].strip()
if NewOption != '':
RetVal += _SplitOption(NewOption)
if "MAKE" in self.EdkIIBuildOption:
if "FLAGS" in self.EdkIIBuildOption["MAKE"]:
Flags = self.EdkIIBuildOption["MAKE"]["FLAGS"]
if Flags.startswith('='):
RetVal = [RetVal[0]] + [Flags[1:]]
else:
RetVal.append(Flags)
return RetVal
## Get tool chain definition
#
# Get each tool definition for given tool chain from tools_def.txt and platform
#
@cached_property
def ToolDefinition(self):
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDictionary
if TAB_TOD_DEFINES_COMMAND_TYPE not in self.Workspace.ToolDef.ToolsDefTxtDatabase:
EdkLogger.error('build', RESOURCE_NOT_AVAILABLE, "No tools found in configuration",
ExtraData="[%s]" % self.MetaFile)
RetVal = OrderedDict()
DllPathList = set()
for Def in ToolDefinition:
Target, Tag, Arch, Tool, Attr = Def.split("_")
if Target != self.BuildTarget or Tag != self.ToolChain or Arch != self.Arch:
continue
Value = ToolDefinition[Def]
# don't record the DLL
if Attr == "DLL":
DllPathList.add(Value)
continue
if Tool not in RetVal:
RetVal[Tool] = OrderedDict()
RetVal[Tool][Attr] = Value
ToolsDef = ''
if GlobalData.gOptions.SilentMode and "MAKE" in RetVal:
if "FLAGS" not in RetVal["MAKE"]:
RetVal["MAKE"]["FLAGS"] = ""
RetVal["MAKE"]["FLAGS"] += " -s"
MakeFlags = ''
for Tool in RetVal:
for Attr in RetVal[Tool]:
Value = RetVal[Tool][Attr]
if Tool in self._BuildOptionWithToolDef(RetVal) and Attr in self._BuildOptionWithToolDef(RetVal)[Tool]:
# check if override is indicated
if self._BuildOptionWithToolDef(RetVal)[Tool][Attr].startswith('='):
Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr][1:]
else:
if Attr != 'PATH':
Value += " " + self._BuildOptionWithToolDef(RetVal)[Tool][Attr]
else:
Value = self._BuildOptionWithToolDef(RetVal)[Tool][Attr]
if Attr == "PATH":
# Don't put MAKE definition in the file
if Tool != "MAKE":
ToolsDef += "%s = %s\n" % (Tool, Value)
elif Attr != "DLL":
# Don't put MAKE definition in the file
if Tool == "MAKE":
if Attr == "FLAGS":
MakeFlags = Value
else:
ToolsDef += "%s_%s = %s\n" % (Tool, Attr, Value)
ToolsDef += "\n"
tool_def_file = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
SaveFileOnChange(tool_def_file, ToolsDef, False)
for DllPath in DllPathList:
os.environ["PATH"] = DllPath + os.pathsep + os.environ["PATH"]
os.environ["MAKE_FLAGS"] = MakeFlags
return RetVal
## Return the paths of tools
@cached_property
def ToolDefinitionFile(self):
tool_def_file = os.path.join(self.MakeFileDir, "TOOLS_DEF." + self.Arch)
if not os.path.exists(tool_def_file):
self.ToolDefinition
return tool_def_file
## Retrieve the toolchain family of given toolchain tag. Default to 'MSFT'.
@cached_property
def ToolChainFamily(self):
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
if TAB_TOD_DEFINES_FAMILY not in ToolDefinition \
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_FAMILY] \
or not ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]:
EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
% self.ToolChain)
RetVal = TAB_COMPILER_MSFT
else:
RetVal = ToolDefinition[TAB_TOD_DEFINES_FAMILY][self.ToolChain]
return RetVal
@cached_property
def BuildRuleFamily(self):
ToolDefinition = self.Workspace.ToolDef.ToolsDefTxtDatabase
if TAB_TOD_DEFINES_BUILDRULEFAMILY not in ToolDefinition \
or self.ToolChain not in ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY] \
or not ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]:
EdkLogger.verbose("No tool chain family found in configuration for %s. Default to MSFT." \
% self.ToolChain)
return TAB_COMPILER_MSFT
return ToolDefinition[TAB_TOD_DEFINES_BUILDRULEFAMILY][self.ToolChain]
## Return the build options specific for all modules in this platform
@cached_property
def BuildOption(self):
return self._ExpandBuildOption(self.Platform.BuildOptions)
def _BuildOptionWithToolDef(self, ToolDef):
return self._ExpandBuildOption(self.Platform.BuildOptions, ToolDef=ToolDef)
## Return the build options specific for EDK modules in this platform
@cached_property
def EdkBuildOption(self):
return self._ExpandBuildOption(self.Platform.BuildOptions, EDK_NAME)
## Return the build options specific for EDKII modules in this platform
@cached_property
def EdkIIBuildOption(self):
return self._ExpandBuildOption(self.Platform.BuildOptions, EDKII_NAME)
## Parse build_rule.txt in Conf Directory.
#
# @retval BuildRule object
#
@cached_property
def BuildRule(self):
BuildRuleFile = None
if TAB_TAT_DEFINES_BUILD_RULE_CONF in self.Workspace.TargetTxt.TargetTxtDictionary:
BuildRuleFile = self.Workspace.TargetTxt.TargetTxtDictionary[TAB_TAT_DEFINES_BUILD_RULE_CONF]
if not BuildRuleFile:
BuildRuleFile = gDefaultBuildRuleFile
RetVal = BuildRule(BuildRuleFile)
if RetVal._FileVersion == "":
RetVal._FileVersion = AutoGenReqBuildRuleVerNum
else:
if RetVal._FileVersion < AutoGenReqBuildRuleVerNum :
# If Build Rule's version is less than the version number required by the tools, halting the build.
EdkLogger.error("build", AUTOGEN_ERROR,
ExtraData="The version number [%s] of build_rule.txt is less than the version number required by the AutoGen.(the minimum required version number is [%s])"\
% (RetVal._FileVersion, AutoGenReqBuildRuleVerNum))
return RetVal
## Summarize the packages used by modules in this platform
@cached_property
def PackageList(self):
RetVal = set()
for Mb in self._MbList:
RetVal.update(Mb.Packages)
for lb in Mb.LibInstances:
RetVal.update(lb.Packages)
#Collect package set information from INF of FDF
for ModuleFile in self._AsBuildModuleList:
if ModuleFile in self.Platform.Modules:
continue
ModuleData = self.BuildDatabase[ModuleFile, self.Arch, self.BuildTarget, self.ToolChain]
RetVal.update(ModuleData.Packages)
RetVal.update(self.Platform.Packages)
return list(RetVal)
@cached_property
def NonDynamicPcdDict(self):
return {(Pcd.TokenCName, Pcd.TokenSpaceGuidCName):Pcd for Pcd in self.NonDynamicPcdList}
## Get list of non-dynamic PCDs
@property
def NonDynamicPcdList(self):
if not self._NonDynamicPcdList:
self.CollectPlatformDynamicPcds()
return self._NonDynamicPcdList
## Get list of dynamic PCDs
@property
def DynamicPcdList(self):
if not self._DynamicPcdList:
self.CollectPlatformDynamicPcds()
return self._DynamicPcdList
## Generate Token Number for all PCD
@cached_property
def PcdTokenNumber(self):
RetVal = OrderedDict()
TokenNumber = 1
#
# Make the Dynamic and DynamicEx PCD use within different TokenNumber area.
# Such as:
#
# Dynamic PCD:
# TokenNumber 0 ~ 10
# DynamicEx PCD:
# TokeNumber 11 ~ 20
#
for Pcd in self.DynamicPcdList:
if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
for Pcd in self.DynamicPcdList:
if Pcd.Phase == "PEI" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
for Pcd in self.DynamicPcdList:
if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_TYPE_SET:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
for Pcd in self.DynamicPcdList:
if Pcd.Phase == "DXE" and Pcd.Type in PCD_DYNAMIC_EX_TYPE_SET:
EdkLogger.debug(EdkLogger.DEBUG_5, "%s %s (%s) -> %d" % (Pcd.TokenCName, Pcd.TokenSpaceGuidCName, Pcd.Phase, TokenNumber))
RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
for Pcd in self.NonDynamicPcdList:
RetVal[Pcd.TokenCName, Pcd.TokenSpaceGuidCName] = TokenNumber
TokenNumber += 1
return RetVal
@cached_property
def _MbList(self):
return [self.BuildDatabase[m, self.Arch, self.BuildTarget, self.ToolChain] for m in self.Platform.Modules]
@cached_property
def _MaList(self):
for ModuleFile in self.Platform.Modules:
Ma = ModuleAutoGen(
self.Workspace,
ModuleFile,
self.BuildTarget,
self.ToolChain,
self.Arch,
self.MetaFile,
self.DataPipe
)
self.Platform.Modules[ModuleFile].M = Ma
return [x.M for x in self.Platform.Modules.values()]
## Summarize ModuleAutoGen objects of all modules to be built for this platform
@cached_property
def ModuleAutoGenList(self):
RetVal = []
for Ma in self._MaList:
if Ma not in RetVal:
RetVal.append(Ma)
return RetVal
## Summarize ModuleAutoGen objects of all libraries to be built for this platform
@cached_property
def LibraryAutoGenList(self):
RetVal = []
for Ma in self._MaList:
for La in Ma.LibraryAutoGenList:
if La not in RetVal:
RetVal.append(La)
if Ma not in La.ReferenceModules:
La.ReferenceModules.append(Ma)
return RetVal
## Test if a module is supported by the platform
#
# An error will be raised directly if the module or its arch is not supported
# by the platform or current configuration
#
def ValidModule(self, Module):
return Module in self.Platform.Modules or Module in self.Platform.LibraryInstances \
or Module in self._AsBuildModuleList
@cached_property
def GetAllModuleInfo(self,WithoutPcd=True):
ModuleLibs = set()
for m in self.Platform.Modules:
module_obj = self.BuildDatabase[m,self.Arch,self.BuildTarget,self.ToolChain]
if not bool(module_obj.LibraryClass):
Libs = GetModuleLibInstances(module_obj, self.Platform, self.BuildDatabase, self.Arch,self.BuildTarget,self.ToolChain,self.MetaFile,EdkLogger)
else:
Libs = []
ModuleLibs.update( set([(l.MetaFile.File,l.MetaFile.Root,l.MetaFile.Path,l.MetaFile.BaseName,l.MetaFile.OriginalPath,l.Arch,True) for l in Libs]))
if WithoutPcd and module_obj.PcdIsDriver:
continue
ModuleLibs.add((m.File,m.Root,m.Path,m.BaseName,m.OriginalPath,module_obj.Arch,bool(module_obj.LibraryClass)))
return ModuleLibs
## Resolve the library classes in a module to library instances
#
# This method will not only resolve library classes but also sort the library
# instances according to the dependency-ship.
#
# @param Module The module from which the library classes will be resolved
#
# @retval library_list List of library instances sorted
#
def ApplyLibraryInstance(self, Module):
# Cover the case that the binary INF file is list in the FDF file but not DSC file, return empty list directly
if str(Module) not in self.Platform.Modules:
return []
return GetModuleLibInstances(Module,
self.Platform,
self.BuildDatabase,
self.Arch,
self.BuildTarget,
self.ToolChain,
self.MetaFile,
EdkLogger)
## Override PCD setting (type, value, ...)
#
# @param ToPcd The PCD to be overridden
# @param FromPcd The PCD overriding from
#
def _OverridePcd(self, ToPcd, FromPcd, Module="", Msg="", Library=""):
#
# in case there's PCDs coming from FDF file, which have no type given.
# at this point, ToPcd.Type has the type found from dependent
# package
#
TokenCName = ToPcd.TokenCName
for PcdItem in GlobalData.MixedPcd:
if (ToPcd.TokenCName, ToPcd.TokenSpaceGuidCName) in GlobalData.MixedPcd[PcdItem]:
TokenCName = PcdItem[0]
break
if FromPcd is not None:
if ToPcd.Pending and FromPcd.Type:
ToPcd.Type = FromPcd.Type
elif ToPcd.Type and FromPcd.Type\
and ToPcd.Type != FromPcd.Type and ToPcd.Type in FromPcd.Type:
if ToPcd.Type.strip() == TAB_PCDS_DYNAMIC_EX:
ToPcd.Type = FromPcd.Type
elif ToPcd.Type and FromPcd.Type \
and ToPcd.Type != FromPcd.Type:
if Library:
Module = str(Module) + " 's library file (" + str(Library) + ")"
EdkLogger.error("build", OPTION_CONFLICT, "Mismatched PCD type",
ExtraData="%s.%s is used as [%s] in module %s, but as [%s] in %s."\
% (ToPcd.TokenSpaceGuidCName, TokenCName,
ToPcd.Type, Module, FromPcd.Type, Msg),
File=self.MetaFile)
if FromPcd.MaxDatumSize:
ToPcd.MaxDatumSize = FromPcd.MaxDatumSize
ToPcd.MaxSizeUserSet = FromPcd.MaxDatumSize
if FromPcd.DefaultValue:
ToPcd.DefaultValue = FromPcd.DefaultValue
if FromPcd.TokenValue:
ToPcd.TokenValue = FromPcd.TokenValue
if FromPcd.DatumType:
ToPcd.DatumType = FromPcd.DatumType
if FromPcd.SkuInfoList:
ToPcd.SkuInfoList = FromPcd.SkuInfoList
if FromPcd.UserDefinedDefaultStoresFlag:
ToPcd.UserDefinedDefaultStoresFlag = FromPcd.UserDefinedDefaultStoresFlag
# Add Flexible PCD format parse
if ToPcd.DefaultValue:
try:
ToPcd.DefaultValue = ValueExpressionEx(ToPcd.DefaultValue, ToPcd.DatumType, self.Platform._GuidDict)(True)
except BadExpression as Value:
EdkLogger.error('Parser', FORMAT_INVALID, 'PCD [%s.%s] Value "%s", %s' %(ToPcd.TokenSpaceGuidCName, ToPcd.TokenCName, ToPcd.DefaultValue, Value),
File=self.MetaFile)
# check the validation of datum
IsValid, Cause = CheckPcdDatum(ToPcd.DatumType, ToPcd.DefaultValue)
if not IsValid:
EdkLogger.error('build', FORMAT_INVALID, Cause, File=self.MetaFile,
ExtraData="%s.%s" % (ToPcd.TokenSpaceGuidCName, TokenCName))
ToPcd.validateranges = FromPcd.validateranges
ToPcd.validlists = FromPcd.validlists
ToPcd.expressions = FromPcd.expressions
ToPcd.CustomAttribute = FromPcd.CustomAttribute
if FromPcd is not None and ToPcd.DatumType == TAB_VOID and not ToPcd.MaxDatumSize:
EdkLogger.debug(EdkLogger.DEBUG_9, "No MaxDatumSize specified for PCD %s.%s" \
% (ToPcd.TokenSpaceGuidCName, TokenCName))
Value = ToPcd.DefaultValue
if not Value:
ToPcd.MaxDatumSize = '1'
elif Value[0] == 'L':
ToPcd.MaxDatumSize = str((len(Value) - 2) * 2)
elif Value[0] == '{':
ToPcd.MaxDatumSize = str(len(Value.split(',')))
else:
ToPcd.MaxDatumSize = str(len(Value) - 1)
# apply default SKU for dynamic PCDS if specified one is not available
if (ToPcd.Type in PCD_DYNAMIC_TYPE_SET or ToPcd.Type in PCD_DYNAMIC_EX_TYPE_SET) \
and not ToPcd.SkuInfoList:
if self.Platform.SkuName in self.Platform.SkuIds:
SkuName = self.Platform.SkuName
else:
SkuName = TAB_DEFAULT
ToPcd.SkuInfoList = {
SkuName : SkuInfoClass(SkuName, self.Platform.SkuIds[SkuName][0], '', '', '', '', '', ToPcd.DefaultValue)
}
## Apply PCD setting defined platform to a module
#
# @param Module The module from which the PCD setting will be overridden
#
# @retval PCD_list The list PCDs with settings from platform
#
def ApplyPcdSetting(self, Module, Pcds, Library=""):
# for each PCD in module
for Name, Guid in Pcds:
PcdInModule = Pcds[Name, Guid]
# find out the PCD setting in platform
if (Name, Guid) in self.Platform.Pcds:
PcdInPlatform = self.Platform.Pcds[Name, Guid]
else:
PcdInPlatform = None
# then override the settings if any
self._OverridePcd(PcdInModule, PcdInPlatform, Module, Msg="DSC PCD sections", Library=Library)
# resolve the VariableGuid value
for SkuId in PcdInModule.SkuInfoList:
Sku = PcdInModule.SkuInfoList[SkuId]
if Sku.VariableGuid == '': continue
Sku.VariableGuidValue = GuidValue(Sku.VariableGuid, self.PackageList, self.MetaFile.Path)
if Sku.VariableGuidValue is None:
PackageList = "\n\t".join(str(P) for P in self.PackageList)
EdkLogger.error(
'build',
RESOURCE_NOT_AVAILABLE,
"Value of GUID [%s] is not found in" % Sku.VariableGuid,
ExtraData=PackageList + "\n\t(used with %s.%s from module %s)" \
% (Guid, Name, str(Module)),
File=self.MetaFile
)
# override PCD settings with module specific setting
if Module in self.Platform.Modules:
PlatformModule = self.Platform.Modules[str(Module)]
for Key in PlatformModule.Pcds:
if GlobalData.BuildOptionPcd:
for pcd in GlobalData.BuildOptionPcd:
(TokenSpaceGuidCName, TokenCName, FieldName, pcdvalue, _) = pcd
if (TokenCName, TokenSpaceGuidCName) == Key and FieldName =="":
PlatformModule.Pcds[Key].DefaultValue = pcdvalue
PlatformModule.Pcds[Key].PcdValueFromComm = pcdvalue
break
Flag = False
if Key in Pcds:
ToPcd = Pcds[Key]
Flag = True
elif Key in GlobalData.MixedPcd:
for PcdItem in GlobalData.MixedPcd[Key]:
if PcdItem in Pcds:
ToPcd = Pcds[PcdItem]
Flag = True
break
if Flag:
self._OverridePcd(ToPcd, PlatformModule.Pcds[Key], Module, Msg="DSC Components Module scoped PCD section", Library=Library)
# use PCD value to calculate the MaxDatumSize when it is not specified
for Name, Guid in Pcds:
Pcd = Pcds[Name, Guid]
if Pcd.DatumType == TAB_VOID and not Pcd.MaxDatumSize:
Pcd.MaxSizeUserSet = None
Value = Pcd.DefaultValue
if not Value:
Pcd.MaxDatumSize = '1'
elif Value[0] == 'L':
Pcd.MaxDatumSize = str((len(Value) - 2) * 2)
elif Value[0] == '{':
Pcd.MaxDatumSize = str(len(Value.split(',')))
else:
Pcd.MaxDatumSize = str(len(Value) - 1)
return list(Pcds.values())
## Append build options in platform to a module
#
# @param Module The module to which the build options will be appended
#
# @retval options The options appended with build options in platform
#
def ApplyBuildOption(self, Module):
# Get the different options for the different style module
PlatformOptions = self.EdkIIBuildOption
ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)
ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)
ModuleOptions = self._ExpandBuildOption(Module.BuildOptions)
if Module in self.Platform.Modules:
PlatformModule = self.Platform.Modules[str(Module)]
PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)
else:
PlatformModuleOptions = {}
BuildRuleOrder = None
for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
for Tool in Options:
for Attr in Options[Tool]:
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
BuildRuleOrder = Options[Tool][Attr]
AllTools = set(list(ModuleOptions.keys()) + list(PlatformOptions.keys()) +
list(PlatformModuleOptions.keys()) + list(ModuleTypeOptions.keys()) +
list(self.ToolDefinition.keys()))
BuildOptions = defaultdict(lambda: defaultdict(str))
for Tool in AllTools:
for Options in [self.ToolDefinition, ModuleOptions, PlatformOptions, ModuleTypeOptions, PlatformModuleOptions]:
if Tool not in Options:
continue
for Attr in Options[Tool]:
#
# Do not generate it in Makefile
#
if Attr == TAB_TOD_DEFINES_BUILDRULEORDER:
continue
Value = Options[Tool][Attr]
# check if override is indicated
if Value.startswith('='):
BuildOptions[Tool][Attr] = mws.handleWsMacro(Value[1:])
else:
if Attr != 'PATH':
BuildOptions[Tool][Attr] += " " + mws.handleWsMacro(Value)
else:
BuildOptions[Tool][Attr] = mws.handleWsMacro(Value)
return BuildOptions, BuildRuleOrder
def GetGlobalBuildOptions(self,Module):
ModuleTypeOptions = self.Platform.GetBuildOptionsByModuleType(EDKII_NAME, Module.ModuleType)
ModuleTypeOptions = self._ExpandBuildOption(ModuleTypeOptions)
if Module in self.Platform.Modules:
PlatformModule = self.Platform.Modules[str(Module)]
PlatformModuleOptions = self._ExpandBuildOption(PlatformModule.BuildOptions)
else:
PlatformModuleOptions = {}
return ModuleTypeOptions,PlatformModuleOptions
def ModuleGuid(self,Module):
if os.path.basename(Module.MetaFile.File) != os.path.basename(Module.MetaFile.Path):
#
# Length of GUID is 36
#
return os.path.basename(Module.MetaFile.Path)[:36]
return Module.Guid
@cached_property
def UniqueBaseName(self):
retVal ={}
ModuleNameDict = {}
UniqueName = {}
for Module in self._MbList:
unique_base_name = '%s_%s' % (Module.BaseName,self.ModuleGuid(Module))
if unique_base_name not in ModuleNameDict:
ModuleNameDict[unique_base_name] = []
ModuleNameDict[unique_base_name].append(Module.MetaFile)
if Module.BaseName not in UniqueName:
UniqueName[Module.BaseName] = set()
UniqueName[Module.BaseName].add((self.ModuleGuid(Module),Module.MetaFile))
for module_paths in ModuleNameDict.values():
if len(set(module_paths))>1:
samemodules = list(set(module_paths))
EdkLogger.error("build", FILE_DUPLICATED, 'Modules have same BaseName and FILE_GUID:\n'
' %s\n %s' % (samemodules[0], samemodules[1]))
for name in UniqueName:
Guid_Path = UniqueName[name]
if len(Guid_Path) > 1:
for guid,mpath in Guid_Path:
retVal[(name,mpath)] = '%s_%s' % (name,guid)
return retVal
## Expand * in build option key
#
# @param Options Options to be expanded
# @param ToolDef Use specified ToolDef instead of full version.
# This is needed during initialization to prevent
# infinite recursion betweeh BuildOptions,
# ToolDefinition, and this function.
#
# @retval options Options expanded
#
def _ExpandBuildOption(self, Options, ModuleStyle=None, ToolDef=None):
if not ToolDef:
ToolDef = self.ToolDefinition
BuildOptions = {}
FamilyMatch = False
FamilyIsNull = True
OverrideList = {}
#
# Construct a list contain the build options which need override.
#
for Key in Options:
#
# Key[0] -- tool family
# Key[1] -- TARGET_TOOLCHAIN_ARCH_COMMANDTYPE_ATTRIBUTE
#
if (Key[0] == self.BuildRuleFamily and
(ModuleStyle is None or len(Key) < 3 or (len(Key) > 2 and Key[2] == ModuleStyle))):
Target, ToolChain, Arch, CommandType, Attr = Key[1].split('_')
if (Target == self.BuildTarget or Target == TAB_STAR) and\
(ToolChain == self.ToolChain or ToolChain == TAB_STAR) and\
(Arch == self.Arch or Arch == TAB_STAR) and\
Options[Key].startswith("="):
if OverrideList.get(Key[1]) is not None:
OverrideList.pop(Key[1])
OverrideList[Key[1]] = Options[Key]
#
# Use the highest priority value.
#
if (len(OverrideList) >= 2):
KeyList = list(OverrideList.keys())
for Index in range(len(KeyList)):
NowKey = KeyList[Index]
Target1, ToolChain1, Arch1, CommandType1, Attr1 = NowKey.split("_")
for Index1 in range(len(KeyList) - Index - 1):
NextKey = KeyList[Index1 + Index + 1]
#
# Compare two Key, if one is included by another, choose the higher priority one
#
Target2, ToolChain2, Arch2, CommandType2, Attr2 = NextKey.split("_")
if (Target1 == Target2 or Target1 == TAB_STAR or Target2 == TAB_STAR) and\
(ToolChain1 == ToolChain2 or ToolChain1 == TAB_STAR or ToolChain2 == TAB_STAR) and\
(Arch1 == Arch2 or Arch1 == TAB_STAR or Arch2 == TAB_STAR) and\
(CommandType1 == CommandType2 or CommandType1 == TAB_STAR or CommandType2 == TAB_STAR) and\
(Attr1 == Attr2 or Attr1 == TAB_STAR or Attr2 == TAB_STAR):
if CalculatePriorityValue(NowKey) > CalculatePriorityValue(NextKey):
if Options.get((self.BuildRuleFamily, NextKey)) is not None:
Options.pop((self.BuildRuleFamily, NextKey))
else:
if Options.get((self.BuildRuleFamily, NowKey)) is not None:
Options.pop((self.BuildRuleFamily, NowKey))
for Key in Options:
if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
# Only append build option for the matched style module.
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
continue
elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
continue
Family = Key[0]
Target, Tag, Arch, Tool, Attr = Key[1].split("_")
# if tool chain family doesn't match, skip it
if Tool in ToolDef and Family != "":
FamilyIsNull = False
if ToolDef[Tool].get(TAB_TOD_DEFINES_BUILDRULEFAMILY, "") != "":
if Family != ToolDef[Tool][TAB_TOD_DEFINES_BUILDRULEFAMILY]:
continue
elif Family != ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:
continue
FamilyMatch = True
# expand any wildcard
if Target == TAB_STAR or Target == self.BuildTarget:
if Tag == TAB_STAR or Tag == self.ToolChain:
if Arch == TAB_STAR or Arch == self.Arch:
if Tool not in BuildOptions:
BuildOptions[Tool] = {}
if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
BuildOptions[Tool][Attr] = Options[Key]
else:
# append options for the same tool except PATH
if Attr != 'PATH':
BuildOptions[Tool][Attr] += " " + Options[Key]
else:
BuildOptions[Tool][Attr] = Options[Key]
# Build Option Family has been checked, which need't to be checked again for family.
if FamilyMatch or FamilyIsNull:
return BuildOptions
for Key in Options:
if ModuleStyle is not None and len (Key) > 2:
# Check Module style is EDK or EDKII.
# Only append build option for the matched style module.
if ModuleStyle == EDK_NAME and Key[2] != EDK_NAME:
continue
elif ModuleStyle == EDKII_NAME and Key[2] != EDKII_NAME:
continue
Family = Key[0]
Target, Tag, Arch, Tool, Attr = Key[1].split("_")
# if tool chain family doesn't match, skip it
if Tool not in ToolDef or Family == "":
continue
# option has been added before
if Family != ToolDef[Tool][TAB_TOD_DEFINES_FAMILY]:
continue
# expand any wildcard
if Target == TAB_STAR or Target == self.BuildTarget:
if Tag == TAB_STAR or Tag == self.ToolChain:
if Arch == TAB_STAR or Arch == self.Arch:
if Tool not in BuildOptions:
BuildOptions[Tool] = {}
if Attr != "FLAGS" or Attr not in BuildOptions[Tool] or Options[Key].startswith('='):
BuildOptions[Tool][Attr] = Options[Key]
else:
# append options for the same tool except PATH
if Attr != 'PATH':
BuildOptions[Tool][Attr] += " " + Options[Key]
else:
BuildOptions[Tool][Attr] = Options[Key]
return BuildOptions
|
StarcoderdataPython
|
45764
|
#!/usr/bin/python
import simple_test
simple_test.test("test29", ["-h", ])
|
StarcoderdataPython
|
3315538
|
# !/usr/bin/env python
from playhouse.migrate import migrate
from fsb import logger
from fsb.db import base_migrator
from fsb.db.models import Chat
from fsb.db.models import Member
from fsb.db.models import MemberRole
from fsb.db.models import QueryEvent
from fsb.db.models import Rating
from fsb.db.models import RatingMember
from fsb.db.models import Role
from fsb.db.models import User
class BaseMigration:
def up(self):
logger.info(f"Migrate {self.__class__.__name__} ...")
def down(self):
logger.info(f"Rollback {self.__class__.__name__} ...")
@staticmethod
def migrate_decorator(callback: callable):
def migration(self):
migrate(callback(self))
return migration
class CreateMainTables(BaseMigration):
_tables = [
User,
Chat,
Member,
Role,
MemberRole,
Rating,
RatingMember,
]
def up(self):
super().up()
for table in self._tables:
if table.table_exists():
raise RuntimeError(f"Table `{table.TABLE_NAME}` already exist")
base_migrator.database.create_tables(self._tables)
logger.info("Creating tables is done")
def down(self):
super().down()
tables = self._tables.copy()
for table in self._tables:
if not table.table_exists():
tables.remove(table)
if not tables:
raise RuntimeError("All tables already dropped")
base_migrator.database.drop_tables(tables)
logger.info(f"Dropped tables: {', '.join([table.TABLE_NAME for table in tables])}")
class CreateEventsTable(BaseMigration):
def up(self):
super().up()
if QueryEvent.table_exists():
raise RuntimeError(f"Table `{QueryEvent.TABLE_NAME}` already exist")
QueryEvent.create_table()
logger.info(f"Creating `{QueryEvent.TABLE_NAME}` is done")
def down(self):
super().down()
if not QueryEvent.table_exists():
raise RuntimeError(f"Table `{QueryEvent.TABLE_NAME}` already dropped")
QueryEvent.drop_table()
logger.info(f"Table `{QueryEvent.TABLE_NAME}` is dropped")
class CreateTablesForRatings(BaseMigration):
_tables = [
Rating,
RatingMember,
]
def up(self):
super().up()
for table in self._tables:
if table.table_exists():
raise RuntimeError(f"Table `{table.TABLE_NAME}` already exist")
base_migrator.database.create_tables(self._tables)
Rating._schema.create_foreign_key(Rating.last_winner)
logger.info("Creating tables is done")
def down(self):
super().down()
tables = self._tables.copy()
for table in self._tables:
if not table.table_exists():
tables.remove(table)
if not tables:
raise RuntimeError("All tables already dropped")
base_migrator.database.drop_tables(tables)
logger.info(f"Dropped tables: {', '.join([table.TABLE_NAME for table in tables])}")
|
StarcoderdataPython
|
1764026
|
<gh_stars>0
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from tracker import views
urlpatterns = patterns('tracker.views',
url(regex=r'^new_char/$',
view=views.CharacterCreateView.as_view(),
name='new_char'),
url(regex=r'^chars/$',
view=views.CharacterListView.as_view(),
name='my_chars'),
url(regex=r'^del_char/(?P<pk>\d+)/$',
view=views.CharacterDeleteView.as_view(),
name='rm_char'),
url(regex=r'^my_items/$',
view=views.TrackedItemsListView.as_view(),
name='character_items_list'),
url(
regex=r'^my_items/(?P<pk>\d+)/$',
view=views.TrackItemDetailView.as_view(),
name='character_item'
),
)
|
StarcoderdataPython
|
1721792
|
<reponame>Sithlord-dev/Dog_vision<gh_stars>0
from __future__ import division, print_function
# coding=utf-8
import os
# Keras
from keras.models import model_from_json
# Flask utils
from flask import Flask, request, render_template
from werkzeug.utils import secure_filename
from model_files.ml_model import make_prediction, get_labels
# Define a flask app
app = Flask(__name__, static_folder='static', static_url_path='/static')
## Model reconstruction
WEIGHTS_PATH = 'model_files/cnn_dogvision_weights.h5'
ARCH_PATH = 'model_files/model.json'
# load json and create model
json_file = open(ARCH_PATH, 'r')
loaded_model_json = json_file.read()
json_file.close()
cnn_model = model_from_json(loaded_model_json)
# load weights into new model
cnn_model.load_weights(WEIGHTS_PATH)
# load models for feature extraction
from keras.applications.inception_resnet_v2 import InceptionResNetV2
InceptionResNetV2(input_shape=(331,331,3), include_top=False, weights='imagenet')
from keras.applications.xception import Xception
Xception(input_shape=(331,331,3), include_top=False, weights='imagenet')
from keras.applications.inception_v3 import InceptionV3
InceptionV3(input_shape=(331,331,3), include_top=False, weights='imagenet')
from keras.applications.nasnet import NASNetLarge
NASNetLarge(input_shape=(331,331,3), include_top=False, weights='imagenet')
print("Model successfully loaded")
@app.route('/', methods=['GET'])
def index():
# Main page
return render_template('index.html')
@app.route('/predict', methods=['GET', 'POST'])
def upload():
if request.method == 'POST':
# Get the file from post request
f = request.files['file']
# Save the file to ./uploads
basepath = os.path.dirname(__file__)
file_path = os.path.join(
basepath, 'uploads', secure_filename(f.filename))
f.save(file_path)
# Make prediction
preds = make_prediction(cnn_model, file_path)
# Process your result for human
result = get_labels(preds) # Convert to string
return result
return None
if __name__ == '__main__':
app.run(debug=True)
|
StarcoderdataPython
|
3267063
|
<gh_stars>0
import torch.utils.data as data
#import h5py
import numpy as np
import os
from glob import glob
from pyntcloud import PyntCloud
import numpy as np
from sklearn.neighbors import KDTree
from utils import hand
from utils.config import config
from utils.database import *
import torch
class ModelNetDataset(data.Dataset):
def __init__(self, train=True):
if train:
data_file = 'data/modelnet40_ply_hdf5_2048/train_files.txt'
else:
data_file = 'data/modelnet40_ply_hdf5_2048/test_files.txt'
file_list = [line.rstrip() for line in open(data_file, 'r')]
all_data = np.zeros([0, 2048, 3], np.float32)
all_label = np.zeros([0, 1], np.int64)
for filename in file_list:
f = h5py.File(filename)
data = f['data'][:]
label = f['label'][:]
all_data = np.concatenate([all_data, data], 0)
all_label = np.concatenate([all_label, label], 0)
self.pointcloud = all_data
self.label = all_label
def __len__(self):
return self.label.shape[0]
def __getitem__(self, index):
return self.pointcloud[index], self.label[index]
def pc_normalize(pc):
centroid = np.mean(pc, axis=0)
pc = pc - centroid
m = np.max(np.sqrt(np.sum(pc**2, axis=1)))
pc = pc / m
return pc
class TensorBodyDataset():
def __init__(self, data_dir, normalize=True, train=True):
self.normalize = normalize
self.pointcloud_files = []
self.label_files = []
file_list = os.path.join(data_dir, 'data_list.txt')
with open(file_list, 'r') as file:
for line in file:
if line:
pointcloud_file, label_file = line.rstrip().split(' ')
self.pointcloud_files.append(os.path.join(data_dir, pointcloud_file))
self.label_files.append(os.path.join(data_dir, label_file))
if train:
self.idxs = np.arange(len(self.pointcloud_files))[:30000]
else:
self.idxs = np.arange(len(self.pointcloud_files))[30000:]
def __len__(self):
return len(self.idxs)
def __getitem__(self, index):
pointcloud = np.load(self.pointcloud_files[self.idxs[index]]).astype(np.float32)
label = np.load(self.label_files[self.idxs[index]]).astype(np.int64)
if self.normalize:
pointcloud = pc_normalize(pointcloud)
return pointcloud, label
class SMPLDataset():
def __init__(self, data_dir, normalize=True, train=True):
self.normalize = normalize
self.pointcloud_files = glob(os.path.join(data_dir, 'pointclouds', '*/*.npy'))
self.label_files = glob(os.path.join(data_dir, 'labels', '*/*.npy'))
N = len(self.pointcloud_files)
indices = np.random.choice(N, N, replace=False)
part = int(N * 0.8)
if train:
self.idxs = indices[:part]
else:
self.idxs = indices[part:]
def __len__(self):
return len(self.idxs)
def __getitem__(self, index):
pointcloud = np.load(self.pointcloud_files[self.idxs[index]]).astype(np.float32)
label = np.load(self.label_files[self.idxs[index]]).astype(np.int64)
if self.normalize:
pointcloud = pc_normalize(pointcloud)
return pointcloud, label
def sample_pt_cld(scale, grasp_rescale, abs_model_path):
m = PyntCloud.from_file(abs_model_path)
pt_cld = m.get_sample("mesh_random", n=10000, rgb=False, normals=False).values
pt_cld *= grasp_rescale
pt_cld *= scale
return pt_cld
def get_joint_locations(grasp_joints, grasp_position):
start = np.array(grasp_position)[np.newaxis, 1:4].T
quat = np.array(grasp_position)[4:8]
h = hand.makeHumanHand(start, quat)
h.updateDofs(np.array(grasp_joints)[1:])
hand_pts = h.getJointPositions()
hand_pts = np.concatenate(hand_pts, axis=1).T[1:]
return hand_pts
def get_contact_probs(preds, pt_cld, hand_pts, contact_dist=10):
#preds 1*10000*20
#contact_dist number of mm for joint to be considered contact point
#returns contact joints (where joint 0 is from finger not hand root) and probs of those contact points
tree = KDTree(pt_cld)
dist, ind = tree.query(hand_pts[1:], k=1)
joints = np.arange(len(hand_pts[1:]))[:, np.newaxis]
ind = ind[dist < contact_dist]
joints = joints[dist < contact_dist]
contact_pts = pt_cld[ind]
probs = preds[0, ind, joints].data.cpu().numpy()
return joints, probs
def get_joint_probs(preds, pt_cld, hand_pts):
return get_contact_probs(preds, pt_cld, hand_pts, contact_dist=np.inf)
class ColumbiaGraspDataset():
def __init__(self, normalize=True):
self.normalize = normalize
self.params = config(section='data')
self.mr = ModelReader()
self.data = [self.mr.prepare_sample(grasp) for grasp in self.mr.getAll()]
#self.models = [self.mr.getModelInfo(grasp["scaled_model_id"]) for grasp in self.data]
self.models = []
for grasp in self.data:
try:
self.models.append(self.mr.getModelInfo(grasp["scaled_model_id"]))
except Exception as e:
self.models.append((None, None, None))
#Some of the scaled_model_id aren't listed in the database
def __len__(self):
return len(self.data)
def __getitem__(self, index):
grasp = self.data[index]
#scale, grasp_rescale, model_path = self.mr.getModelInfo(grasp["scaled_model_id"])
scale, grasp_rescale, model_path = self.models[index]
if scale is None:
return None, None
# m = PyntCloud.from_file(self.params['model_dir'] + model_path)
# pt_cld = m.get_sample("mesh_random", n=10000, rgb=False, normals=False).values
# pt_cld *= grasp_rescale
# pt_cld *= scale
pt_cld = sample_pt_cld(scale, grasp_rescale, self.params['model_dir'] + model_path)
# start = np.array(grasp['grasp_grasp_position'])[np.newaxis, 1:4].T
# quat = np.array(grasp['grasp_grasp_position'])[4:8]
# h = hand.makeHumanHand(start, quat)
# h.updateDofs(np.array(grasp['grasp_grasp_joints'])[1:])
# hand_pts = h.getJointPositions()
# hand_pts = np.concatenate(hand_pts, axis=1).T[1:]
hand_pts = get_joint_locations(grasp['grasp_grasp_joints'], grasp['grasp_grasp_position'])
tree = KDTree(pt_cld)
dist, ind = tree.query(hand_pts[1:], k=1)
joints = np.arange(len(hand_pts[1:]))[:, np.newaxis]
contact_dist = 10 #number of mm for joint to be considered contact point
ind = ind[dist < contact_dist]
joints = joints[dist < contact_dist]
contact_pts = pt_cld[ind]
label = np.zeros((len(pt_cld), 20))
if len(contact_pts) == 0:
return None, None
tau = 20.8936034 #value such that a point 20mm away has value 0.4
contact_dists, contact_inds = tree.query(contact_pts, k=len(pt_cld))
#approximating geodesic distance with euclidean distance as described in
#Distance Functions and Geodesics on Points Clouds by Memoli et. al.
for contact_dist, contact_ind, joint in zip(contact_dists, contact_inds, joints):
label[contact_ind, joint] = np.exp(-np.power(contact_dist, 2) / tau**2)
#pointcloud = torch.tensor(pc_normalize(pt_cld)).cuda().unsqueeze(0).permute(0, 2, 1)
#label = torch.tensor(label, dtype=torch.float32).cuda().unsqueeze(0)
pt_cld = pc_normalize(pt_cld) if self.normalize else pt_cld
return pt_cld, label
def collate_fn(data):
pointclouds = []
labels = []
for pointcloud, label in data:
if not pointcloud is None:
pointclouds.append(torch.tensor(pointcloud).unsqueeze(0).permute(0, 2, 1))
labels.append(torch.tensor(label, dtype=torch.float32).unsqueeze(0))
pointclouds = torch.cat(pointclouds)
labels = torch.cat(labels)
return pointclouds, labels
if __name__ == '__main__':
#dataset = ModelNetDataset()
#dataset = TensorBodyDataset('data/seg1024')
dataset = SMPLDataset('D:\\Data\\CMUPointclouds')
print(len(dataset))
|
StarcoderdataPython
|
3224987
|
# Test data is contained in goldens.json. This is an array of objects, with
# keys:
#
# - original_code
# - new_code
# - original_tests
# - new_tests
#
# The tests load this file, and then verify that calling fixup_* functions
# on the original_* data returns the same values as in the new_* data.
#
# To generate goldens.json, run "main.py --golden_file goldens.json"
import json
from main import fixup_function
from main import fixup_tests
with open('goldens.json') as golden_file:
goldens = json.load(golden_file)
def test_everything():
for golden in goldens:
assert fixup_function(golden['original_code']) == golden['new_code']
assert fixup_tests(golden['original_tests']) == golden['new_tests']
|
StarcoderdataPython
|
1747295
|
#!/usr/bin/env python
#
# Copyright 2015 Airbus
# Copyright 2017 Fraunhofer Institute for Manufacturing Engineering and Automation (IPA)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import rospy
from python_qt_binding.QtGui import *
from python_qt_binding.QtCore import *
from airbus_pyqt_extend.QtAgiCore import loadRsc
rsc = loadRsc("airbus_pyqt_extend")
## @package: message_box
##
## @version 1.0
## @author <NAME>
## @date Last modified 22/03/2016
## @class QPopup
## @brief Popup object.
## @class MessageBox
## @brief Class for redefine QMessageBox.
class QAgiMessageBox(QMessageBox):
STYLE = "QLabel{font-size: 18pt; font-weight:40; color: #000000;} \
QPushButton{ background-color:qlineargradient(x1: 0, \
y1: 0, \
x2: 0, \
y2: 1, \
stop: 0 #2ca1cf, \
stop: 1 #0482bb); \
border: 2px #616763; border-radius: 5px; \
font-size: 16pt; font-weight:40; color: #000000;\
width:100px; \
height:40px}"
INFO = QMessageBox.Information
WARN = QMessageBox.Warning
CRITICAL = QMessageBox.Critical
QUESTION = QMessageBox.Question
def __init__(self, type=None, msg=None):
"""! The constructor."""
QMessageBox.__init__(self)
self.setWindowFlags(Qt.FramelessWindowHint)
self.setMinimumSize(QSize(600,300))
style_base = ""
if type == QMessageBox.Information:
self.setIcon(QMessageBox.Information)
style_base = "QMessageBox{ border: 2px solid bleu;}"
elif type == QMessageBox.Warning:
self.setIcon(QMessageBox.Warning)
style_base = "QMessageBox{ border: 2px solid yellow;}"
elif type == QMessageBox.Critical:
self.setIcon(QMessageBox.Critical)
style_base = "QMessageBox{ border: 2px solid red;}"
elif type == QMessageBox.Question:
self.setIcon(QMessageBox.Question)
style_base = "QMessageBox{ border: 2px solid blue;}"
else:
self.setIcon(QMessageBox.NoIcon)
self.setStyleSheet(style_base+self.STYLE)
if msg is not None:
self.setText(msg)
def setStyle(self, style):
self.STYLE = style
#End of file
|
StarcoderdataPython
|
1714828
|
s1 = "<KEY>"
s2 = "<KEY>"
# take 2 strings s1 and s2 including only letters from ato z.
# Return a new sorted string, the longest possible, containing distinct letters
# This version is using bitwise
def longest(s1, s2):
visited = 0
s = s1 + s2
result = ""
for i in range(len(s)):
shift = ord(s[i]) - ord('a')
if visited & (1 << shift) == 0:
visited = visited | (1 << shift)
for shift in range(26):
if ((visited >> shift) & 1 == 1):
result += chr(shift + ord('a'))
return result
|
StarcoderdataPython
|
3394923
|
import os
import sys
import subprocess
import pickle
import time
import pandas as pd
import numpy as np
from datetime import datetime
from scipy.sparse import csr_matrix
from implicit.als import AlternatingLeastSquares
class Recommender:
def __init__(self, **args):
self.TRAINING_THREADS = int(args.get("training_threads", os.cpu_count()))
self.ALS_FACTORS = args.get("als_factors", 128)
self.ALS_REGULARIZATION = args.get("als_regularization", 1e-2)
self.ALS_ITERATIONS = args.get("als_iterations", 15)
self.MIN_POST_FAVS = args.get("min_post_favs", 5)
self.MIN_USER_FAVS = args.get("min_user_favs", 50)
self.MAX_FAVS = args.get("max_favs", 1e12)
self.FAVS_PATH = args.get("favs_path", "data/favs.csv")
self.MODEL_PATH = args.get("model_path", "data/recommender.pickle")
self.DATABASE_URL = args.get("database_url", "postgresql://localhost/danbooru2")
@staticmethod
def create(**args):
env = { name.lower(): value for name, value in os.environ.items() }
args = { **env, **args }
recommender = Recommender(**args)
recommender.dump_favorites()
recommender.load_favorites()
recommender.train()
recommender.save(recommender.MODEL_PATH)
return recommender
@staticmethod
def load(model_path):
with open(model_path, "rb") as file:
return pickle.load(file)
def dump_favorites(self):
query = f"""
SELECT
post_id,
user_id
FROM favorites
WHERE
post_id IN (SELECT id FROM posts WHERE fav_count > {self.MIN_POST_FAVS})
AND user_id IN (SELECT id FROM users WHERE favorite_count > {self.MIN_USER_FAVS})
ORDER BY post_id DESC
LIMIT {self.MAX_FAVS}
"""
self.shell(f"psql --no-psqlrc -c '\copy ({query}) TO STDOUT WITH (FORMAT CSV)' {self.DATABASE_URL} > {self.FAVS_PATH}")
def load_favorites(self):
favs_df = pd.read_csv(self.FAVS_PATH, dtype=np.int32, names=["post_id", "user_id"])
favs_df = favs_df.astype("category")
self.favorites = csr_matrix((np.ones(favs_df.shape[0]), (favs_df["post_id"].cat.codes.copy(), favs_df["user_id"].cat.codes.copy())), dtype=np.int32)
self.users_to_id = { k: v for v, k in enumerate(favs_df["user_id"].cat.categories) }
self.posts_to_id = { k: v for v, k in enumerate(favs_df["post_id"].cat.categories) }
self.ids_to_post = { k: v for v, k in self.posts_to_id.items() }
self.empty = csr_matrix(self.favorites.shape)
def train(self):
self.model = AlternatingLeastSquares(
calculate_training_loss=True,
dtype=np.float32,
num_threads=self.TRAINING_THREADS,
factors=self.ALS_FACTORS,
regularization=self.ALS_REGULARIZATION,
iterations=self.ALS_ITERATIONS
)
start = time.monotonic()
self.model.fit(self.favorites)
end = time.monotonic()
dur = int(end - start)
self.favorites = None
self.trained_at = datetime.utcnow().isoformat()
self.training_time = "{:02d}:{:02d}:{:02d}".format(dur // 3600, (dur % 3600 // 60), dur % 60)
def recommend_for_user(self, user_id, limit=50):
if not user_id in self.users_to_id:
return []
uid = self.users_to_id[user_id]
recommendations = self.model.recommend(uid, self.empty, N=limit)
recommendations = [(self.ids_to_post[id], float(score)) for id, score in recommendations]
return recommendations
def recommend_for_post(self, post_id, limit=50):
if not post_id in self.posts_to_id:
return []
pid = self.posts_to_id[post_id]
recommendations = self.model.similar_items(pid, N=limit)
recommendations = [(self.ids_to_post[id], float(score)) for id, score in recommendations]
return recommendations
def metrics(self):
return {
"user_count": len(self.users_to_id),
"post_count": len(self.posts_to_id),
"factors": self.model.factors,
"model_size": 4 * self.model.factors * (len(self.users_to_id) + len(self.posts_to_id)),
"trained_at": self.trained_at,
"training_time": self.training_time,
}
def save(self, model_path):
with open(model_path, "wb") as file:
pickle.dump(self, file)
def shell(self, cmd):
subprocess.run(cmd, stdout=sys.stdout, stderr=sys.stderr, shell=True, check=True)
|
StarcoderdataPython
|
73264
|
import asyncio
import random
import pytest
import uuid
from collections import defaultdict
import aiotask_context as context
@asyncio.coroutine
def dummy3():
yield from asyncio.sleep(random.uniform(0, 2))
return context.get("key")
@asyncio.coroutine
def dummy2(a, b):
yield from asyncio.sleep(random.uniform(0, 2))
res = context.get("key")
yield from asyncio.sleep(random.uniform(0, 2))
res1 = yield from dummy3()
assert res == res1
return a, b, res
@asyncio.coroutine
def dummy1(n_tasks):
context.set("key", str(uuid.uuid4()))
tasks = [
asyncio.ensure_future(
dummy2(id(context.asyncio_current_task()), n)) for n in range(n_tasks)]
results = yield from asyncio.gather(*tasks)
info = defaultdict(list)
for taskid, n, key in results:
info[key].append([taskid, n])
return info
@pytest.mark.asyncio
@asyncio.coroutine
def test_ensure_future_concurrent():
n_tasks = 10
results = yield from asyncio.gather(*[dummy1(n_tasks=n_tasks) for x in range(1000)])
for r in results:
assert len(r) == 1
for key, value in r.items():
assert len(value) == n_tasks
@pytest.mark.asyncio
@asyncio.coroutine
def test_ensurefuture_context_propagation():
context.set("key", "value")
@asyncio.coroutine
def change_context():
assert context.get("key") == "value"
context.set("key", "what")
context.set("other", "data")
yield from asyncio.ensure_future(change_context())
assert context.get("key") == "what"
assert context.get("other") == "data"
@pytest.mark.asyncio
@asyncio.coroutine
def test_waitfor_context_propagation():
context.set("key", "value")
@asyncio.coroutine
def change_context():
assert context.get("key") == "value"
context.set("key", "what")
context.set("other", "data")
yield from asyncio.wait_for(change_context(), 1)
assert context.get("key") == "what"
assert context.get("other") == "data"
@pytest.mark.asyncio
@asyncio.coroutine
def test_gather_context_propagation():
context.set("key", "value")
@asyncio.coroutine
def change_context():
assert context.get("key") == "value"
context.set("key", "what")
context.set("other", "data")
yield from asyncio.gather(change_context())
assert context.get("key") == "what"
assert context.get("other") == "data"
|
StarcoderdataPython
|
1786476
|
<filename>src_2/server/batch_server.py<gh_stars>0
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC helloworld.Greeter server."""
# Python version 3.7
# install grpc dependencies
# https://grpc.io/docs/quickstart/python/
# python batch_server.py
from concurrent import futures
import logging
import json
import math
import grpc
import batch_request_pb2
import batch_request_pb2_grpc
def file_selector(benchmark_type_val):
NDBench_testing_json = "workload_data_json/NDBench-testing.json"
NDBench_training_json = "workload_data_json/NDBench-training.json"
DVD_testing_json = "workload_data_json/DVD-testing.json"
DVD_training_json = "workload_data_json/DVD-training.json"
switcher = {
1: NDBench_testing_json,
2: NDBench_training_json,
3: DVD_testing_json,
4: DVD_training_json
}
return switcher.get(benchmark_type_val, "null")
def workloadMetric_selector(workloadMetric_val):
switcher = {
1: "CPU",
2: "NetworkIn",
3: "NetworkOut",
4: "Memory"
}
return switcher.get(workloadMetric_val, "null")
def generate_batch_data_response(user_request):
# Client Request data
rfwId = user_request['rfwId'] # 1. RFWID
# Benchmark -> \n1 - DVD_Test, \n2 - DVD_Train, \n3 - NDBench_Test, \n4 - NDBench_Train
benchmarkType = user_request['benchmarkType']
# Workload Metric -> \n1 - CPU, \n2 - NetworkIn, \n3 - NetworkOut, \n4 - Memory
workloadMetric = user_request['workloadMetric'] # 3. CPU / NetworkIn
batchUnit = user_request['batchUnit'] # number of samples
batchId = user_request['batchId'] # 5th batch or 6th batch etc
batchSize = user_request['batchSize'] # how many batches to return
data = -1
response_start_line = -1
response_end_line = -1
response_lines_size = -1
try:
with open(file_selector(benchmarkType), 'r') as f:
data = json.load(f)
total_number_of_batches = {math.floor(len(data) / batchUnit)}
length_of_json_data_files = len(data)
response_start_line = (batchUnit * (batchId - 1))
response_end_line = (batchUnit * ((batchId - 1) + batchSize) - 1)
response_lines_size = (batchUnit * batchSize)
last_batch_id = (batchId - 1) + batchSize
# calculation_tester(data, batchId, last_batch_id, length_of_json_data_files, response_end_line,
# response_lines_size, response_start_line, total_number_of_batches)
f.close()
# try:
# with open("response_data/clientID_response.json", 'w') as file:
# response_data = data[response_start_line:response_end_line + 1]
# json.dump(response_data, file)
#
# except:
# print(f"Error writing response to file")
# building response packet
response_data = [data[i][workloadMetric_selector(workloadMetric)] for i in
range(response_start_line, (response_end_line + 1))]
# response_data = data[response_start_line:response_end_line + 1]
response_data_str = json.dumps(response_data)
response = [rfwId, last_batch_id, str(response_data_str)]
# send response packet to client
return response
except:
print(f"Error reading json file")
class Batch(batch_request_pb2_grpc.batchServicer):
def getBatch(self, request, context):
request_json = {
"rfwId": request.rfwId,
"benchmarkType": request.benchmarkType,
"workloadMetric": request.workloadMetric,
"batchUnit": request.batchUnit,
"batchId": request.batchId,
"batchSize": request.batchSize
}
# print(generate_batch_data_response(request_json))
return batch_request_pb2.batch_response(rfwId=(generate_batch_data_response(request_json))[0],
last_batch_id=(generate_batch_data_response(request_json))[1],
response_batch=(generate_batch_data_response(request_json))[2])
def serve():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10))
batch_request_pb2_grpc.add_batchServicer_to_server(Batch(), server)
server.add_insecure_port('[::]:50051')
server.start()
server.wait_for_termination()
if __name__ == '__main__':
logging.basicConfig()
serve()
|
StarcoderdataPython
|
3265738
|
<reponame>osmante/freeCodeCamp_Courses
import tensorflow as tf
import numpy as np
def predict_with_model(model, imgpath):
"""
Predict an image to which class it belongs
Parameters:
model: tensorflow model
imgpath: to be predicted image path (str)
Returns:
prediction: predicted class (int)
"""
image = tf.io.read_file(imgpath)
image = tf.image.decode_png(image, channels = 3)
image = tf.image.convert_image_dtype(image, tf.float32) # scales [0 - 1]
image = tf.image.resize(image, [60, 60]) # (60, 60, 3)
image = tf.expand_dims(image, axis = 0) # (1, 60, 60, 3)
predictions = model.predict(image)
prediction = np.argmax(predictions) # max probability index
# because data generator read the data classes' name as string,
# the predicted class name doesn't match with its order in the
# predictions array. Example, prediction 2 deesn't mean the class 2,
# it means 10.
# to resolve this problem a prediction dictionary can be created.
classes_int = range(len(np.squeeze(predictions)))
classes_str = [str(i) for i in classes_int]
classes_str.sort()
prediction_dict = {}
for i, j in zip(classes_int, classes_str):
prediction_dict[i] = int(j)
prediction = prediction_dict[prediction]
return prediction
if __name__ == '__main__':
img_path = "D:\\Osman\\Datasets\\" \
"TrafficSignDataset\\data\\test\\2\\00409.png"
img_path = "D:\\Osman\\Datasets\\" \
"TrafficSignDataset\\data\\test\\0\\00807.png"
model = tf.keras.models.load_model('./TrafficSignModels')
prediction = predict_with_model(model, img_path)
print(f"prediction: {prediction}")
|
StarcoderdataPython
|
1638858
|
<gh_stars>1-10
class RoutingRulesAdditionalFields:
USERS = "users"
CASE_TYPES = "case_types"
FLAGS = "flags"
COUNTRY = "country"
choices = [
(USERS, "Users"),
(CASE_TYPES, "Case Types"),
(FLAGS, "flags"),
(COUNTRY, "Country"),
]
class StatusAction:
DEACTIVATE = "deactivate"
REACTIVATE = "reactivate"
|
StarcoderdataPython
|
3218586
|
<filename>demos/kitchen_sink/main.py
# -*- coding: utf-8 -*-
import os
import sys
sys.path.append(os.path.abspath(__file__).split('demos')[0])
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.lang import Builder
from kivy.properties import ObjectProperty
from kivy.uix.image import Image
from kivy.uix.modalview import ModalView
from kivy.utils import get_hex_from_color
from kivymd.bottomsheet import MDListBottomSheet, MDGridBottomSheet
from kivymd.button import MDIconButton
from kivymd.date_picker import MDDatePicker
from kivymd.dialog import MDInputDialog, MDDialog
from kivymd.list import ILeftBody, ILeftBodyTouch, IRightBodyTouch
from kivymd.material_resources import DEVICE_TYPE
from kivymd.selectioncontrols import MDCheckbox
from kivymd.snackbar import Snackbar
from kivymd.theme_picker import MDThemePicker
from kivymd.theming import ThemeManager
from kivymd.time_picker import MDTimePicker
from kivymd.card import MDCardPost
from kivymd.filemanager import MDFileManager
from kivymd.progressloader import MDProgressLoader
from kivymd.stackfloatingbuttons import MDStackFloatingButtons
from kivymd.useranimationcard import MDUserAnimationCard
from kivymd.accordionlistitem import MDAccordionListItem
# FIXME: crush with Python3.
try:
from kivymd.toast import toast
except TypeError:
from kivymd.toast.kivytoast import toast
main_widget_kv = """
#:import Clock kivy.clock.Clock
#:import get_hex_from_color kivy.utils.get_hex_from_color
#:import get_color_from_hex kivy.utils.get_color_from_hex
#:import images_path kivymd.images_path
#:import Toolbar kivymd.toolbar.Toolbar
#:import ThemeManager kivymd.theming.ThemeManager
#:import MDNavigationDrawer kivymd.navigationdrawer.MDNavigationDrawer
#:import NavigationLayout kivymd.navigationdrawer.NavigationLayout
#:import NavigationDrawerToolbar kivymd.navigationdrawer.NavigationDrawerToolbar
#:import NavigationDrawerSubheader kivymd.navigationdrawer.NavigationDrawerSubheader
#:import MDCheckbox kivymd.selectioncontrols.MDCheckbox
#:import MDSwitch kivymd.selectioncontrols.MDSwitch
#:import MDList kivymd.list.MDList
#:import OneLineListItem kivymd.list.OneLineListItem
#:import TwoLineListItem kivymd.list.TwoLineListItem
#:import ThreeLineListItem kivymd.list.ThreeLineListItem
#:import OneLineAvatarListItem kivymd.list.OneLineAvatarListItem
#:import OneLineIconListItem kivymd.list.OneLineIconListItem
#:import OneLineAvatarIconListItem kivymd.list.OneLineAvatarIconListItem
#:import MDTextField kivymd.textfields.MDTextField
#:import MDTextFieldRect kivymd.textfields.MDTextFieldRect
#:import MDTextFieldClear kivymd.textfields.MDTextFieldClear
#:import MDSpinner kivymd.spinner.MDSpinner
#:import MDCard kivymd.card.MDCard
#:import MDRectangleFlatButton kivymd.button.MDRectangleFlatButton
#:import MDRoundFlatButton kivymd.button.MDRoundFlatButton
#:import MDRoundFlatIconButton kivymd.button.MDRoundFlatIconButton
#:import MDRectangleFlatIconButton kivymd.button.MDRectangleFlatIconButton
#:import MDTextButton kivymd.button.MDTextButton
#:import MDSeparator kivymd.card.MDSeparator
#:import MDDropdownMenu kivymd.menu.MDDropdownMenu
#:import colors kivymd.color_definitions.colors
#:import SmartTile kivymd.grid.SmartTile
#:import MDSlider kivymd.slider.MDSlider
#:import MDTabbedPanel kivymd.tabs.MDTabbedPanel
#:import MDTab kivymd.tabs.MDTab
#:import MDProgressBar kivymd.progressbar.MDProgressBar
#:import MDAccordion kivymd.accordion.MDAccordion
#:import MDAccordionItem kivymd.accordion.MDAccordionItem
#:import MDAccordionSubItem kivymd.accordion.MDAccordionSubItem
#:import MDBottomNavigation kivymd.tabs.MDBottomNavigation
#:import MDBottomNavigationItem kivymd.tabs.MDBottomNavigationItem
#:import MDUpdateSpinner kivymd.updatespinner.MDUpdateSpinner
<ContentForAnimCard>:
orientation: 'vertical'
padding: dp(10)
spacing: dp(10)
size_hint_y: None
height: self.minimum_height
BoxLayout:
size_hint_y: None
height: self.minimum_height
Widget:
MDRoundFlatButton:
text: "Free call"
on_press: root.callback(self.text)
Widget:
MDRoundFlatButton:
text: "Free message"
on_press: root.callback(self.text)
Widget:
OneLineIconListItem:
text: "Video call"
on_press: root.callback(self.text)
IconLeftSampleWidget:
icon: 'camera-front-variant'
TwoLineIconListItem:
text: "Call Viber Out"
on_press: root.callback(self.text)
secondary_text:
"[color=%s]Advantageous rates for calls[/color]" \
% get_hex_from_color(app.theme_cls.primary_color)
# FIXME: Don't work "secondary_text_color" parameter
# secondary_text_color: app.theme_cls.primary_color
IconLeftSampleWidget:
icon: 'phone'
TwoLineIconListItem:
text: "Call over mobile network"
on_press: root.callback(self.text)
secondary_text:
"[color=%s]Operator's tariffs apply[/color]" \
% get_hex_from_color(app.theme_cls.primary_color)
IconLeftSampleWidget:
icon: 'remote'
<ContentNavigationDrawer@MDNavigationDrawer>:
drawer_logo: './assets/drawer_logo.png'
NavigationDrawerSubheader:
text: "Menu of Examples:"
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Accordion"
on_release: app.root.ids.scr_mngr.current = 'accordion'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Accordion List"
on_release: app.root.ids.scr_mngr.current = 'accordion list'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Bottom Navigation"
on_release: app.root.ids.scr_mngr.current = 'bottom_navigation'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Bottom Sheets"
on_release: app.root.ids.scr_mngr.current = 'bottomsheet'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Buttons"
on_release: app.root.ids.scr_mngr.current = 'button'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Cards"
on_release: app.root.ids.scr_mngr.current = 'card'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Dialogs"
on_release: app.root.ids.scr_mngr.current = 'dialog'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Download File"
on_release: app.root.ids.scr_mngr.current = 'download file'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Files Manager"
on_release: app.root.ids.scr_mngr.current = 'files manager'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Grid lists"
on_release: app.root.ids.scr_mngr.current = 'grid'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Labels"
on_release: app.root.ids.scr_mngr.current = 'labels'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Lists"
on_release: app.root.ids.scr_mngr.current = 'list'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Menus"
on_release: app.root.ids.scr_mngr.current = 'menu'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Pickers"
on_release: app.root.ids.scr_mngr.current = 'pickers'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Progress & activity"
on_release: app.root.ids.scr_mngr.current = 'progress'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Progress bars"
on_release: app.root.ids.scr_mngr.current = 'progressbars'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Selection controls"
on_release: app.root.ids.scr_mngr.current = 'selectioncontrols'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Sliders"
on_release: app.root.ids.scr_mngr.current = 'slider'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Stack Floating Buttons"
on_release: app.root.ids.scr_mngr.current = 'stack buttons'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Snackbars"
on_release: app.root.ids.scr_mngr.current = 'snackbar'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Tabs"
on_release: app.root.ids.scr_mngr.current = 'tabs'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Text fields"
on_release: app.root.ids.scr_mngr.current = 'textfields'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Themes"
on_release: app.root.ids.scr_mngr.current = 'theming'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Toolbars"
on_release: app.root.ids.scr_mngr.current = 'toolbar'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "Update Screen Widget"
on_release: app.root.ids.scr_mngr.current = 'update spinner'
NavigationDrawerIconButton:
icon: 'checkbox-blank-circle'
text: "User Animation Card"
on_release: app.root.ids.scr_mngr.current = 'user animation card'
NavigationLayout:
id: nav_layout
ContentNavigationDrawer:
id: nav_drawer
BoxLayout:
orientation: 'vertical'
Toolbar:
id: toolbar
title: app.title
md_bg_color: app.theme_cls.primary_color
background_palette: 'Primary'
background_hue: '500'
elevation: 10
left_action_items:
[['menu', lambda x: app.root.toggle_nav_drawer()]]
right_action_items:
[['dots-vertical', lambda x: app.root.toggle_nav_drawer()]]
ScreenManager:
id: scr_mngr
Screen:
name: 'previous'
FloatLayout:
Image:
source: '{}kivymd_logo.png'.format(images_path)
opacity: .3
MDLabel:
text: app.previous_text
size_hint_y: None
font_style: 'Subhead'
theme_text_color: 'Primary'
markup: True
halign: 'center'
text_size: self.width - 20, None
pos_hint: {'center_x': .5, 'center_y': .6}
###################################################################
#
# BOTTOM SHEET
#
###################################################################
Screen:
name: 'bottomsheet'
MDRaisedButton:
text: "Open list bottom sheet"
opposite_colors: True
size_hint: None, None
size: 4 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.6}
on_release: app.show_example_bottom_sheet()
MDRaisedButton:
text: "Open grid bottom sheet"
opposite_colors: True
size_hint: None, None
size: 4 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.3}
on_release: app.show_example_grid_bottom_sheet()
###################################################################
#
# BUTTONS
#
###################################################################
Screen:
name: 'button'
BoxLayout:
size_hint_y: None
height: '56'
spacing: '10dp'
pos_hint: {'center_y': .9}
Widget:
MDIconButton:
icon: 'sd'
MDFloatingActionButton:
icon: 'plus'
opposite_colors: True
elevation_normal: 8
MDFloatingActionButton:
icon: 'check'
opposite_colors: True
elevation_normal: 8
md_bg_color: app.theme_cls.primary_color
MDIconButton:
icon: 'sd'
theme_text_color: 'Custom'
text_color: app.theme_cls.primary_color
Widget:
MDFlatButton:
text: 'MDFlatButton'
pos_hint: {'center_x': 0.5, 'center_y': .75}
MDRaisedButton:
text: "MDRaisedButton"
elevation_normal: 2
opposite_colors: True
pos_hint: {'center_x': 0.5, 'center_y': .65}
MDRectangleFlatButton:
text: "MDRectangleFlatButton"
pos_hint: {'center_x': 0.5, 'center_y': .55}
MDRectangleFlatIconButton:
text: "MDRectangleFlatIconButton"
icon: "language-python"
pos_hint: {'center_x': 0.5, 'center_y': .45}
width: dp(230)
MDRoundFlatButton:
text: "MDROUNDFLATBUTTON"
pos_hint: {'center_x': 0.5, 'center_y': .35}
MDRoundFlatIconButton:
text: "MDRoundFlatIconButton"
icon: "language-python"
pos_hint: {'center_x': 0.5, 'center_y': .25}
width: dp(200)
MDFillRoundFlatButton:
text: "MDFillRoundFlatButton"
pos_hint: {'center_x': 0.5, 'center_y': .15}
MDTextButton:
text: "MDTextButton"
pos_hint: {'center_x': 0.5, 'center_y': .05}
###################################################################
#
# CARDS
#
###################################################################
Screen:
name: 'card'
on_enter: app.add_cards(grid_card)
ScrollView:
id: scroll
size_hint: 1, 1
do_scroll_x: False
GridLayout:
id: grid_card
cols: 1
spacing: dp(5)
padding: dp(5)
size_hint_y: None
height: self.minimum_height
# See how to add a card with the menu and others
# in the add_cards function.
###################################################################
#
# DOWNLOAD FILE
#
###################################################################
Screen:
name: 'download file'
FloatLayout:
id: box_flt
MDRaisedButton:
text: "Download file"
size_hint: None, None
size: 3 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
opposite_colors: True
on_release:
Clock.schedule_once(\
app.show_example_download_file, .1)
###################################################################
#
# DIALOGS
#
###################################################################
Screen:
name: 'dialog'
MDRaisedButton:
text: "Open lengthy dialog"
size_hint: None, None
size: 3 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.8}
opposite_colors: True
on_release: app.show_example_long_dialog()
MDRaisedButton:
text: "Open input dialog"
size_hint: None, None
size: 3 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.6}
opposite_colors: True
on_release: app.show_example_input_dialog()
MDRaisedButton:
text: "Open Alert Dialog"
size_hint: None, None
size: 3 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.4}
opposite_colors: True
on_release: app.show_example_alert_dialog()
MDRaisedButton:
text: "Open Ok Cancel Dialog"
size_hint: None, None
size: 3 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.2}
opposite_colors: True
on_release: app.show_example_ok_cancel_dialog()
###################################################################
#
# GRID
#
###################################################################
Screen:
name: 'grid'
ScrollView:
do_scroll_x: False
GridLayout:
cols: 3
row_default_height:
(self.width - self.cols*self.spacing[0])/self.cols
row_force_default: True
size_hint_y: None
height: self.minimum_height
padding: dp(4), dp(4)
spacing: dp(4)
SmartTileWithLabel:
mipmap: True
source: './assets/african-lion-951778_1280.jpg'
text: "African Lion"
SmartTile:
mipmap: True
source: './assets/beautiful-931152_1280.jpg'
SmartTile:
mipmap: True
source: './assets/african-lion-951778_1280.jpg'
SmartTile:
mipmap: True
source: './assets/guitar-1139397_1280.jpg'
SmartTile:
mipmap: True
source: './assets/robin-944887_1280.jpg'
SmartTile:
mipmap: True
source: './assets/kitten-1049129_1280.jpg'
SmartTile:
mipmap: True
source: './assets/light-bulb-1042480_1280.jpg'
SmartTile:
mipmap: True
source: './assets/tangerines-1111529_1280.jpg'
###################################################################
#
# LABELS
#
###################################################################
Screen:
name: 'labels'
ScrollView:
do_scroll_x: False
BoxLayout:
orientation: 'vertical'
size_hint_y: None
height: dp(1000)
BoxLayout:
MDLabel:
font_style: 'Body1'
theme_text_color: 'Primary'
text: "Body1 label"
halign: 'center'
MDLabel:
font_style: 'Body2'
theme_text_color: 'Primary'
text: "Body2 label"
halign: 'center'
BoxLayout:
MDLabel:
font_style: 'Caption'
theme_text_color: 'Primary'
text: "Caption label"
halign: 'center'
MDLabel:
font_style: 'Subhead'
theme_text_color: 'Primary'
text: "Subhead label"
halign: 'center'
BoxLayout:
MDLabel:
font_style: 'Title'
theme_text_color: 'Primary'
text: "Title label"
halign: 'center'
MDLabel:
font_style: 'Headline'
theme_text_color: 'Primary'
text: "Headline label"
halign: 'center'
MDLabel:
font_style: 'Display1'
theme_text_color: 'Primary'
text: "Display1 label"
halign: 'center'
size_hint_y: None
height: self.texture_size[1] + dp(4)
MDLabel:
font_style: 'Display2'
theme_text_color: 'Primary'
text: "Display2 label"
halign: 'center'
size_hint_y: None
height: self.texture_size[1] + dp(4)
MDLabel:
font_style: 'Display3'
theme_text_color: 'Primary'
text: "Display3 label"
halign: 'center'
size_hint_y: None
height: self.texture_size[1] + dp(4)
MDLabel:
font_style: 'Display4'
theme_text_color: 'Primary'
text: "Display4 label"
halign: 'center'
size_hint_y: None
height: self.texture_size[1] + dp(4)
BoxLayout:
MDLabel:
font_style: 'Body1'
theme_text_color: 'Primary'
text: "Primary color"
halign: 'center'
MDLabel:
font_style: 'Body1'
theme_text_color: 'Secondary'
text: "Secondary color"
halign: 'center'
BoxLayout:
MDLabel:
font_style: 'Body1'
theme_text_color: 'Hint'
text: "Hint color"
halign: 'center'
MDLabel:
font_style: 'Body1'
theme_text_color: 'Error'
text: "Error color"
halign: 'center'
MDLabel:
font_style: 'Body1'
theme_text_color: 'Custom'
text_color: (0,1,0,.4)
text: "Custom"
halign: 'center'
###################################################################
#
# LISTS
#
###################################################################
Screen:
name: 'list'
ScrollView:
do_scroll_x: False
MDList:
id: ml
OneLineListItem:
text: "One-line item"
TwoLineListItem:
text: "Two-line item"
secondary_text: "Secondary text here"
ThreeLineListItem:
text: "Three-line item"
secondary_text:
"This is a multi-line label where you can " \
"fit more text than usual"
OneLineAvatarListItem:
text: "Single-line item with avatar"
AvatarSampleWidget:
source: './assets/avatar.png'
TwoLineAvatarListItem:
type: "two-line"
text: "Two-line item..."
secondary_text: "with avatar"
AvatarSampleWidget:
source: './assets/avatar.png'
ThreeLineAvatarListItem:
type: "three-line"
text: "Three-line item..."
secondary_text:
"...with avatar..." + '\\n' + "and third line!"
AvatarSampleWidget:
source: './assets/avatar.png'
OneLineIconListItem:
text: "Single-line item with left icon"
IconLeftSampleWidget:
id: li_icon_1
icon: 'star-circle'
TwoLineIconListItem:
text: "Two-line item..."
secondary_text: "...with left icon"
IconLeftSampleWidget:
id: li_icon_2
icon: 'comment-text'
ThreeLineIconListItem:
text: "Three-line item..."
secondary_text:
"...with left icon..." + '\\n' + "and " \
"third line!"
IconLeftSampleWidget:
id: li_icon_3
icon: 'sd'
OneLineAvatarIconListItem:
text: "Single-line + avatar&icon"
AvatarSampleWidget:
source: './assets/avatar.png'
IconRightSampleWidget:
TwoLineAvatarIconListItem:
text: "Two-line item..."
secondary_text: "...with avatar&icon"
AvatarSampleWidget:
source: './assets/avatar.png'
IconRightSampleWidget:
ThreeLineAvatarIconListItem:
text: "Three-line item..."
secondary_text:
"...with avatar&icon..." + '\\n' + "and " \
"third line!"
AvatarSampleWidget:
source: './assets/avatar.png'
IconRightSampleWidget:
###################################################################
#
# ACCORDION LIST
#
###################################################################
Screen:
name: 'accordion list'
on_enter: app.set_accordion_list()
on_leave: anim_list.clear_widgets()
ScrollView:
GridLayout:
id: anim_list
cols: 1
size_hint_y: None
height: self.minimum_height
###################################################################
#
# FILES MANAGER
#
# See the help on using the file in the file filemanager.py
#
###################################################################
Screen:
name: 'files manager'
MDRaisedButton:
size_hint: None, None
size: 3 * dp(48), dp(48)
text: 'Open files manager'
opposite_colors: True
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
on_release: app.file_manager_open()
###################################################################
#
# MENUS
#
###################################################################
Screen:
name: 'menu'
MDRaisedButton:
size_hint: None, None
size: 3 * dp(48), dp(48)
text: 'Open menu'
opposite_colors: True
pos_hint: {'center_x': 0.2, 'center_y': 0.9}
on_release:
MDDropdownMenu(\
items=app.menu_items, width_mult=3).open(self)
MDRaisedButton:
size_hint: None, None
size: 3 * dp(48), dp(48)
text: 'Open menu'
opposite_colors: True
pos_hint: {'center_x': 0.2, 'center_y': 0.1}
on_release:
MDDropdownMenu(\
items=app.menu_items, width_mult=3).open(self)
MDRaisedButton:
size_hint: None, None
size: 3 * dp(48), dp(48)
text: 'Open menu'
opposite_colors: True
pos_hint: {'center_x': 0.8, 'center_y': 0.1}
on_release:
MDDropdownMenu(\
items=app.menu_items, width_mult=3).open(self)
MDRaisedButton:
size_hint: None, None
size: 3 * dp(48), dp(48)
text: 'Open menu'
opposite_colors: True
pos_hint: {'center_x': 0.8, 'center_y': 0.9}
on_release:
MDDropdownMenu(\
items=app.menu_items, width_mult=3).open(self)
MDRaisedButton:
size_hint: None, None
size: 3 * dp(48), dp(48)
text: 'Open menu'
opposite_colors: True
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
on_release:
MDDropdownMenu(\
items=app.menu_items, width_mult=4).open(self)
###################################################################
#
# CHECKBOX
#
###################################################################
Screen:
name: 'progress'
MDCheckbox:
id: chkbox
size_hint: None, None
size: dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.4}
active: True
MDSpinner:
id: spinner
size_hint: None, None
size: dp(46), dp(46)
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
active: True if chkbox.active else False
###################################################################
#
# PROGRESS BAR
#
###################################################################
Screen:
name: 'progressbars'
BoxLayout:
orientation:'vertical'
padding: '8dp'
MDSlider:
id:progress_slider
min:0
max:100
value: 40
MDProgressBar:
value: progress_slider.value
MDProgressBar:
reversed: True
value: progress_slider.value
BoxLayout:
MDProgressBar:
orientation:"vertical"
reversed: True
value: progress_slider.value
MDProgressBar:
orientation:"vertical"
value: progress_slider.value
###################################################################
#
# UPDATE SPINNER
#
###################################################################
Screen:
name: 'update spinner'
on_enter: upd_lbl.text = "Pull to string update"
on_leave: upd_lbl.text = ""
MDLabel:
id: upd_lbl
font_style: 'Display2'
theme_text_color: 'Primary'
halign: 'center'
pos_hint: {'center_x': .5, 'center_y': .6}
size_hint_y: None
height: self.texture_size[1] + dp(4)
MDUpdateSpinner:
event_update: lambda x: app.update_screen(self)
###################################################################
#
# STACK FLOATING BUTTONS
#
###################################################################
Screen:
name: 'stack buttons'
on_enter: app.example_add_stack_floating_buttons()
###################################################################
#
# SLIDER
#
###################################################################
Screen:
name: 'slider'
BoxLayout:
MDSlider:
id: hslider
min:0
max:100
value: 10
MDSlider:
id: vslider
orientation:'vertical'
min:0
max:100
value: hslider.value
###################################################################
#
# USER ANIMATION CARD
#
###################################################################
Screen:
name: 'user animation card'
MDRaisedButton:
size_hint: None, None
size: 3 * dp(48), dp(48)
text: 'Open card'
opposite_colors: True
pos_hint: {'center_x': 0.5, 'center_y': 0.6}
on_release: app.show_user_example_animation_card()
###################################################################
#
# SELECTION CONTROLS
#
###################################################################
Screen:
name: 'selectioncontrols'
MDCheckbox:
id: grp_chkbox_1
group: 'test'
size_hint: None, None
size: dp(48), dp(48)
pos_hint: {'center_x': 0.25, 'center_y': 0.5}
MDCheckbox:
id: grp_chkbox_2
group: 'test'
size_hint: None, None
size: dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
MDSwitch:
size_hint: None, None
size: dp(36), dp(48)
pos_hint: {'center_x': 0.75, 'center_y': 0.5}
_active: False
###################################################################
#
# SNACKBAR
#
###################################################################
Screen:
name: 'snackbar'
MDRaisedButton:
text: "Create simple snackbar"
size_hint: None, None
size: 4 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.75}
opposite_colors: True
on_release: app.show_example_snackbar('simple')
MDRaisedButton:
text: "Create snackbar with button"
size_hint: None, None
size: 4 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
opposite_colors: True
on_release: app.show_example_snackbar('button')
MDRaisedButton:
text: "Create snackbar with a lot of text"
size_hint: None, None
size: 5 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.25}
opposite_colors: True
on_release: app.show_example_snackbar('verylong')
###################################################################
#
# TEXTFIELDS
#
###################################################################
Screen:
name: 'textfields'
ScrollView:
BoxLayout:
orientation: 'vertical'
size_hint_y: None
height: self.minimum_height
padding: dp(48)
spacing: 10
MDTextField:
hint_text: "No helper text"
MDTextField:
hint_text: "Helper text on focus"
helper_text:
"This will disappear when you click off"
helper_text_mode: "on_focus"
MDTextField:
hint_text: "Persistent helper text"
helper_text: "Text is always here"
helper_text_mode: "persistent"
MDTextField:
id: text_field_error
hint_text:
"Helper text on error (Hit Enter with " \
"two characters here)"
helper_text: "Two is my least favorite number"
helper_text_mode: "on_error"
MDTextField:
hint_text: "Max text length = 10"
max_text_length: 10
MDTextField:
hint_text: "required = True"
required: True
helper_text_mode: "on_error"
MDTextField:
multiline: True
hint_text: "Multi-line text"
helper_text: "Messages are also supported here"
helper_text_mode: "persistent"
MDTextField:
hint_text: "color_mode = \'accent\'"
color_mode: 'accent'
MDTextField:
hint_text: "color_mode = \'custom\'"
color_mode: 'custom'
helper_text_mode: "on_focus"
helper_text:
"Color is defined by \'line_color_focus\' " \
"property"
line_color_focus:
# This is the color used by the textfield
self.theme_cls.opposite_bg_normal
MDTextField:
hint_text: "disabled = True"
disabled: True
MDTextFieldRect:
size_hint: None, None
size: app.Window.width - dp(40), dp(30)
pos_hint: {'center_y': .5, 'center_x': .5}
MDTextFieldClear:
hint_text: "Text field with clearing type"
###################################################################
#
# THEMING
#
###################################################################
Screen:
name: 'theming'
BoxLayout:
orientation: 'vertical'
size_hint_y: None
height: dp(80)
center_y: self.parent.center_y
MDRaisedButton:
size_hint: None, None
size: 3 * dp(48), dp(48)
center_x: self.parent.center_x
text: 'Change theme'
on_release: app.theme_picker_open()
opposite_colors: True
pos_hint: {'center_x': 0.5}
MDLabel:
text:
"Current: " + app.theme_cls.theme_style + \
", " + app.theme_cls.primary_palette
theme_text_color: 'Primary'
pos_hint: {'center_x': 0.5}
halign: 'center'
###################################################################
#
# TOOLBARS
#
###################################################################
Screen:
name: 'toolbar'
Toolbar:
title: "Simple toolbar"
pos_hint: {'center_x': 0.5, 'center_y': 0.75}
md_bg_color: get_color_from_hex(colors['Teal']['500'])
background_palette: 'Teal'
background_hue: '500'
Toolbar:
title: "Toolbar with right buttons"
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
md_bg_color: get_color_from_hex(colors['Amber']['700'])
background_palette: 'Amber'
background_hue: '700'
right_action_items: [['content-copy', lambda x: None]]
Toolbar:
title: "Toolbar with left and right buttons"
pos_hint: {'center_x': 0.5, 'center_y': 0.25}
md_bg_color:
get_color_from_hex(colors['DeepPurple']['A400'])
background_palette: 'DeepPurple'
background_hue: 'A400'
left_action_items: [['arrow-left', lambda x: None]]
right_action_items: [['lock', lambda x: None], \
['camera', lambda x: None], \
['play', lambda x: None]]
###################################################################
#
# TABS
#
###################################################################
Screen:
name: 'tabs'
MDTabbedPanel:
id: tab_panel
tab_display_mode:'text'
MDTab:
name: 'music'
text: "Music" # Why are these not set!!!
icon: "playlist-play"
MDLabel:
font_style: 'Body1'
theme_text_color: 'Primary'
text: "Here is my music list :)"
halign: 'center'
MDTab:
name: 'movies'
text: 'Movies'
icon: "movie"
MDLabel:
font_style: 'Body1'
theme_text_color: 'Primary'
text: "Show movies here :)"
halign: 'center'
BoxLayout:
size_hint_y:None
height: '48dp'
padding: '12dp'
MDLabel:
font_style: 'Body1'
theme_text_color: 'Primary'
text: "Use icons"
size_hint_x:None
width: '64dp'
MDCheckbox:
on_state:
tab_panel.tab_display_mode = 'icons' \
if tab_panel.tab_display_mode=='text' else 'text'
###################################################################
#
# ACCORDION
#
###################################################################
Screen:
name: 'accordion'
BoxLayout:
MDAccordion:
orientation: 'vertical'
size_hint_x: None
width: '240dp'
MDAccordionItem:
title:'Item 1'
icon: 'home'
MDAccordionSubItem:
text: "Subitem 1"
MDAccordionSubItem:
text: "Subitem 2"
MDAccordionSubItem:
text: "Subitem 3"
MDAccordionItem:
title:'Item 2'
icon: 'earth'
MDAccordionSubItem:
text: "Subitem 4"
MDAccordionSubItem:
text: "Subitem 5"
MDAccordionSubItem:
text: "Subitem 6"
MDAccordionItem:
title:'Item 3'
icon: 'account'
MDAccordionSubItem:
text: "Subitem 7"
MDAccordionSubItem:
text: "Subitem 8"
MDAccordionSubItem:
text: "Subitem 9"
MDLabel:
text: 'Content'
halign: 'center'
theme_text_color: 'Primary'
###################################################################
#
# PICKERS
#
###################################################################
Screen:
name: 'pickers'
BoxLayout:
spacing: dp(40)
orientation: 'vertical'
size_hint_x: None
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
BoxLayout:
orientation: 'vertical'
# size_hint: (None, None)
MDRaisedButton:
text: "Open time picker"
size_hint: None, None
size: 3 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
opposite_colors: True
on_release: app.show_example_time_picker()
MDLabel:
id: time_picker_label
theme_text_color: 'Primary'
size_hint: None, None
size: dp(48)*3, dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
BoxLayout:
size: dp(48)*3, dp(48)
size_hint: (None, None)
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
MDLabel:
theme_text_color: 'Primary'
text: "Start on previous time"
size_hint: None, None
size: dp(130), dp(48)
MDCheckbox:
id: time_picker_use_previous_time
size_hint: None, None
size: dp(48), dp(48)
BoxLayout:
orientation: 'vertical'
MDRaisedButton:
text: "Open date picker"
size_hint: None, None
size: 3 * dp(48), dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
opposite_colors: True
on_release: app.show_example_date_picker()
MDLabel:
id: date_picker_label
theme_text_color: 'Primary'
size_hint: None, None
size: dp(48)*3, dp(48)
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
BoxLayout:
size: dp(48)*3, dp(48)
size_hint: (None, None)
pos_hint: {'center_x': 0.5, 'center_y': 0.5}
MDLabel:
theme_text_color: 'Primary'
text: "Start on previous date"
size_hint: None, None
size: dp(130), dp(48)
MDCheckbox:
id: date_picker_use_previous_date
size_hint: None, None
size: dp(48), dp(48)
###################################################################
#
# BOTTOM NAVIGATION
#
###################################################################
Screen:
name: 'bottom_navigation'
MDBottomNavigation:
id: bottom_navigation_demo
MDBottomNavigationItem:
name: 'octagon'
text: "Warning"
icon: "alert-octagon"
MDLabel:
font_style: 'Body1'
theme_text_color: 'Primary'
text: "Warning!"
halign: 'center'
MDBottomNavigationItem:
name: 'banking'
text: "Bank"
icon: 'bank'
BoxLayout:
orientation: 'vertical'
size_hint_y: None
padding: dp(48)
spacing: 10
MDTextField:
hint_text: "You can put any widgets here"
helper_text: "Hello :)"
helper_text_mode: "on_focus"
MDBottomNavigationItem:
name: 'bottom_navigation_desktop_1'
text: "Hello"
icon: 'alert'
id: bottom_navigation_desktop_1
BoxLayout:
orientation: 'vertical'
size_hint_y: None
padding: dp(48)
spacing: 10
MDTextField:
hint_text: "Hello again"
MDBottomNavigationItem:
name: 'bottom_navigation_desktop_2'
text: "Food"
icon: 'food'
id: bottom_navigation_desktop_2
MDLabel:
font_style: 'Body1'
theme_text_color: 'Primary'
text: "Cheese!"
halign: 'center'
"""
class KitchenSink(App):
theme_cls = ThemeManager()
theme_cls.primary_palette = 'Blue'
previous_date = ObjectProperty()
title = "Kitchen Sink"
def __init__(self, **kwargs):
super(KitchenSink, self).__init__(**kwargs)
self.menu_items = [
{'viewclass': 'MDMenuItem',
'text': 'Example item %d' % i,
'callback': self.callback_for_menu_items}
for i in range(15)
]
self.Window = Window
self.manager = False
self.md_theme_picker = None
self.long_dialog = None
self.input_dialog = None
self.alert_dialog = None
self.ok_cancel_dialog = None
self.long_dialog = None
self.dialog = None
self.user_animation_card = None
self.manager_open = False
self.cards_created = False
self.file_manager = None
self.bs_menu_1 = None
self.bs_menu_2 = None
self.tick = 0
self.create_stack_floating_buttons = False
self.previous_text = \
"Welcome to the application [b][color={COLOR}]Kitchen Sink" \
"[/color][/b].\nTo see [b][color={COLOR}]KivyMD[/color][/b] " \
"examples, open the menu and select from the list the desired " \
"example\n\n" \
"" \
"" \
"Author - [b][color={COLOR}]<NAME>[/color][/b]\n" \
"[u][b][color={COLOR}]<EMAIL>[/color]" \
"[/b][/u]\n\n" \
"Author this Fork - [b][color={COLOR}]Ivanov Yuri[/color][/b]\n" \
"[u][b][color={COLOR}]<EMAIL>[/color]" \
"[/b][u]".format(COLOR=get_hex_from_color(
self.theme_cls.primary_color))
self.names_contacts = (
'<NAME>', '<NAME>', '<NAME>', '<NAME>',
'<NAME>', '<NAME>', '<NAME>', '<NAME>',
'<NAME>', '<NAME>'
)
Window.bind(on_keyboard=self.events)
def theme_picker_open(self):
if not self.md_theme_picker:
self.md_theme_picker = MDThemePicker()
self.md_theme_picker.open()
def example_add_stack_floating_buttons(self):
def set_my_language(instance_button):
toast(instance_button.icon)
if not self.create_stack_floating_buttons:
screen = self.main_widget.ids.scr_mngr.get_screen('stack buttons')
screen.add_widget(MDStackFloatingButtons(
icon='lead-pencil',
floating_data={
'Python': 'language-python',
'Php': 'language-php',
'C++': 'language-cpp'},
callback=set_my_language))
self.create_stack_floating_buttons = True
def set_accordion_list(self):
def callback(text):
toast('{} to {}'.format(text, content.name_item))
content = ContentForAnimCard(callback=callback)
for name_contact in self.names_contacts:
self.main_widget.ids.anim_list.add_widget(
MDAccordionListItem(content=content,
icon='assets/kivymd_logo.png',
title=name_contact))
def set_chevron_back_screen(self):
"""Sets the return chevron to the previous screen in ToolBar."""
self.main_widget.ids.toolbar.right_action_items = [
['dots-vertical', lambda x: self.root.toggle_nav_drawer()]]
def download_progress_hide(self, instance_progress, value):
"""Hides progress progress."""
self.main_widget.ids.toolbar.right_action_items = \
[['download',
lambda x: self.download_progress_show(instance_progress)]]
def download_progress_show(self, instance_progress):
self.set_chevron_back_screen()
instance_progress.open()
instance_progress.animation_progress_from_fade()
def show_example_download_file(self, interval):
def get_connect(host="8.8.8.8", port=53, timeout=3):
import socket
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect(
(host, port))
return True
except Exception:
return False
if get_connect():
link = 'https://www.python.org/ftp/python/3.5.1/' \
'python-3.5.1-embed-win32.zip'
progress = MDProgressLoader(
url_on_image=link,
path_to_file=os.path.join(self.directory, 'python-3.5.1.zip'),
download_complete=self.download_complete,
download_hide=self.download_progress_hide
)
progress.start(self.main_widget.ids.box_flt)
else:
toast('Connect error!')
def download_complete(self):
self.set_chevron_back_screen()
toast('Done')
def file_manager_open(self):
def file_manager_open(text_item):
previous = False if text_item == 'List' else True
self.manager = ModalView(size_hint=(1, 1), auto_dismiss=False)
self.file_manager = MDFileManager(exit_manager=self.exit_manager,
select_path=self.select_path,
previous=previous)
self.manager.add_widget(self.file_manager)
self.file_manager.show('/') # output manager to the screen
self.manager_open = True
self.manager.open()
MDDialog(
title='Title', size_hint=(.8, .4), text_button_ok='List',
text="Open manager with 'list' or 'previous' mode?",
text_button_cancel='Previous',
events_callback=file_manager_open).open()
def select_path(self, path):
"""It will be called when you click on the file name
or the catalog selection button.
:type path: str;
:param path: path to the selected directory or file;
"""
self.exit_manager()
toast(path)
def exit_manager(self, *args):
"""Called when the user reaches the root of the directory tree."""
self.manager.dismiss()
self.manager_open = False
self.set_chevron_menu()
def set_chevron_menu(self):
self.main_widget.ids.toolbar.left_action_items = [
['menu', lambda x: self.root.toggle_nav_drawer()]]
def events(self, instance, keyboard, keycode, text, modifiers):
"""Called when buttons are pressed on the mobile device.."""
if keyboard in (1001, 27):
if self.manager_open:
self.file_manager.back()
return True
def callback_for_menu_items(self, text_item):
toast(text_item)
def add_cards(self, instance_grid_card):
def callback(instance, value):
if value and isinstance(value, int):
toast('Set like in %d stars' % value)
elif value and isinstance(value, str):
toast('Repost with %s ' % value)
elif value and isinstance(value, list):
toast(value[1])
else:
toast('Delete post %s' % str(instance))
if not self.cards_created:
self.cards_created = True
menu_items = [
{'viewclass': 'MDMenuItem',
'text': 'Example item %d' % i,
'callback': self.callback_for_menu_items}
for i in range(2)
]
buttons = ['facebook', 'vk', 'twitter']
instance_grid_card.add_widget(
MDCardPost(text_post='Card with text',
swipe=True, callback=callback))
instance_grid_card.add_widget(
MDCardPost(
right_menu=menu_items, swipe=True,
text_post='Card with a button to open the menu MDDropDown',
callback=callback))
instance_grid_card.add_widget(
MDCardPost(
likes_stars=True, callback=callback, swipe=True,
text_post='Card with asterisks for voting.'))
instance_grid_card.add_widget(
MDCardPost(
source="./assets/kitten-1049129_1280.jpg",
tile_text="Little Baby",
tile_font_style="Headline",
text_post="This is my favorite cat. He's only six months "
"old. He loves milk and steals sausages :) "
"And he likes to play in the garden.",
with_image=True, swipe=True, callback=callback,
buttons=buttons))
def update_screen(self, instance):
def update_screen(interval):
self.tick += 1
if self.tick > 2:
instance.update = True
self.tick = 0
self.main_widget.ids.upd_lbl.text = "New string"
Clock.unschedule(update_screen)
Clock.schedule_interval(update_screen, 1)
def build(self):
self.main_widget = Builder.load_string(main_widget_kv)
# self.theme_cls.theme_style = 'Dark'
self.main_widget.ids.text_field_error.bind(
on_text_validate=self.set_error_message,
on_focus=self.set_error_message)
self.bottom_navigation_remove_mobile(self.main_widget)
return self.main_widget
def bottom_navigation_remove_mobile(self, widget):
# Removes some items from bottom-navigation demo when on mobile
if DEVICE_TYPE == 'mobile':
widget.ids.bottom_navigation_demo.remove_widget(
widget.ids.bottom_navigation_desktop_2)
if DEVICE_TYPE == 'mobile' or DEVICE_TYPE == 'tablet':
widget.ids.bottom_navigation_demo.remove_widget(
widget.ids.bottom_navigation_desktop_1)
def show_user_example_animation_card(self):
def main_back_callback():
toast('Close card')
if not self.user_animation_card:
self.user_animation_card = MDUserAnimationCard(
user_name="Lion Lion",
path_to_avatar="./assets/guitar-1139397_1280.jpg",
callback=main_back_callback)
self.user_animation_card.box_content.add_widget(
ContentForAnimCard())
self.user_animation_card.open()
def show_example_snackbar(self, snack_type):
if snack_type == 'simple':
Snackbar(text="This is a snackbar!").show()
elif snack_type == 'button':
Snackbar(text="This is a snackbar", button_text="with a button!",
button_callback=lambda *args: 2).show()
elif snack_type == 'verylong':
Snackbar(text="This is a very very very very very very very "
"long snackbar!").show()
def show_example_input_dialog(self):
if not self.input_dialog:
self.input_dialog = MDInputDialog(
title='Title', hint_text='Hint text', size_hint=(.8, .4),
text_button_ok='Ok', events_callback=lambda x: None)
self.input_dialog.open()
def show_example_alert_dialog(self):
if not self.alert_dialog:
self.alert_dialog = MDDialog(
title='Title', size_hint=(.8, .4), text_button_ok='Ok',
text="This is Alert dialog",
events_callback=self.callback_for_menu_items)
self.alert_dialog.open()
def show_example_ok_cancel_dialog(self):
if not self.ok_cancel_dialog:
self.ok_cancel_dialog = MDDialog(
title='Title', size_hint=(.8, .4), text_button_ok='Ok',
text="This is Ok Cancel dialog", text_button_cancel='Cancel',
events_callback=self.callback_for_menu_items)
self.ok_cancel_dialog.open()
def show_example_long_dialog(self):
if not self.long_dialog:
self.long_dialog = MDDialog(
text="Lorem ipsum dolor sit amet, consectetur adipiscing "
"elit, sed do eiusmod tempor incididunt ut labore et "
"dolore magna aliqua. Ut enim ad minim veniam, quis "
"nostrud exercitation ullamco laboris nisi ut aliquip "
"ex ea commodo consequat. Duis aute irure dolor in "
"reprehenderit in voluptate velit esse cillum dolore eu "
"fugiat nulla pariatur. Excepteur sint occaecat "
"cupidatat non proident, sunt in culpa qui officia "
"deserunt mollit anim id est laborum.",
title='Title', size_hint=(.8, .4), text_button_ok='Yes',
events_callback=self.callback_for_menu_items)
self.long_dialog.open()
def get_time_picker_data(self, instance, time):
self.root.ids.time_picker_label.text = str(time)
self.previous_time = time
def show_example_time_picker(self):
time_dialog = MDTimePicker()
time_dialog.bind(time=self.get_time_picker_data)
if self.root.ids.time_picker_use_previous_time.active:
try:
time_dialog.set_time(self.previous_time)
except AttributeError:
pass
time_dialog.open()
def set_previous_date(self, date_obj):
self.previous_date = date_obj
self.root.ids.date_picker_label.text = str(date_obj)
def show_example_date_picker(self):
if self.root.ids.date_picker_use_previous_date.active:
pd = self.previous_date
try:
MDDatePicker(self.set_previous_date,
pd.year, pd.month, pd.day).open()
except AttributeError:
MDDatePicker(self.set_previous_date).open()
else:
MDDatePicker(self.set_previous_date).open()
def show_example_bottom_sheet(self):
if not self.bs_menu_1:
self.bs_menu_1 = MDListBottomSheet()
self.bs_menu_1.add_item(
"Here's an item with text only",
lambda x: self.callback_for_menu_items(
"Here's an item with text only"))
self.bs_menu_1.add_item(
"Here's an item with an icon",
lambda x: self.callback_for_menu_items(
"Here's an item with an icon"),
icon='clipboard-account')
self.bs_menu_1.add_item(
"Here's another!",
lambda x: self.callback_for_menu_items(
"Here's another!"),
icon='nfc')
self.bs_menu_1.open()
def show_example_grid_bottom_sheet(self):
if not self.bs_menu_2:
self.bs_menu_2 = MDGridBottomSheet()
self.bs_menu_2.add_item(
"Facebook",
lambda x: self.callback_for_menu_items("Facebook"),
icon_src='./assets/facebook-box.png')
self.bs_menu_2.add_item(
"YouTube",
lambda x: self.callback_for_menu_items("YouTube"),
icon_src='./assets/youtube-play.png')
self.bs_menu_2.add_item(
"Twitter",
lambda x: self.callback_for_menu_items("Twitter"),
icon_src='./assets/twitter.png')
self.bs_menu_2.add_item(
"Da Cloud",
lambda x: self.callback_for_menu_items("Da Cloud"),
icon_src='./assets/cloud-upload.png')
self.bs_menu_2.add_item(
"Camera",
lambda x: self.callback_for_menu_items("Camera"),
icon_src='./assets/camera.png')
self.bs_menu_2.open()
def set_error_message(self, *args):
if len(self.root.ids.text_field_error.text) == 2:
self.root.ids.text_field_error.error = True
else:
self.root.ids.text_field_error.error = False
def on_pause(self):
return True
def on_stop(self):
pass
def open_settings(self, *args):
return False
class ContentForAnimCard(BoxLayout):
callback = ObjectProperty(lambda x: None)
class AvatarSampleWidget(ILeftBody, Image):
pass
class IconLeftSampleWidget(ILeftBodyTouch, MDIconButton):
pass
class IconRightSampleWidget(IRightBodyTouch, MDCheckbox):
pass
if __name__ == '__main__':
KitchenSink().run()
|
StarcoderdataPython
|
3370382
|
import json
from pathlib import Path
import re
import questionary
from questionary import Choice
from rich.console import Console
from dbt_coves.tasks.base import BaseConfiguredTask
from dbt_coves.utils.jinja import render_template, render_template_file
console = Console()
NESTED_FIELD_TYPES = {
"SnowflakeAdapter": "VARIANT",
"BigQueryAdapter": "STRUCT",
"RedshiftAdapter": "SUPER",
}
class GenerateSourcesTask(BaseConfiguredTask):
"""
Task that generate sources, models and model properties automatically
"""
@classmethod
def register_parser(cls, sub_parsers, base_subparser):
subparser = sub_parsers.add_parser(
"sources",
parents=[base_subparser],
help="Generate source dbt models by inspecting the database schemas and relations.",
)
subparser.add_argument(
"--database",
type=str,
help="Database where source relations live, if different than target",
)
subparser.add_argument(
"--schemas",
type=str,
help="Comma separated list of schemas where raw data resides, "
"i.e. 'RAW_SALESFORCE,RAW_HUBSPOT'",
)
subparser.add_argument(
"--relations",
type=str,
help="Comma separated list of relations where raw data resides, "
"i.e. 'RAW_HUBSPOT_PRODUCTS,RAW_SALESFORCE_USERS'",
)
subparser.add_argument(
"--destination",
type=str,
help="Where models sql files will be generated, i.e. "
"'models/{schema_name}/{relation_name}.sql'",
)
subparser.add_argument(
"--model_props_strategy",
type=str,
help="Strategy for model properties files generation,"
" i.e. 'one_file_per_model'",
)
subparser.add_argument(
"--templates_folder",
type=str,
help="Folder with jinja templates that override default "
"sources generation templates, i.e. 'templates'",
)
subparser.set_defaults(cls=cls, which="sources")
return subparser
def run(self):
config_database = self.get_config_value("database")
db = config_database or self.config.credentials.database
schema_name_selectors = [
schema.upper() for schema in self.get_config_value("schemas")
]
schema_wildcard_selectors = []
for schema_name in schema_name_selectors:
if "*" in schema_name:
schema_wildcard_selectors.append(schema_name.replace("*", ".*"))
with self.adapter.connection_named("master"):
schemas = [
schema.upper()
for schema in self.adapter.list_schemas(db)
# TODO: fix this for different adapters
if schema != "INFORMATION_SCHEMA"
]
for schema in schemas:
for selector in schema_wildcard_selectors:
if re.search(selector, schema):
schema_name_selectors.append(schema)
break
filtered_schemas = list(set(schemas).intersection(schema_name_selectors))
if not filtered_schemas:
schema_nlg = f"schema{'s' if len(schema_name_selectors) > 1 else ''}"
console.print(
f"Provided {schema_nlg} [u]{', '.join(schema_name_selectors)}[/u] not found in Database.\n"
)
selected_schemas = questionary.checkbox(
"Which schemas would you like to inspect?",
choices=[
Choice(schema, checked=True)
if "RAW" in schema
else Choice(schema)
for schema in schemas
],
).ask()
if selected_schemas:
filtered_schemas = selected_schemas
else:
return 0
rel_name_selectors = [
relation.upper() for relation in self.get_config_value("relations")
]
rel_wildcard_selectors = []
for rel_name in rel_name_selectors:
if "*" in rel_name:
rel_wildcard_selectors.append(rel_name.replace("*", ".*"))
listed_relations = []
for schema in filtered_schemas:
listed_relations += self.adapter.list_relations(db, schema)
for rel in listed_relations:
for selector in rel_wildcard_selectors:
if re.search(selector, rel.name):
rel_name_selectors.append(rel.name)
break
intersected_rels = [
relation
for relation in listed_relations
if relation.name in rel_name_selectors
]
rels = (
intersected_rels
if rel_name_selectors and rel_name_selectors[0]
else listed_relations
)
if rels:
selected_rels = questionary.checkbox(
"Which sources would you like to generate?",
choices=[
Choice(f"[{rel.schema}] {rel.name}", checked=True, value=rel)
for rel in rels
],
).ask()
if selected_rels:
self.generate_sources(selected_rels)
else:
return 0
else:
schema_nlg = f"schema{'s' if len(filtered_schemas) > 1 else ''}"
console.print(
f"No tables/views found in [u]{', '.join(filtered_schemas)}[/u] {schema_nlg}."
)
return 0
def get_config_value(self, key):
return self.coves_config.integrated["generate"]["sources"][key]
def generate_sources(self, rels):
dest = self.get_config_value("destination")
options = {"override_all": None, "flatten_all": None}
for rel in rels:
model_dest = render_template(
dest, {"schema": rel.schema.lower(), "relation": rel.name.lower()}
)
model_sql = Path().joinpath(model_dest)
if not options["override_all"]:
if model_sql.exists():
overwrite = questionary.select(
f"{model_dest} already exists. Would you like to overwrite it?",
choices=["No", "Yes", "No for all", "Yes for all"],
default="No",
).ask()
if overwrite == "Yes":
self.generate_model(rel, model_sql, options)
elif overwrite == "No for all":
options["override_all"] = "No"
elif overwrite == "Yes for all":
options["override_all"] = "Yes"
self.generate_model(rel, model_sql, options)
else:
self.generate_model(rel, model_sql, options)
elif options["override_all"] == "Yes":
self.generate_model(rel, model_sql, options)
else:
if not model_sql.exists():
self.generate_model(rel, model_sql, options)
def generate_model(self, relation, destination, options):
destination.parent.mkdir(parents=True, exist_ok=True)
columns = self.adapter.get_columns_in_relation(relation)
nested_field_type = NESTED_FIELD_TYPES.get(self.adapter.__class__.__name__)
nested = [col.name.lower() for col in columns if col.dtype == nested_field_type]
if not options["flatten_all"]:
if nested:
field_nlg = "field"
flatten_nlg = "flatten it"
if len(nested) > 1:
field_nlg = "fields"
flatten_nlg = "flatten them"
flatten = questionary.select(
f"{relation.name.lower()} contains the JSON {field_nlg} {', '.join(nested)}."
f" Would you like to {flatten_nlg}?",
choices=["No", "Yes", "No for all", "Yes for all"],
default="Yes",
).ask()
if flatten == "Yes":
self.render_templates(relation, columns, destination, nested=nested)
elif flatten == "No":
self.render_templates(relation, columns, destination)
elif flatten == "No for all":
options["flatten_all"] = "No"
self.render_templates(relation, columns, destination)
elif flatten == "Yes for all":
options["flatten_all"] = "Yes"
self.render_templates(relation, columns, destination, nested=nested)
else:
self.render_templates(relation, columns, destination)
elif options["flatten_all"] == "Yes":
if nested:
self.render_templates(relation, columns, destination, nested=nested)
else:
self.render_templates(relation, columns, destination)
def get_nested_keys(self, columns, schema, relation):
config_db = self.get_config_value("database")
if config_db:
config_db += "."
else:
config_db = ""
_, data = self.adapter.execute(
f"SELECT {', '.join(columns)} FROM {config_db}{schema}.{relation} limit 1",
fetch=True,
)
result = dict()
if len(data.rows) > 0:
for idx, col in enumerate(columns):
value = data.columns[idx]
try:
result[col] = list(json.loads(value[0]).keys())
except TypeError:
console.print(
f"Column {col} in relation {relation} contains invalid JSON.\n"
)
return result
def render_templates(self, relation, columns, destination, nested=None):
context = {
"relation": relation,
"columns": columns,
"nested": {},
"adapter_name": self.adapter.__class__.__name__,
}
if nested:
context["nested"] = self.get_nested_keys(
nested, relation.schema, relation.name
)
# Removing original column with JSON data
new_cols = []
for col in columns:
if col.name.lower() not in context["nested"]:
new_cols.append(col)
context["columns"] = new_cols
config_db = self.get_config_value("database")
if config_db:
context["source_database"] = config_db
templates_folder = self.get_config_value("templates_folder")
render_template_file(
"source_model.sql", context, destination, templates_folder=templates_folder
)
context["model"] = destination.name.lower().replace(".sql", "")
render_template_file(
"source_model_props.yml",
context,
str(destination).replace(".sql", ".yml"),
templates_folder=templates_folder,
)
|
StarcoderdataPython
|
3292574
|
<filename>language/bert/sequene_parallel/loss_func/cross_entropy.py
from colossalai.context.parallel_mode import ParallelMode
import torch
from torch.cuda.amp import custom_bwd, custom_fwd
class _VocabCrossEntropy(torch.autograd.Function):
@staticmethod
@custom_fwd
def forward(ctx, vocab_parallel_logits, target):
# Maximum value along vocab dimension across all GPUs.
logits_max = torch.max(vocab_parallel_logits, dim=-1)[0]
# Subtract the maximum value.
vocab_parallel_logits.sub_(logits_max.unsqueeze(dim=-1))
# Create a mask of valid vocab ids (1 means it needs to be masked).
target_mask = target < 0
masked_target = target.clone()
masked_target[target_mask] = 0
# Get predicted-logits = logits[target].
# For Simplicity, we convert logits to a 2-D tensor with size
# [*, partition-vocab-size] and target to a 1-D tensor of size [*].
logits_2d = vocab_parallel_logits.view(-1, vocab_parallel_logits.size(-1))
masked_target_1d = masked_target.view(-1)
arange_1d = torch.arange(start=0, end=logits_2d.size()[0],
device=logits_2d.device)
predicted_logits_1d = logits_2d[arange_1d, masked_target_1d]
predicted_logits_1d = predicted_logits_1d.clone().contiguous()
predicted_logits = predicted_logits_1d.view_as(target)
predicted_logits[target_mask] = 0.0
# Sum of exponential of logits along vocab dimension across all GPUs.
exp_logits = vocab_parallel_logits
torch.exp(vocab_parallel_logits, out=exp_logits)
sum_exp_logits = exp_logits.sum(dim=-1)
# Loss = log(sum(exp(logits))) - predicted-logit.
loss = torch.log(sum_exp_logits) - predicted_logits
# Store softmax, target-mask and masked-target for backward pass.
exp_logits.div_(sum_exp_logits.unsqueeze(dim=-1))
ctx.save_for_backward(exp_logits, target_mask, masked_target_1d)
return loss
@staticmethod
@custom_bwd
def backward(ctx, grad_output):
# Retreive tensors from the forward path.
softmax, target_mask, masked_target_1d = ctx.saved_tensors
# All the inputs have softmax as thier gradient.
grad_input = softmax
# For simplicity, work with the 2D gradient.
partition_vocab_size = softmax.size()[-1]
grad_2d = grad_input.view(-1, partition_vocab_size)
# Add the gradient from matching classes.
arange_1d = torch.arange(start=0, end=grad_2d.size()[0],
device=grad_2d.device)
grad_2d[arange_1d, masked_target_1d] -= (
1.0 - target_mask.view(-1).float())
# Finally elementwise multiplication with the output gradients.
grad_input.mul_(grad_output.unsqueeze(dim=-1))
return grad_input, None
def vocab_cross_entropy(vocab_logits, target):
"""helper function for the cross entropy."""
return _VocabCrossEntropy.apply(vocab_logits, target)
|
StarcoderdataPython
|
1771653
|
<reponame>sdpython/code_beatrix<gh_stars>1-10
"""
@brief test log(time=1s)
"""
import os
import unittest
from pyquickhelper.loghelper import fLOG
from code_beatrix.ipythonhelper.magic_scratch import MagicScratch
from code_beatrix.jsscripts.nbsnap import RenderSnap
class TestMagicSnap(unittest.TestCase):
def test_magic_snap(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
this = os.path.abspath(__file__)
mg = MagicScratch()
cmd = "-W 500"
fLOG("**", cmd)
mg.add_context({"this": this})
res = mg.snap(cmd)
fLOG(res)
self.assertTrue(res)
def test_snap(self):
obj = RenderSnap()
s = obj._repr_html_()
self.assertTrue(s)
if __name__ == "__main__":
unittest.main()
|
StarcoderdataPython
|
4835043
|
<reponame>CopenhagenCityArchives/CorrectOCR<filename>CorrectOCR/setup.py
from setuptools import setup
setup(
name='CorrectOCR',
version='',
packages=['', 'tokens', ''],
package_dir={'': 'CorrectOCR'},
url='https://github.com/CopenhagenCityArchives/CorrectOCR',
license='CC-BY-4.0',
author='<NAME>',
author_email='<EMAIL>',
description='Machine Learning-assisted correction of OCR errors in Danish corpora'
)
|
StarcoderdataPython
|
1737398
|
# Generated by Django 3.1.5 on 2021-02-14 11:35
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0006_auto_20210214_0842'),
]
operations = [
migrations.CreateModel(
name='BankInfo',
fields=[
('tax_code', models.IntegerField(primary_key=True, serialize=False)),
('bank_name', models.CharField(default=None, max_length=200, null=True)),
('support_number_1', models.CharField(default=None, max_length=13, null=True)),
('support_number_2', models.CharField(default=None, max_length=13, null=True)),
('support_number_3', models.CharField(default=None, max_length=13, null=True)),
('email', models.CharField(default=None, max_length=20, null=True)),
('website', models.CharField(default=None, max_length=20, null=True)),
('info', models.TextField(default=None, max_length=500, null=True)),
('signature_person', models.TextField(default=None, max_length=100, null=True)),
('sign', models.ImageField(blank=True, null=True, upload_to='')),
('logo', models.ImageField(blank=True, null=True, upload_to='')),
],
options={
'ordering': ['tax_code', 'bank_name'],
},
),
migrations.DeleteModel(
name='Receipt',
),
migrations.RemoveField(
model_name='rbaresponse',
name='id',
),
migrations.AlterField(
model_name='rbaresponse',
name='link_code',
field=models.UUIDField(default='ae6fabcd466f4fda94e20d89209a988c', editable=False),
),
migrations.AlterField(
model_name='rbaresponse',
name='reciept_id',
field=models.IntegerField(primary_key=True, serialize=False, unique=True),
),
migrations.AddField(
model_name='rbaresponse',
name='sender_bank_tax_code',
field=models.ForeignKey(default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='sender_bank_tax_code', to='api.bankinfo'),
),
]
|
StarcoderdataPython
|
194409
|
from __future__ import absolute_import
__all__ = ('Device', )
from sentry.interfaces.base import Interface, InterfaceValidationError
from sentry.utils.safe import trim, trim_dict
class Device(Interface):
"""
An interface which describes the device.
>>> {
>>> "name": "Windows",
>>> "version": "95",
>>> "build": "95.0.134.1651",
>>> "arbitrary": "data"
>>> }
"""
@classmethod
def to_python(cls, data):
data = data.copy()
extra_data = data.pop('data', data)
if not isinstance(extra_data, dict):
extra_data = {}
try:
name = trim(data.pop('name'), 64)
except KeyError:
raise InterfaceValidationError("Missing or invalid value for 'name'")
try:
version = trim(data.pop('version'), 64)
except KeyError:
raise InterfaceValidationError("Missing or invalid value for 'version'")
build = trim(data.pop('build', None), 64)
kwargs = {
'name': name,
'version': version,
'build': build,
'data': trim_dict(extra_data),
}
return cls(**kwargs)
def get_api_context(self, is_public=False):
return {
'name': self.name,
'version': self.version,
'build': self.build,
'data': self.data,
}
def get_path(self):
return 'device'
def get_hash(self):
return []
|
StarcoderdataPython
|
1756926
|
import discord
from discord.ext import commands
from utils.configManager import BotConfig
class HelpCommand(commands.HelpCommand):
def __init__(self):
super().__init__()
self.botConfig = BotConfig()
async def send_bot_help(self, mapping):
helpMessage = discord.Embed(
title="WingBot Help\t|\tModules", color=discord.Color.dark_blue()
)
for i in mapping.keys():
if i is None:
continue
helpMessage.add_field(
name=i.qualified_name, value=i.description, inline=False
)
helpMessage.set_footer(
text=f"Type `{self.botConfig.commandPrefix}help <module>` to learn more about that module."
)
await self.get_destination().send(embed=helpMessage)
async def send_cog_help(self, cog):
options = ""
l = []
for i in cog.walk_commands():
if i not in l:
options += (
f"`{self.botConfig.commandPrefix}{i.qualified_name}` | {i.help}\n"
)
l.append(i)
helpMessage = discord.Embed(
title=f"WingBot Help\t|\t{cog.qualified_name}",
color=discord.Color.dark_blue(),
)
helpMessage.add_field(name="Description", value=cog.description)
helpMessage.add_field(name="Options", value=options, inline=False)
helpMessage.set_footer(
text=f"Type `{self.botConfig.commandPrefix}help <command>` to learn more about a command."
)
await self.get_destination().send(embed=helpMessage)
async def send_group_help(self, group):
options = ""
l = []
for i in group.walk_commands():
if i not in l:
options += (
f"`{self.botConfig.commandPrefix}{i.qualified_name}` | {i.help}\n"
)
l.append(i)
helpMessage = discord.Embed(
title=f"WingBot Help\t|\t`{self.botConfig.commandPrefix}{group.qualified_name}`",
color=discord.Color.dark_blue(),
)
helpMessage.add_field(name="Description", value=group.short_doc)
helpMessage.add_field(name="Options", value=options, inline=False)
helpMessage.set_footer(
text=f"Type `{self.botConfig.commandPrefix}help <command>` to learn more about a command."
)
await self.get_destination().send(embed=helpMessage)
async def send_command_help(self, command):
helpMessage = discord.Embed(
title=f"WingBot Help\t|\t`{self.botConfig.commandPrefix}{command.qualified_name}`",
color=discord.Color.dark_blue(),
)
helpMessage.add_field(name="Description", value=command.help)
helpMessage.add_field(
name="Usage",
value=f"`{self.botConfig.commandPrefix}{command.qualified_name} {command.signature}`",
inline=False,
)
if command.aliases:
aliasString = ""
for i in command.aliases:
aliasString += (
f"`{self.botConfig.commandPrefix}{command.full_parent_name} {i}`\n"
)
helpMessage.add_field(name="Aliases", value=aliasString, inline=False)
await self.get_destination().send(embed=helpMessage)
|
StarcoderdataPython
|
4814731
|
from os import name, system
from variables import *
# define global variables
inventory = {
"health": 10,
"money": 10,
"social": 10,
"fame": 10,
}
scenarios = [
scenario01,
scenario02,
scenario03,
scenario04,
scenario05,
scenario06,
scenario07,
scenario08,
]
def clear_console(): system('cls' if name == 'nt'else'clear')
def choose():
decision = 'null'
while(decision != 'false' and decision != 'true'):
decision = input((f'\n\tVocê aceita (true) ou recusa (false)? '))
decision = decision.lower().strip()
if decision == 'true':
return True
elif decision == 'false':
return False
else:
print('\n\t⚠️ Por favor, digite apenas "true" ou "false" ⚠️\n')
def showMenu():
print("""\n
*********************
** DEVELOPER, INC. **
*********************
---------------------
| 0 - Iniciar jogo |
| 1 - Créditos |
| 2 - Sair |
---------------------
\n""")
def is_player_alive(inventory):
flag = True
for item_value in inventory.values():
if item_value <= 0:
flag = False
return flag
def show_inventory():
global inventory
print(f"""\n
STATUS ATUAL:
Saúde: {inventory['health']}
Dinheiro: {inventory['money']}
Vida social: {inventory['social']}
Fama: {inventory['fame']}
""")
def start_game():
global scenarios
global inventory
reset_inventory(inventory)
for scenario in scenarios:
if is_player_alive(inventory) == False:
game_over()
break
print(scenario)
boolean_answer = choose()
if scenario == scenario01:
if boolean_answer:
inventory['health'] -= 2
inventory['money'] += 2
inventory['social'] -= 3
inventory['fame'] += 1
else:
inventory['health'] += 1
inventory['money'] -= 4
inventory['social'] += 1
inventory['fame'] -= 1
clear_console()
show_inventory()
elif scenario == scenario02:
if boolean_answer:
inventory['health'] += 2
inventory['money'] -= 4
inventory['social'] -= 0
inventory['fame'] += 2
else:
inventory['health'] -= 2
inventory['money'] += 4
inventory['social'] += 0
inventory['fame'] -= 2
clear_console()
show_inventory()
elif scenario == scenario03:
if boolean_answer:
inventory['health'] += 1
inventory['money'] -= 5
inventory['social'] += 2
inventory['fame'] += 2
else:
inventory['health'] -= 3
inventory['money'] += 4
inventory['social'] -= 4
inventory['fame'] -= 3
clear_console()
show_inventory()
elif scenario == scenario04:
if boolean_answer:
inventory['health'] -= 9
inventory['money'] -= 6
inventory['social'] += 8
inventory['fame'] += 5
else:
inventory['health'] += 4
inventory['money'] += 8
inventory['social'] -= 4
inventory['fame'] -= 3
clear_console()
show_inventory()
elif scenario == scenario05:
if boolean_answer:
inventory['health'] += 9
inventory['money'] -= 8
inventory['social'] -= 0
inventory['fame'] += 0
else:
inventory['health'] -= 9
inventory['money'] += 8
inventory['social'] += 0
inventory['fame'] -= 0
clear_console()
show_inventory()
elif scenario == scenario06:
if boolean_answer:
inventory['health'] += 5
inventory['money'] += 2
inventory['social'] += 8
inventory['fame'] -= 8
else:
inventory['health'] -= 3
inventory['money'] -= 2
inventory['social'] -= 4
inventory['fame'] += 3
clear_console()
show_inventory()
elif scenario == scenario07:
if boolean_answer:
inventory['health'] -= 10
inventory['money'] += 8
inventory['social'] -= 11
inventory['fame'] += 10
else:
inventory['health'] += 5
inventory['money'] -= 7
inventory['social'] += 2
inventory['fame'] -= 5
clear_console()
show_inventory()
elif scenario == scenario08:
if boolean_answer:
inventory['health'] -= 2
inventory['money'] -= 10
inventory['social'] -= 3
inventory['fame'] += 5
else:
inventory['health'] += 1
inventory['money'] += 4
inventory['social'] += 1
inventory['fame'] -= 8
clear_console()
show_inventory()
# clear_console()
pass
clear_console()
end_game()
def reset_inventory(inventory):
for item in inventory.values():
item = 10
def main():
while True:
showMenu()
menu_option = input("\n\tEscolha uma opção: ")
if menu_option == '0':
clear_console()
presentation()
clear_console()
start_game()
elif menu_option == '1':
clear_console()
show_credits()
elif menu_option == '2':
clear_console()
bye()
break
else:
print('\n\tFavor inserir uma opção válida!\n')
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
3340579
|
import wikipedia
import cPickle as pickle
import string
from csv import DictReader, DictWriter
import re
import nltk
"""
This class is used to fetch articles from wiki and dump it locally so that the runtime for the program is reduced.
"""
class ArticleDump:
def web_lookup(self):
"""
We made use of the wikipedia package available in python to fetch the articles from Wikipedia. The guesses present in the training data correspond to
articles in wikipedia and they are fetched and dumped locally.
"""
guess_words = []
train = DictReader(open("train_final.csv", 'r'))
for ii in train:
guesses = ii['IR_Wiki Scores'] + ii['QANTA Scores']
words = re.split(',[^_]', guesses)
for word in words:
guess = word.split(":")[0].rstrip().lstrip()
if '_' in guess:
guess_words.append(guess.replace('_', ' '))
else:
guess_words.append(guess)
counter = 0
#Unique guess articles are fetched. Below is some code to handle discrepency in the training data
for guesses in list(set(guess_words)):
if '&' in guesses:
guesses = guesses.replace('&', '&')
elif '"' in guesses:
guesses = guesses.replace('"', '"')
guesses = re.sub('[%s]' % re.escape('!"#$%\*+,./:;<=>?@[\\]^_`{|}~'), '', guesses)
guess_count_score = {}
try:
#For each guess, we fetch the wikipedia page. The below line shows the code for the same.
wiki_content = wikipedia.page(guesses).content.lower().encode("utf8")
except wikipedia.exceptions.DisambiguationError as e:
wiki_content = wikipedia.page(e.options[0]).content.lower().encode("utf8")
except wikipedia.exceptions.PageError as pe:
print "Exception for guess: ", guesses
guesses = re.sub('[%s]' % re.escape(string.punctuation), '', guesses)
guess_filename = "dumps_final1/"+guesses+".txt"
#Used cPickle to dump wikipedia articles.
with open(guess_filename, 'wb') as fp:
pickle.dump(wiki_content, fp)
fp.close()
counter+=1
#Counter maintained for debugging purposes only.
print "Done ", counter, " Out of ", len(list(set(guess_words)))
# Same holds good for test data also
guess_words = []
train = DictReader(open("test.csv", 'r'))
for ii in train:
guesses = ii['IR_Wiki Scores'] + ii['QANTA Scores']
words = re.split(',[^_]', guesses)
for word in words:
guess = word.split(":")[0].rstrip().lstrip()
if '_' in guess:
guess_words.append(guess.replace('_', ' '))
else:
guess_words.append(guess)
counter = 0
for guesses in list(set(guess_words)):
if '&' in guesses:
guesses = guesses.replace('&', '&')
elif '"' in guesses:
guesses = guesses.replace('"', '"')
guesses = re.sub('[%s]' % re.escape('!"#$%\*+,./:;<=>?@[\\]^_`{|}~'), '', guesses)
guess_count_score = {}
try:
wiki_content = wikipedia.page(guesses).content.lower().encode("utf8")
except wikipedia.exceptions.DisambiguationError as e:
wiki_content = wikipedia.page(e.options[0]).content.lower().encode("utf8")
except wikipedia.exceptions.PageError as pe:
print "Exception for guess: ", guesses
guesses = re.sub('[%s]' % re.escape(string.punctuation), '', guesses)
guess_filename = "dumps_final1/"+guesses+".txt"
with open(guess_filename, 'wb') as fp:
pickle.dump(wiki_content, fp)
fp.close()
with open(filename, 'a') as fp:
pickle.dump(guesses+"-->"+guess_filename, fp)
fp.close()
counter+=1
#Counter maintained for debugging purposes only.
print "Done ", counter, " Out of ", len(list(set(guess_words)))
if __name__=="__main__":
ad = ArticleDump()
ad.web_lookup()
|
StarcoderdataPython
|
3265371
|
from meltano.core.plugin import PluginInstall, PluginType
class ModelPlugin(PluginInstall):
__plugin_type__ = PluginType.MODELS
def __init__(self, *args, **kwargs):
super().__init__(self.__class__.__plugin_type__, *args, **kwargs)
|
StarcoderdataPython
|
188570
|
import pytest
from werkzeug.exceptions import BadRequest, InternalServerError
# Link Controller
from abitly.services.link.controller import (validate_request_body,
get_generated_url,
get_original_url)
def test_validate_request_body_should_return_original_url():
"""Should return the original url when it is in the request body
and it is of type str
"""
request_data = {'originalUrl': 'https://realpython.com/'}
expected_original_url = 'https://realpython.com/'
original_url = validate_request_body(request_data)
assert original_url == expected_original_url
def test_validate_request_body_should_raise_bad_request():
"""Should raise BadRequest exception when the originalUrl is not in the
request body or it is of another type of string
"""
request_data_empty = {}
request_data_wrong_type = {'originalUrl': 10}
with pytest.raises(BadRequest):
validate_request_body(request_data_empty)
with pytest.raises(BadRequest):
validate_request_body(request_data_wrong_type)
def test_get_generated_url_should_raise_internal_server_error():
"""Should raise InternalServerError when the
original_url is another of type str
"""
original_url = 10
with pytest.raises(InternalServerError):
get_generated_url(original_url)
def test_get_original_url_should_rise_bad_request():
"""Should rise BadRequest when the generated_url have an invalid format"""
with pytest.raises(BadRequest):
get_original_url('4RjLzNF5')
|
StarcoderdataPython
|
1680326
|
<filename>nmtg/modules/loss.py
import torch
from torch import nn, Tensor
class NMTLoss(nn.Module):
def __init__(self, output_size, padding_idx, label_smoothing=0.0):
super().__init__()
self.output_size = output_size
self.padding_idx = padding_idx
self.label_smoothing = label_smoothing
weight = torch.ones(output_size)
weight[padding_idx] = 0
self.loss = nn.NLLLoss(weight, reduction='sum')
def forward(self, lprobs: Tensor, targets: Tensor):
gtruth = targets.view(-1) # (batch * time,)
lprobs = lprobs.view(-1, lprobs.size(-1)) # (batch * time, vocab_size)
if self.label_smoothing > 0: # label smoothing
non_pad_mask = gtruth.ne(self.padding_idx)
nll_loss = -lprobs.gather(1, gtruth.unsqueeze(1))[non_pad_mask]
nll_loss = nll_loss.sum()
smooth_loss = -lprobs.sum(dim=-1, keepdim=True)[non_pad_mask]
smooth_loss = smooth_loss.sum()
eps_i = self.label_smoothing / (self.output_size - 1)
loss = (1 - self.label_smoothing) * nll_loss + eps_i * smooth_loss
loss_data = nll_loss.item()
else:
loss = self.loss(lprobs.float(), gtruth)
loss_data = loss.item()
return loss, loss_data
|
StarcoderdataPython
|
1660002
|
"""Credit to <NAME>:
https://github.com/MilesCranmer/easy_normalizing_flow/blob/master/flow.py
"""
import torch
from torch import nn, optim
from torch.functional import F
import numpy as np
####
# From Karpathy's MADE implementation
####
DEBUG = False
class MaskedLinear(nn.Linear):
""" same as Linear except has a configurable mask on the weights """
def __init__(self, in_features, out_features, bias=True):
super().__init__(in_features, out_features, bias)
self.register_buffer('mask', torch.ones(out_features, in_features))
def set_mask(self, mask):
self.mask.data.copy_(torch.from_numpy(mask.astype(np.uint8).T))
def forward(self, input):
if DEBUG:
print("masked linear: ", torch.any(torch.isnan(input)), input.mean())
return F.linear(input, self.mask * self.weight, self.bias)
class MADE(nn.Module):
def __init__(self, nin, hidden_sizes,
nout, num_masks=1, natural_ordering=False):
"""
nin: integer; number of inputs
hidden sizes: a list of integers; number of units in hidden layers
nout: integer; number of outputs, which usually collectively parameterize some kind of 1D distribution
note: if nout is e.g. 2x larger than nin (perhaps the mean and std), then the first nin
will be all the means and the second nin will be stds. i.e. output dimensions depend on the
same input dimensions in "chunks" and should be carefully decoded downstream appropriately.
the output of running the tests for this file makes this a bit more clear with examples.
num_masks: can be used to train ensemble over orderings/connections
natural_ordering: force natural ordering of dimensions, don't use random permutations
"""
super().__init__()
self.nin = nin
self.nout = nout
self.hidden_sizes = hidden_sizes
assert self.nout % self.nin == 0, "nout must be integer multiple of nin"
# define a simple MLP neural net
self.net = []
hs = [nin] + hidden_sizes + [nout]
for h0, h1 in zip(hs, hs[1:]):
self.net.extend([
MaskedLinear(h0, h1),
nn.ReLU(),
])
self.net.pop() # pop the last ReLU for the output layer
self.net = nn.Sequential(*self.net)
# seeds for orders/connectivities of the model ensemble
self.natural_ordering = natural_ordering
self.num_masks = num_masks
self.seed = 0 # for cycling through num_masks orderings
self.m = {}
self.update_masks() # builds the initial self.m connectivity
# note, we could also precompute the masks and cache them, but this
# could get memory expensive for large number of masks.
def update_masks(self):
if self.m and self.num_masks == 1: return # only a single seed, skip for efficiency
L = len(self.hidden_sizes)
# fetch the next seed and construct a random stream
rng = np.random.RandomState(self.seed)
self.seed = (self.seed + 1) % self.num_masks
# sample the order of the inputs and the connectivity of all neurons
self.m[-1] = np.arange(self.nin) if self.natural_ordering else rng.permutation(self.nin)
for l in range(L):
self.m[l] = rng.randint(self.m[l-1].min(), self.nin-1, size=self.hidden_sizes[l])
# construct the mask matrices
masks = [self.m[l-1][:,None] <= self.m[l][None,:] for l in range(L)]
masks.append(self.m[L-1][:,None] < self.m[-1][None,:])
# handle the case where nout = nin * k, for integer k > 1
if self.nout > self.nin:
k = int(self.nout / self.nin)
# replicate the mask across the other outputs
masks[-1] = np.concatenate([masks[-1]]*k, axis=1)
# set the masks in all MaskedLinear layers
layers = [l for l in self.net.modules() if isinstance(l, MaskedLinear)]
for l,m in zip(layers, masks):
l.set_mask(m)
def forward(self, x):
return self.net(x)
####
# End Karpathy's code
####
class MAF(nn.Module):
"""x0 only depends on x0, etc"""
def __init__(self, features, context, hidden=100, nlayers=1):
super(self.__class__, self).__init__()
self._fmualpha = MADE(features+context,
[hidden]*nlayers, 2*(features+context),
natural_ordering=True)
self.context_map = nn.Linear(context, context)
self.context = context
self.features = features
def fmualpha(self, x):
# Only return the data parts: (conditioned on whole context vector)
out = self._fmualpha(x)
mu = out[:, self.context:self.context+self.features]
alpha = out[:, 2*self.context+self.features:]
return mu, alpha
def load_context(self, x, context):
return torch.cat((self.context_map(context), x), dim=1)
def invert(self, u, context):
_x = self.load_context(u, context)
mu, alpha = self.fmualpha(_x)
x = u * torch.exp(alpha) + mu
return x
def forward(self, x, context):
# Invert the flow
_x = self.load_context(x, context)
if DEBUG:
print("_x is nan:", torch.any(torch.isnan(_x)), _x.mean())
mu, alpha = self.fmualpha(_x)
if DEBUG:
print("mu is nan:", torch.any(torch.isnan(mu)), mu.mean())
print("alpha is nan:", torch.any(torch.isnan(alpha)), alpha.mean())
u = (x - mu) * torch.exp(-alpha)
log_det = - torch.sum(alpha, dim=1)
return u, log_det
class Perm(nn.Module):
def __init__(self, nvars, perm=None):
super(self.__class__, self).__init__()
# If perm is none, chose some random permutation that gets fixed at initialization
if perm is None:
perm = torch.randperm(nvars)
self.perm = perm
self.reverse_perm = torch.argsort(perm)
def forward(self, x, context):
idx = self.perm.to(x.device)
return x[:, idx], 0
def invert(self, x, context):
rev_idx = self.reverse_perm.to(x.device)
return x[:, rev_idx]
class Flow(nn.Module):
def __init__(self, *layers):
super(self.__class__, self).__init__()
self.layers = nn.ModuleList(layers)
def forward(self, x, context):
log_det = None
for layer in self.layers:
x, _log_det = layer(x, context)
log_det = (log_det if log_det is not None else 0) + _log_det
# Same ordering as input:
for layer in self.layers[::-1]:
if 'Perm' not in str(layer):
continue
x = x[:, layer.reverse_perm]
return x, log_det
def invert(self, u, context):
for layer in self.layers:
if 'Perm' not in str(layer):
continue
u = u[:, layer.perm]
for layer in self.layers[::-1]:
u = layer.invert(u, context)
return u
|
StarcoderdataPython
|
152549
|
# Time: O(n^2 * 2^n)
# Space: O(1)
# brute force, bitmask
class Solution(object):
def maximumGood(self, statements):
"""
:type statements: List[List[int]]
:rtype: int
"""
def check(mask):
return all(((mask>>j)&1) == statements[i][j]
for i in xrange(len(statements)) if (mask>>i)&1
for j in xrange(len(statements[i])) if statements[i][j] != 2)
def popcount(x):
result = 0
while x:
x &= x-1
result += 1
return result
result = 0
for mask in xrange(1<<len(statements)):
if check(mask):
result = max(result, popcount(mask))
return result
|
StarcoderdataPython
|
182076
|
<gh_stars>0
n = int(input('Informe um número entre 0 e 9999: ').strip())
u = n // 1 % 10
d = n // 10 % 10
c = n // 100 % 10
m = n // 1000 % 10
print(f'Analisando o número {n}...')
print('Unidade: {}'.format(u))
print('Dezena: {}'.format(d))
print('Centena: {}'.format(c))
print('Milhar: {}'.format(m))
|
StarcoderdataPython
|
1700882
|
class UserProfile:
pass
# trailing comment
#leading comment
#noinspection PyUnusedLocal
def foo(sender):
pass
|
StarcoderdataPython
|
4809727
|
import argparse
from picamera import PiCamera
from aiy.vision.inference import CameraInference
from aiy.vision.models import face_detection
from aiy.vision.annotator import Annotator
import os
from sendgrid import SendGridAPIClient
from sendgrid.helpers.mail import Mail
from environs import Env
import urllib.request, json
def avg_joy_score(faces):
if faces:
return sum(face.joy_score for face in faces) / len(faces)
return 0.0
def main():
env = Env()
env.read_env()
parser = argparse.ArgumentParser()
parser.add_argument('--num_frames', '-n', type=int, dest='num_frames', default=None,
help='Sets the number of frames to run for, otherwise runs forever.')
args = parser.parse_args()
# Forced sensor mode, 1640x1232, full FoV. See:
# https://picamera.readthedocs.io/en/release-1.13/fov.html#sensor-modes
# This is the resolution inference run on.
with PiCamera(sensor_mode=4, resolution=(1640, 1232), framerate=30) as camera:
camera.start_preview()
# Annotator renders in software so use a smaller size and scale results
# for increased performace.
annotator = Annotator(camera, dimensions=(320, 240))
scale_x = 320 / 1640
scale_y = 240 / 1232
# Incoming boxes are of the form (x, y, width, height). Scale and
# transform to the form (x1, y1, x2, y2).
def transform(bounding_box):
x, y, width, height = bounding_box
return (scale_x * x, scale_y * y, scale_x * (x + width),
scale_y * (y + height))
joy_counter = 0
with CameraInference(face_detection.model()) as inference:
for result in inference.run(args.num_frames):
faces = face_detection.get_faces(result)
annotator.clear()
for face in faces:
annotator.bounding_box(transform(face.bounding_box), fill=0)
annotator.update()
if len(faces) > 0:
if avg_joy_score(faces) > 0.8:
if joy_counter < 0:
joy_counter = 0
else:
joy_counter += 1
if avg_joy_score(faces) < 0.1:
if joy_counter > 0:
joy_counter = 0
else:
joy_counter -= 1
if joy_counter > 20:
print("Happy")
joy_counter = 0
if joy_counter < -20:
print("Sad")
request_url = urllib.request.urlopen("https://www.reddit.com/r/dogpictures/random.json")
result = json.loads(request_url.read().decode())[0]["data"]["children"][0]["data"]["url"]
message = Mail(
from_email='<EMAIL>',
to_emails='<EMAIL>',
subject='Sending with Twilio SendGrid is Fun',
html_content='<img src='+result+'>')
try:
sg = SendGridAPIClient(env.str('SENDGRID_API_KEY'))
response = sg.send(message)
print(response.status_code)
# print(response.body)
# print(response.headers)
except Exception as e:
print(e.message)
joy_counter = 0
else:
joy_counter = 0
# print('#%05d (%5.2f fps): num_faces=%d, avg_joy_score=%.2f' %
# (inference.count, inference.rate, len(faces), avg_joy_score(faces)))
camera.stop_preview()
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
19544
|
#!/usr/bin/python
# Copyright (c) 2018, 2019, Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_api_key
short_description: Upload and delete API signing key of a user in OCI
description:
- This module allows the user upload and delete API signing keys of a user in OCI. A PEM-format RSA credential for
securing requests to the Oracle Cloud Infrastructure REST API. Also known as an API signing key. Specifically,
this is the public key from the key pair. The private key remains with the user calling the API. For information
about generating a key pair in the required PEM format, see Required Keys and OCIDs.
Note that this is not the SSH key for accessing compute instances.
Each user can have a maximum of three API signing keys.
For more information about user credentials, see
U(https://docs.us-phoenix-1.oraclecloud.com/Content/API/Concepts/apisigningkey.htm).
version_added: "2.5"
options:
user_id:
description: The OCID of the user whose API signing key needs to be created or deleted.
required: true
api_signing_key:
description: The public key. Must be an RSA key in PEM format. Required when the API signing key is
uploaded with I(state=present)
required: false
aliases: ['key']
api_key_id:
description: The API signing key's id. The Id must be of the format TENANCY_OCID/USER_OCID/KEY_FINGERPRINT.
required: false
aliases: ['id']
state:
description: The state of the api signing key that must be asserted to. When I(state=present), and the
api key doesn't exist, the api key is created with the provided C(api_signing_key).
When I(state=absent), the api signing key corresponding to the provided C(fingerprint) is deleted.
required: false
default: "present"
choices: ['present', 'absent']
author: "<NAME> (@sivakumart)"
extends_documentation_fragment: [ oracle, oracle_creatable_resource, oracle_wait_options ]
"""
EXAMPLES = """
- name: Upload a new api signing key for the specified user
oci_api_key:
user_id: "ocid1.user.oc1..xxxxxEXAMPLExxxxx"
key: "-----BEGIN PUBLIC KEY-----cmdnMIIBIjANBgkqhkiG9w0BAQEFA......mwIDAQAB-----END PUBLIC KEY-----"
- name: Delete an API signing key for the specified user
oci_api_key:
user_id: "ocid1.user.oc1..xxxxxEXAMPLExxxxx"
"id": "ocid1.tenancy.oc1..xxxxxEXAMPLExxxxx/ocid1.user.oc1..xxxxxEXAMPLExxxxx/08:07:fc00:db20:35b:7399::5:da"
state: "absent"
"""
RETURN = """
oci_api_key:
description: Details of the API signing key
returned: On success
type: dict
sample: {
"fingerprint": "08:07:a6:7d:06:b4:73:91:e9:2c:da:42:c8:cb:df:02",
"inactive_status": null,
"key_id": "ocid1.tenancy.oc1..xxxxxEXAMPLExxxxx/ocid1.user.oc1..xxxxxEXAMPLExxxxx/08:07:a6:7d:06:b4:<KEY>",
"key_value": "-----BEGIN PUBLIC KEY-----...urt/fN8jNz2nZwIDAQAB-----END PUBLIC KEY-----",
"lifecycle_state": "ACTIVE",
"time_created": "2018-01-08T09:33:59.705000+00:00",
"user_id": "ocid1.user.oc1..xxxxxEXAMPLExxxxx"
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.oracle import oci_utils
try:
import oci
from oci.identity.identity_client import IdentityClient
from oci.identity.models import CreateApiKeyDetails
from oci.util import to_dict
from oci.exceptions import ServiceError, MaximumWaitTimeExceeded
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
logger = None
RESOURCE_NAME = "api_key"
def set_logger(provided_logger):
global logger
logger = provided_logger
def get_logger():
return logger
def _get_api_key_from_id(identity_client, user_id, api_key_id, module):
try:
resp = oci_utils.call_with_backoff(
identity_client.list_api_keys, user_id=user_id
)
if resp is not None:
for api_key in resp.data:
if api_key.key_id == api_key_id:
return api_key
return None
except ServiceError as ex:
module.fail_json(msg=ex.message)
def delete_api_key(identity_client, user_id, id, module):
result = {}
changed = False
try:
api_key = _get_api_key_from_id(identity_client, user_id, id, module)
oci_utils.call_with_backoff(
identity_client.delete_api_key,
user_id=user_id,
fingerprint=api_key.fingerprint,
)
get_logger().info("Deleted api password %s", id)
changed = True
# The API key is not returned by list api passwords after it
# is deleted, and so we currently reuse the earlier api password object and mark
# its lifecycle state as DELETED.
# Note: This current approach has problems around idempotency.
# We also don't wait, as there is no state transition that we need to wait for.
api_key.lifecycle_state = "DELETED"
result[RESOURCE_NAME] = to_dict(api_key)
except ServiceError as ex:
module.fail_json(msg=ex.message)
result["changed"] = changed
return result
def _is_api_key_active(api_keys, api_key_id):
result = [
api_key
for api_key in api_keys
if api_key.key_id == api_key_id and api_key.lifecycle_state == "ACTIVE"
]
return len(result) == 1
def create_api_key(identity_client, user_id, key, module):
try:
cakd = CreateApiKeyDetails()
cakd.key = key
result = oci_utils.create_resource(
resource_type=RESOURCE_NAME,
create_fn=identity_client.upload_api_key,
kwargs_create={"user_id": user_id, "create_api_key_details": cakd},
module=module,
)
resource = result[RESOURCE_NAME]
api_key_id = resource["key_id"]
get_logger().info("Created API signing key %s", to_dict(resource))
# API keys don't have a get<resource> and so we can't use oci_utils.create_and_wait
# The following logic manually checks if the API key in `list_api_keys` has reached the desired ACTIVE state
response = identity_client.list_api_keys(user_id)
# wait until the created API Key reaches Active state
oci.wait_until(
identity_client,
response,
evaluate_response=lambda resp: _is_api_key_active(resp.data, api_key_id),
)
result[RESOURCE_NAME] = to_dict(
_get_api_key_from_id(identity_client, user_id, api_key_id, module)
)
return result
except ServiceError as ex:
module.fail_json(msg=ex.message)
except MaximumWaitTimeExceeded as mwte:
module.fail_json(msg=str(mwte))
def main():
set_logger(oci_utils.get_logger("oci_api_key"))
module_args = oci_utils.get_common_arg_spec(
supports_create=True, supports_wait=True
)
module_args.update(
dict(
user_id=dict(type="str", required=True),
api_key_id=dict(type="str", required=False, aliases=["id"]),
api_signing_key=dict(type="str", required=False, aliases=["key"]),
state=dict(
type="str",
required=False,
default="present",
choices=["present", "absent"],
),
)
)
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=False,
required_if=[("state", "absent", ["api_key_id"])],
)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
identity_client = oci_utils.create_service_client(module, IdentityClient)
state = module.params["state"]
result = dict(changed=False)
user_id = module.params.get("user_id", None)
public_key = module.params.get("api_signing_key", None)
api_key_id = module.params.get("api_key_id", None)
if api_key_id is not None:
api_key = _get_api_key_from_id(identity_client, user_id, api_key_id, module)
if state == "absent":
get_logger().debug(
"Delete api password %s for user %s requested", api_key_id, user_id
)
if api_key is not None:
get_logger().debug("Deleting %s", api_key.key_id)
result = delete_api_key(identity_client, user_id, api_key_id, module)
else:
get_logger().debug("API Signing Key %s already deleted.", api_key_id)
elif state == "present":
module.fail_json(msg="API signing key cannot be updated.")
else:
result = oci_utils.check_and_create_resource(
resource_type=RESOURCE_NAME,
create_fn=create_api_key,
kwargs_create={
"identity_client": identity_client,
"user_id": user_id,
"key": public_key,
"module": module,
},
list_fn=identity_client.list_api_keys,
kwargs_list={"user_id": user_id},
module=module,
model=CreateApiKeyDetails(),
create_model_attr_to_get_model_mapping={"key": "key_value"},
)
module.exit_json(**result)
if __name__ == "__main__":
main()
|
StarcoderdataPython
|
192892
|
<gh_stars>10-100
#!/usr/bin/python3
from botbase import *
_traunstein_cc = re.compile(r"([0-9.]+) Neuinfektionen")
_traunstein_c = re.compile(r"insgesamt ([0-9.]+) bestätigte Fälle")
_traunstein_g = re.compile(r"genesen gelten mindestens ([0-9.]+) Personen \(([0-9.]+) Personen mehr")
_traunstein_d = re.compile(r"insgesamt ([0-9.]+) Todesfälle")
_traunstein_dd = re.compile(r"seit (?:\w+ )*([0-9.]+|\w+) Todesmeldungen")
#_traunstein_si = re.compile(r"([0-9.]+) auf der Normalstation und ([0-9.]+) auf der Intensivstation.")
def traunstein(sheets):
soup = get_soup("https://www.traunstein.com/aktuelles")
li = next(x for x in soup.findAll("article") if "Covid-Patienten" in x.get_text())
check_date(li.find(class_="presse_datum").get_text(), "Traunstein")
link = li.find(href=True)["href"] if li else None
link = urljoin("https://www.traunstein.com/aktuelles", link)
print("Getting", link)
soup = get_soup(link)
text = soup.find(id="block-system-main").get_text(" ").strip()
#print(text)
cc = force_int(_traunstein_cc.search(text).group(1))
c = force_int(_traunstein_c.search(text).group(1))
g, gg = map(force_int, _traunstein_g.search(text).groups()) if _traunstein_g.search(text) else (None, None)
d = force_int(_traunstein_d.search(text).group(1))
dd = force_int(_traunstein_dd.search(text).group(1)) if _traunstein_dd.search(text) else None
#s, i = map(force_int, _traunstein_si.search(text).groups())
#s += i
update(sheets, 9189, c=c, cc=cc, d=d, dd=dd, g=g, gg=gg, comment="Bot ohne SI", ignore_delta="mon")
return True
schedule.append(Task(14, 0, 16, 35, 360, traunstein, 9189))
if __name__ == '__main__': traunstein(googlesheets())
|
StarcoderdataPython
|
3299959
|
<gh_stars>1-10
"""Abstract base classes and default types"""
import copy
from abc import ABC, abstractmethod
from enum import Enum
from threading import Event
from typing import List, Dict
from PyQt5 import QtWidgets, QtGui, QtCore
from PyQt5.QtWidgets import QWidget
from solarviewer.config import content_ctrl_name
from solarviewer.config.ioc import RequiredFeature
from solarviewer.ui.dialog import Ui_Dialog
from solarviewer.util import classproperty
V_ID = 0
def generateVId():
"""Viewer ID generation function."""
global V_ID
V_ID += 1
return V_ID
class DataType(Enum):
"""Default data types."""
MAP = "SunPy Map"
MAP_COMPOSITE = "SunPy Composite Map"
SERIES = "SunPy Series"
PLAIN_2D = "2D FITS"
SPECTROGRAM = "SunPy Spectrogram"
NDCUBE = "NDCube"
ANY = "Any"
NONE = ""
class ViewerType(Enum):
"""Default viewer types."""
MPL = "Matplotlib"
GINGA = "Ginga"
ANY = "Any"
class FileType(Enum):
"""Predefined file types."""
FITS = {"FITS": ["fits", "fit", "fts"]}
class ItemConfig:
"""Configuration class for menu items."""
def __init__(self):
self.menu_path = ""
self.title = ""
self.supported_data_types = []
self.supported_viewer_types = []
self.shortcut = None
self.orientation = QtCore.Qt.LeftDockWidgetArea
def setMenuPath(self, path: str) -> "ItemConfig":
"""
Set the menu path.
:param path: The path for the action in the menu. Separated by '/'.
:type path: str
:return: self
:rtype: ItemConfig
"""
self.menu_path = path
return self
def setTitle(self, title: str) -> "ItemConfig":
"""
Set the text in the window title.
:param title: The title text.
:type title: str
:return: self
:rtype: ItemConfig
"""
self.title = title
return self
def setOrientation(self, orientation: QtCore.Qt.DockWidgetArea) -> "ItemConfig":
"""
Set the alignment in the main window.
:param orientation: The Qt dock position
:type orientation: DockWidgetArea
:return: self
:rtype: ItemConfig
"""
self.orientation = orientation
return self
def setSupportedData(self, data_types: List[DataType]) -> "ItemConfig":
"""
Set the list of supported data types.
:param data_types: List of data types.
:type data_types: List[DataType]
:return: self
:rtype: ItemConfig
"""
self.supported_data_types = data_types
return self
def addSupportedData(self, data_type: DataType) -> "ItemConfig":
"""
Add data type to the supported data types.
:param data_type: The data type to add.
:type data_type: DataType
:return: self
:rtype: ItemConfig
"""
self.supported_data_types.append(data_type)
return self
def setSupportedViewer(self, viewer_types: List[ViewerType]) -> "ItemConfig":
"""
Set the list of supported viewer types.
:param viewer_types: List of viewer types.
:type viewer_types: List[ViewerType]
:return: self
:rtype: ItemConfig
"""
self.supported_data_types = viewer_types
return self
def addSupportedViewer(self, viewer_type: ViewerType) -> "ItemConfig":
"""
Add viewer type to the supported viewer types.
:param viewer_type: The viewer type to add.
:type viewer_type: ViewerType
:return: self
:rtype: ItemConfig
"""
self.supported_viewer_types.append(viewer_type)
return self
def setShortcut(self, key_sequence: QtGui.QKeySequence) -> "ItemConfig":
"""
Set a shortcut to execute the associated action within the main window.
:param key_sequence: The Qt key sequence.
:type key_sequence: QKeySequence
:return: self
:rtype: ItemConfig
"""
self.shortcut = key_sequence
return self
class ToolbarConfig:
"""Configuration class for toolbars."""
def __init__(self):
self.menu_path = ""
self.supported_data_types = []
self.supported_viewer_types = []
self.orientation = QtCore.Qt.RightToolBarArea
def setMenuPath(self, path: str) -> "ToolbarConfig":
"""
Set the menu path.
:param path: The path for the action in the menu. Separated by '/'.
:type path: str
:return: self
:rtype: ToolbarConfig
"""
self.menu_path = path
return self
def setSupportedData(self, data_types: List[DataType]) -> "ToolbarConfig":
"""
Set the list of supported data types.
:param data_types: List of data types.
:type data_types: List[DataType]
:return: self
:rtype: ToolbarConfig
"""
self.supported_data_types = data_types
return self
def addSupportedData(self, data_type: DataType) -> "ToolbarConfig":
"""
Add data type to the supported data types.
:param data_type: The data type to add.
:type data_type: DataType
:return: self
:rtype: ToolbarConfig
"""
self.supported_data_types.append(data_type)
return self
def setSupportedViewer(self, viewer_types: List[ViewerType]) -> "ToolbarConfig":
"""
Set the list of supported viewer types.
:param viewer_types: List of viewer types.
:type viewer_types: List[ViewerType]
:return: self
:rtype: ToolbarConfig
"""
self.supported_data_types = viewer_types
return self
def addSupportedViewer(self, viewer_type: ViewerType) -> "ToolbarConfig":
"""
Add viewer type to the supported viewer types.
:param viewer_type: The viewer type to add.
:type viewer_type: ViewerType
:return: self
:rtype: ToolbarConfig
"""
self.supported_viewer_types.append(viewer_type)
return self
def setOrientation(self, orientation: QtCore.Qt.ToolBarArea) -> "ToolbarConfig":
"""
Set the alignment in the main window.
:param orientation: The Qt dock position
:type orientation: ToolBarArea
:return: self
:rtype: ItemConfig
"""
self.orientation = orientation
return self
class ViewerConfig:
"""Configuration class for viewers."""
def __init__(self):
self.menu_path = ""
self.multi_file = False
self.file_types = FileType.FITS.value
self.required_pkg = []
def setMenuPath(self, path: str) -> "ViewerConfig":
"""
Set the menu path.
:param path: The path for the action in the menu. Separated by '/'.
:type path: str
:return: self
:rtype: ViewerConfig
"""
self.menu_path = path
return self
def setMultiFile(self, value: str) -> "ViewerConfig":
"""
Set the open method of the viewer.
:param value: True for selection of multiple files to create a single viewer. False if the viewer is associated with a single file.
:type value: bool
:return: self
:rtype: ViewerConfig
"""
self.multi_file = value
return self
def setFileType(self, value: Dict[str, List[str]]) -> "ViewerConfig":
"""
Set the supported file types. Use FileType for predefined types.
:param value: Dictionary of the supported files. Use {'TYPE NAME' : ['ext1', 'ext2']}
:type value: Dict[str, List[str]]
:return: self
:rtype: ViewerConfig
"""
self.file_types = value
return self
def addRequiredPackage(self, package: str) -> "ViewerConfig":
"""
Add required python packages for this viewer. The user will be asked to install these packages when the viewer opens.
:param package: The name of the python module.
:type package: str
:return: self
:rtype: ViewerConfig
"""
self.required_pkg.append(package)
return self
class DataModel(ABC):
"""Base class for viewer controller models."""
path = None
class Viewer(QtWidgets.QWidget):
"""Base class for the displayed widget."""
rendered = Event()
@abstractmethod
def updateModel(self, model: DataModel) -> None:
"""
Triggered function on data refresh.
:param model: the data to visualize
:type model: DataModel
"""
raise NotImplementedError
class Controller(ABC):
"""Base class for registering controllers."""
@classproperty
def name(cls) -> str:
"""Unique identifier for registering the controller."""
return cls.__name__
class ViewerController(ABC):
"""Base class for viewer controllers."""
_v_id: int = None
def __init__(self):
self._v_id = generateVId()
@classmethod
@abstractmethod
def fromFile(cls, file: str) -> 'ViewerController':
"""
Create new ViewerController from file.
:param file: file path to the data
:type file: str
:return: the initialized ViewerController
:rtype: ViewerController
"""
raise NotImplementedError
@classmethod
@abstractmethod
def fromModel(cls, model: DataModel) -> 'ViewerController':
"""
Create new ViewerController from existing model.
:param model: data model
:type model: DataModel
:return: the initialized ViewerController
:rtype: ViewerController
"""
raise NotImplementedError
@classproperty
@abstractmethod
def viewer_config(self) -> ViewerConfig:
"""
Create the Configuration.
Responsible for the representation in the application.
:return: the viewer configuration
:rtype: ViewerConfig
"""
raise NotImplementedError
@property
@abstractmethod
def model(self) -> DataModel:
"""
Returns the central data model of the viewer controller.
:return: The data model of the viewer controller.
:rtype: DataModel
"""
raise NotImplementedError
@property
@abstractmethod
def view(self) -> Viewer:
"""
Returns the GUI component of the viewer controller.
:return: The GUI viewer of the controller.
:rtype: Viewer
"""
raise NotImplementedError
@abstractmethod
def updateModel(self, model):
"""
Triggered action on data change. Refresh internal data model.
Use ContentController to trigger this action.
:param model: The new data model.
:type model: DataModel
"""
raise NotImplementedError
@abstractmethod
def getTitle(self) -> str:
"""
Generate title depending on the data model.
:return: The title of the viewer.
"""
raise NotImplementedError
@property
def v_id(self) -> int:
"""Unique identifier of the viewer controller"""
return self._v_id
@property
@abstractmethod
def data_type(self) -> str:
"""The data type of the viewer controller"""
raise NotImplementedError
@property
@abstractmethod
def viewer_type(self) -> str:
"""The viewer type of the viewer controller"""
raise NotImplementedError
def close(self):
"""Closes the view of the controller."""
self.view.deleteLater()
class ActionController(Controller):
"""Base class for action items."""
@property
@abstractmethod
def item_config(self) -> ItemConfig:
"""
Create the Configuration.
Responsible for the representation in the application.
:return: the menu item configuration
:rtype: ItemConfig
"""
raise NotImplementedError
@abstractmethod
def onAction(self):
"""Triggered action on selection of the menu item."""
raise NotImplementedError
class DialogController(Controller):
"""Base class for dialog items."""
content_ctrl = RequiredFeature(content_ctrl_name)
def __init__(self):
self._dlg_view = QtWidgets.QDialog()
self._dlg_view.setWindowTitle(self.item_config.title)
self._dlg_ui = Ui_Dialog()
self._dlg_ui.setupUi(self._dlg_view)
self.setupContent(self._dlg_ui.content)
self._dlg_ui.button_box.accepted.connect(self._onOk)
self._dlg_ui.button_box.rejected.connect(self._onCancel)
@property
@abstractmethod
def item_config(self) -> ItemConfig:
"""
Create the Configuration.
Responsible for the representation in the application.
:return: the menu item configuration
:rtype: ItemConfig
"""
raise NotImplementedError
@abstractmethod
def setupContent(self, content_widget):
"""
Internal basic UI setup function. Use the UI setup file here.
:param content_widget: The Qt parent widget.
:type: QWidget
"""
raise NotImplementedError
@abstractmethod
def onDataChanged(self, viewer_ctrl: ViewerController):
"""
Triggered when a new viewer is selected. Only supported data/viewer types need to be handled.
:param viewer_ctrl: The new viewer controller.
:type viewer_ctrl: ViewerController
"""
raise NotImplementedError
@abstractmethod
def modifyData(self, data_model: DataModel) -> DataModel:
"""
Triggered action on dialog accept. Execute action on data model.
:param data_model: The data model to modify.
:type data_model: DataModel
:return: The modified data model.
:rtype: DataModel
"""
raise NotImplementedError
@property
def view(self) -> QtWidgets.QDialog:
"""
Returns the dialog view.
:return: The dialog widget.
:rtype: QDialog
"""
viewer_ctrl = self.content_ctrl.getViewerController()
self.onDataChanged(viewer_ctrl)
return self._dlg_view
def _onOk(self):
"""Triggered ok action"""
viewer_ctrl = self.content_ctrl.getViewerController()
model = self.modifyData(copy.deepcopy(viewer_ctrl.model))
self.content_ctrl.setDataModel(model)
def _onCancel(self):
"""Triggered cancel action"""
pass
class ToolController(Controller):
"""Base class for tool items."""
@property
@abstractmethod
def item_config(self) -> ItemConfig:
"""
Create the Configuration.
Responsible for the representation in the application.
:return: the menu item configuration
:rtype: ItemConfig
"""
raise NotImplementedError
@property
@abstractmethod
def view(self) -> QWidget:
"""
Create the UI component of the tool controller.
:return: The central widget of the tool.
:rtype: QWidget
"""
raise NotImplementedError
|
StarcoderdataPython
|
1662647
|
<gh_stars>1-10
import os
import uuid
from typing import Dict
import pandas as pd
import matplotlib.pyplot as plt
COLUMN_OF_INTEREST = 'g:send' # to be changed to 'roundtrip' once issue is fixed
def clean_dataframe(file_path: str = 'routes.parquet') -> pd.DataFrame:
routes_df = pd.read_parquet(file_path)
# Dropping the last batch as end_time is defaulting
routes_df.drop(routes_df.index[-1], inplace=True)
columns_to_be_combined = {}
for c in routes_df.columns:
if c.endswith('-head') or c.endswith('-tail'):
new_c_name = c.replace('-head', '').replace('-tail', '')
c_combined = [_ for _ in routes_df.columns
if new_c_name in _ and not _.endswith('-head') and not _.endswith('-tail')]
try:
columns_to_be_combined[new_c_name].extend(c_combined)
except KeyError:
columns_to_be_combined[new_c_name] = []
for k, v in columns_to_be_combined.items():
routes_df[k] = routes_df[v[0]]
del routes_df[v[0]]
for i in range(1, len(v)):
routes_df[k] = routes_df[k].fillna(routes_df[v[i]])
del routes_df[v[i]]
for c in routes_df.columns:
routes_df[[f'{c}_start_time', f'{c}_end_time']] = \
pd.DataFrame(routes_df[c].tolist(), index= routes_df.index)
routes_df.drop(columns=c, inplace=True)
return routes_df
def evaluate_times(routes_df, num_docs, pod_names):
""" Evaluates different timestamps from the dataframe """
if 'gateway' in pod_names: pod_names.remove('gateway')
existing_cols = routes_df.columns
for i in range(len(pod_names) + 1):
if i == 0:
# print(f'gateway->{pod_names[i]}:send = {pod_names[i]}_start_time - gateway_start_time')
routes_df[f'gateway->{pod_names[i]}:send'] = routes_df[f'{pod_names[i]}_start_time'] - routes_df['gateway_start_time']
elif i == len(pod_names):
## This needs fix as routes_df['gateway_end_time'] are None (hence defaulting)
# print(f'{pod_names[i-1]}->gateway:send = gateway_end_time - {pod_names[i-1]}_start_time')
# routes_df[f'{pod_names[i-1]}->gateway:send'] = routes_df['gateway_end_time'] - routes_df[f'{pod_names[i-1]}_start_time']
continue
else:
# print(f'{pod_names[i-1]}->{pod_names[i]}:send = {pod_names[i]}_start_time - {pod_names[i-1]}_start_time')
routes_df[f'{pod_names[i-1]}->{pod_names[i]}:send'] = routes_df[f'{pod_names[i]}_start_time'] - \
routes_df[f'{pod_names[i-1]}_start_time']
## This needs fix as routes_df['gateway_end_time'] & routes_df['pod1_end_time'] are None (hence defaulting)
# routes_df['roundtrip'] = routes_df['gateway_end_time'] - routes_df['gateway_start_time']
columns_for_send = [c + '_start_time' for c in reversed(pod_names)] + ['gateway_start_time']
for i in range(len(columns_for_send)-1):
current_send = routes_df[columns_for_send[i]] - routes_df[columns_for_send[i+1]]
if i == 0:
total_send = current_send
else:
total_send += current_send
routes_df['g:send'] = total_send
columns_for_recv = [c + '_end_time' for c in reversed(pod_names)] # + ['gateway_end_time']
for i in range(len(columns_for_recv)-1):
current_recv = routes_df[columns_for_recv[i]] - routes_df[columns_for_recv[i+1]]
if i == 0:
total_recv = current_recv
else:
total_recv += current_recv
## This needs fix as routes_df['gateway_end_time'] is None (hence defaulting)
routes_df['g:recv'] = total_recv
## This needs fix as routes_df['gateway_end_time'] is None (hence defaulting)
# routes_df['docs/s'] = num_docs / (routes_df['roundtrip'].seconds)
columns_of_interest = list(set(routes_df.columns) - set(existing_cols))
return routes_df, columns_of_interest
def get_summary(routes_df, columns_of_interest) -> Dict:
""" Returns Stats summary of the timestamps """
summary = {}
for _ in columns_of_interest:
summary[_] = {}
summary[_]['mean'] = routes_df[_].mean().total_seconds()
summary[_]['median'] = routes_df[_].median().total_seconds()
summary[_]['std'] = routes_df[_].std().total_seconds()
summary[_]['max'] = routes_df[_].max().total_seconds()
summary[_]['min'] = routes_df[_].min().total_seconds()
summary[_]['sum'] = routes_df[_].sum().total_seconds()
return summary
def write_benchmark_to_markdown(overall_summary, click_help_msg):
with open('README.template.MD') as template_md:
template_text = template_md.readlines()
print(overall_summary)
html_table_text = html_table(overall_summary_dict=overall_summary)
uuid_gen = uuid.uuid4().hex.lower()[0:10]
image_filename = plot_num_docs_vs_time(overall_summary_dict=overall_summary,
column_of_interest=COLUMN_OF_INTEREST,
uid=uuid_gen,
file_dir=os.environ['FILE_DIR']) # to be changed to gh hash
with open('README.MD', 'w') as readme_f:
readme_f.writelines(template_text)
readme_f.write('<h3> Usage </h3>\n\n')
readme_f.write(f'```\n{click_help_msg}\n```')
readme_f.write('\n\n<h3>Results</h3>\n')
readme_f.writelines(f'\n\n{html_table_text}')
readme_f.write('\n\n\n<h3>Num docs vs Time<h3>\n\n')
readme_f.write(f'')
def html_table(overall_summary_dict) -> str:
table_html = ''
for num_docs, summary in overall_summary_dict.items():
table_html += pd.DataFrame(summary).loc['mean'].to_frame().rename(columns={'mean': num_docs}).T.round(3).to_html()
return table_html
def plot_num_docs_vs_time(overall_summary_dict, column_of_interest, uid, file_dir) -> str:
""" Plots num_docs (log scale) vs total time"""
x, y = [], []
for num_docs, summary in overall_summary_dict.items():
x.append(num_docs)
y.append(summary[column_of_interest]['sum'])
plt.figure(figsize=(16, 4))
plt.plot(x, y, linestyle='--', marker='o')
plt.xlabel('Number of docs indexed')
plt.ylabel(f'{column_of_interest} total time (secs)')
plt.xscale('log', base=2)
plt.tight_layout()
image_filename = f'{file_dir}/num-docs-vs-time-{uid}.svg'
plt.savefig(image_filename)
return image_filename
|
StarcoderdataPython
|
4833254
|
import pandas as pd
from random import randint
import os
import time
transactions = pd.read_csv('stream.csv', header='infer', sep=',')
output_directory = 'output'
i = 0
while True:
# number of simultaneous transactions
simultaneous = randint(1, 150)
subset = transactions.iloc[i:i+simultaneous]
file_name = 'transactions_{}_{}.csv'.format(simultaneous, str(time.time()).split('.')[0])
path = os.path.join(output_directory, file_name)
print('Exporting:', path)
subset.to_csv(path, header=False, index=False)
# waiting for more transactions
time.sleep(2)
i = i + simultaneous if len(transactions) > i + simultaneous else 0
|
StarcoderdataPython
|
3209983
|
t = int(input());
while t > 0:
companies = input().split(' ');
x , y = input().split(' ');
selx = 0;
sely = 0;
for i in range(3):
if( x == companies[i]):
selx = i;
if( y == companies[i]):
sely = i;
if(selx < sely):
print( companies[selx] );
else:
print( companies[sely] );
t = t - 1;
|
StarcoderdataPython
|
3296836
|
<filename>language/threading/13_lazy_connection.py<gh_stars>1-10
import threading
from socket import socket, AF_INET, SOCK_STREAM
from functools import partial
class LazyConnection:
def __init__(self, address, family=AF_INET, type_=SOCK_STREAM):
self.address = address
self.family = family
self.type = type_
self.local = threading.local()
def __enter__(self):
if hasattr(self.local, 'sock'):
raise RuntimeError('Already connected')
self.local.sock = socket(self.family, self.type)
self.local.sock.connect(self.address)
return self.local.sock
def __exit__(self, exc_ty, exc_val, tb):
self.local.sock.close()
del self.local.sock
def test(connection):
assert isinstance(connection, LazyConnection)
with connection as s:
print('{} Socket id: {}'.format(threading.current_thread().getName(), s))
s.send(b'GET /index.html HTTP/1.0\r\n')
s.send(b'Host: www.python.org\r\n')
s.send(b'\r\n')
resp = b''.join(iter(partial(s.recv, 8192), b''))
print('{} Got {} bytes'.format(threading.current_thread().getName(), len(resp)))
if __name__ == '__main__':
lazy_connection = LazyConnection(('www.python.org', 80),)
t1 = threading.Thread(target=test, args=(lazy_connection,))
t2 = threading.Thread(target=test, args=(lazy_connection,))
t1.start()
t2.start()
t1.join()
t2.join()
|
StarcoderdataPython
|
3383969
|
from posixpath import basename, join
from copy import copy, deepcopy
from io import BytesIO
import sys
import numpy as np
from dataflow.lib.uncertainty import Uncertainty
# Action names
__all__ = [] # type: List[str]
# Action methods
ALL_ACTIONS = [] # type: List[Callable[Any, Any]]
IS_PY3 = sys.version_info[0] >= 3
def _b(s):
if IS_PY3:
return s.encode('utf-8')
else:
return s
def _s(b):
if IS_PY3:
return b.decode('utf-8') if hasattr(b, 'decode') else b
else:
return b
def cache(action):
"""
Decorator which adds the *cached* attribute to the function.
Use *@cache* to force caching to always occur (for example, when
the function references remote resources, vastly reduces memory, or is
expensive to compute. Use *@nocache* when debugging a function
so that it will be recomputed each time regardless of whether or not it
is seen again.
"""
action.cached = True
return action
def nocache(action):
"""
Decorator which adds the *cached* attribute to the function.
Use *@cache* to force caching to always occur (for example, when
the function references remote resources, vastly reduces memory, or is
expensive to compute. Use *@nocache* when debugging a function
so that it will be recomputed each time regardless of whether or not it
is seen again.
"""
action.cached = False
return action
def module(action):
"""
Decorator which records the action in *ALL_ACTIONS*.
This just collects the action, it does not otherwise modify it.
"""
ALL_ACTIONS.append(action)
__all__.append(action.__name__)
# Sort modules alphabetically
ALL_ACTIONS.sort(key=lambda action: action.__name__)
__all__.sort()
# This is a decorator, so return the original function
return action
def hidden(action):
"""
Decorator which indicates method is not to be shown in GUI
"""
action.visible = False
return action
@cache
@module
@hidden
def _LoadVSANS(filelist=None, check_timestamps=True):
"""
loads a data file into a VSansData obj and returns that.
**Inputs**
filelist (fileinfo[]): Files to open.
check_timestamps (bool): verify that timestamps on file match request
**Returns**
output (raw[]): all the entries loaded.
| 2018-04-29 <NAME>
| 2020-10-01 <NAME> adding fileinfo to metadata
"""
from dataflow.fetch import url_get
from .loader import readVSANSNexuz
if filelist is None:
filelist = []
data = []
for fileinfo in filelist:
path, mtime, entries = fileinfo['path'], fileinfo.get('mtime', None), fileinfo.get('entries', None)
name = basename(path)
fid = BytesIO(url_get(fileinfo, mtime_check=check_timestamps))
entries = readVSANSNexuz(name, fid)
for entry in entries:
if fileinfo['path'].endswith("DIV.h5"):
print('div file...')
entry.metadata['analysis.filepurpose'] = "Sensitivity"
entry.metadata['analysis.intent'] = "DIV"
entry.metadata['sample.description'] = entry.metadata['run.filename']
fi = fileinfo.copy()
fi['entries'] = [entry.metadata['entry']]
entry.metadata['fileinfo'] = fi
data.extend(entries)
return data
@nocache
@module
def LoadVSANS(filelist=None, check_timestamps=True, load_data=True):
"""
loads a data file into a VSansData obj and returns that. (uses cached values)
**Inputs**
filelist (fileinfo[]): Files to open.
check_timestamps (bool): verify that timestamps on file match request
load_data (bool): include the data in the load
**Returns**
output (raw[]): all the entries loaded.
| 2018-10-30 <NAME>
| 2020-09-30 <NAME> adding option to not load data
"""
from dataflow.calc import process_template
from dataflow.core import Template
template_def = {
"name": "loader_template",
"description": "VSANS remote loader",
"modules": [
{"module": "ncnr.vsans._LoadVSANS", "version": "0.1", "config": {}}
],
"wires": [],
"instrument": "ncnr.vsans",
"version": "0.0"
}
template = Template(**template_def)
output = []
for fi in filelist:
config = {"0": {"filelist": [fi], "check_timestamps": check_timestamps, "load_data": load_data}}
nodenum = 0
terminal_id = "output"
retval = process_template(template, config, target=(nodenum, terminal_id))
output.extend(retval.values)
return output
def addSimple(data):
"""
Naive addition of counts and monitor from different datasets,
assuming all datasets were taken under identical conditions
(except for count time)
Just adds together count time, counts and monitor.
Use metadata from first dataset for output.
**Inputs**
data (realspace[]): measurements to be added together
**Returns**
sum (realspace): sum of inputs
2019-09-22 <NAME>
"""
output = data[0].copy()
for d in data[1:]:
for detname in output.detectors:
if detname in d.detectors:
output.detectors[detname]['data'] += d.detectors[detname]['data']
output.metadata['run.moncnt'] += d.metadata['run.moncnt']
output.metadata['run.rtime'] += d.metadata['run.rtime']
#output.metadata['run.detcnt'] += d.metadata['run.detcnt']
return output
@cache
@module
def LoadVSANSHe3(filelist=None, check_timestamps=True):
"""
loads a data file into a VSansData obj and returns that.
**Inputs**
filelist (fileinfo[]): Files to open.
check_timestamps (bool): verify that timestamps on file match request
**Returns**
output (raw[]): all the entries loaded.
2018-04-29 <NAME>
"""
from dataflow.fetch import url_get
from .loader import readVSANSNexuz, he3_metadata_lookup
if filelist is None:
filelist = []
data = []
for fileinfo in filelist:
path, mtime, entries = fileinfo['path'], fileinfo.get('mtime', None), fileinfo.get('entries', None)
name = basename(path)
fid = BytesIO(url_get(fileinfo, mtime_check=check_timestamps))
entries = readVSANSNexuz(name, fid, metadata_lookup=he3_metadata_lookup)
data.extend(entries)
return data
@nocache
@module
def LoadVSANSHe3Parallel(filelist=None, check_timestamps=True):
"""
loads a data file into a VSansData obj and returns that.
**Inputs**
filelist (fileinfo[]): Files to open.
check_timestamps (bool): verify that timestamps on file match request
**Returns**
output (raw[]): all the entries loaded.
| 2018-04-29 <NAME>
| 2019-11-20 <NAME> changed metadata list
"""
from dataflow.calc import process_template
from dataflow.core import Template
template_def = {
"name": "loader_template",
"description": "VSANS remote loader",
"modules": [
{"module": "ncnr.vsans.LoadVSANSHe3", "version": "0.1", "config": {}}
],
"wires": [],
"instrument": "ncnr.vsans",
"version": "0.0"
}
template = Template(**template_def)
output = []
for fi in filelist:
#config = {"0": {"filelist": [{"path": fi["path"], "source": fi["source"], "mtime": fi["mtime"]}]}}
config = {"0": {"filelist": [fi]}}
nodenum = 0
terminal_id = "output"
retval = process_template(template, config, target=(nodenum, terminal_id))
output.extend(retval.values)
return output
@nocache
@module
def LoadVSANSDIV(filelist=None, check_timestamps=True):
"""
loads a DIV file into a VSansData obj and returns that.
**Inputs**
filelist (fileinfo[]): Files to open.
check_timestamps (bool): verify that timestamps on file match request
**Returns**
output (realspace[]): all the entries loaded.
2019-10-30 <NAME>
"""
from dataflow.fetch import url_get
from .loader import readVSANSNexuz
if filelist is None:
filelist = []
data = []
for fileinfo in filelist:
path, mtime, entries = fileinfo['path'], fileinfo.get('mtime', None), fileinfo.get('entries', None)
name = basename(path)
fid = BytesIO(url_get(fileinfo, mtime_check=check_timestamps))
entries = readVSANSNexuz(name, fid) # metadata_lookup=div_metadata_lookup)
for entry in entries:
div_entries = _loadDivData(entry)
data.extend(div_entries)
return data
def _loadDivData(entry):
from collections import OrderedDict
from .vsansdata import VSansDataRealSpace, short_detectors
div_entries = []
for sn in short_detectors:
new_detectors = OrderedDict()
new_metadata = deepcopy(entry.metadata)
detname = 'detector_{short_name}'.format(short_name=sn)
if not detname in entry.detectors:
continue
det = deepcopy(entry.detectors[detname])
data = det['data']['value']
if 'linear_data_error' in det and 'value' in det['linear_data_error']:
data_variance = np.sqrt(det['linear_data_error']['value'])
else:
data_variance = data
udata = Uncertainty(data, data_variance)
det['data'] = udata
det['norm'] = 1.0
xDim, yDim = data.shape[:2]
det['X'] = np.arange(xDim)
det['Y'] = np.arange(yDim)
det['dX'] = det['dY'] = 1
new_metadata['sample.labl'] = detname
new_detectors[detname] = det
div_entries.append(VSansDataRealSpace(metadata=new_metadata, detectors=new_detectors))
return div_entries
@module
def SortDataAutomatic(data):
"""
Sorting with algorithms to categorize all files and auto-associate
**Inputs**
data (raw[]): data files to sort, typically all of them
**Returns**
sorting_info (params): associations and metadata, by filenumber
2020-05-06 <NAME>
"""
from .categorize import SortDataAutomatic
from .vsansdata import Parameters
return Parameters(SortDataAutomatic(data))
@cache
@module
def He3_transmission(he3data, trans_panel="auto"):
"""
Calculate transmissions
**Inputs**
he3data (raw[]): datafiles with he3 transmissions
trans_panel (opt:auto|MB|MT|ML|MR|FT|FB|FL|FR): panel to use for transmissions
**Returns**
annotated (raw[]): datafiles grouped by cell
transmissions (v1d[]): 1d transmissions per cell
atomic_pols (v1d[]): 1d atomic polarizations per cell
mappings (params[]): cell parameters
| 2018-05-01 <NAME>
| 2020-07-30 <NAME> update cell name
| 2020-10-01 <NAME> add atomic_pol
"""
from .vsansdata import short_detectors, Parameters, VSans1dData, _toDictItem
import dateutil.parser
import datetime
from collections import OrderedDict
he3data.sort(key=lambda d: d.metadata.get("run.instrumentScanID", None))
BlockedBeams = OrderedDict()
for d in he3data:
filename = d.metadata.get("run.filename", "unknown_file")
if _s(d.metadata.get('analysis.intent', '')).lower().startswith('bl'):
m_det_dis_desired = int(d.metadata.get("m_det.dis_des", 0))
f_det_dis_desired = int(d.metadata.get("f_det.dis_des", 0))
num_attenuators = int(d.metadata.get("run.atten", 0))
#t_key = "{:d}_{:d}_{:d}".format(m_det_dis_desired, f_det_dis_desired, num_attenuators)
count_time = d.metadata['run.rtime']
if count_time == 0: count_time = 1
trans_counts = get_transmission_sum(d.detectors, panel_name=trans_panel)
BlockedBeams[(m_det_dis_desired, f_det_dis_desired, num_attenuators)] = OrderedDict([
("filename", filename),
("counts_per_second", trans_counts / count_time),
("middle_detector_distance", m_det_dis_desired),
("front_detector_distance", f_det_dis_desired),
("attenuators", num_attenuators),
])
mappings = OrderedDict()
previous_transmission = {}
previous_scan_id = 0
for d in he3data:
scan_id = d.metadata.get("run.instrumentScanID", 0)
cellstart = d.metadata.get("he3_back.starttime", None)
if cellstart is None:
cellstart = 0
cellstart = int(cellstart) # coerce strings
cellstartstr = "{ts:d}".format(ts=cellstart)
tend = dateutil.parser.parse(d.metadata.get("end_time", "1969")).timestamp()
count_time = d.metadata['run.rtime']
monitor_counts = d.metadata['run.moncnt']
detector_counts = get_transmission_sum(d.detectors, panel_name=trans_panel)
filename = d.metadata.get("run.filename", "unknown_file")
m_det_dis_desired = d.metadata.get("m_det.dis_des", 0)
f_det_dis_desired = d.metadata.get("f_det.dis_des", 0)
num_attenuators = d.metadata.get("run.atten", 0)
middle_timestamp = (tend - (count_time / 2.0)) # in seconds
opacity = d.metadata.get("he3_back.opacity", 0.0)
wavelength = d.metadata.get("resolution.lmda")
Te = d.metadata.get("he3_back.te", 1.0)
Mu = opacity*wavelength
mappings.setdefault(cellstartstr, {
"Insert_time": cellstart,
"Insert_datetime": datetime.datetime.fromtimestamp(cellstart/1000).ctime(),
"Cell_name": _s(d.metadata.get("he3_back.name", "unknown")),
"Te": Te,
"Mu": Mu,
"P0": None,
"Gamma": None,
"Transmissions": []
})
# assume that He3 OUT is measured before He3 IN
mapping_trans = mappings[cellstartstr]["Transmissions"]
t_key = (m_det_dis_desired, f_det_dis_desired, num_attenuators)
direction = _s(d.metadata.get("he3_back.direction", "UNPOLARIZED"))
if direction != "UNPOLARIZED" and (scan_id - previous_scan_id) == 1:
p = previous_transmission
#print('previous transmission: ', p)
#print(p.get("CellTimeIdentifier", None), tstart,
# p.get("m_det_dis_desired", None), m_det_dis_desired,
# p.get("f_det_dis_desired", None), f_det_dis_desired,
# p.get("num_attenuators", None), num_attenuators)
if p.get("CellTimeIdentifier", None) == cellstart and \
p.get("m_det_dis_desired", None) == m_det_dis_desired and \
p.get("f_det_dis_desired", None) == f_det_dis_desired and \
p.get("num_attenuators", None) == num_attenuators:
p["HE3_IN_file"] = filename
p["HE3_IN_counts"] = detector_counts
p["HE3_IN_count_time"] = count_time
p["HE3_IN_mon"] = monitor_counts
p["HE3_IN_timestamp"] = middle_timestamp
if t_key in BlockedBeams:
bb = BlockedBeams[t_key]
BlockBeamRate = bb['counts_per_second']
BlockBeam_filename = bb['filename']
else:
BlockBeamRate = 0
BlockBeam_filename = "missing"
p["BlockedBeam_filename"] = BlockBeam_filename
HE3_transmission_IN = (p["HE3_IN_counts"] - BlockBeamRate*p["HE3_IN_count_time"])/p["HE3_IN_mon"]
HE3_transmission_OUT = (p["HE3_OUT_counts"] - BlockBeamRate*p["HE3_OUT_count_time"])/p["HE3_OUT_mon"]
HE3_transmission = HE3_transmission_IN / HE3_transmission_OUT
p['transmission'] = HE3_transmission
p['atomic_pol'] = np.arccosh(HE3_transmission / (Te * np.exp(-Mu))) / Mu
mapping_trans.append(deepcopy(p))
else:
previous_transmission = {
"CellTimeIdentifier": cellstart,
"HE3_OUT_file": filename,
"HE3_OUT_counts": detector_counts,
"HE3_OUT_count_time": count_time,
"HE3_OUT_mon": monitor_counts,
"m_det_dis_desired": m_det_dis_desired,
"f_det_dis_desired": f_det_dis_desired,
"num_attenuators": num_attenuators
}
previous_scan_id = scan_id
# catch back-to-back
bb_out = _toDictItem(list(BlockedBeams.values()))
trans_1d = []
atomic_pol_1d = []
for m in mappings.values():
transmissions = []
atomic_pols = []
timestamps = []
for c in m["Transmissions"]:
t = c['transmission']
ap = c['atomic_pol']
if t > 0:
transmissions.append(t)
atomic_pols.append(ap)
timestamps.append(c['HE3_IN_timestamp'])
x = np.array(timestamps)
x0 = m['Insert_time']/1000.0
xa = (x-x0)/(3600)
dx = np.zeros_like(x)
v = np.array(transmissions)
dv = np.zeros_like(v)
va = np.array(atomic_pols)
dva = np.zeros_like(va)
if (len(timestamps) > 1):
ginv, logP = np.polyfit(xa, np.log(va), 1)
m['P0'] = np.exp(logP)
m['Gamma'] = -1/ginv
else:
m['P0'] = va[0]
ordering = np.argsort(x)
trans_1d.append(VSans1dData(x[ordering] - x0, v[ordering], dx=dx, dv=dv, xlabel="timestamp (s)", vlabel="Transmission", metadata={"title": _s(m["Cell_name"])}))
atomic_pol_1d.append(VSans1dData(xa[ordering], va[ordering], dx=dx, dv=dva, xlabel="timestamp (h)", vlabel="Atomic Polarization", metadata={"title": _s(m["Cell_name"])}))
return he3data, trans_1d, atomic_pol_1d, [Parameters({"cells": mappings, "blocked_beams": bb_out})]
def get_transmission_sum(detectors, panel_name="auto"):
from .vsansdata import short_detectors
total_counts = -np.inf
if panel_name == 'auto':
for sn in short_detectors:
detname = "detector_{sn}".format(sn=sn)
if 'data' in detectors[detname]:
counts = detectors[detname]['data']['value'].sum()
if counts > total_counts:
total_counts = counts
else:
detname = "detector_{sn}".format(sn=panel_name)
total_counts = detectors[detname]['data']['value'].sum()
return total_counts
@nocache
@module
def patch(data, patches=None):
"""
loads a data file into a VSansData obj and returns that.
**Inputs**
data (raw): datafiles with metadata to patch
patches (patch_metadata[]:run.filename): patches to be applied, with run.filename used as unique key
**Returns**
patched (raw): datafiles with patched metadata
2019-07-26 <NAME>
"""
if patches is None:
return data
from jsonpatch import JsonPatch
from collections import OrderedDict
# make a master dict of metadata from provided key:
key="run.filename"
master = OrderedDict([(_s(d.metadata[key]), d.metadata) for d in data])
to_apply = JsonPatch(patches)
to_apply.apply(master, in_place=True)
return data
@nocache
@module
def sort_sample(raw_data):
"""
categorize data files
**Inputs**
raw_data (raw[]): datafiles in
**Returns**
blocked_beam (raw[]): datafiles with "blocked beam" intent
2018-04-27 <NAME>
"""
blocked_beam = [f for f in raw_data if _s(f.metadata.get('analysis.intent', '')).lower().startswith('bl')]
return blocked_beam
@nocache
@module
def calculate_XY(raw_data, solid_angle_correction=True):
"""
from embedded detector metadata, calculates the x,y,z values for each detector.
**Inputs**
raw_data (raw): raw datafiles
solid_angle_correction (bool): Divide by solid angle
**Returns**
realspace_data (realspace): datafiles with realspace information
| 2018-04-28 <NAME>
| 2019-09-19 Added monitor normalization
| 2019-09-22 Separated monitor and dOmega norm
| 2020-10-02 <NAME> ignore back detector when data missing
"""
from .vsansdata import VSansDataRealSpace, short_detectors
from collections import OrderedDict
metadata = deepcopy(raw_data.metadata)
monitor_counts = metadata['run.moncnt']
new_detectors = OrderedDict()
for sn in short_detectors:
detname = 'detector_{short_name}'.format(short_name=sn)
det = deepcopy(raw_data.detectors[detname])
dimX = int(det['pixel_num_x']['value'][0])
dimY = int(det['pixel_num_y']['value'][0])
z_offset = det.get('setback', {"value": [0.0]})['value'][0]
z = det['distance']['value'][0] + z_offset
if sn == "B":
# special handling for back detector
total = det['integrated_count']['value'][0] if 'integrated_count' in det else 0
if total < 1:
# don't load the back detector if it has no counts (turned off)
continue
beam_center_x_pixels = det['beam_center_x']['value'][0] # in pixels
beam_center_y_pixels = det['beam_center_y']['value'][0]
cal_x = det['cal_x']['value'] # in cm
cal_y = det['cal_y']['value']
x_pixel_size = cal_x[0] # cm
y_pixel_size = cal_y[0] # cm
beam_center_x = x_pixel_size * beam_center_x_pixels
beam_center_y = y_pixel_size * beam_center_y_pixels
# lateral_offset = det['lateral_offset']['value'][0] # # already cm
realDistX = 0.5 * x_pixel_size
realDistY = 0.5 * y_pixel_size
data = det['data']['value']
if 'linear_data_error' in det and 'value' in det['linear_data_error']:
data_variance = np.sqrt(det['linear_data_error']['value'])
else:
data_variance = data
udata = Uncertainty(data, data_variance)
else:
orientation = det['tube_orientation']['value'][0].decode().upper()
coeffs = det['spatial_calibration']['value']
lateral_offset = 0
vertical_offset = 0
beam_center_x = det['beam_center_x']['value'][0]
beam_center_y = det['beam_center_y']['value'][0]
panel_gap = det['panel_gap']['value'][0]/10.0 # mm to cm
if (orientation == "VERTICAL"):
x_pixel_size = det['x_pixel_size']['value'][0] / 10.0 # mm to cm
y_pixel_size = coeffs[1][0] / 10.0 # mm to cm
lateral_offset = det['lateral_offset']['value'][0] # # already cm
else:
x_pixel_size = coeffs[1][0] / 10.0
y_pixel_size = det['y_pixel_size']['value'][0] / 10.0 # mm to cm
vertical_offset = det['vertical_offset']['value'][0] # already cm
#solid_angle_correction = z*z / 1e6
data = det['data']['value']
if 'linear_data_error' in det and 'value' in det['linear_data_error']:
data_variance = np.sqrt(det['linear_data_error']['value'])
else:
data_variance = data
udata = Uncertainty(data, data_variance)
position_key = sn[-1]
if position_key == 'T':
# FROM IGOR: (q,p = 0 for lower-left pixel)
# if(cmpstr("T",detStr[1]) == 0)
# data_realDistY[][] = tube_width*(q+1/2) + offset + gap/2
# data_realDistX[][] = coefW[0][q] + coefW[1][q]*p + coefW[2][q]*p*p
realDistX = coeffs[0][0]/10.0 # to cm
realDistY = 0.5 * y_pixel_size + vertical_offset + panel_gap/2.0
elif position_key == 'B':
# FROM IGOR: (q,p = 0 for lower-left pixel)
# if(cmpstr("B",detStr[1]) == 0)
# data_realDistY[][] = offset - (dimY - q - 1/2)*tube_width - gap/2
# data_realDistX[][] = coefW[0][q] + coefW[1][q]*p + coefW[2][q]*p*p
realDistX = coeffs[0][0]/10.0
realDistY = vertical_offset - (dimY - 0.5)*y_pixel_size - panel_gap/2.0
elif position_key == 'L':
# FROM IGOR: (q,p = 0 for lower-left pixel)
# if(cmpstr("L",detStr[1]) == 0)
# data_realDistY[][] = coefW[0][p] + coefW[1][p]*q + coefW[2][p]*q*q
# data_realDistX[][] = offset - (dimX - p - 1/2)*tube_width - gap/2
realDistX = lateral_offset - (dimX - 0.5)*x_pixel_size - panel_gap/2.0
realDistY = coeffs[0][0]/10.0
elif position_key == 'R':
# FROM IGOR: (q,p = 0 for lower-left pixel)
# data_realDistY[][] = coefW[0][p] + coefW[1][p]*q + coefW[2][p]*q*q
# data_realDistX[][] = tube_width*(p+1/2) + offset + gap/2
realDistX = x_pixel_size*(0.5) + lateral_offset + panel_gap/2.0
realDistY = coeffs[0][0]/10.0
#x_pos = size_x/2.0 # place panel with lower-right corner at center of view
#y_pos = size_y/2.0 #
x0_pos = realDistX - beam_center_x # then move it the 'real' distance away from the origin,
y0_pos = realDistY - beam_center_y # which is the beam center
#metadata['det_' + short_name + '_x0_pos'] = x0_pos
#metadata['det_' + short_name + '_y0_pos'] = y0_pos
X,Y = np.indices((dimX, dimY))
X = X * x_pixel_size + x0_pos
Y = Y * y_pixel_size + y0_pos
det['data'] = udata
det['X'] = X
det['dX'] = x_pixel_size
det['Y'] = Y
det['dY'] = y_pixel_size
det['Z'] = z
det['dOmega'] = x_pixel_size * y_pixel_size / z**2
if solid_angle_correction:
det['data'] /= det['dOmega']
new_detectors[detname] = det
output = VSansDataRealSpace(metadata=metadata, detectors=new_detectors)
return output
@cache
@module
def oversample_XY(realspace_data, oversampling=3, exclude_back_detector=True):
"""
Split each pixel into subpixels in realspace
**Inputs**
realspace_data (realspace): data in XY coordinates
oversampling (int): how many subpixels to create along x and y
(e.g. oversampling=3 results in 9 subpixels per input pixel)
exclude_back_detector {exclude back detector} (bool): Skip oversampling for the back detector when true
**Returns**
oversampled (realspace): datasets with oversampled pixels
| 2019-10-29 <NAME>
"""
from .vsansdata import short_detectors
rd = realspace_data.copy()
for sn in short_detectors:
detname = 'detector_{short_name}'.format(short_name=sn)
if detname == 'detector_B' and exclude_back_detector:
continue
if not detname in rd.detectors:
continue
det = rd.detectors[detname]
X = det['X']
Y = det['Y']
dX = det['dX']
dY = det['dY']
x_min = X.min() - dX/2.0
y_min = Y.min() - dY/2.0
data = det['data']
dimX, dimY = data.shape
dimX *= oversampling
dimY *= oversampling
dX /= oversampling
dY /= oversampling
X,Y = np.indices((dimX, dimY))
X = X * dX + x_min + dX/2.0
Y = Y * dY + y_min + dY/2.0
det['data'] = np.repeat(np.repeat(data, oversampling, 0), oversampling, 1) / oversampling**2
det['X'] = X
det['dX'] = dX
det['Y'] = Y
det['dY'] = dY
det['dOmega'] /= oversampling**2
det['oversampling'] = det.get('oversampling', 1.0) * oversampling
return rd
@module
def monitor_normalize(qdata, mon0=1e8):
""""
Given a SansData object, normalize the data to the provided monitor
**Inputs**
qdata (qspace): data in
mon0 (float): provided monitor
**Returns**
output (qspace): corrected for monitor counts
2019-09-19 <NAME>
"""
output = qdata.copy()
monitor = output.metadata['run.moncnt']
umon = Uncertainty(monitor, monitor)
for d in output.detectors:
output.detectors[d]['data'] *= mon0/umon
return output
@cache
@module
def correct_detector_sensitivity(data, sensitivity, exclude_back_detector=True):
"""
Divide by detector sensitivity
**Inputs**
data (realspace): datafile in realspace X,Y coordinates
sensitivity (realspace): DIV file
exclude_back_detector {exclude back detector} (bool): Skip correcting the back detector when true
**Returns**
div_corrected (realspace): datafiles where output is divided by sensitivity
2019-10-30 <NAME>
"""
from .vsansdata import VSansDataQSpace, short_detectors
from collections import OrderedDict
new_data = data.copy()
for detname in data.detectors:
det = new_data.detectors[detname]
div_det = sensitivity.detectors.get(detname, None)
if detname.endswith("_B") and exclude_back_detector:
continue
if div_det is not None:
det['data'] /= div_det['data']
return new_data
@nocache
@module
def calculate_Q(realspace_data):
"""
Calculates Q values (Qx, Qy) from realspace coordinates and wavelength
**Inputs**
realspace_data (realspace): datafiles in realspace X,Y coordinates
**Returns**
QxQy_data (qspace): datafiles with Q information
2018-04-27 <NAME>
"""
from .vsansdata import VSansDataQSpace, short_detectors
from collections import OrderedDict
metadata = deepcopy(realspace_data.metadata)
wavelength = metadata['resolution.lmda']
delta_wavelength = metadata['resolution.dlmda']
new_detectors = OrderedDict()
#print(r.detectors)
for sn in short_detectors:
detname = 'detector_{short_name}'.format(short_name=sn)
if not detname in realspace_data.detectors:
continue
det = deepcopy(realspace_data.detectors[detname])
X = det['X']
Y = det['Y']
z = det['Z']
r = np.sqrt(X**2+Y**2)
theta = np.arctan2(r, z)/2 #remember to convert L2 to cm from meters
q = (4*np.pi/wavelength)*np.sin(theta)
phi = np.arctan2(Y, X)
# need to add qz... and qx and qy are really e.g. q*cos(theta)*sin(alpha)...
# qz = q * sin(theta)
qx = q * np.cos(theta) * np.cos(phi)
qy = q * np.cos(theta) * np.sin(phi)
qz = q * np.sin(theta)
det['Qx'] = qx
det['Qy'] = qy
det['Qz'] = qz
det['Q'] = q
new_detectors[detname] = det
output = VSansDataQSpace(metadata=metadata, detectors=new_detectors)
return output
@cache
@module
def circular_av_new(qspace_data, q_min=None, q_max=None, q_step=None):
"""
Calculates I vs Q from qpace coordinate data
**Inputs**
qspace_data (qspace): datafiles in qspace X,Y coordinates
q_min (float): minimum Q value for binning (defaults to q_step)
q_max (float): maxiumum Q value for binning (defaults to max of q values in data)
q_step (float): step size for Q bins (defaults to minimum qx step)
**Returns**
I_Q (v1d[]): VSANS 1d data
| 2019-10-29 <NAME>
"""
from .vsansdata import short_detectors, VSans1dData
output = []
for sn in short_detectors:
detname = 'detector_{short_name}'.format(short_name=sn)
if not detname in qspace_data.detectors:
continue
det = deepcopy(qspace_data.detectors[detname])
my_q_step = (det['Qx'][1, 0] - det['Qx'][0, 0]) * det.get('oversampling', 1.0) if q_step is None else q_step
my_q_min = my_q_step if q_min is None else q_min
my_q_max = det['Q'].max() if q_max is None else q_max
q_bins = np.arange(my_q_min, my_q_max+my_q_step, my_q_step)
Q = (q_bins[:-1] + q_bins[1:])/2.0
dx = np.zeros_like(Q)
mask = det.get('shadow_mask', np.ones_like(det['Q'], dtype=np.bool))
# dq = data.dq_para if hasattr(data, 'dqpara') else np.ones_like(data.q) * q_step
I, _bins_used = np.histogram(det['Q'][mask], bins=q_bins, weights=(det['data'].x)[mask])
I_norm, _ = np.histogram(det['Q'][mask], bins=q_bins, weights=np.ones_like(det['data'].x[mask]))
I_var, _ = np.histogram(det['Q'][mask], bins=q_bins, weights=det['data'].variance[mask])
#Q_ave, _ = np.histogram(data.q, bins=q_bins, weights=data.q)
#Q_var, _ = np.histogram(data.q, bins=q_bins, weights=data.dq_para**2)
#Q_mean, _ = np.histogram(data.meanQ[mask], bins=q_bins, weights=data.meanQ[mask])
#Q_mean_lookup = np.digitize(data.meanQ[mask], bins=q_bins)
#Q_mean_norm, _ = np.histogram(data.meanQ[mask], bins=q_bins, weights=np.ones_like(data.data.x[mask]))
#ShadowFactor, _ = np.histogram(data.meanQ[mask], bins=q_bins, weights=data.shadow_factor[mask])
nonzero_mask = I_norm > 0
I[nonzero_mask] /= I_norm[nonzero_mask]
I_var[nonzero_mask] /= (I_norm[nonzero_mask]**2)
#Q_mean[Q_mean_norm > 0] /= Q_mean_norm[Q_mean_norm > 0]
#ShadowFactor[Q_mean_norm > 0] /= Q_mean_norm[Q_mean_norm > 0]
# calculate Q_var...
# remarkably, the variance of a sum of normalized gaussians
# with variances v_i, displaced from the mean center by xc_i
# is the sum of (xc_i**2 + v_i). Gaussians are weird.
# exclude Q_mean_lookups that overflow the length of the calculated Q_mean:
#Q_var_mask = (Q_mean_lookup < len(Q_mean))
#Q_mean_center = Q_mean[Q_mean_lookup[Q_var_mask]]
#Q_var_contrib = (data.meanQ[mask][Q_var_mask] - Q_mean_center)**2 + (data.dq_para[mask][Q_var_mask])**2
#Q_var, _ = np.histogram(data.meanQ[mask][Q_var_mask], bins=q_bins, weights=Q_var_contrib)
#Q_var[Q_mean_norm > 0] /= Q_mean_norm[Q_mean_norm > 0]
canonical_output = VSans1dData(Q, I, dx, np.sqrt(I_var), xlabel="Q", vlabel="I", xunits="1/Ang", vunits="arb.", xscale="log", vscale="log", metadata={"title": sn})
output.append(canonical_output)
return output
def circular_average(qspace_data):
"""
Calculates I vs Q from qpace coordinate data
**Inputs**
qspace_data (qspace[]): datafiles in qspace X,Y coordinates
**Returns**
I_Q (v1d[]): VSANS 1d data
2018-04-27 <NAME>
"""
from sansred.sansdata import Sans1dData
from collections import OrderedDict
def calculate_IQ(realspace_data):
"""
Calculates I vs Q from realspace coordinate data
**Inputs**
realspace_data (realspace[]): datafiles in qspace X,Y coordinates
**Returns**
I_Q (iqdata[]): datafiles with Q information
2018-04-27 <NAME>
"""
from sansred.sansdata import Sans1dData
from collections import OrderedDict
@cache
@module
def geometric_shadow(realspace_data, border_width=4.0, inplace=False):
"""
Calculate the overlap shadow from upstream panels on VSANS detectors
Outputs will still be realspace data, but with shadow_mask updated to
include these overlap regions
**Inputs**
realspace_data (realspace): datafiles in qspace X,Y coordinates
border_width (float): extra width (in pixels on original detector) to exclude
as a margin. Note that if the data has been oversampled, this number
still refers to original pixel widths (oversampling is divided out)
inplace (bool): do the calculation in-place, modifying the input dataset
**Returns**
shadowed (realspace): datafiles in qspace X,Y coordinates with updated
shadow mask
2019-11-01 <NAME>
"""
detector_angles = calculate_angles(realspace_data)
if not inplace:
realspace_data = realspace_data.copy()
# assume that detectors are in decreasing Z-order
for dnum, (detname, det) in enumerate(detector_angles.items()):
rdet = realspace_data.detectors[detname]
shadow_mask = rdet.get('shadow_mask', np.ones_like(rdet['data'].x, dtype=np.bool))
for udet in list(detector_angles.values())[dnum+1:]:
#final check: is detector in the same plane?
if udet['Z'] < det['Z'] - 1:
x_min_index = int(round((udet['theta_x_min'] - det['theta_x_min'])/det['theta_x_step'] - border_width))
x_max_index = int(round((udet['theta_x_max'] - det['theta_x_min'])/det['theta_x_step'] + border_width))
y_min_index = int(round((udet['theta_y_min'] - det['theta_y_min'])/det['theta_y_step'] - border_width))
y_max_index = int(round((udet['theta_y_max'] - det['theta_y_min'])/det['theta_y_step'] + border_width))
dimX = rdet['data'].shape[0]
dimY = rdet['data'].shape[1]
x_applies = (x_min_index < dimX and x_max_index >= 0)
y_applies = (y_min_index < dimY and y_max_index >= 0)
if x_applies and y_applies:
x_min_index = max(x_min_index, 0)
x_max_index = min(x_max_index, dimX)
y_min_index = max(y_min_index, 0)
y_max_index = min(y_max_index, dimY)
shadow_mask[x_min_index:x_max_index, y_min_index:y_max_index] = False
rdet['shadow_mask'] = shadow_mask
return realspace_data
def calculate_angles(rd):
from collections import OrderedDict
from .vsansdata import short_detectors
detector_angles = OrderedDict()
for sn in short_detectors:
detname = 'detector_{short_name}'.format(short_name=sn)
if not detname in rd.detectors:
continue
det = rd.detectors[detname]
X = det['X']
dX = det['dX']
Y = det['Y']
dY = det['dY']
z = det['Z']
dobj = OrderedDict()
# small angle approximation
dobj['theta_x_min'] = X.min() / z
dobj['theta_x_max'] = X.max() / z
dobj['theta_x_step'] = dX / z
dobj['theta_y_min'] = Y.min() / z
dobj['theta_y_max'] = Y.max() / z
dobj['theta_y_step'] = dY / z
dobj['Z'] = det['Z']
detector_angles[detname] = dobj
return detector_angles
@cache
@module
def sector_cut(qspace_data, sector=[0.0, 90.0], mirror=True):
"""
Calculate an additional shadow mask for defining a sector cut
**Inputs**
qspace_data (qspace): input datafile in q-space coordinates
sector (range:sector_centered): angle and opening of sector cut (degrees)
mirror (bool): extend sector cut on both sides of origin
**Returns**
sector_masked (qspace): datafile with mask updated with angular sector cut
| 2020-11-02 <NAME>
"""
angle_offset, opening = sector
if angle_offset is None:
angle_offset = 0.0
if opening is None:
opening = 90.0
x_offset = np.cos(np.radians(angle_offset))
y_offset = np.sin(np.radians(angle_offset))
cos_theta_min = np.cos(np.radians(opening/2.0))
for detname in qspace_data.detectors:
det = qspace_data.detectors[detname]
# theta is the distance in angle from the offset_vector to the datapoints
Q_normsq = det['Qx']**2 + det['Qy']**2
nonzero = Q_normsq > 0
Q_normsq[Q_normsq == 0] = 1.0
cos_theta = (det['Qx'] * x_offset + det['Qy'] * y_offset) / np.sqrt(Q_normsq)
shadow_mask = det.get('shadow_mask', np.ones_like(det['data'].x, dtype=np.bool))
sector_mask = np.zeros_like(det['data'].x, dtype=np.bool)
sector_mask[np.logical_and(nonzero, cos_theta >= cos_theta_min)] = True
if mirror:
sector_mask[np.logical_and(nonzero, cos_theta <= -cos_theta_min)] = True
det['shadow_mask'] = np.logical_and(shadow_mask, sector_mask)
return qspace_data
@cache
@module
def top_bottom_shadow(realspace_data, width=3, inplace=True):
"""
Calculate the overlap shadow from upstream panels on VSANS detectors
Outputs will still be realspace data, but with shadow_mask updated to
include these overlap regions
**Inputs**
realspace_data (realspace): datafiles in qspace X,Y coordinates
width (float): width to mask on the top of the L,R detectors
(middle and front). Note that if the data has been oversampled, this number
still refers to original pixel widths (oversampling is divided out)
inplace (bool): do the calculation in-place, modifying the input dataset
**Returns**
shadowed (realspace): datafiles in qspace X,Y coordinates with updated
shadow mask
2019-11-01 <NAME>
"""
from .vsansdata import short_detectors
rd = realspace_data if inplace else realspace_data.copy()
for det in rd.detectors.values():
orientation = det.get(
'tube_orientation', {}
).get(
'value', [b"NONE"]
)[0].decode().upper()
if orientation == 'VERTICAL':
oversampling = det.get('oversampling', 1)
shadow_mask = det.get('shadow_mask', np.ones_like(det['data'].x, dtype=np.bool))
effective_width = int(width * oversampling)
shadow_mask[:,0:effective_width] = False
shadow_mask[:,-effective_width:] = False
det['shadow_mask'] = shadow_mask
return rd
|
StarcoderdataPython
|
1778894
|
#!/usr/bin/env python
from gimpfu import *
from math import pow, sqrt
from gimpcolor import RGB
def euclidean_distance(point_one, point_two):
""" Calculate the euclidean distance.
Args:
point_one (tuple)
point_two (tuple)
Returns:
float: the distance between the two points
"""
return sqrt(pow(point_two[0] - point_one[0], 2) + \
pow(point_two[1] - point_one[1], 2))
def get_maximum_distance(ref_list, dev_list):
""" Calculate the distance between two list of pixels
Args:
ref_list (list)
dev_list (list)
Returns:
float: the distance between the two list
tuple: the pixel of the dev_list
tuple: the pixel of the ref_list
"""
gimp.progress_init("Calculating distance...")
ref_pixel = (0, 0)
dev_pixel = (0, 0)
maximum_distance = float("-inf")
for index, pixel_ref_list in enumerate(ref_list):
# Update the progress bar
gimp.progress_update(float(index) / float(len(ref_list)))
minimum_distance = float("inf")
for pixel_dev_list in dev_list:
distance = euclidean_distance(pixel_ref_list, pixel_dev_list)
# Update the minimum distance
if distance < minimum_distance:
minimum_distance = distance
dev_pixel = pixel_dev_list
# Update the maximum distance
if minimum_distance > maximum_distance:
maximum_distance = minimum_distance
ref_pixel = pixel_ref_list
return maximum_distance, dev_pixel, ref_pixel
def search_pixel(layer, color, pixel, outline_pixels):
""" Search the outline pixels with a DFS
Args:
layer (gimp.Drawable): the layer over do the search
color (gimpcolor.RGB): the outline pixel's color
pixel (tuple): the pixel to control and from start a new search
outline_pixels (list): the list of the outline pixels
Returns:
list: the list of the outline pixels
"""
# I use a `try except` to avoid exceptions that can araise
# if the method goes through an illegal position in the
# image (e.g. a pixel that does not exists)
try:
# Goes on in the search if the color that it has met is the target color
if RGB(*layer.get_pixel(pixel)) == color:
outline_pixels.append(pixel)
target_pixels = [
(pixel[0], pixel[1] + 1), # Up
(pixel[0] + 1, pixel[1]), # Right
(pixel[0], pixel[1] - 1), # Down
(pixel[0] + 1, pixel[1]) # Left
]
# Searching
for target_pixel in target_pixels:
if target_pixel not in outline_pixels:
outline_pixels = search_pixel(layer, color, target_pixel, \
outline_pixels)
except Exception as e:
gimp.message("Raised exception while saving the outline pixels: " + \
str(e.message))
finally:
return outline_pixels
def get_outline_pixels_positions(image, layer, color, fill_color):
""" Create the outline and search the pixels of the outline.
Args:
image (gimp.Image): the image over we make the transformation
layer (gimp.Drawable): the layer we transformate
color (gimpcolor.RGB): the outline's color
fill_color (gimpcolor.tuple): the other color
Returns:
list: the list of the outline pixels
"""
gimp.progress_init("Searching the outline pixels for the layer...")
# Clear an eventually selection
pdb.gimp_selection_clear(image)
# Initially i search the first pixel colored with the target color
target_pixel = (0, 0)
found_pixel = False
for x in range(layer.width):
gimp.progress_update(float(x) / float(layer.width))
for y in range(layer.height):
if RGB(*layer.get_pixel(x, y)) == color:
target_pixel = (x, y)
found_pixel = True
# If the target color is found, then stop the search
if found_pixel:
break
# Selecting the target area
pdb.gimp_image_select_contiguous_color(image, 0, layer, \
target_pixel[0], target_pixel[1])
# Shrink the selection
pdb.gimp_selection_shrink(image, 1)
# Set the target color in the palette
pdb.gimp_context_set_foreground(RGB(
fill_color.r if fill_color.r < 1.0 else fill_color.r / 255.0,
fill_color.g if fill_color.g < 1.0 else fill_color.g / 255.0,
fill_color.b if fill_color.b < 1.0 else fill_color.b / 255.0,
fill_color.a if fill_color.a < 1.0 else fill_color.a / 255.0
))
# Fill the selection with the target color
pdb.gimp_edit_bucket_fill(layer, 0, 0, 100, 0, False, 0, 0)
gimp.progress_init("Saving the outline pixels...")
return search_pixel(layer, color, target_pixel, [])
def hausdorff_distance(image, color, fill_color):
""" Calculate the hausdorff distance.
Args:
image (Image): the image to analyse
color (RGB):
"""
# Indicates the start of the process
gimp.progress_init("Initializing Hausdorff distance...")
try:
# Outline the first layer
ref_layer = image.layers[0]
ref_layer_outline_pixels_positions_list = \
get_outline_pixels_positions(image, ref_layer, color, fill_color)
# Outline the second layer
dev_layer = image.layers[1]
dev_layer_outline_pixels_positions_list = \
get_outline_pixels_positions(image, dev_layer, color, fill_color)
# Retrieve the maxmin distance of first layer, with the two points...
ref_layer_distance, ref_pixel_one, ref_pixel_two = \
get_maximum_distance(ref_layer_outline_pixels_positions_list, \
dev_layer_outline_pixels_positions_list)
# ...and the maxmin distance and the points of the second layer.
dev_layer_distance, dev_pixel_one, dev_pixel_two = \
get_maximum_distance(dev_layer_outline_pixels_positions_list, \
ref_layer_outline_pixels_positions_list)
# Now i make the lines to point out the two distances (obviously, the
# maximum distance will have a wider line)
red = (255.0, 0.0, 0.0, 255.0)
distance = max(ref_layer_distance, dev_layer_distance)
gimp.message("The distance is: " + str(distance))
return distance
except Exception as e:
gimp.message("Unexpected error: " + str(e.message))
gimp.message("It was not possible to calculate the distance.")
return float("-inf")
register(
"python-fu-hausdorff-distance",
"AISP Hausdorff distance",
"Calculate the Hausdorff distance between two layers.",
"<NAME>",
"<NAME>",
"2017",
"Hausdorff distance",
"",
[
(PF_IMAGE, "image", """The image on which we calculates Hausdorff
distance.""", None),
(PF_COLOR, "color", "The outline's color.", (255.0, 255.0, 255.0, 255.0)),
(PF_COLOR, "fill_color", "The filling color", (0.0, 0.0, 0.0, 255.0))
],
[
(PF_FLOAT, "distance", "The calculated distance."),
],
hausdorff_distance,
menu="<Image>/Filters/",
)
if "__main__" == __name__:
main()
|
StarcoderdataPython
|
119956
|
<filename>api/artifacts.py<gh_stars>0
from flask import request
from hurry.filesize import size
from ...shared.utils.restApi import RestResource
from ...shared.connectors.minio import MinioClient
from ...shared.utils.api_utils import build_req_parser, upload_file
class Artifacts(RestResource):
delete_rules = (
dict(name="fname[]", type=str, action="append", location="args"),
)
def __init__(self):
super().__init__()
self.__init_req_parsers()
def __init_req_parsers(self):
self._parser_delete = build_req_parser(rules=self.delete_rules)
def get(self, project_id: int, bucket: str):
project = self.rpc.project_get_or_404(project_id=project_id)
c = MinioClient(project)
files = c.list_files(bucket)
for each in files:
each["size"] = size(each["size"])
return {"total": len(files), "rows": files}
def post(self, project_id: int, bucket: str):
project = self.rpc.project_get_or_404(project_id=project_id)
if "file" in request.files:
upload_file(bucket, request.files["file"], project)
return {"message": "Done", "code": 200}
def delete(self, project_id: int, bucket: str):
args = self._parser_delete.parse_args(strict=False)
project = self.rpc.project_get_or_404(project_id=project_id)
if not args.get("fname[]"):
MinioClient(project=project).remove_bucket(bucket)
else:
for filename in args.get("fname[]", ()) or ():
MinioClient(project=project).remove_file(bucket, filename)
return {"message": "Deleted", "code": 200}
|
StarcoderdataPython
|
106052
|
import requests
from urllib.parse import urljoin, urlencode
from .utils import flatten
class APIKeyMissingError(Exception):
pass
class CFLApi(object):
def __init__(self, apiKey, baseUri='http://api.cfl.ca'):
if apiKey == None:
raise APIKeyMissingError(
"An API Key is required. Request one at the following URL: "
"http://api.cfl.ca/key-request "
)
self._baseUri = baseUri
self._session = requests.Session()
self._session.params = {}
self._session.params['key'] = apiKey
def __buildParams(self, kwargs):
params = []
if 'include' in kwargs:
params.append(self.__parseIncludeParams(kwargs['include']))
if 'sort' in kwargs:
params.append(self.__parseSortParams(kwargs['sort']))
if 'filter' in kwargs:
params.append(self.__parseFilterParams(kwargs['filter']))
if 'pageNumber' in kwargs:
params.append('page[number]={num}'.format(num=kwargs['pageNumber']))
if 'pageSize' in kwargs:
params.append('page[size]={size}'.format(size=kwargs['pageSize']))
return '?' + '&'.join(params)
def __parseIncludeParams(self, args):
params = 'include='
if type(args) == list:
params += ','.join(args)
else:
params += args
return params
def __parseSortParams(self, args):
params = 'sort='
if type(args) == list:
params += ','.join(args)
else:
params += args
return params
def __parseFilterParams(self, args):
filters = []
if type(args) == list:
for f in args:
filters.append(flatten(f, 'filter'))
return '&'.join(filters)
else:
return flatten(args, 'filter')
def getGames(self, **kwargs):
url = urljoin(self._baseUri, '/v1/games')
response = self._session.get(url + self.__buildParams(kwargs))
return response.json()
def getGamesBySeason(self, season, **kwargs):
url = urljoin(self._baseUri, '/v1/games/{season}'
.format(season=season))
response = self._session.get(url + self.__buildParams(kwargs))
return response.json()
def getGame(self, season, gameId, **kwargs):
url = urljoin(self._baseUri, '/v1/games/{season}/game/{gameId}'
.format(season=season, gameId=gameId))
response = self._session.get(url + self.__buildParams(kwargs))
return response.json()
def getPlayers(self, **kwargs):
url = urljoin(self._baseUri, '/v1/players')
response = self._session.get(url + self.__buildParams(kwargs))
return response.json()
def getPlayer(self, playerId, **kwargs):
url = urljoin(self._baseUri, '/v1/players/{playerId}'
.format(playerId=playerId))
response = self._session.get(url + self.__buildParams(kwargs))
return response.json()
def getLeaders(self, season, category, **kwargs):
url = urljoin(self._baseUri, '/v1/leaders/{season}/category/{category}'
.format(season=season, category=category))
response = self._session.get(url + self.__buildParams(kwargs))
return response.json()
def getTeamLeaders(self, season, category, **kwargs):
url = urljoin(self._baseUri, '/v1/team_leaders/{season}/category/{category}'
.format(season=season, category=category))
response = self._session.get(url + self.__buildParams(kwargs))
return response.json()
def getStandings(self, season):
url = urljoin(self._baseUri, '/v1/standings/{season}'
.format(season=season))
response = self._session.get(url)
return response.json()
def getCrossoverStandings(self, season):
url = urljoin(self._baseUri, '/v1/standings/crossover/{season}'
.format(season=season))
response = self._session.get(url)
return response.json()
def getTeams(self):
url = urljoin(self._baseUri, '/v1/teams')
response = self._session.get(url)
return response.json()
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.