repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ebrelsford/django-phillydata | phillydata/opa/migrations/0001_initial.py | 1 | 3517 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('owners', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AccountOwner',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(unique=True, max_length=256, verbose_name='name')),
('owner', models.ForeignKey(verbose_name='owner', blank=True, to='owners.Owner', null=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='BillingAccount',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('external_id', models.CharField(help_text='The OPA account number (also called "BRT number")', unique=True, max_length=50, verbose_name='external id')),
('property_address', models.CharField(help_text='The address of the property this account is associated with', max_length=300, null=True, verbose_name='property address', blank=True)),
('improvement_description', models.CharField(help_text='The improvement description according to OPA', max_length=300, null=True, verbose_name='improvement description', blank=True)),
('sale_date', models.DateField(help_text='The date of the last sale of this property according to the OPA', null=True, verbose_name='sale date', blank=True)),
('land_area', models.DecimalField(decimal_places=3, max_digits=20, blank=True, help_text='The land area of the property according to the OPA in square feet', null=True, verbose_name='land area (sq ft)')),
('improvement_area', models.IntegerField(help_text='The improvement area of the property according to the OPA', null=True, verbose_name='improvement area', blank=True)),
('assessment', models.DecimalField(decimal_places=2, max_digits=20, blank=True, help_text='The assessment of the property according to the OPA', null=True, verbose_name='assessment')),
('mailing_name', models.CharField(help_text='The name on the mailing address for this account.', max_length=300, null=True, verbose_name='mailing name', blank=True)),
('mailing_address', models.CharField(help_text='The mailing address for this account.', max_length=300, null=True, verbose_name='mailing address', blank=True)),
('mailing_postal_code', models.CharField(max_length=10, null=True, verbose_name='mailing postal code', blank=True)),
('mailing_city', models.CharField(max_length=50, null=True, verbose_name='mailing city', blank=True)),
('mailing_state_province', models.CharField(max_length=40, null=True, verbose_name='mailing state/province', blank=True)),
('mailing_country', models.CharField(default=b'USA', max_length=40, null=True, verbose_name='mailing country', blank=True)),
('last_updated', models.DateTimeField(auto_now=True, verbose_name='last updated')),
('account_owner', models.ForeignKey(verbose_name='account owner', blank=True, to='opa.AccountOwner', null=True)),
],
options={
},
bases=(models.Model,),
),
]
| bsd-3-clause | -6,894,110,242,630,896,000 | 70.77551 | 220 | 0.637475 | false | 4.123095 | false | false | false |
ncullen93/pyBN | pyBN/inference/marginal_exact/exact_bp.py | 1 | 2676 |
__author__ = """N. Cullen <[email protected]>"""
from pyBN.classes.factor import Factor
from pyBN.classes.factorization import Factorization
from pyBN.utils.graph import *
from copy import deepcopy, copy
import numpy as np
import json
def exact_bp(bn, target=None, evidence=None, downward_pass=False):
"""
Perform Belief Propagation (Message Passing) over a Clique Tree. This
is sometimes referred to as the "Junction Tree Algorithm" or
the "Hugin Algorithm".
It involves an Upward Pass (see [1] pg. 353) along with
Downward Pass (Calibration) ([1] pg. 357) if the target involves
multiple random variables - i.e. is a list
Steps Involved:
1. Build a Clique Tree from a Bayesian Network
a. Moralize the BN
b. Triangulate the graph
c. Find maximal cliques and collapse into nodes
d. Create complete graph and make edge weights = sepset cardinality
e. Using Max Spanning Tree to create a tree of cliques
2. Assign each factor to only one clique
3. Compute the initial potentials of each clique
- multiply all of the clique's factors together
4. Perform belief propagation based on message passing protocol.
Arguments
---------
*bn* : a BayesNet object
Returns
-------
Notes
-----
"""
# 1: Moralize the graph
# 2: Triangluate
# 3: Build a clique tree using max spanning
# 4: Propagation of probabilities using message passing
# creates clique tree and assigns factors, thus satisfying steps 1-3
ctree = CliqueTree(bn) # might not be initialized?
#G = ctree.G
#cliques = copy.copy(ctree.V)
# select a clique as root where target is in scope of root
root = ctree.V[0]
if target is not None:
for v in ctree.V:
if target in ctree[v].scope:
root = v
break
clique_ordering = ctree.dfs_postorder(root=root)
# UPWARD PASS
# send messages up the tree from the leaves to the single root
for i in clique_ordering:
#clique = ctree[i]
for j in ctree.parents(i):
ctree[i] >> ctree[j]
#clique.send_message(ctree[j])
# if root node, collect its beliefs
#if len(ctree.parents(i)) == 0:
#ctree[root].collect_beliefs()
ctree[root].collect_beliefs()
marginal_target = ctree[root].marginalize_over(target)
# DOWNWARD PASS
if downward_pass == True:
# send messages down the tree from the root to the leaves
# (not needed unless *target* involves more than one variable)
new_ordering = list(reversed(clique_ordering))
for j in new_ordering:
for i in ctree.children(j):
ctree[j] >> ctree[i]
# if leaf node, collect its beliefs
if len(ctree.children(j)) == 0:
ctree[j].collect_beliefs()
return marginal_target
# beliefs hold the answers | mit | -26,576,409,045,976,944 | 26.885417 | 70 | 0.705531 | false | 3.093642 | false | false | false |
clody23/MToolBox | MToolBox/mt-classifier.py | 1 | 13234 | #!/usr/bin/env python
import getopt, sys, re, os, glob, csv
from classifier import tree, NGclassify, consts, datatypes, parse_mhcs
from bioinf.seqs import SeqList
import io_modules.csv
import io_modules.old_table
import io_modules.serialize
import os.path
# folder where to find data for haplogroup classification and functional annotation
data_file = os.path.dirname(sys.argv[0])
def usage_old():
print """\nAssigns haplogroup to contigs and performs functional annotation
Options:
-i Contig file [mtDNAassembly-Contigs.fasta]
-g GMAP executable PATH [/usr/local/bin/gmap]
-D GMAP mt sequences database location [/usr/local/share/gmapdb]
-m GMAP mt sequences database [mt_mhcss]
-t GMAP threads [2]
-b basename for output files
"""
def usage():
print """\nAssigns haplogroup to contigs and performs functional annotation
Options:
-i Contig file [mtDNAassembly-Contigs.fasta]
-m MUSCLE executable PATH [/usr/local/bin/muscle]
-b basename for output files
-s file with most reliable haplogroup prediction
"""
def pickle_csv(csvfile, pickle_fname=None):
tree_file = csv.reader(open(csvfile, 'rb'))
if pickle_fname is None:
pickle_fname = csvfile + '.pickle'
aplo_list = io_modules.csv.parse_csv(tree_file)
htree = tree.HaplogroupTree(aplo_list=aplo_list)
pickle_file = open(pickle_fname, 'wb')
pickle_file.write(htree.serialize())
def write_old_table(pickle_fname, out_fname):
htree = tree.HaplogroupTree(pickle_data=open(pickle_fname, 'rb').read())
fh = csv.writer(open(out_fname, 'wb'))
for haplo_name in htree:
io_modules.old_table.write_haplogroup(fh, '', htree[haplo_name])
def parse_gmapf9_line(line):
parsed = line.split('\t')
last_field = re.findall(r"[\w']+", parsed[2])
seq_nuc = parsed[1].partition(' ')[2]
seq_index = parsed[1].partition(' ')[0]
ref_pos = int(last_field[1])
ref_nuc = parsed[2][-1]
return ref_pos, ref_nuc, seq_nuc, seq_index
def parse_gmapf9_file(inhandle):
contigs_mappings = [[]]
h = inhandle.readlines()
c = 0
mutations = []
while c < len(h):
# end coordinate of last contig
if c == len(h)-1:
contigs_mappings[-1].append(parse_gmapf9_line(h[c])[0])
if h[c][0] != '>':
ref_pos, ref_nuc, seq_nuc, seq_index = parse_gmapf9_line(h[c])
# insertion
if ref_nuc == ' ' and seq_nuc != ' ':
# gmap assigns the position of the next nucleotide to the insertion
pos_ins = ref_pos - 1
ins = [seq_nuc]
c += 1
ref_pos, ref_nuc, seq_nuc, seq_index = parse_gmapf9_line(h[c])
while c < len(h) and (ref_nuc == ' ' and seq_nuc != ' '):
ins.append(seq_nuc)
c += 1
ref_pos, ref_nuc, seq_nuc, seq_index = parse_gmapf9_line(h[c])
mut = datatypes.Insertion("%d.%s" % (pos_ins, ''.join(ins)))
mutations.append(mut)
#print "%d.%s" % (pos_ins, ''.join(ins))
# deletion
elif ref_nuc != ' ' and seq_nuc == ' ':
pos_del = ref_pos
c += 1
ref_pos, ref_nuc, seq_nuc, seq_index = parse_gmapf9_line(h[c])
while c < len(h) and (ref_nuc != ' ' and seq_nuc == ' '):
c += 1
ref_pos, ref_nuc, seq_nuc, seq_index = parse_gmapf9_line(h[c])
if pos_del == ref_pos-1:
print "%dd" % (pos_del)
mut = datatypes.Deletion("%dd" % pos_del)
mutations.append(mut)
else:
print "%d-%dd" % (pos_del, ref_pos-1)
mut = datatypes.Deletion("%d-%dd" % (pos_del, ref_pos-1))
mutations.append(mut)
# mismatch
elif ref_nuc != seq_nuc:
if seq_nuc != 'N':
# Transition
if (ref_nuc in consts.PUR and seq_nuc in consts.PUR) or (ref_nuc in consts.PYR and seq_nuc in consts.PYR):
print "%d%s" % (ref_pos, seq_nuc)
mut = datatypes.Transition(ref_pos)
mutations.append(mut)
# Transversion
if (ref_nuc in consts.PUR and seq_nuc in consts.PYR) or (ref_nuc in consts.PYR and seq_nuc in consts.PUR):
mut = datatypes.Transversion("%d%s" % (ref_pos, seq_nuc))
mutations.append(mut)
c += 1
else:
c += 1
else:
# first contig
if len(contigs_mappings) == 1 and len(contigs_mappings[-1]) == 0:
contigs_mappings[-1].append(parse_gmapf9_line(h[c+1])[0])
# all the others
else:
contigs_mappings[-1].append(parse_gmapf9_line(h[c-1])[0])
contigs_mappings.append([parse_gmapf9_line(h[c+1])[0]])
c += 1
# don't know if contig coordinate sorting is needed but I'll do anyway
contigs_mappings.sort()
return mutations, contigs_mappings
def merge_tables(f, g, h):
fgh = f + g + h
mergedlist = []
for jj in fgh:
if jj not in mergedlist:
mergedlist.append(jj)
o = []
o.append(["", "RSRS", "MHCS", "rCRS"])
y = "yes"
n = ""
for i in mergedlist:
if i in f and i in g and i in h:
o.append([i.pprint(),y,y,y])
elif i in f and i in g:
o.append([i.pprint(),y,y,n])
elif i in f and i in h:
o.append([i.pprint(),y,n,y])
elif i in g and i in h:
o.append([i.pprint(),n,y,y])
elif i in f:
o.append([i.pprint(),y,n,n])
elif i in g:
o.append([i.pprint(),n,y,n])
elif i in h:
o.append([i.pprint(),n,n,y])
return o
def align_sequence(muscle_exe, sequence, rif=None, ):
"""sequence is a datatypes.Sequence, rif"""
if rif is None:
rif = datatypes.Sequence('RSRS', consts.RCRS)
seq_diff = NGclassify.SequenceDiff()
#print "Aligning sequence %s" % sequence.name
seq_diff.gen_diff(muscle_exe, rif, datatypes.Sequence(sequence.name, str(sequence)))
#print "-"*30
return seq_diff
def h_analysis(htrees, seq_diff, regions, mhcs_dict):
a = NGclassify.Classify()
#print "Classification of sequence %s" % seq_diff.obj.name
for htree, name in htrees:
print "Classification according to tree:", name
a.classify_by_tree(htree, seq_diff, regions)
#print "start is ", seq_diff.start
#print "end is ", seq_diff.end
#print "haplo_stats: ", a.haplo_stats
print "genome_state is ", a.get_genome_state()
(haplo_stats_sorted, haplo_best) = a.prediction_sorting()
print haplo_best
#print "haplo_stats_sorted is:\n", haplo_stats_sorted
print "="*20
#print "haplo_best is: ", haplo_best
#print "finding MHCS for sequence %s" % seq_diff.obj.name
mhcss = a.get_mhcss(mhcs_dict)
#print "MHCS ID for sequence %s is %s" % (seq_diff.obj.name, ','.join(list(mhcss)))
# PROVA PRINT
# print "stat_list is:"
# print type(a.__dict__['stat_list'])
#print a.__dict__
print '-'*30
#print a.seq_diff.obj.name
#print a.haplo_stats
#pdb.set_trace()
return a
def load_sequences(fname):
a = SeqList()
a.load_file(fname)
print "Loaded %d contig sequences" % len(a)
return a
def write_output(class_obj, seq_diff, seq_diff_mhcs, seq_diff_rcrs, merged_tables, outfile):
print "Writing results for sequence %s" % outfile
class_obj.pprint(open(outfile + '.csv', 'w'))
class_obj.pprint_sorted(open(outfile + '.sorted.csv', 'w'))
#seq_diff.print_alg(open(outfile + '_alg.txt','w'))
#seq_diff.pprint(open(outfile + '_diff.txt','w'))
#seq_diff_mhcs.pprint(open(outfile + '_mhcs_diff.txt','w'))
#seq_diff_mhcs.print_alg(open(outfile + '_mhcs_alg.txt','w'))
#seq_diff_rcrs.pprint(open(outfile + '_rcrs_diff.txt','w'))
#seq_diff_rcrs.print_alg(open(outfile + '_rcrs_alg.txt','w'))
merged_tables_file = open(outfile + '_merged_diff.csv', 'w')
for row in merged_tables:
merged_tables_file.write(','.join(row)+'\n')
def main_mt_hpred():
try:
opts, args = getopt.getopt(sys.argv[1:], "hi:m:b:s:")
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit()
#print opts, args
contig_file = 'mtDNAassembly-contigs.fasta'
muscle_exe='/usr/local/bin/muscle'
basename='mtDNAassembly-contigs'
best_results_file = 'mt_classification_best_results.csv'
#print opts
for o,a in opts:
#print "option", o, "argument", a
if o == "-h":
usage()
sys.exit()
elif o == "-i": contig_file = a
elif o == "-m": muscle_exe = a
elif o == "-b": basename = a
elif o == "-s": best_results_file = a
else:
assert False, "Unhandled option."
print "Your best results file is ", best_results_file
# sample name
f = os.path.abspath(contig_file)
#sample_name = f.split('/')[-2].split('_')[-1]
sample_name = contig_file.split('-')[0]
# haplogroup tree parsing
htrees = [(tree.HaplogroupTree(pickle_data=open(data_file + '/data/phylotree_r16.pickle', 'rb').read()), data_file + '/data/phylotree_r16.pickle')]
# mhcs parsing
mhcs_dict = parse_mhcs.parse2mhcs_dict(data_file + '/data/mhcs.tab')
print "\nLoading contig sequences from file %s" % contig_file
contig_array = load_sequences(contig_file)
contig_array_seqdiff = [] # lista di liste
contig_total_seqdiff = [] # lista di varianti
contig_array_mappings = []
print "\nAligning Contigs to mtDNA reference genome...\n"
# update each contig's SeqDiff
for x,contig in enumerate(contig_array):
if x == 0:
contig_seq_diff = align_sequence(muscle_exe, contig)
contig_seq_diff.find_segment() # avoid having long gaps at 5' and 3' (not actual gaps but due to the alignment)
contig_seq_diff.regions.append([contig_seq_diff.start, contig_seq_diff.end])
else:
incoming_seqdiff = align_sequence(muscle_exe, contig)
incoming_seqdiff.find_segment()
contig_seq_diff.diff_list.extend(incoming_seqdiff.diff_list)
contig_seq_diff.regions.append([incoming_seqdiff.start, incoming_seqdiff.end])
print "\nSequence haplogroup assignment\n"
seq_classify = h_analysis(htrees, contig_seq_diff, contig_seq_diff.regions, mhcs_dict)
seq_classify.sample_name = sample_name
#print "\nSequence functional annotation\n"
print "Contig alignment to MHCS and rCRS"
m = list(seq_classify.mhcss)[0]
print "Aligning contigs to MHCS SeqDiff object"
its_mhcs = datatypes.Sequence(m, mhcs_dict[m])
#contig_mhcs_total_seqdiff = []
for x, contig in enumerate(contig_array):
if x == 0:
contig_mhcs_seq_diff = align_sequence(muscle_exe, contig, its_mhcs)
contig_mhcs_seq_diff.find_segment()
contig_mhcs_seq_diff.regions.append([contig_seq_diff.start, contig_seq_diff.end])
else:
incoming_mhcs_seqdiff = align_sequence(muscle_exe, contig, its_mhcs)
incoming_mhcs_seqdiff.find_segment()
contig_mhcs_seq_diff.diff_list.extend(incoming_mhcs_seqdiff.diff_list)
contig_mhcs_seq_diff.regions.append([incoming_mhcs_seqdiff.start, incoming_mhcs_seqdiff.end])
print "rCRS SeqDiff object"
rcrs = datatypes.Sequence('rCRS', consts.rcrs)
#contig_rcrs_total_seqdiff = []
for x, contig in enumerate(contig_array):
if x == 0:
contig_rcrs_seq_diff = align_sequence(muscle_exe, contig, rcrs)
contig_rcrs_seq_diff.find_segment()
contig_rcrs_seq_diff.regions.append([contig_seq_diff.start, contig_seq_diff.end])
else:
incoming_rcrs_seqdiff = align_sequence(muscle_exe, contig, rcrs)
incoming_rcrs_seqdiff.find_segment()
contig_rcrs_seq_diff.diff_list.extend(incoming_rcrs_seqdiff.diff_list)
contig_rcrs_seq_diff.regions.append([incoming_rcrs_seqdiff.start, incoming_rcrs_seqdiff.end])
# try gathering diff from reference sequences
#print "type(seq_diff) is", type(seq_diff.diff_list)
print "Merging seq_diffs..."
mergedtables = merge_tables(contig_seq_diff.diff_list, contig_mhcs_seq_diff.diff_list, contig_rcrs_seq_diff.diff_list)
#print mergedtables
# OUTPUTS
write_output(seq_classify, contig_seq_diff.diff_list, contig_mhcs_seq_diff.diff_list, contig_rcrs_seq_diff.diff_list, mergedtables, basename)
#open(os.path.join(folder,'mt_classification_best_results'), 'a').write(','.join([seq_diff.obj.name, ';'.join([i[0] for i in class_obj.haplo_best.items()])])+'\n')
#open(os.path.join('../', best_results_file), 'a').write(','.join([seq_classify.sample_name, ';'.join([i[0] for i in seq_classify.haplo_best.items()])])+'\n')
open(os.path.join('../', best_results_file), 'a').write(','.join([basename, ';'.join([i[0] for i in seq_classify.haplo_best.items()])])+'\n')
#align_cmd = '%s -D %s -d %s -c chrRSRS -f 9 -B 5 -t 2 %s > %s.coords' % (gmapexe, gmapdb, mtdb, contig_file, basename)
#print align_cmd
# os.system(align_cmd) DON'T YOU FORGET ABOUT ME!!!
# Parsing gmap output
#mutations, contigs_mappings = parse_gmapf9_file(open("%s.coords" % basename, 'r'))
#print "mutations, ", mutations
#print "contig mappings: "
#for i in contigs_mappings:
# print i
if __name__ == "__main__":
main_mt_hpred()
# path = os.getcwd()
# for infile in glob.glob(os.path.join(path, 'OUT_*')):
# main_mt_hpred()
# print "\nHERE COMES THE FUNCTIONAL ANNOTATION...\n"
# path = os.getcwd()
# for infile in glob.glob(os.path.join(path, folder, '*', '*_merged_diff.csv')):
# (PATH, FILENAME) = os.path.split(infile)
# print infile
# diff_file = infile
# file_file = os.path.join(data_file, 'patho_table.txt')
# site_file = os.path.join(data_file, 'sitevar_modified.txt')
# bestres_file = os.path.join(path, 'mt_classification_best_results')
# haptab_file = os.path.join(data_file, 'haplogroups.txt')
# variants_functional_annotation.main_functional_analysis(diff_file, file_file, site_file, bestres_file, haptab_file, PATH, FILENAME)
| gpl-3.0 | -3,761,935,509,628,710,000 | 37.58309 | 164 | 0.65241 | false | 2.707447 | false | false | false |
ianrust/coinbase_autotrader | automated_bittrader.py | 1 | 4459 | import json,urllib2,csv,time,smtplib,string,os
os.chdir('/home/ian/Documents')
# Buy and sell urls
sell_url = "https://coinbase.com/api/v1/sells"
buy_url = "https://coinbase.com/api/v1/buys"
sell_price_url = "https://coinbase.com/api/v1/prices/sell"
buy_price_url = "https://coinbase.com/api/v1/prices/buy"
headers = {'content-type': 'application/json'}
price_payload={'qty':1.0}
# gmail login info
gmailUser='[email protected]'
gmailPassword='' #password omitting *facepalm*
#function for interacting with coinbase
def req_and_ret(url,req_input,header,url_type='GET'):
if url_type=='POST':
url = urllib2.Request(url, json.dumps(req_input), header)
f = urllib2.urlopen(url)
json_response = f.read()
list_response = json.loads(json_response)
f.close()
return list_response,json_response
#Reading in current state
with open('trader_state.csv','r') as trader_state:
trader_state_csv=csv.reader(trader_state,delimiter=',')
for line in trader_state_csv:
if line[0]=='api_key':
vars()[line[0]]=line[1]
else:
vars()[line[0]]=float(line[1])
trader_state.close()
#Get Current Bitcoin Prices for buy/sell
buy_price_response,throwaway = req_and_ret(buy_price_url,price_payload,headers)
buy_price=buy_price_response['subtotal']['amount']
sell_price_response,throwaway = req_and_ret(sell_price_url,price_payload,headers)
sell_price=sell_price_response['subtotal']['amount']
# Assembling Message
transaction_payload = {'api_key':api_key,'qty':amount_to_trade}
# Decide to make transaction
transaction_type=''
make_transaction=False
current_unix_time=time.time()
if current_unix_time-time_between_transactions>last_transaction_time:
#decide on type of transaction
if coins==amount_to_trade and sell_price>=(1.0+percent_swing)*last_price:
transaction_type='sell'
make_transaction=True
elif coins==0 and buy_price<=(1.0-percent_swing)*last_price:
transaction_type='buy'
make_transaction=True
#transact
success=False
transaction_response={'success':'False'}
trans_resp_string=''
last_price_new=last_price
coins_new=coins
if make_transaction:
if transaction_type=='sell':
transaction_response,trans_resp_string=req_and_ret(sell_url,transaction_payload,headers,'POST')
coins_new=0
last_price_new=sell_price
else:
transaction_response,trans_resp_string=req_and_ret(buy_url,transaction_payload,headers,'POST')
coins_new=amount_to_trade
last_price_new=buy_price
success=transaction_response['success']
errors=''
if not success:
errors=transaction_response['errors']
# if there are problems, send an email to Ian Rust. Likewise, if there is a succesful transaction, tell Ian Rust
subject=""
to_addr="[email protected]"
from_addr="[email protected]"
text=''
mailServer = smtplib.SMTP('smtp.gmail.com', 587)
mailServer.ehlo()
mailServer.starttls()
mailServer.ehlo()
mailServer.login(gmailUser, gmailPassword)
if make_transaction:
if not success:
subject="Got Problems With Your Bitcoin Trader"
text="Hello Sir \n\n I just had trouble making an api based "+transaction_type+" bitcoin transaction on coinbase. Coinbase gave the following error: \r\n "+str(errors)+"\r\n You have 1 day from the time these email was sent to fix the problem. \n\n Yours Truly, \n\n RPI BitTrader \r\n PS This is the whole response: \r\n" +str(trans_resp_string)
else:
subject="Successful "+transaction_type+" On the Part of Your Bitcoin Trader"
text="Hello Sir \n\n I just made a "+transaction_type+" order successfully on coinbase. \r\n The price was "+str(last_price)+" for "+str(amount_to_trade)+"BTC \n\n Yours Truly, \n\n RPI BitTrader"
body=string.join(("From: %s" % from_addr,"To: %s" % to_addr,"Subject: %s" % subject ,"",text), "\r\n")
mailServer.sendmail(from_addr, [to_addr], body)
mailServer.close()
# record the state
with open('trader_state.csv','w') as trader_state:
last_transaction_time_towrite=last_transaction_time
last_price_towrite=last_price
coins_towrite=coins
if make_transaction and success:
last_transaction_time_towrite=current_unix_time
last_price_towrite=last_price_new
coins_towrite=coins_new
trader_state.write('last_price,'+str(last_price_towrite)+'\nlast_transaction_time,'+str(int(last_transaction_time_towrite))+'\ncoins,'+str(coins_towrite)+'\namount_to_trade,'+str(amount_to_trade)+'\npercent_swing,'+str(percent_swing)+'\ntime_between_transactions,'+str(time_between_transactions)+'\napi_key,'+str(api_key)+'\nlast_check_time,'+str(int(current_unix_time)))
| mit | 3,535,470,929,085,376,500 | 35.85124 | 372 | 0.742095 | false | 2.871217 | false | false | false |
drcoms/jlu-drcom-client | jlu-drcom-py3/newclinet-py3.py | 1 | 12355 | #!/usr/bin/env python
# coding: utf-8
# license: AGPL-V3
import re
import socket
import struct
import time
from hashlib import md5
import sys
import os
import random
import platform
# CONFIG
server = '10.100.61.3'
username = b'XXXXX' # 用户名
password = b'XXXXX' # 密码
host_ip = '100.100.100.100' # ip地址
mac = 0x112288776655 # mac地址
host_name = b'YOURPCNAME' # 计算机名
host_os = b'Windows 10' # 操作系统
CONTROLCHECKSTATUS = b'\x20'
ADAPTERNUM = b'\x03'
IPDOG = b'\x01'
PRIMARY_DNS = '10.10.10.10'
dhcp_server = '0.0.0.0'
AUTH_VERSION = b'\x68\x00'
KEEP_ALIVE_VERSION = b'\xdc\x02'
nic_name = '' # Indicate your nic, e.g. 'eth0.2'.nic_name
bind_ip = '0.0.0.0'
# CONFIG_END
keep_alive_times = 0
class ChallengeException (Exception):
def __init__(self):
pass
class LoginException (Exception):
def __init__(self):
pass
def bind_nic():
try:
import fcntl
def get_ip_address(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
return get_ip_address(nic_name)
except ImportError as e:
print('Indicate nic feature need to be run under Unix based system.')
return '0.0.0.0'
except IOError as e:
print(nic_name + ' is unacceptable !')
return '0.0.0.0'
finally:
return '0.0.0.0'
if nic_name != '':
bind_ip = bind_nic()
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind((bind_ip, 61440))
s.settimeout(3)
SALT = ''
IS_TEST = True
# specified fields based on version
CONF = "/etc/drcom.conf"
UNLIMITED_RETRY = True
EXCEPTION = False
DEBUG = False # log saves to file
LOG_PATH = '/var/log/drcom_client.log'
if IS_TEST:
DEBUG = True
LOG_PATH = 'drcom_client.log'
def log(*args, **kwargs):
print(*args, **kwargs)
if DEBUG and platform.uname().system != 'Windows':
with open(LOG_PATH,'a') as f:
f.write(s + '\n')
def challenge(svr, ran):
while True:
t = struct.pack("<H", int(ran) % (0xFFFF))
s.sendto(b"\x01\x02" + t + b"\x09" + b"\x00"*15, (svr, 61440))
try:
data, address = s.recvfrom(1024)
log('[challenge] recv', data.hex())
except:
log('[challenge] timeout, retrying...')
continue
if address == (svr, 61440):
break
else:
log(f"Wrong address: {address}")
exit()
log('[DEBUG] challenge:\n' + data.hex())
if data[0] != 2:
raise ChallengeException
log('[challenge] challenge packet sent.')
return data[4:8]
def md5sum(s):
m = md5()
m.update(s)
return m.digest()
def dump(n):
s = '%x' % n
if len(s) & 1:
s = '0' + s
return bytes.fromhex(s)
def ror(md5 : bytes, pwd : bytes):
ret = b''
for i in range(len(pwd)):
x = md5[i] ^ pwd[i]
ret += (((x << 3) & 0xFF) + (x >> 5)).to_bytes(1, 'big')
return ret
def keep_alive_package_builder(number, random, tail: bytes, type=1, first=False):
data = b'\x07' + number.to_bytes(1, 'big') + b'\x28\x00\x0b' + type.to_bytes(1, 'big')
if first:
data += b'\x0f\x27'
else:
data += KEEP_ALIVE_VERSION
data += b'\x2f\x12' + b'\x00' * 6
data += tail
data += b'\x00' * 4
#data += struct.pack("!H",0xdc02)z
if type == 3:
foo = b''.join([int(i).to_bytes(1, 'big') for i in host_ip.split('.')]) # host_ip
# CRC
# edited on 2014/5/12, filled zeros to checksum
# crc = packet_CRC(data+foo)
crc = b'\x00' * 4
#data += struct.pack("!I",crc) + foo + b'\x00' * 8
data += crc + foo + b'\x00' * 8
else: # packet type = 1
data += b'\x00' * 16
return data
def keep_alive2(*args):
tail = b''
packet = b''
svr = server
ran = random.randint(0, 0xFFFF)
ran += random.randint(1, 10)
# 2014/10/15 add by latyas, maybe svr sends back a file packet
svr_num = 0
packet = keep_alive_package_builder(svr_num, dump(ran), b'\x00'*4, 1, True)
while True:
log('[keep-alive2] send1', packet.hex())
s.sendto(packet, (svr, 61440))
data, address = s.recvfrom(1024)
log('[keep-alive2] recv1', data.hex())
if data.startswith(b'\x07\x00\x28\x00') or data.startswith(b'\x07' + svr_num.to_bytes(1, 'big') + b'\x28\x00'):
break
elif data[0] == 0x07 and data[2] == 0x10:
log('[keep-alive2] recv file, resending..')
svr_num = svr_num + 1
packet = keep_alive_package_builder(
svr_num, dump(ran), b'\x00'*4, 1, False)
else:
log('[keep-alive2] recv1/unexpected', data.hex())
#log('[keep-alive2] recv1',data.hex())
ran += random.randint(1, 10)
packet = keep_alive_package_builder(svr_num, dump(ran), b'\x00' * 4, 1, False)
log('[keep-alive2] send2', packet.hex())
s.sendto(packet, (svr, 61440))
while True:
data, address = s.recvfrom(1024)
if data[0] == 7:
svr_num = svr_num + 1
break
else:
log('[keep-alive2] recv2/unexpected', data.hex())
log('[keep-alive2] recv2', data.hex())
tail = data[16:20]
ran += random.randint(1, 10)
packet = keep_alive_package_builder(svr_num, dump(ran), tail, 3, False)
log('[keep-alive2] send3', packet.hex())
s.sendto(packet, (svr, 61440))
while True:
data, address = s.recvfrom(1024)
if data[0] == 7:
svr_num = svr_num + 1
break
else:
log('[keep-alive2] recv3/unexpected', data.hex())
log('[keep-alive2] recv3', data.hex())
tail = data[16:20]
log("[keep-alive2] keep-alive2 loop was in daemon.")
i = svr_num
while True:
try:
ran += random.randint(1, 10)
packet = keep_alive_package_builder(i, dump(ran), tail, 1, False)
#log('DEBUG: keep_alive2,packet 4\n',packet.hex())
log('[keep_alive2] send', str(i), packet.hex())
s.sendto(packet, (svr, 61440))
data, address = s.recvfrom(1024)
log('[keep_alive2] recv', data.hex())
tail = data[16:20]
#log('DEBUG: keep_alive2,packet 4 return\n',data.hex())
ran += random.randint(1, 10)
packet = keep_alive_package_builder(i+1, dump(ran), tail, 3, False)
#log('DEBUG: keep_alive2,packet 5\n',packet.hex())
s.sendto(packet, (svr, 61440))
log('[keep_alive2] send', str(i+1), packet.hex())
data, address = s.recvfrom(1024)
log('[keep_alive2] recv', data.hex())
tail = data[16:20]
#log('DEBUG: keep_alive2,packet 5 return\n',data.hex())
i = (i+2) % 0xFF
time.sleep(20)
keep_alive1(*args)
except:
continue
def checksum(s):
ret = 1234
for i in re.findall(b'....', s):
ret ^= int(i[::-1].hex(), 16)
ret = (1968 * ret) & 0xffffffff
return struct.pack('<I', ret)
def mkpkt(salt, usr, pwd, mac):
data = b'\x03\x01\x00'+ (len(usr)+20).to_bytes(1, 'big')
data += md5sum(b'\x03\x01'+salt+pwd)
data += usr.ljust(36, b'\x00')
data += CONTROLCHECKSTATUS
data += ADAPTERNUM
data += dump(int(data[4:10].hex(), 16) ^
mac).rjust(6, b'\x00') # mac xor md51
data += md5sum(b"\x01" + pwd + salt + b'\x00'*4) # md52
data += b'\x01' # number of ip
data += b''.join([int(x).to_bytes(1,'big') for x in host_ip.split('.')])
data += b'\x00'*4 # your ipaddress 2
data += b'\x00'*4 # your ipaddress 3
data += b'\x00'*4 # your ipaddress 4
data += md5sum(data + b'\x14\x00\x07\x0b')[:8] # md53
data += IPDOG
data += b'\x00'*4 # delimeter
data += host_name.ljust(32, b'\x00')
data += b''.join([ int(i).to_bytes(1, 'big') for i in PRIMARY_DNS.split('.')]) # primary dns
data += b''.join([ int(i).to_bytes(1, 'big') for i in dhcp_server.split('.')]) # DHCP dns
data += b'\x00\x00\x00\x00' # secondary dns:0.0.0.0
data += b'\x00' * 8 # delimeter
data += b'\x94\x00\x00\x00' # unknow
data += b'\x06\x00\x00\x00' # os major
data += b'\x02\x00\x00\x00' # os minor
data += b'\xf0\x23\x00\x00' # OS build
data += b'\x02\x00\x00\x00' # os unknown
data += b'\x44\x72\x43\x4f\x4d\x00\xcf\x07\x68'
data += b'\x00' * 55 # unknown string
data += b'\x33\x64\x63\x37\x39\x66\x35\x32\x31\x32\x65\x38\x31\x37\x30\x61\x63\x66\x61\x39\x65\x63\x39\x35\x66\x31\x64\x37\x34\x39\x31\x36\x35\x34\x32\x62\x65\x37\x62\x31'
data += b'\x00' * 24
data += AUTH_VERSION
data += b'\x00' + len(pwd).to_bytes(1, 'big')
data += ror(md5sum(b'\x03\x01'+salt+pwd), pwd)
data += b'\x02\x0c'
data += checksum(data+b'\x01\x26\x07\x11\x00\x00'+dump(mac))
data += b'\x00\x00' # delimeter
data += dump(mac)
if (len(pwd) / 4) != 4:
data += b'\x00' * (len(pwd) // 4) # strange。。。
data += b'\x60\xa2' # unknown, filled numbers randomly =w=
data += b'\x00' * 28
log('[mkpkt]', data.hex())
return data
def login(usr, pwd, svr):
global SALT
i = 0
while True:
salt = challenge(svr, time.time()+random.randint(0xF, 0xFF))
SALT = salt
log('[salt] ', SALT)
packet = mkpkt(salt, usr, pwd, mac) #生成数据包
log('[login] send', packet.hex())
s.sendto(packet, (svr, 61440))
data, address = s.recvfrom(1024)
log('[login] recv', data.hex())
log('[login] packet sent.')
if address == (svr, 61440):
if data[0] == 4:
log('[login] loged in')
break
else:
log(f'[login] login failed. data[0] = {data[0]} type={type(data[0])}')
exit(2)
else:
if i >= 5 and UNLIMITED_RETRY == False:
log('[login] exception occured.')
sys.exit(1)
else:
exit(2)
log('[login] login sent')
# 0.8 changed:
return data[23:39]
# return data[-22:-6]
def keep_alive1(salt, tail, pwd, svr):
foo = struct.pack('!H', int(time.time()) % 0xFFFF)
data = b'\xff' + md5sum(b'\x03\x01'+salt+pwd) + b'\x00\x00\x00'
data += tail
data += foo + b'\x00\x00\x00\x00'
log('[keep_alive1] send', data.hex())
s.sendto(data, (svr, 61440))
while True:
data, address = s.recvfrom(1024)
if data[0] == 7:
break
else:
log('[keep-alive1]recv/not expected', data.hex())
log('[keep-alive1] recv', data.hex())
def empty_socket_buffer():
# empty buffer for some fucking schools
log('starting to empty socket buffer')
try:
while True:
data, address = s.recvfrom(1024)
log('recived sth unexpected', data.hex())
if s == '':
break
except socket.timeout as timeout_err:
# get exception means it has done.
log(f'exception in empty_socket_buffer {timeout_err}')
log('emptyed')
def daemon():
if(platform.uname().system != 'Windows'):
with open('/var/run/jludrcom.pid', 'w') as f:
f.write(str(os.getpid()))
def main():
if not IS_TEST:
daemon()
execfile(CONF, globals())
log("auth svr:", server, "\nusername:", username ,
"\npassword:", password, "\nmac:", str(hex(mac)))
log(bind_ip)
# 流程 login -> keep alive
while True:
try:
package_tail = login(username, password, server)
except LoginException:
log("登录失败!")
break
log('package_tail', package_tail.hex())
# keep_alive1 is fucking bullshit!
# ↑↑↑ 附议 ↑↑↑
empty_socket_buffer()
keep_alive1(SALT, package_tail, password, server)
keep_alive2(SALT, package_tail, password, server)
if __name__ == "__main__":
main()
| agpl-3.0 | -4,541,838,971,522,397,000 | 30.159898 | 175 | 0.531237 | false | 2.943419 | false | false | false |
lizardsystem/lizard-layers | lizard_layers/migrations/0005_auto__add_field_areavalue_flag__add_field_areavalue_comment.py | 1 | 13178 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'AreaValue.flag'
db.add_column('lizard_layers_areavalue', 'flag', self.gf('django.db.models.fields.IntegerField')(null=True, blank=True), keep_default=False)
# Adding field 'AreaValue.comment'
db.add_column('lizard_layers_areavalue', 'comment', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'AreaValue.flag'
db.delete_column('lizard_layers_areavalue', 'flag')
# Deleting field 'AreaValue.comment'
db.delete_column('lizard_layers_areavalue', 'comment')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'lizard_area.area': {
'Meta': {'ordering': "('name',)", 'object_name': 'Area', '_ormbases': ['lizard_area.Communique']},
'area_class': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'area_type': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'communique_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['lizard_area.Communique']", 'unique': 'True', 'primary_key': 'True'}),
'data_administrator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.DataAdministrator']", 'null': 'True', 'blank': 'True'}),
'data_set': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_security.DataSet']", 'null': 'True', 'blank': 'True'}),
'dt_created': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2012, 3, 28, 11, 32, 38, 519893)'}),
'dt_latestchanged': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'dt_latestsynchronized': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.Area']", 'null': 'True', 'blank': 'True'})
},
'lizard_area.communique': {
'Meta': {'object_name': 'Communique', '_ormbases': ['lizard_geo.GeoObject']},
'areasort': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'areasort_krw': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''"}),
'dt_latestchanged_krw': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'edited_at': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'edited_by': ('django.db.models.fields.TextField', [], {'max_length': '150', 'null': 'True', 'blank': 'True'}),
'geoobject_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['lizard_geo.GeoObject']", 'unique': 'True', 'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'surface': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '1', 'blank': 'True'}),
'watertype_krw': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'lizard_area.dataadministrator': {
'Meta': {'object_name': 'DataAdministrator'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'lizard_fewsnorm.parametercache': {
'Meta': {'ordering': "('ident',)", 'object_name': 'ParameterCache'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'shortname': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'lizard_geo.geoobject': {
'Meta': {'object_name': 'GeoObject'},
'geo_object_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_geo.GeoObjectGroup']"}),
'geometry': ('django.contrib.gis.db.models.fields.GeometryField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
'lizard_geo.geoobjectgroup': {
'Meta': {'object_name': 'GeoObjectGroup'},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'source_log': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'lizard_layers.areavalue': {
'Meta': {'object_name': 'AreaValue'},
'area': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_area.Area']", 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'flag': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'value': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_layers.ValueType']", 'null': 'True', 'blank': 'True'})
},
'lizard_layers.parametertype': {
'Meta': {'object_name': 'ParameterType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'measuring_rod': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_measure.MeasuringRod']", 'null': 'True', 'blank': 'True'}),
'parameter': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_fewsnorm.ParameterCache']", 'null': 'True', 'blank': 'True'}),
'value_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_layers.ValueType']", 'null': 'True', 'blank': 'True'})
},
'lizard_layers.servermapping': {
'Meta': {'object_name': 'ServerMapping'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'external_server': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'relative_path': ('django.db.models.fields.CharField', [], {'max_length': '512', 'null': 'True', 'blank': 'True'})
},
'lizard_layers.valuetype': {
'Meta': {'object_name': 'ValueType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'lizard_measure.measuringrod': {
'Meta': {'object_name': 'MeasuringRod'},
'code': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '256', 'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'measuring_rod': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'measuring_rod_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_measure.MeasuringRod']", 'null': 'True', 'blank': 'True'}),
'sign': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'sub_measuring_rod': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'unit': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'valid': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'})
},
'lizard_security.dataset': {
'Meta': {'ordering': "['name']", 'object_name': 'DataSet'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'})
}
}
complete_apps = ['lizard_layers']
| gpl-3.0 | -7,289,027,439,874,857,000 | 76.976331 | 182 | 0.550159 | false | 3.54057 | false | false | false |
fametrano/BitcoinBlockchainTechnology | btclib/rfc6979.py | 1 | 4053 | #!/usr/bin/env python3
# Copyright (C) 2017-2020 The btclib developers
#
# This file is part of btclib. It is subject to the license terms in the
# LICENSE file found in the top-level directory of this distribution.
#
# No part of btclib including this file, may be copied, modified, propagated,
# or distributed except according to the terms contained in the LICENSE file.
"""Deterministic generation of the ephemeral key following RFC6979.
https://tools.ietf.org/html/rfc6979:
ECDSA and ECSSA need to produce, for each signature generation,
a fresh random value (ephemeral key, hereafter designated as k).
For effective security, k must be chosen randomly and uniformly
from a set of modular integers, using a cryptographically secure
process. Even slight biases in that process may be turned into
attacks on the signature schemes.
The need for a cryptographically secure source of randomness proves
to be a hindranceand and makes implementations harder to test.
Moreover, reusing the same ephemeral key for a different message
signed with the same private key reveal the private key!
RFC6979 turns ECDSA into deterministic schemes by using a
deterministic process for generating the "random" value k.
The process fulfills the cryptographic characteristics in order to
maintain the properties of verifiability and unforgeability
expected from signature schemes; namely, for whoever does not know
the signature private key, the mapping from input messages to the
corresponding k values is computationally indistinguishable from
what a randomly and uniformly chosen function (from the set of
messages to the set of possible k values) would return.
"""
import hmac
from hashlib import sha256
from .alias import HashF, PrvKey, String
from .curve import Curve
from .curves import secp256k1
from .to_prvkey import int_from_prvkey
from .utils import int_from_bits
def rfc6979(
msg: String, prvkey: PrvKey, ec: Curve = secp256k1, hf: HashF = sha256
) -> int:
"""Return a deterministic ephemeral key following RFC 6979."""
# the following is strictly equivalent to dsa._challenge
if isinstance(msg, str):
msg = msg.encode()
# Steps numbering follows SEC 1 v.2 section 4.1.3
h = hf()
h.update(msg)
mhd = h.digest() # 4
# leftmost ec.nlen bits %= ec.n
c = int_from_bits(mhd, ec.nlen) % ec.n # 5
q = int_from_prvkey(prvkey, ec)
return _rfc6979(c, q, ec, hf)
def _rfc6979(c: int, q: int, ec: Curve, hf: HashF) -> int:
# https://tools.ietf.org/html/rfc6979 section 3.2
# c = hf(m) # 3.2.a
# convert the private key q to an octet sequence of size nsize
bprv = q.to_bytes(ec.nsize, "big")
# truncate and/or expand c: encoding size is driven by nsize
bc = c.to_bytes(ec.nsize, "big")
bprvbm = bprv + bc
hsize = hf().digest_size
V = b"\x01" * hsize # 3.2.b
K = b"\x00" * hsize # 3.2.c
K = hmac.new(K, V + b"\x00" + bprvbm, hf).digest() # 3.2.d
V = hmac.new(K, V, hf).digest() # 3.2.e
K = hmac.new(K, V + b"\x01" + bprvbm, hf).digest() # 3.2.f
V = hmac.new(K, V, hf).digest() # 3.2.g
while True: # 3.2.h
T = b"" # 3.2.h.1
while len(T) < ec.nsize: # 3.2.h.2
V = hmac.new(K, V, hf).digest()
T += V
# The following line would introduce a bias
# k = int.from_bytes(T, 'big') % ec.n
# In general, taking a uniformly random integer (like those
# obtained from a hash function in the random oracle model)
# modulo the curve order n would produce a biased result.
# However, if the order n is sufficiently close to 2^hlen,
# then the bias is not observable: e.g.
# for secp256k1 and sha256 1-n/2^256 it is about 1.27*2^-128
k = int_from_bits(T, ec.nlen) # candidate k # 3.2.h.3
if 0 < k < ec.n: # acceptable values for k
return k # successful candidate
K = hmac.new(K, V + b"\x00", hf).digest()
V = hmac.new(K, V, hf).digest()
| mit | -3,713,050,999,322,462,700 | 37.6 | 77 | 0.668887 | false | 3.26066 | false | false | false |
spaghetti-/rosdep | src/rosdep2/platforms/arch.py | 1 | 3009 | #!/usr/bin/env python
# Copyright (c) 2009, Willow Garage, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Willow Garage, Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# Author Tully Foote/[email protected]
import subprocess
from ..installers import PackageManagerInstaller
from .source import SOURCE_INSTALLER
ARCH_OS_NAME = 'arch'
PACMAN_INSTALLER = 'pacman'
def register_installers(context):
context.set_installer(PACMAN_INSTALLER, PacmanInstaller())
def register_platforms(context):
context.add_os_installer_key(ARCH_OS_NAME, SOURCE_INSTALLER)
context.add_os_installer_key(ARCH_OS_NAME, PACMAN_INSTALLER)
context.set_default_os_installer_key(ARCH_OS_NAME, lambda self: PACMAN_INSTALLER)
def pacman_detect_single(p):
return not subprocess.call(['pacman', '-T', p], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
def pacman_detect(packages):
return [p for p in packages if pacman_detect_single(p)]
class PacmanInstaller(PackageManagerInstaller):
def __init__(self):
super(PacmanInstaller, self).__init__(pacman_detect)
def get_install_command(self, resolved, interactive=True, reinstall=False, quiet=False):
packages = self.get_packages_to_install(resolved, reinstall=reinstall)
if not packages:
return []
command = ['pacman', '-S']
if not interactive:
command.append('--noconfirm')
if not reinstall:
command.append('--needed')
if quiet:
command.append('-q')
return [self.elevate_priv(command + packages)]
| bsd-3-clause | 6,367,772,519,631,742,000 | 40.791667 | 99 | 0.730808 | false | 4.012 | false | false | false |
AIFDR/inasafe-django | django_project/realtime/migrations/0050_reporttemplate.py | 2 | 1641 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('realtime', '0049_auto_20180320_0406'),
]
operations = [
migrations.CreateModel(
name='ReportTemplate',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField(help_text='The time the template uploaded.', verbose_name='Timestamp')),
('version', models.CharField(default=None, max_length=10, blank=True, help_text='Version number of the template.', null=True, verbose_name='Template version')),
('notes', models.CharField(default=None, max_length=255, blank=True, help_text='Notes of the report template.', null=True, verbose_name='Template Notes')),
('language', models.CharField(default=b'id', help_text='The language ID of the report', max_length=4, verbose_name='Language ID')),
('hazard', models.CharField(default=None, help_text='The hazard type of the template.', max_length=25, verbose_name='Hazard Type')),
('template_file', models.FileField(help_text='Template file formatted as qgis template file (*.qpt).', upload_to=b'', verbose_name='Template File')),
('owner', models.IntegerField(default=0, help_text='The owner/uploader of the template.', verbose_name='Owner')),
],
options={
'verbose_name_plural': 'Report Templates',
},
),
]
| bsd-2-clause | -3,155,160,990,121,272,300 | 53.7 | 176 | 0.622182 | false | 4.229381 | false | false | false |
supernifty/mgsa | mgsa/analyze_bam.py | 1 | 2071 |
import argparse
import collections
import numpy
import sys
import bio
import config
parser = argparse.ArgumentParser(description='Analyze BAM')
parser.add_argument('bam', metavar='bam', help='bam file to analyze')
parser.add_argument('--buckets', metavar='buckets', type=int, default=10, help='number of buckets')
parser.add_argument('--max_sample', metavar='max_sample', type=int, default=-1, help='max number of samples in each group')
parser.add_argument('--skip', metavar='skip', type=int, default=0, help='skip the first reads')
args = parser.parse_args()
bam = bio.BamReaderExternal( config.BAM_TO_SAM, args.bam )
stats = bio.SamStats( bam, max_sample=args.max_sample, skip=skip )
# gc
buckets = numpy.linspace(0, 1, args.buckets + 1)
mapped_buckets = bio.bucket( filter( None, stats.mapped['gc'] ), buckets )
unmapped_buckets = bio.bucket( filter( None, stats.unmapped['gc'] ), buckets )
total_mapped = sum( mapped_buckets )
total_unmapped = sum( unmapped_buckets )
print '========== GC content =========='
print 'GC %%: %s' % '\t'.join( [ '%.2f' % bucket for bucket in buckets ] )
print 'mapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_mapped ) for x in mapped_buckets ] )
print 'unmapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_unmapped ) for x in unmapped_buckets ] )
# entropy
mapped_buckets = bio.bucket( stats.mapped['entropy'], buckets )
unmapped_buckets = bio.bucket( stats.unmapped['entropy'], buckets )
total_mapped = sum( mapped_buckets )
total_unmapped = sum( unmapped_buckets )
print '\n========== Entropy =========='
print 'Mapped: min: %.2f max: %.2f' % ( min( stats.mapped['entropy'] ), max( stats.mapped['entropy'] ) )
print 'Unmapped: min: %.2f max: %.2f' % ( min( stats.unmapped['entropy'] ), max( stats.unmapped['entropy'] ) )
print 'Entropy: %s' % '\t'.join( [ '%.2f' % bucket for bucket in buckets ] )
print 'mapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_mapped ) for x in mapped_buckets ] )
print 'unmapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_unmapped ) for x in unmapped_buckets ] )
| mit | 6,292,994,006,158,052,000 | 45.022222 | 123 | 0.649445 | false | 3.059084 | false | false | false |
dafrito/trac-mirror | trac/ticket/default_workflow.py | 1 | 21721 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2009 Edgewall Software
# Copyright (C) 2006 Alec Thomas
# Copyright (C) 2007 Eli Carter
# Copyright (C) 2007 Christian Boos <[email protected]>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Eli Carter
import pkg_resources
from ConfigParser import RawConfigParser
from StringIO import StringIO
from genshi.builder import tag
from trac.config import Configuration, ConfigSection
from trac.core import *
from trac.env import IEnvironmentSetupParticipant
from trac.perm import PermissionSystem
from trac.ticket.api import ITicketActionController, TicketSystem
from trac.ticket.model import Resolution
from trac.util.text import obfuscate_email_address
from trac.util.translation import _, tag_, cleandoc_
from trac.web.chrome import Chrome, add_script, add_script_data
from trac.wiki.macros import WikiMacroBase
# -- Utilities for the ConfigurableTicketWorkflow
def parse_workflow_config(rawactions):
"""Given a list of options from [ticket-workflow]"""
actions = {}
for option, value in rawactions:
parts = option.split('.')
action = parts[0]
if action not in actions:
actions[action] = {'oldstates': '', 'newstate': ''}
if len(parts) == 1:
# Base name, of the syntax: old,states,here -> newstate
try:
oldstates, newstate = [x.strip() for x in value.split('->')]
except ValueError:
continue # Syntax error, a warning will be logged later
actions[action]['newstate'] = newstate
actions[action]['oldstates'] = oldstates
else:
action, attribute = option.split('.')
actions[action][attribute] = value
# Fill in the defaults for every action, and normalize them to the desired
# types
def as_list(key):
value = attributes.get(key, '')
return [item for item in (x.strip() for x in value.split(',')) if item]
for action, attributes in actions.items():
# Default the 'name' attribute to the name used in the ini file
if 'name' not in attributes:
attributes['name'] = action
# If not specified, an action is not the default.
attributes['default'] = int(attributes.get('default', 0))
# If operations are not specified, that means no operations
attributes['operations'] = as_list('operations')
# If no permissions are specified, then no permissions are needed
attributes['permissions'] = as_list('permissions')
# Normalize the oldstates
attributes['oldstates'] = as_list('oldstates')
return actions
def get_workflow_config(config):
"""Usually passed self.config, this will return the parsed ticket-workflow
section.
"""
raw_actions = list(config.options('ticket-workflow'))
actions = parse_workflow_config(raw_actions)
return actions
def load_workflow_config_snippet(config, filename):
"""Loads the ticket-workflow section from the given file (expected to be in
the 'workflows' tree) into the provided config.
"""
filename = pkg_resources.resource_filename('trac.ticket',
'workflows/%s' % filename)
new_config = Configuration(filename)
for name, value in new_config.options('ticket-workflow'):
config.set('ticket-workflow', name, value)
class ConfigurableTicketWorkflow(Component):
"""Ticket action controller which provides actions according to a
workflow defined in trac.ini.
The workflow is idefined in the `[ticket-workflow]` section of the
[wiki:TracIni#ticket-workflow-section trac.ini] configuration file.
"""
ticket_workflow_section = ConfigSection('ticket-workflow',
"""The workflow for tickets is controlled by plugins. By default,
there's only a `ConfigurableTicketWorkflow` component in charge.
That component allows the workflow to be configured via this section
in the `trac.ini` file. See TracWorkflow for more details.
(''since 0.11'')""")
def __init__(self, *args, **kwargs):
self.actions = get_workflow_config(self.config)
if not '_reset' in self.actions:
# Special action that gets enabled if the current status no longer
# exists, as no other action can then change its state. (#5307)
self.actions['_reset'] = {
'default': 0,
'name': 'reset',
'newstate': 'new',
'oldstates': [], # Will not be invoked unless needed
'operations': ['reset_workflow'],
'permissions': []}
self.log.debug('Workflow actions at initialization: %s\n' %
str(self.actions))
for name, info in self.actions.iteritems():
if not info['newstate']:
self.log.warning("Ticket workflow action '%s' doesn't define "
"any transitions", name)
implements(ITicketActionController, IEnvironmentSetupParticipant)
# IEnvironmentSetupParticipant methods
def environment_created(self):
"""When an environment is created, we provide the basic-workflow,
unless a ticket-workflow section already exists.
"""
if not 'ticket-workflow' in self.config.sections():
load_workflow_config_snippet(self.config, 'basic-workflow.ini')
self.config.save()
self.actions = get_workflow_config(self.config)
def environment_needs_upgrade(self, db):
"""The environment needs an upgrade if there is no [ticket-workflow]
section in the config.
"""
return not list(self.config.options('ticket-workflow'))
def upgrade_environment(self, db):
"""Insert a [ticket-workflow] section using the original-workflow"""
load_workflow_config_snippet(self.config, 'original-workflow.ini')
self.config.save()
self.actions = get_workflow_config(self.config)
info_message = """
==== Upgrade Notice ====
The ticket Workflow is now configurable.
Your environment has been upgraded, but configured to use the original
workflow. It is recommended that you look at changing this configuration to use
basic-workflow.
Read TracWorkflow for more information (don't forget to 'wiki upgrade' as well)
"""
self.log.info(info_message.replace('\n', ' ').replace('==', ''))
print info_message
# ITicketActionController methods
def get_ticket_actions(self, req, ticket):
"""Returns a list of (weight, action) tuples that are valid for this
request and this ticket."""
# Get the list of actions that can be performed
# Determine the current status of this ticket. If this ticket is in
# the process of being modified, we need to base our information on the
# pre-modified state so that we don't try to do two (or more!) steps at
# once and get really confused.
status = ticket._old.get('status', ticket['status']) or 'new'
ticket_perm = req.perm(ticket.resource)
allowed_actions = []
for action_name, action_info in self.actions.items():
oldstates = action_info['oldstates']
if oldstates == ['*'] or status in oldstates:
# This action is valid in this state. Check permissions.
required_perms = action_info['permissions']
if self._is_action_allowed(ticket_perm, required_perms):
allowed_actions.append((action_info['default'],
action_name))
if not (status in ['new', 'closed'] or \
status in TicketSystem(self.env).get_all_status()) \
and 'TICKET_ADMIN' in ticket_perm:
# State no longer exists - add a 'reset' action if admin.
allowed_actions.append((0, '_reset'))
return allowed_actions
def _is_action_allowed(self, ticket_perm, required_perms):
if not required_perms:
return True
for permission in required_perms:
if permission in ticket_perm:
return True
return False
def get_all_status(self):
"""Return a list of all states described by the configuration.
"""
all_status = set()
for action_name, action_info in self.actions.items():
all_status.update(action_info['oldstates'])
all_status.add(action_info['newstate'])
all_status.discard('*')
all_status.discard('')
return all_status
def render_ticket_action_control(self, req, ticket, action):
self.log.debug('render_ticket_action_control: action "%s"' % action)
this_action = self.actions[action]
status = this_action['newstate']
operations = this_action['operations']
current_owner_or_empty = ticket._old.get('owner', ticket['owner'])
current_owner = current_owner_or_empty or '(none)'
if not (Chrome(self.env).show_email_addresses
or 'EMAIL_VIEW' in req.perm(ticket.resource)):
format_user = obfuscate_email_address
else:
format_user = lambda address: address
current_owner = format_user(current_owner)
control = [] # default to nothing
hints = []
if 'reset_workflow' in operations:
control.append(tag("from invalid state "))
hints.append(_("Current state no longer exists"))
if 'del_owner' in operations:
hints.append(_("The ticket will be disowned"))
if 'set_owner' in operations:
id = 'action_%s_reassign_owner' % action
selected_owner = req.args.get(id, req.authname)
if this_action.has_key('set_owner'):
owners = [x.strip() for x in
this_action['set_owner'].split(',')]
elif self.config.getbool('ticket', 'restrict_owner'):
perm = PermissionSystem(self.env)
owners = perm.get_users_with_permission('TICKET_MODIFY')
owners.sort()
else:
owners = None
if owners == None:
owner = req.args.get(id, req.authname)
control.append(tag_('to %(owner)s',
owner=tag.input(type='text', id=id,
name=id, value=owner)))
hints.append(_("The owner will be changed from "
"%(current_owner)s to the specified user",
current_owner=current_owner))
elif len(owners) == 1:
owner = tag.input(type='hidden', id=id, name=id,
value=owners[0])
formatted_owner = format_user(owners[0])
control.append(tag_('to %(owner)s ',
owner=tag(formatted_owner, owner)))
if ticket['owner'] != owners[0]:
hints.append(_("The owner will be changed from "
"%(current_owner)s to %(selected_owner)s",
current_owner=current_owner,
selected_owner=formatted_owner))
else:
control.append(tag_('to %(owner)s', owner=tag.select(
[tag.option(x, value=x,
selected=(x == selected_owner or None))
for x in owners],
id=id, name=id)))
hints.append(_("The owner will be changed from "
"%(current_owner)s to the selected user",
current_owner=current_owner))
elif 'set_owner_to_self' in operations and \
ticket._old.get('owner', ticket['owner']) != req.authname:
hints.append(_("The owner will be changed from %(current_owner)s "
"to %(authname)s", current_owner=current_owner,
authname=req.authname))
if 'set_resolution' in operations:
if this_action.has_key('set_resolution'):
resolutions = [x.strip() for x in
this_action['set_resolution'].split(',')]
else:
resolutions = [val.name for val in Resolution.select(self.env)]
if not resolutions:
raise TracError(_("Your workflow attempts to set a resolution "
"but none is defined (configuration issue, "
"please contact your Trac admin)."))
id = 'action_%s_resolve_resolution' % action
if len(resolutions) == 1:
resolution = tag.input(type='hidden', id=id, name=id,
value=resolutions[0])
control.append(tag_('as %(resolution)s',
resolution=tag(resolutions[0],
resolution)))
hints.append(_("The resolution will be set to %(name)s",
name=resolutions[0]))
else:
selected_option = req.args.get(id,
TicketSystem(self.env).default_resolution)
control.append(tag_('as %(resolution)s',
resolution=tag.select(
[tag.option(x, value=x,
selected=(x == selected_option or None))
for x in resolutions],
id=id, name=id)))
hints.append(_("The resolution will be set"))
if 'del_resolution' in operations:
hints.append(_("The resolution will be deleted"))
if 'leave_status' in operations:
control.append(_('as %(status)s ',
status= ticket._old.get('status',
ticket['status'])))
if len(operations) == 1:
hints.append(_("The owner will remain %(current_owner)s",
current_owner=current_owner)
if current_owner_or_empty else
_("The ticket will remain with no owner"))
else:
if status != '*':
hints.append(_("Next status will be '%(name)s'", name=status))
return (this_action['name'], tag(*control), '. '.join(hints) + '.'
if hints else '')
def get_ticket_changes(self, req, ticket, action):
this_action = self.actions[action]
# Enforce permissions
if not self._has_perms_for_action(req, this_action, ticket.resource):
# The user does not have any of the listed permissions, so we won't
# do anything.
return {}
updated = {}
# Status changes
status = this_action['newstate']
if status != '*':
updated['status'] = status
for operation in this_action['operations']:
if operation == 'reset_workflow':
updated['status'] = 'new'
elif operation == 'del_owner':
updated['owner'] = ''
elif operation == 'set_owner':
newowner = req.args.get('action_%s_reassign_owner' % action,
this_action.get('set_owner', '').strip())
# If there was already an owner, we get a list, [new, old],
# but if there wasn't we just get new.
if type(newowner) == list:
newowner = newowner[0]
updated['owner'] = newowner
elif operation == 'set_owner_to_self':
updated['owner'] = req.authname
elif operation == 'del_resolution':
updated['resolution'] = ''
elif operation == 'set_resolution':
newresolution = req.args.get('action_%s_resolve_resolution' % \
action,
this_action.get('set_resolution', '').strip())
updated['resolution'] = newresolution
# leave_status is just a no-op here, so we don't look for it.
return updated
def apply_action_side_effects(self, req, ticket, action):
pass
def _has_perms_for_action(self, req, action, resource):
required_perms = action['permissions']
if required_perms:
for permission in required_perms:
if permission in req.perm(resource):
break
else:
# The user does not have any of the listed permissions
return False
return True
# Public methods (for other ITicketActionControllers that want to use
# our config file and provide an operation for an action)
def get_actions_by_operation(self, operation):
"""Return a list of all actions with a given operation
(for use in the controller's get_all_status())
"""
actions = [(info['default'], action) for action, info
in self.actions.items()
if operation in info['operations']]
return actions
def get_actions_by_operation_for_req(self, req, ticket, operation):
"""Return list of all actions with a given operation that are valid
in the given state for the controller's get_ticket_actions().
If state='*' (the default), all actions with the given operation are
returned.
"""
# Be sure to look at the original status.
status = ticket._old.get('status', ticket['status'])
actions = [(info['default'], action) for action, info
in self.actions.items()
if operation in info['operations'] and
('*' in info['oldstates'] or
status in info['oldstates']) and
self._has_perms_for_action(req, info, ticket.resource)]
return actions
class WorkflowMacro(WikiMacroBase):
_domain = 'messages'
_description = cleandoc_(
"""Render a workflow graph.
This macro accepts a TracWorkflow configuration and renders the states
and transitions as a directed graph. If no parameters are given, the
current ticket workflow is rendered. In WikiProcessors mode the `width`
and `height` arguments can be specified.
(Defaults: `width = 800` and `heigth = 600`)
Examples:
{{{
[[Workflow()]]
[[Workflow(go = here -> there; return = there -> here)]]
{{{
#!Workflow width=700 height=700
leave = * -> *
leave.operations = leave_status
leave.default = 1
accept = new,assigned,accepted,reopened -> accepted
accept.permissions = TICKET_MODIFY
accept.operations = set_owner_to_self
resolve = new,assigned,accepted,reopened -> closed
resolve.permissions = TICKET_MODIFY
resolve.operations = set_resolution
reassign = new,assigned,accepted,reopened -> assigned
reassign.permissions = TICKET_MODIFY
reassign.operations = set_owner
reopen = closed -> reopened
reopen.permissions = TICKET_CREATE
reopen.operations = del_resolution
}}}
}}}
""")
def expand_macro(self, formatter, name, text, args):
if not text:
raw_actions = self.config.options('ticket-workflow')
else:
if args is None:
text = '\n'.join([line.lstrip() for line in text.split(';')])
if not '[ticket-workflow]' in text:
text = '[ticket-workflow]\n' + text
parser = RawConfigParser()
parser.readfp(StringIO(text))
raw_actions = list(parser.items('ticket-workflow'))
actions = parse_workflow_config(raw_actions)
states = list(set(
[state for action in actions.itervalues()
for state in action['oldstates']] +
[action['newstate'] for action in actions.itervalues()]))
action_names = actions.keys()
edges = []
for name, action in actions.items():
new_index = states.index(action['newstate'])
name_index = action_names.index(name)
for old_state in action['oldstates']:
old_index = states.index(old_state)
edges.append((old_index, new_index, name_index))
args = args or {}
graph = {'nodes': states, 'actions': action_names, 'edges': edges,
'width': args.get('width', 800),
'height': args.get('height', 600)}
graph_id = '%012x' % id(graph)
req = formatter.req
add_script(req, 'common/js/excanvas.js', ie_if='IE')
add_script(req, 'common/js/workflow_graph.js')
add_script_data(req, {'graph_%s' % graph_id: graph})
return tag.div(_("Enable JavaScript to display the workflow graph."),
class_='trac-workflow-graph system-message',
id='trac-workflow-graph-%s' % graph_id)
| bsd-3-clause | -5,058,617,368,730,896,000 | 42.268924 | 79 | 0.569173 | false | 4.506432 | true | false | false |
cs207-project/TimeSeries | procs/_corr.py | 1 | 4794 | import numpy.fft as nfft
import numpy as np
import timeseries as ts
from scipy.stats import norm
# import pyfftw
import sys
#sys.path.append("/Users/yuhantang/CS207/TimeSeries/procs")
from .interface import *
def createfromlist(l):
d = new_darray(len(l))
for i in range(0,len(l)):
darray_set(d,i,l[i])
return d
def tsmaker(m, s, j):
meta={}
meta['order'] = int(np.random.choice([-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]))
meta['blarg'] = int(np.random.choice([1, 2]))
t = np.arange(0.0, 1.0, 0.01)
v = norm.pdf(t, m, s) + j*np.random.randn(100)
return meta, ts.TimeSeries(t, v)
def random_ts(a):
t = np.arange(0.0, 1.0, 0.01)
v = a*np.random.random(100)
return ts.TimeSeries(t, v)
def stand(x, m, s):
return (x-m)/s
def ccor(ts1, ts2):
"given two standardized time series, compute their cross-correlation using FFT"
# Get the next 2 th power 110 -> 128
next_2 = int(2**np.ceil(np.log(len(ts1.values()))))
#
ts1_value = ts1.values()
ts2_value = ts2.values()
ts1_container,ts2_container = [],[]
ts1_zero_container = [0]*len(ts1.values())
ts2_zero_container = [0]*len(ts2.values())
ts1_c_array,ts2_c_array = [None]*(len(ts1.values())*2),[None]*(len(ts2.values())*2)
ts1_c_array[::2] = ts1_value
ts1_c_array[1::2] = ts1_zero_container
ts2_c_array[::2] = ts2_value
ts2_c_array[1::2] = ts2_zero_container
for i in range(len(ts1_c_array)+1,next_2*2):
ts1_c_array.append(np.double(0))
for i in range(len(ts2_c_array)+1,next_2*2):
ts2_c_array.append(np.double(0))
ts1_c_array.insert(0,0)
ts2_c_array.insert(0,0)
ts1_c_array = createfromlist(np.double(ts1_c_array))
ts2_c_array = createfromlist(np.double(ts2_c_array))
four1(ts1_c_array,next_2,1)
four1(ts2_c_array,next_2,1)
for i in range(len(ts2.values())*2+1):
ts1_container.append(darray_get(ts1_c_array,i))
for j in range(len(ts1.values())*2+1):
ts2_container.append(darray_get(ts2_c_array,j))
ts1_fft = np.asarray(ts1_container[1::2]) + 1j * np.asarray(ts1_container[2::2])
ts2_fft = np.asarray(ts2_container[1::2]) + 1j * np.asarray(ts2_container[2::2])
ts1_fft = ts1_fft[:len(ts1)+1]
ts2_fft = ts2_fft[:len(ts2)+1]
# ifft part
ts1_ts2_conj = ts1_fft * np.conj(ts2_fft)
ts1_ts2_ifft_container = [0]*len(ts1_ts2_conj)*2
ts1_ts2_ifft_container[::2] = ts1_ts2_conj.real
ts1_ts2_ifft_container[1::2] = ts1_ts2_conj.imag
for i in range(len(ts1_ts2_conj)+1, next_2 *2):
ts1_ts2_ifft_container.append(0)
ts1_ts2_ifft_container.insert(0,0)
ts1_ts2_ifft_container = createfromlist(ts1_ts2_ifft_container)
four1(ts1_ts2_ifft_container, next_2, -1)
ts1_ts2_ifft_container_python = []
for i in range(len(ts1_ts2_conj)*2+1):
ts1_ts2_ifft_container_python.append(darray_get(ts1_ts2_ifft_container,i))
ccor_value = np.asarray(ts1_ts2_ifft_container_python[1::2])
return 1/len(ts1) * ccor_value
def max_corr_at_phase(ts1, ts2):
ccorts = ccor(ts1, ts2)
idx = np.argmax(ccorts)
maxcorr = ccorts[idx]
return idx, maxcorr
#The equation for the kernelized cross correlation is given at
#http://www.cs.tufts.edu/~roni/PUB/ecml09-tskernels.pdf
#normalize the kernel there by np.sqrt(K(x,x)K(y,y)) so that the correlation
#of a time series with itself is 1.
def kernel_corr(ts1, ts2, mult=1):
"compute a kernelized correlation so that we can get a real distance"
#your code here.
cross_correlation = ccor(ts1, ts2) * mult
corr_ts1, corr_ts2 = ccor(ts1, ts1) * mult, ccor(ts2, ts2) * mult
return np.sum(np.exp(cross_correlation))/np.sqrt(np.sum(np.exp(corr_ts1))*np.sum(np.exp(corr_ts2)))
#this is for a quick and dirty test of these functions
#you might need to add procs to pythonpath for this to work
if __name__ == "__main__":
print("HI")
_, t1 = tsmaker(0.5, 0.1, 0.01)
_, t2 = tsmaker(0.5, 0.1, 0.01)
print(t1.mean(), t1.std(), t2.mean(), t2.std())
import matplotlib.pyplot as plt
plt.plot(t1)
plt.plot(t2)
plt.show()
standts1 = stand(t1, t1.mean(), t1.std())
standts2 = stand(t2, t2.mean(), t2.std())
#print(type(standts1),'this is the type=================*********')
#assert 1 == 2
idx, mcorr = max_corr_at_phase(standts1, standts2)
print(idx, mcorr)
sumcorr = kernel_corr(standts1, standts2, mult=10)
print(sumcorr)
t3 = random_ts(2)
t4 = random_ts(3)
plt.plot(t3)
plt.plot(t4)
plt.show()
standts3 = stand(t3, t3.mean(), t3.std())
standts4 = stand(t4, t4.mean(), t4.std())
idx, mcorr = max_corr_at_phase(standts3, standts4)
print(idx, mcorr)
sumcorr = kernel_corr(standts3, standts4, mult=10)
print(sumcorr)
| mit | 2,104,750,453,322,507,300 | 29.929032 | 103 | 0.623905 | false | 2.495575 | false | false | false |
antiface/ThinkBayes2 | code/cookie3.py | 1 | 1095 | """This file contains code for use with "Think Bayes",
by Allen B. Downey, available from greenteapress.com
Copyright 2014 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function, division
import thinkbayes2
class Cookie(thinkbayes2.Suite):
"""A map from string bowl ID to probablity."""
def Likelihood(self, data, hypo):
"""The likelihood of the data under the hypothesis.
data: string cookie type
hypo: string bowl ID
"""
like = hypo[data] / hypo.Total()
if like:
hypo[data] -= 1
return like
def main():
bowl1 = thinkbayes2.Hist(dict(vanilla=30, chocolate=10))
bowl2 = thinkbayes2.Hist(dict(vanilla=20, chocolate=20))
pmf = Cookie([bowl1, bowl2])
print('After 1 vanilla')
pmf.Update('vanilla')
for hypo, prob in pmf.Items():
print(hypo, prob)
print('\nAfter 1 vanilla, 1 chocolate')
pmf.Update('chocolate')
for hypo, prob in pmf.Items():
print(hypo, prob)
if __name__ == '__main__':
main()
| gpl-2.0 | 7,652,526,682,298,288,000 | 23.333333 | 60 | 0.628311 | false | 3.220588 | false | false | false |
dvro/scikit-protopy | protopy/base.py | 1 | 4528 | """Base and mixin classes for instance reduction techniques"""
# Author: Dayvid Victor <[email protected]>
# License: BSD Style
import warnings
from abc import ABCMeta, abstractmethod
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.neighbors.classification import KNeighborsClassifier
from sklearn.utils import check_array
from sklearn.externals import six
class InstanceReductionWarning(UserWarning):
pass
# Make sure that NeighborsWarning are displayed more than once
warnings.simplefilter("always", InstanceReductionWarning)
class InstanceReductionBase(six.with_metaclass(ABCMeta, BaseEstimator)):
"""Base class for instance reduction estimators."""
@abstractmethod
def __init__(self):
pass
class InstanceReductionMixin(InstanceReductionBase, ClassifierMixin):
"""Mixin class for all instance reduction techniques"""
def set_classifier(self):
"""Sets the classified to be used in the instance reduction process
and classification.
Parameters
----------
classifier : classifier, following the KNeighborsClassifier style
(default = KNN)
y : array-like, shape = [n_samples]
Labels for X.
Returns
-------
P : array-like, shape = [indeterminated, n_features]
Resulting training set.
q : array-like, shape = [indertaminated]
Labels for P
"""
self.classifier = classifier
def reduce_data(self, X, y):
"""Perform the instance reduction procedure on the given training data.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training set.0
y : array-like, shape = [n_samples]
Labels for X.
Returns
-------
X_ : array-like, shape = [indeterminated, n_features]
Resulting training set.
y_ : array-like, shape = [indertaminated]
Labels for X_
"""
pass
def fit(self, X, y, reduce_data=True):
"""
Fit the InstanceReduction model according to the given training data.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Note that centroid shrinking cannot be used with sparse matrices.
y : array, shape = [n_samples]
Target values (integers)
reduce_data : bool, flag indicating if the reduction would be performed
"""
self.X = X
self.y = y
if reduce_data:
self.reduce_data(X, y)
return self
def predict(self, X, n_neighbors=1):
"""Perform classification on an array of test vectors X.
The predicted class C for each sample in X is returned.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
C : array, shape = [n_samples]
Notes
-----
The default prediction is using KNeighborsClassifier, if the
instance reducition algorithm is to be performed with another
classifier, it should be explicited overwritten and explained
in the documentation.
"""
X = check_array(X)
if not hasattr(self, "X_") or self.X_ is None:
raise AttributeError("Model has not been trained yet.")
if not hasattr(self, "y_") or self.y_ is None:
raise AttributeError("Model has not been trained yet.")
if self.classifier == None:
self.classifier = KNeighborsClassifier(n_neighbors=n_neighbors)
self.classifier.fit(self.X_, self.y_)
return self.classifier.predict(X)
def predict_proba(self, X):
"""Return probability estimates for the test data X.
after a given prototype selection algorithm.
Parameters
----------
X : array, shape = (n_samples, n_features)
A 2-D array representing the test points.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
of such arrays if n_outputs > 1.
The class probabilities of the input samples. Classes are ordered
by lexicographic order.
"""
self.classifier.fit(self.X_, self.y_)
return self.classifier.predict_proba(X)
| bsd-2-clause | -6,141,545,886,733,990,000 | 28.38961 | 79 | 0.607601 | false | 4.548744 | false | false | false |
vguzmanp/cloud-in-one | main_crypto.py | 1 | 1392 | #!/usr/bin/env python3
import getpass
import argparse
import shutil
from core.databaseManager import DatabaseManager
from core.securityModule import SecurityModule
def processFile(file_in_name, file_out_name, encrypt_flag):
user = input("CLOUD-IN-ONE Username: ")
password = getpass.getpass()
databaseManager = DatabaseManager(':memory:')
sec = SecurityModule(databaseManager, user, password)
file_processed = None
with open(file_in_name, 'rb') as f_in:
if encrypt_flag:
file_processed = sec.encrypt(f_in)
else:
file_processed = sec.decrypt(f_in)
with open(file_out_name, 'wb') as f_out:
file_processed.seek(0)
shutil.copyfileobj(file_processed, f_out)
file_processed.close()
def main():
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-d", "--decrypt", action="store_true")
group.add_argument("-e", "--encrypt", action="store_true")
parser.add_argument("file", help="the file to encrypt / decrypt")
parser.add_argument("file_output", help="name of the destination file")
args = parser.parse_args()
encrypt_flag = args.encrypt
if not encrypt_flag:
encrypt_flag = not args.decrypt
processFile(args.file, args.file_output, encrypt_flag)
if __name__ == '__main__':
main()
| mit | 120,298,394,218,790,240 | 28 | 75 | 0.666667 | false | 3.712 | false | false | false |
PaesslerAG/django-performance-testing | settings.py | 1 | 1267 | # Django settings for autodata project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
# Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this
# list if you haven't customized them:
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'mq%31q+sjj^)m^tvy(klwqw6ksv7du2yzdf9-django_performance_testing'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django_performance_testing',
'testapp',
)
STATIC_URL = '/static/'
ROOT_URLCONF = None
| bsd-3-clause | 3,754,765,840,320,147,000 | 27.155556 | 78 | 0.598264 | false | 3.874618 | false | false | false |
owlabs/incubator-airflow | airflow/models/taskreschedule.py | 1 | 3374 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""TaskReschedule tracks rescheduled task instances."""
from sqlalchemy import Column, ForeignKeyConstraint, Index, Integer, String, asc
from airflow.models.base import Base, ID_LEN
from airflow.utils.db import provide_session
from airflow.utils.sqlalchemy import UtcDateTime
class TaskReschedule(Base):
"""
TaskReschedule tracks rescheduled task instances.
"""
__tablename__ = "task_reschedule"
id = Column(Integer, primary_key=True)
task_id = Column(String(ID_LEN), nullable=False)
dag_id = Column(String(ID_LEN), nullable=False)
execution_date = Column(UtcDateTime, nullable=False)
try_number = Column(Integer, nullable=False)
start_date = Column(UtcDateTime, nullable=False)
end_date = Column(UtcDateTime, nullable=False)
duration = Column(Integer, nullable=False)
reschedule_date = Column(UtcDateTime, nullable=False)
__table_args__ = (
Index('idx_task_reschedule_dag_task_date', dag_id, task_id, execution_date,
unique=False),
ForeignKeyConstraint([task_id, dag_id, execution_date],
['task_instance.task_id', 'task_instance.dag_id',
'task_instance.execution_date'],
name='task_reschedule_dag_task_date_fkey',
ondelete='CASCADE')
)
def __init__(self, task, execution_date, try_number, start_date, end_date,
reschedule_date):
self.dag_id = task.dag_id
self.task_id = task.task_id
self.execution_date = execution_date
self.try_number = try_number
self.start_date = start_date
self.end_date = end_date
self.reschedule_date = reschedule_date
self.duration = (self.end_date - self.start_date).total_seconds()
@staticmethod
@provide_session
def find_for_task_instance(task_instance, session):
"""
Returns all task reschedules for the task instance and try number,
in ascending order.
:param task_instance: the task instance to find task reschedules for
:type task_instance: airflow.models.TaskInstance
"""
TR = TaskReschedule
return (
session
.query(TR)
.filter(TR.dag_id == task_instance.dag_id,
TR.task_id == task_instance.task_id,
TR.execution_date == task_instance.execution_date,
TR.try_number == task_instance.try_number)
.order_by(asc(TR.id))
.all()
)
| apache-2.0 | -3,710,862,593,287,448,000 | 38.694118 | 83 | 0.648785 | false | 4.026253 | false | false | false |
cykerway/wmwm | setup.py | 1 | 7315 | #!/usr/bin/env python3
'''
setuptools based setup module;
see <https://packaging.python.org/en/latest/distributing.html>;
'''
from os import path
from setuptools import find_packages
from setuptools import setup
here = path.abspath(path.dirname(__file__))
## get long description from readme file;
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
## ========================================================================
## required for pypi upload;
## ========================================================================
## project name;
##
## this determines how users install this project:
##
## pip install sampleproject
##
## and where this project lives on pypi:
##
## <https://pypi.org/project/sampleproject/>
##
## this name is registered for you the first time you publish this package;
##
## name specification:
##
## <https://packaging.python.org/specifications/core-metadata/#name>
##
name='awd',
## project version;
##
## version specification (pep 440):
##
## <https://www.python.org/dev/peps/pep-0440/>;
##
## single-sourcing techniques:
##
## <https://packaging.python.org/en/latest/single_source_version.html>
##
version='1.3.4',
## project homepage;
##
## this arg corresponds to "home-page" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#home-page-optional>
##
url='https://github.com/cykerway/awd',
## author name;
author='Cyker Way',
## author email address;
author_email='[email protected]',
## packages;
##
## you can provide a list of packages manually or use `find_packages()`;
##
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
## ========================================================================
## optional for pypi upload;
## ========================================================================
## a one-line description;
##
## this arg corresponds to "summary" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#summary>
##
description='a window director;',
## a longer description shown on project homepage on pypi;
##
## this is often the same as the readme;
##
## this arg corresponds to "description" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#description-optional>
##
long_description=long_description,
## longer description content type;
##
## valid values are: `text/plain`, `text/x-rst`, `text/markdown`;
##
## this arg corresponds to "description-content-type" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#description-content-type-optional>
##
long_description_content_type='text/markdown',
## classifiers categorizing this project;
##
## see <https://pypi.org/classifiers/>;
##
classifiers=[
## development status;
# 'Development Status :: 3 - Alpha',
'Development Status :: 4 - Beta',
# 'Development Status :: 5 - Production/Stable',
## intended audience;
# 'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
## topic;
'Topic :: Desktop Environment',
# 'Topic :: Games/Entertainment',
# 'Topic :: Multimedia',
# 'Topic :: Office/Business',
# 'Topic :: Scientific/Engineering',
# 'Topic :: Software Development',
# 'Topic :: System',
## license;
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
# 'License :: OSI Approved :: BSD License',
# 'License :: OSI Approved :: MIT License',
## supported python versions;
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
## project keywords;
##
## these keywords will appear on the project page;
##
keywords='window layout',
## package data;
##
## this is a dict mapping package names to a list of relative path names
## (or glob patterns) that should be copied into the package when
## installed; the path names are interpreted relative to the package dir;
##
package_data={
# 'sample': ['*.bin'],
},
## additional data files;
##
## this is a sequence of `(dir, files)` pairs; each `(dir, files)` pair
## specifies the install dir and the files to install there; if `dir` is a
## relative path, it is relative to the install prefix (`sys.prefix` or
## `sys.exec_prefix`); each file in `files` is interpreted relative to the
## `setup.py` script;
##
## see <https://docs.python.org/3/distutils/setupscript.html#installing-additional-files>;
##
data_files=[
# ('data_files', ['data/data0.bin', 'data/data1.bin']),
],
## package dependencies;
##
## this is a list of packages that this project depends on; these packages
## will be installed by pip when this project is installed;
##
install_requires=[
'argparse-ext',
'ewmh-ext',
'logging-ext',
'python-xlib',
],
## extra package dependencies;
##
## this is a dict mapping extras (optional features of this project) to a
## list of packages that those extras depend on;
##
## users will be able to install these using the extras syntax:
##
## pip install sampleproject[dev]
##
## see <https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-extras-optional-features-with-their-own-dependencies>
##
extras_require={
# 'dev': ['check-manifest'],
# 'test': ['coverage'],
},
## to create executable scripts, use entry points:
##
## <https://setuptools.readthedocs.io/en/latest/setuptools.html#automatic-script-creation>
##
## for example, the following would provide a console script `sample-cli`
## which executes the `main` function in package `sample.cli`, and a gui
## script `sample-gui` which executes the `main` function in package
## `sample.gui`;
entry_points={
'console_scripts': [
'awd=awd.__main__:main',
],
# 'gui_scripts': [
# 'sample-gui=sample.gui:main',
# ],
},
## additional urls that are relevant to this project;
##
## examples include: where the package tracks issues, where the source is
## hosted, where to say thanks to the package maintainers, and where to
## support the project financially; the keys are used to render the link
## texts on pypi;
##
## this arg corresponds to "project-url" metadata fields:
##
## <https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use>
##
project_urls={
'Bug Reports': 'https://github.com/cykerway/awd/issues',
# 'Funding': 'https://donate.pypi.org',
# 'Say Thanks!': 'http://saythanks.io/to/example',
'Source': 'https://github.com/cykerway/awd/',
},
)
| gpl-3.0 | -8,959,277,930,186,001,000 | 30.530172 | 136 | 0.579357 | false | 3.930682 | true | false | false |
nicko96/Chrome-Infra | glyco/glucose/install.py | 1 | 8024 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import hashlib
import httplib2
import logging
import os
import sys
import urllib
from glucose import util
LOGGER = logging.getLogger(__name__)
DEFAULT_CACHE = os.path.join(os.path.expanduser('~'), '.glyco_wheelcache')
def get_sha1_from_filename(filename, verbose=True):
"""Extract the claimed sha1 from the filename.
Also verify the name matches the wheel convention.
Args:
filename (str): path to a local file.
verbose (bool): print messages only if True.
Returns: claimed_hash(str) or None if no hash can be found.
"""
basename = os.path.split(filename)[-1]
wheel_info = util.WHEEL_FILE_RE.match(basename)
if not wheel_info:
if verbose:
print >> sys.stderr, 'Invalid file name for wheel: %s' % basename
return None
if not wheel_info.group('build'):
if verbose:
print >> sys.stderr, ('No hash could be found in the filename.\n'
'Has this file been generated with Glyco?\n'
'%s' % basename)
return None
return wheel_info.group('build').split('_')[1]
def has_valid_sha1(filename, verbose=True):
"""Verify the hash of a whl file created by Glyco.
Args:
filename (str): path to a whl file.
verbose(bool): print messages only if True.
Returns:
matches (bool): true if the file content and the name match.
"""
claimed_sha = get_sha1_from_filename(filename, verbose=verbose)
if not claimed_sha:
return False
with open(filename, 'rb') as f:
digest = hashlib.sha1(f.read())
actual_sha = digest.hexdigest()
return actual_sha == claimed_sha
def get_install_list(packages):
"""Consolidate the list of things to install.
Args:
packages (list of str): local paths or https/gs URLs.
"""
install_list = []
for package in packages:
location = package
location_type = 'ERROR'
error = None
# Let's support only https. Security matters.
if package.startswith('http://'):
error = 'Non-secure http is not supported, please use https: %s' % package
elif package.startswith('https://'):
location_type = 'http'
elif package.startswith('gs://'):
# TODO(pgervais): handle Cloud Storage properly.
location_type = 'http'
location = 'https://storage.googleapis.com/' + package[len('gs://'):]
elif os.path.isfile(package):
location = 'file://%s' % urllib.pathname2url(os.path.abspath(package))
location_type = 'file'
else:
error = ('Cannot find this file locally: %s\n'
'If you did not specify a file but an URI, '
'then the protocol is probably not supported.'
% os.path.abspath(package))
install_list.append({'location': location,
'location_type': location_type,
'error': error})
return install_list
def fetch_packages(install_list, requester=httplib2.Http(),
cache=DEFAULT_CACHE, verbose=True):
"""Make sure there is a local copy of all packages.
All paths returned by this function point at existing wheel files, with
correct hashes.
Args:
install_list (list of dict): return value of get_install_list.
requester (httplib2.Http): object to use to send http requests.
cache (str): path to a local directory used to store wheel files downloaded
from a remote storage.
verbose(bool): print messages only if True.
Returns:
paths (list of strings): path to each local wheel file.
"""
if not os.path.isdir(cache):
os.mkdir(cache)
paths = []
all_valid = True
for source in install_list:
if source['location_type'] == 'file':
assert source['location'].startswith('file://')
filename = source['location'][len('file://'):]
# FIXME(pgervais): convert to a windows path (/ -> \) and unquote.
if not has_valid_sha1(filename, verbose=verbose):
if verbose:
print >> sys.stderr, ("File content does not match hash for %s"
% filename)
all_valid = False
else:
paths.append(filename)
elif source['location_type'] == 'http':
# This is an URL so the path separator is necessarily /
base_filename = source['location'].split('/')[-1]
filename = os.path.join(cache, base_filename)
if not os.path.exists(filename):
# Try to download file to local cache
resp, content = requester.request(source['location'], 'GET')
if resp['status'] == '200':
temp_filename = os.path.join(cache, base_filename + '.tmp')
try:
with open(temp_filename, 'wb') as f:
f.write(content)
os.rename(temp_filename, filename)
except OSError:
if os.path.isfile(temp_filename):
os.remove(temp_filename)
else:
if verbose:
print >> sys.stderr, ("Got status %s when talking to %s" %
(resp['status'], source['location']))
all_valid = False
# We have to test again for existence since the download
# could have failed.
if os.path.exists(filename) and not has_valid_sha1(filename,
verbose=verbose):
if verbose:
print >> sys.stderr, ("File content does not match hash for %s"
% filename)
all_valid = False
# The file is bad anyway, there's no point in keeping it around.
# Plus we probably want to retry the download some time in the future.
os.remove(filename)
else:
paths.append(filename)
if not all_valid:
raise ValueError('Some errors occurred when getting wheel files.')
return paths
def install(args):
"""Install wheel files"""
if not args.packages:
print 'No packages have been provided on the command-line, doing nothing.'
return
if not args.install_dir:
print >> sys.stderr, ('No destination directory specified, aborting. \n'
'Use the --install-dir option to specify it')
return 2
install_list = get_install_list(args.packages)
error_msgs = [d['error'] for d in install_list if 'error' in d and d['error']]
if error_msgs:
print >> sys.stderr, ('\n'.join(error_msgs))
print >> sys.stderr, 'Aborting (no packages installed)'
return 1
try:
package_paths = fetch_packages(install_list)
except ValueError:
print >> sys.stderr, 'Aborting (no packages installed)'
return 1
if not os.path.isdir(args.install_dir):
os.mkdir(args.install_dir)
with util.Virtualenv() as venv:
cmd = (['pip', 'install', '--no-index', '--target', args.install_dir]
+ package_paths)
LOGGER.debug('Running %s', ' '.join(cmd))
venv.check_call(cmd)
def add_subparser(subparsers):
"""Add the 'install' command.
Also add the 'lysis' command as a synonym (and pun).
Args:
subparsers: output of argparse.ArgumentParser.add_subparsers()
"""
install_parser = subparsers.add_parser('install',
help='Install wheel files to a local '
'directory (synonym of lysis)')
install_parser.set_defaults(command=install)
# Add synonym just for the pun
lysis_parser = subparsers.add_parser('lysis',
help='Install wheel files to a local '
'directory (synonym of install)')
lysis_parser.set_defaults(command=install)
for parser in (install_parser, lysis_parser):
parser.add_argument('--install-dir', '-i',
help='Directory where to install packages')
parser.add_argument('packages', metavar='PACKAGE', nargs='*',
help='Wheel files to install (path)')
| bsd-3-clause | 3,545,463,092,387,777,500 | 32.157025 | 80 | 0.61154 | false | 4.056623 | false | false | false |
rwl/puddle | puddle/resource/action/properties_action.py | 1 | 3749 | #------------------------------------------------------------------------------
# Copyright (C) 2009 Richard W. Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#------------------------------------------------------------------------------
""" Defines an action for viewing resource properties.
"""
#------------------------------------------------------------------------------
# Imports:
#------------------------------------------------------------------------------
from enthought.io.api import File
from enthought.traits.api import Bool, Instance
from enthought.traits.ui.api import View, Item, Group
from enthought.pyface.action.api import Action
#------------------------------------------------------------------------------
# "PropertiesAction" class:
#------------------------------------------------------------------------------
class PropertiesAction(Action):
""" Defines an action for viewing resource properties.
"""
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
# The action"s name (displayed on menus/tool bar tools etc):
name = "P&roperties"
# Keyboard accelerator:
accelerator = "Alt+Enter"
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
def perform(self, event):
""" Perform the action.
"""
selections = self.window.selection
if selections:
selection = selections[0]
if isinstance(selection, File):
selection.edit_traits( parent=self.window.control,
view=self._create_resource_view(selection),
kind="livemodal" )
def _create_resource_view(self, selection):
""" Creates a resource view.
"""
resource_view = View(
Item(name="absolute_path", style="readonly"),
# FIXME: Readonly boolean editor is just blank
# Item(name="exists", style="readonly"),
# Item(name="is_file", style="readonly"),
# Item(name="is_folder", style="readonly"),
# Item(name="is_package", style="readonly"),
# Item(name="is_readonly", style="readonly"),
Item(name="mime_type", style="readonly"),
Item(name="url", style="readonly"),
title="Properties for %s" % selection.name+selection.ext,
icon=self.window.application.icon)
return resource_view
# EOF -------------------------------------------------------------------------
| mit | -6,371,690,640,377,739,000 | 41.123596 | 79 | 0.51107 | false | 5.236034 | false | false | false |
endlessm/endless-ndn | eos_data_distribution/parallel.py | 1 | 2456 | # -*- Mode:python; coding: utf-8; c-file-style:"gnu"; indent-tabs-mode:nil -*- */
#
# Copyright (C) 2016 Endless Mobile, Inc.
# Author: Niv Sardi <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# A copy of the GNU Lesser General Public License is in the file COPYING.
import logging
from gi.repository import GObject
logger = logging.getLogger(__name__)
class Batch(GObject.GObject):
__gsignals__ = {
'complete': (GObject.SIGNAL_RUN_FIRST, None, ()),
}
def __init__(self, workers, type="Batch"):
super(Batch, self).__init__()
self._type = type
self._incomplete_workers = set(workers)
for worker in self._incomplete_workers:
worker.connect('complete', self._on_batch_complete)
def start(self):
if not self._incomplete_workers:
logger.info('%s complete: no workers', self._type)
self.emit('complete')
for worker in self._incomplete_workers:
worker.start()
def _on_batch_complete(self, worker):
logger.info("%s complete: %s", self._type, worker)
self._incomplete_workers.remove(worker)
if len(self._incomplete_workers) == 0:
self.emit('complete')
if __name__ == '__main__':
import argparse
from . import utils
from gi.repository import GLib
from ndn.file import FileConsumer
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--output")
parser.add_argument("-c", "--count", default=10, type=int)
args = utils.parse_args(parser=parser)
loop = GLib.MainLoop()
consumers = [FileConsumer("%s-%s"%(args.name, i), "%s-%s"%(args.output, i))
for i in range(args.count)]
batch = Batch(workers=consumers)
batch.connect('complete', lambda *a: loop.quit())
batch.start()
loop.run()
| lgpl-3.0 | -2,895,144,727,426,026,000 | 32.643836 | 81 | 0.65513 | false | 3.8375 | false | false | false |
SP2RC-Coding-Club/Codes | 13_07_2017/3D_slab_modes.py | 1 | 35096 |
#import pdb # pause code for debugging at pdb.set_trace()
import numpy as np
import toolbox as tool
import slab_functions as sf
from pysac.plot.mayavi_seed_streamlines import SeedStreamline
import matplotlib.pyplot as plt
from mayavi import mlab
import gc
#import move_seed_points as msp
import mayavi_plotting_functions as mpf
import dispersion_diagram
import img2vid as i2v
from functools import partial
import os
# ================================
# Preamble: set mode options and view parameters
# ================================
# What mode do you want? OPTIONS:
mode_options = ['slow-kink-surf', 'slow-saus-surf', 'slow-saus-body-3',
'slow-kink-body-3', 'slow-saus-body-2', 'slow-kink-body-2',
'slow-saus-body-1', 'slow-kink-body-1', 'fast-saus-body-1',
'fast-kink-body-1', 'fast-saus-body-2', 'fast-kink-body-2',
'fast-saus-body-3', 'fast-kink-body-3', 'fast-kink-surf',
'fast-saus-surf', 'shear-alfven', 'shear-alfven-broadband']
# Which angle shall we view from? OPTIONS:
view_options = ['front', 'front-parallel', 'top', 'top-parallel', 'front-top',
'front-side', 'front-top-side']
# Uniform lighting?
#uniform_light = True
uniform_light = False
show_density = False
show_density_pert = False
show_mag = False
show_mag_scale = False
show_mag_fade = False
show_mag_vec = False
show_vel_front = False
show_vel_front_pert = False
show_vel_top = False
show_vel_top_pert = False
show_disp_top = False
show_disp_front = False
show_axes = False
show_axis_labels = False
show_mini_axis = False
show_boundary = False
# Uncomment the parametrer you would like to see
# No density perturbations or vel/disp pert for alfven modes.
#show_density = True
#show_density_pert = True
show_mag = True
#show_mag_scale = True #must also have show_mag = True
#show_mag_fade = True
#show_mag_vec = True
#show_vel_front = True
#show_vel_front_pert = True
#show_vel_top = True
#show_vel_top_pert = True
#show_disp_top = True
#show_disp_front = True
show_axes = True
#show_axis_labels = True
show_mini_axis = True
show_boundary = True
# Visualisation modules in string form for file-names
vis_modules = [show_density, show_density_pert, show_mag, show_mag_scale,
show_mag_fade, show_mag_vec, show_vel_front, show_vel_front_pert,
show_vel_top, show_vel_top_pert, show_disp_top, show_disp_front]
vis_modules_strings = ['show_density', 'show_density_pert', 'show_mag', 'show_mag_scale',
'show_mag_fade', 'show_mag_vec', 'show_vel_front', 'show_vel_front_pert',
'show_vel_top', 'show_vel_top_pert', 'show_disp_top', 'show_disp_front']
vis_mod_string = ''
for i, j in enumerate(vis_modules):
if vis_modules[i]:
vis_mod_string = vis_mod_string + vis_modules_strings[i][5:] + '_'
# Set to True if you would like the dispersion diagram with chosen mode highlighted.
show_dispersion = False
#show_dispersion = True
# Wanna see the animation? Of course you do
#show_animation = False
show_animation = True
# Basic plot to see which eigensolutions have been found.
show_quick_plot = False
#show_quick_plot = True
# Video resolution
#res = (1920,1080) # There is a problem with this resolution- height must be odd number - Mayavi bug apparently
res = tuple(101 * np.array((16,9)))
#res = tuple(51 * np.array((16,9)))
#res = tuple(21 * np.array((16,9)))
number_of_frames = 1
# Frames per second of output video
fps = 20
#save_images = False
save_images = True
make_video = False
#make_video = True
# Where should I save the animation images/videos?
os.path.abspath(os.curdir)
os.chdir('..')
save_directory = os.path.join(os.path.abspath(os.curdir), '3D_vis_animations')
# Where should I save the dispersion diagrams?
save_dispersion_diagram_directory = os.path.join(os.path.abspath(os.curdir), '3D_vis_dispersion_diagrams')
# ================================
# Visualisation set-up
# ================================
# Variable definitions (for reference):
# x = k*x
# y = k*y
# z = k*z
# W = omega/k
# K = k*x_0
# t = omega*t
# Loop through selected modes
for mode_ind in [0]:#range(8,14): # for all others. REMEMBER SBB pparameters
#for mode_ind in [14,15]: #for fast body surf. REMEMBER SBS parameters
#for mode_ind in [16, 17]:
#for mode_ind in [13]: #for an individual mode
#for mode_ind in range(2,14):
if mode_ind not in range(len(mode_options)):
raise NameError('Mode not in mode_options')
# (note that fast surface modes, i.e. 14 and 15, can only be
# found with SBS parameters in slab_functions...)
mode = mode_options[mode_ind]
# Specify oscillation parameters
if 'slow' in mode and 'surf' in mode or 'alfven' in mode:
K = 2.
elif 'slow' in mode and 'body' in mode:
K = 8.
elif 'fast' in mode and 'body-1' in mode:
K = 8.
elif 'fast' in mode and 'body-2' in mode:
K = 15.
elif 'fast' in mode and 'body-3' in mode:
K = 22.
elif 'fast' in mode and 'surf' in mode:
K = 8.
else:
raise NameError('Mode not found')
# Specify density ratio R1 := rho_1 / rho_0
# R1 = 1.5 # Higher denisty on left than right
# R1 = 1.8
# R1 = 1.9 # Disp_diagram will only work for R1=1.5, 1.8, 2.0
R1 = 2. # Symmetric slab
# Reduce number of variables in dispersion relation
disp_rel_partial = partial(sf.disp_rel_asym, R1=R1)
# find eigenfrequencies W (= omega/k) within the range Wrange for the given parameters.
Wrange1 = np.linspace(0., sf.cT, 11)
Wrange2 = np.linspace(sf.cT, sf.c0, 401)
Wrange3 = np.linspace(sf.c0, sf.c2, 11)
Woptions_slow_surf = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange1, args=None).transpose())
Woptions_slow_body = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange2, args=None).transpose())
Woptions_fast = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange3, args=None).transpose())
# Remove W values that are very close to characteristic speeds - these are spurious solutions
tol = 1e-2
indices_to_rm = []
for i, w in enumerate(Woptions_slow_surf):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < 0 or w > sf.cT:
indices_to_rm.append(i)
Woptions_slow_surf = np.delete(Woptions_slow_surf, indices_to_rm)
Woptions_slow_surf.sort()
indices_to_rm = []
for i, w in enumerate(Woptions_slow_body):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < sf.cT or w > sf.c0:
indices_to_rm.append(i)
Woptions_slow_body = np.delete(Woptions_slow_body, indices_to_rm)
Woptions_slow_body.sort()
indices_to_rm = []
for i, w in enumerate(Woptions_fast):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < sf.c0 or w > min(sf.c1, sf.c2):
indices_to_rm.append(i)
Woptions_fast = np.delete(Woptions_fast, indices_to_rm)
Woptions_fast.sort()
# remove any higher order slow body modes - we only want to do the first 3 saus/kink
if len(Woptions_slow_body) > 6:
Woptions_slow_body = np.delete(Woptions_slow_body, range(len(Woptions_slow_body) - 6))
Woptions = np.concatenate((Woptions_slow_surf, Woptions_slow_body, Woptions_fast))
# set W to be the eigenfrequency for the requested mode
if 'fast-saus-body' in mode or 'fast-kink-surf' in mode:
W = Woptions_fast[-2]
elif 'fast-kink-body' in mode or 'fast-saus-surf' in mode:
W = Woptions_fast[-1]
elif 'slow' in mode and 'surf' in mode:
W = Woptions_slow_surf[mode_ind]
elif 'slow' in mode and 'body' in mode:
W = Woptions_slow_body[mode_ind-2]
if 'alfven' in mode:
W = sf.vA
else:
W = np.real(W)
# Quick plot to see if we are hitting correct mode
if show_quick_plot:
plt.plot([K] * len(Woptions), Woptions, '.')
plt.plot(K+0.5, W, 'go')
plt.xlim([0,23])
plt.show()
# ================================
# Dispersion diagram
# ================================
if show_dispersion:
if 'alfven' in mode:
raise NameError('Disperion plot requested for an alfven mode. Cant do that.')
dispersion_diagram.dispersion_diagram(mode_options, mode,
disp_rel_partial, K, W, R1)
# plt.tight_layout() # seems to make it chop the sides off with this
plt.savefig(os.path.join(save_dispersion_diagram_directory, 'R1_' + str(R1) + '_' + mode + '.png') )
plt.close()
# ================================
# Animation
# ================================
if show_animation:
print('Starting ' + mode)
# set grid parameters
xmin = -2.*K
xmax = 2.*K
ymin = 0.
ymax = 4.
zmin = 0.
zmax = 2*np.pi
# You can change ny but be careful changing nx, nz.
nx = 300#100 #100 #300 gives us reduced bouncing of field lines for the same video size, but there is significant computational cost.
ny = 300#100 #100 #100#20 #100
nz = 300#100 #100
nt = number_of_frames
if nz % nt != 0:
print("nt doesnt divide nz so there may be a problem with chopping in z direction for each time step")
t_start = 0.
t_end = zmax
t = t_start
xvals = np.linspace(xmin, xmax, nx)
yvals = np.linspace(ymin, ymax, ny)
zvals = np.linspace(zmin, zmax, nz, endpoint=False) # A fudge to give the height as exactly one wavelength
x_spacing = max(nx, ny, nz) / nx
y_spacing = max(nx, ny, nz) / ny
z_spacing = max(nx, ny, nz) / nz
# For masking points for plotting vector fields- have to do it manually due to Mayavi bug
mod = int(4 * nx / 100)
mod_y = int(np.ceil(mod / y_spacing))
# Get the data xi=displacement, v=velocity, b=mag field
if show_disp_top or show_disp_front:
xixvals = np.real(np.repeat(sf.xix(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
xizvals = np.real(np.repeat(sf.xiz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
xiyvals = np.real(np.repeat(sf.xiy(mode, xvals, zvals, t, W, K)[:, :, np.newaxis], ny, axis=2))
if show_vel_front or show_vel_top:
vxvals = np.real(np.repeat(sf.vx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vzvals = np.real(np.repeat(sf.vz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vyvals = np.real(np.repeat(sf.vy(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
if show_vel_front_pert or show_vel_top_pert:
vxvals = np.real(np.repeat(sf.vx_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vzvals = np.real(np.repeat(sf.vz_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vyvals = np.zeros_like(vxvals)
# Axis is defined on the mag field so we have to set up this data
bxvals = np.real(np.repeat(sf.bx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
byvals = np.real(np.repeat(sf.by(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
bz_eq3d = np.repeat(sf.bz_eq(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2)
bzvals = np.real(np.repeat(-sf.bz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2) +
bz_eq3d)
# displacement at the right and left boundaries
if show_boundary:
xix_boundary_r_vals = np.real(np.repeat(K + sf.xix_boundary(mode, zvals, t, W, K, R1, boundary='r')[:, np.newaxis], ny, axis=1))
xix_boundary_l_vals = np.real(np.repeat(-K + sf.xix_boundary(mode, zvals, t, W, K, R1, boundary='l')[:, np.newaxis], ny, axis=1))
if show_density:
rho_vals = np.real(np.repeat(sf.rho(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
if show_density_pert:
rho_vals = np.real(np.repeat(sf.rho_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
if show_disp_top or show_disp_front:
xixvals_t = xixvals
xiyvals_t = xiyvals
xizvals_t = xizvals
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_t = vxvals
vyvals_t = vyvals
vzvals_t = vzvals
if show_boundary:
xix_boundary_r_vals_t = xix_boundary_r_vals
xix_boundary_l_vals_t = xix_boundary_l_vals
if show_density or show_density_pert:
rho_vals_t = rho_vals
# ================================
# Starting figure and visualisation modules
# ================================
zgrid_zy, ygrid_zy = np.mgrid[0:nz:(nz)*1j,
0:ny:(ny)*1j]
fig = mlab.figure(size=res) # (1920, 1080) for 1080p , tuple(101 * np.array((16,9))) #16:9 aspect ratio for video upload
# Spacing of grid so that we can display a visualisation cube without having the same number of grid points in each dimension
spacing = np.array([x_spacing, z_spacing, y_spacing])
if show_density or show_density_pert:
# Scalar field density
rho = mlab.pipeline.scalar_field(rho_vals_t, name="density", figure=fig)
rho.spacing = spacing
mpf.volume_red_blue(rho, rho_vals_t)
#Masking points
if show_mag_vec:
bxvals_mask_front_t, byvals_mask_front_t, bzvals_mask_front_t = mpf.mask_points(bxvals_t, byvals_t, bzvals_t,
'front', mod, mod_y)
if show_disp_top:
xixvals_mask_top_t, xiyvals_mask_top_t, xizvals_mask_top_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'top', mod, mod_y)
if show_disp_front:
xixvals_mask_front_t, xiyvals_mask_front_t, xizvals_mask_front_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'front', mod, mod_y)
if show_vel_top or show_vel_top_pert:
vxvals_mask_top_t, vyvals_mask_top_t, vzvals_mask_top_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'top', mod, mod_y)
if show_vel_front or show_vel_front_pert:
vxvals_mask_front_t, vyvals_mask_front_t, vzvals_mask_front_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'front', mod, mod_y)
xgrid, zgrid, ygrid = np.mgrid[0:nx:(nx)*1j,
0:nz:(nz)*1j,
0:ny:(ny)*1j]
field = mlab.pipeline.vector_field(bxvals_t, bzvals_t, byvals_t, name="B field",
figure=fig, scalars=zgrid)
field.spacing = spacing
if show_axes:
mpf.axes_no_label(field)
if show_mini_axis:
mpf.mini_axes()
if uniform_light:
#uniform lighting, but if we turn shading of volumes off, we are ok without
mpf.uniform_lighting(fig)
#Black background
mpf.background_colour(fig, (0., 0., 0.))
scalefactor = 8. * nx / 100. # scale factor for direction field vectors
# Set up visualisation modules
if show_mag_vec:
bdirfield_front = mlab.pipeline.vector_field(bxvals_mask_front_t, bzvals_mask_front_t,
byvals_mask_front_t, name="B field front",
figure=fig)
bdirfield_front.spacing = spacing
mpf.vector_cut_plane(bdirfield_front, 'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_vel_top or show_vel_top_pert:
vdirfield_top = mlab.pipeline.vector_field(vxvals_mask_top_t, np.zeros_like(vxvals_mask_top_t),
vyvals_mask_top_t, name="V field top",
figure=fig)
vdirfield_top.spacing = spacing
mpf.vector_cut_plane(vdirfield_top, 'top', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_vel_front or show_vel_front_pert:
vdirfield_front = mlab.pipeline.vector_field(vxvals_mask_front_t, vzvals_mask_front_t,
vyvals_mask_front_t, name="V field front",
figure=fig)
vdirfield_front.spacing = spacing
mpf.vector_cut_plane(vdirfield_front,'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_disp_top:
xidirfield_top = mlab.pipeline.vector_field(xixvals_mask_top_t, np.zeros_like(xixvals_mask_top_t),
xiyvals_mask_top_t, name="Xi field top",
figure=fig)
xidirfield_top.spacing = spacing
mpf.vector_cut_plane(xidirfield_top, 'top', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_disp_front:
xidirfield_front = mlab.pipeline.vector_field(xixvals_mask_front_t, xizvals_mask_front_t,
xiyvals_mask_front_t, name="Xi field front",
figure=fig)
xidirfield_front.spacing = spacing
mpf.vector_cut_plane(xidirfield_front, 'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
# Loop through time
for t_ind in range(nt):
if t_ind == 0:
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
if show_disp_top or show_disp_front:
xixvals_t = xixvals
xiyvals_t = xiyvals
xizvals_t = xizvals
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_t = vxvals
vyvals_t = vyvals
vzvals_t = vzvals
if show_boundary:
xix_boundary_r_vals_t = xix_boundary_r_vals
xix_boundary_l_vals_t = xix_boundary_l_vals
if show_density or show_density_pert:
rho_vals_t = rho_vals
else:
bxvals = np.real(np.repeat(sf.bx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
byvals = np.real(np.repeat(sf.by(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
bz_eq3d = np.repeat(sf.bz_eq(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2)
bzvals = np.real(np.repeat(-sf.bz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2) +
bz_eq3d)
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
# Update mag field data
field.mlab_source.set(u=bxvals_t, v=bzvals_t, w=byvals_t)
# Update mag field visualisation module
if show_mag_vec:
bxvals_mask_front_t, byvals_mask_front_t, bzvals_mask_front_t = mpf.mask_points(bxvals_t, byvals_t, bzvals_t,
'front', mod, mod_y)
bdirfield_front.mlab_source.set(u=bxvals_mask_front_t, v=bzvals_mask_front_t, w=byvals_mask_front_t)
# Update displacement field data
if show_disp_top or show_disp_front:
xixvals_split = np.split(xixvals, [nz - (nz / nt) * t_ind], axis=1)
xiyvals_split = np.split(xiyvals, [nz - (nz / nt) * t_ind], axis=1)
xizvals_split = np.split(xizvals, [nz - (nz / nt) * t_ind], axis=1)
xixvals_t = np.concatenate((xixvals_split[1], xixvals_split[0]), axis=1)
xiyvals_t = np.concatenate((xiyvals_split[1], xiyvals_split[0]), axis=1)
xizvals_t = np.concatenate((xizvals_split[1], xizvals_split[0]), axis=1)
# Update displacement field visualisation module
if show_disp_top:
xixvals_mask_top_t, xiyvals_mask_top_t, xizvals_mask_top_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'top', mod, mod_y)
xidirfield_top.mlab_source.set(u=xixvals_mask_top_t, v=np.zeros_like(xixvals_mask_top_t), w=xiyvals_mask_top_t)
if show_disp_front:
xixvals_mask_front_t, xiyvals_mask_front_t, xizvals_mask_front_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'front', mod, mod_y)
xidirfield_front.mlab_source.set(u=xixvals_mask_front_t, v=xizvals_mask_front_t, w=xiyvals_mask_front_t)
# Update velocity field data
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_split = np.split(vxvals, [nz - (nz / nt) * t_ind], axis=1)
vyvals_split = np.split(vyvals, [nz - (nz / nt) * t_ind], axis=1)
vzvals_split = np.split(vzvals, [nz - (nz / nt) * t_ind], axis=1)
vxvals_t = np.concatenate((vxvals_split[1], vxvals_split[0]), axis=1)
vyvals_t = np.concatenate((vyvals_split[1], vyvals_split[0]), axis=1)
vzvals_t = np.concatenate((vzvals_split[1], vzvals_split[0]), axis=1)
# Update velocity field visualisation module
if show_vel_top or show_vel_top_pert:
vxvals_mask_top_t, vyvals_mask_top_t, vzvals_mask_top_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'top', mod, mod_y)
vdirfield_top.mlab_source.set(u=vxvals_mask_top_t, v=np.zeros_like(vxvals_mask_top_t), w=vyvals_mask_top_t)
if show_vel_front or show_vel_front_pert:
vxvals_mask_front_t, vyvals_mask_front_t, vzvals_mask_front_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'front', mod, mod_y)
vdirfield_front.mlab_source.set(u=vxvals_mask_front_t, v=vzvals_mask_front_t, w=vyvals_mask_front_t)
# Update boundary displacement data
if show_boundary:
xix_boundary_r_vals_split = np.split(xix_boundary_r_vals, [nz - (nz / nt) * t_ind], axis=0)
xix_boundary_l_vals_split = np.split(xix_boundary_l_vals, [nz - (nz / nt) * t_ind], axis=0)
xix_boundary_r_vals_t = np.concatenate((xix_boundary_r_vals_split[1], xix_boundary_r_vals_split[0]), axis=0)
xix_boundary_l_vals_t = np.concatenate((xix_boundary_l_vals_split[1], xix_boundary_l_vals_split[0]), axis=0)
# Update density data
if show_density or show_density_pert:
rho_vals_split = np.split(rho_vals, [nz - (nz / nt) * t_ind], axis=1)
rho_vals_t = np.concatenate((rho_vals_split[1], rho_vals_split[0]), axis=1)
rho.mlab_source.set(scalars=rho_vals_t)
# Boundary data - Letting mayavi know where to plot the boundary
if show_boundary:
ext_min_r = ((nx) * (xix_boundary_r_vals_t.min() - xmin) / (xmax - xmin)) * x_spacing
ext_max_r = ((nx) * (xix_boundary_r_vals_t.max() - xmin) / (xmax - xmin)) * x_spacing
ext_min_l = ((nx) * (xix_boundary_l_vals_t.min() - xmin) / (xmax - xmin)) * x_spacing
ext_max_l = ((nx) * (xix_boundary_l_vals_t.max() - xmin) / (xmax - xmin)) * x_spacing
#Make field lines
if show_mag:
# move seed points up with phase speed. - Bit of a fudge.
# Create an array of points for which we want mag field seeds
nx_seed = 9
ny_seed = 13
start_x = 30. * nx / 100.
end_x = nx+1 - start_x
start_y = 1.
if ny == 20: # so that the lines dont go right up to the edge of the box
end_y = ny - 1.
elif ny == 100:
end_y = ny - 2.
elif ny == 300:
end_y = ny - 6.
else:
end_y = ny - 1
seeds=[]
dx_res = (end_x - start_x) / (nx_seed-1)
dy_res = (end_y - start_y) / (ny_seed-1)
for j in range(ny_seed):
for i in range(nx_seed):
x = start_x + (i * dx_res) * x_spacing
y = start_y + (j * dy_res) * y_spacing
z = 1. + (t_start + t_ind*(t_end - t_start)/nt)/zmax * nz
seeds.append((x,z,y))
if 'alfven' in mode:
for i in range(nx_seed):
del seeds[0]
del seeds[-1]
# Remove previous field lines - field lines cannot be updated, just the data that they are built from
if t_ind != 0:
field_lines.remove() # field_lines is defined in first go through loop
field_lines = SeedStreamline(seed_points=seeds)
# Field line visualisation tinkering
field_lines.stream_tracer.integration_direction='both'
field_lines.streamline_type = 'tube'
field_lines.stream_tracer.maximum_propagation = nz * 2
field_lines.tube_filter.number_of_sides = 20
field_lines.tube_filter.radius = 0.7 * max(nx, ny, nz) / 100.
field_lines.tube_filter.capping = True
field_lines.actor.property.opacity = 1.0
field.add_child(field_lines)
module_manager = field_lines.parent
# Colormap of magnetic field strength plotted on the field lines
if show_mag_scale:
module_manager.scalar_lut_manager.lut_mode = 'coolwarm'
module_manager.scalar_lut_manager.data_range=[7,18]
else:
mag_lut = module_manager.scalar_lut_manager.lut.table.to_array()
mag_lut[:,0] = [220]*256
mag_lut[:,1] = [20]*256
mag_lut[:,2] = [20]*256
module_manager.scalar_lut_manager.lut.table = mag_lut
if show_mag_fade:
mpf.colormap_fade(module_manager, fade_value=20)
# Which views do you want to show? Options are defined at the start
views_selected = [0]#[0,1,4,5,6] #range(7) #[2,3]
for view_ind, view_selected in enumerate(views_selected):
view = view_options[view_selected]
# Display boundary - cannot be updated each time
if show_boundary:
# Boundaries should look different depending on view
if view == 'front-parallel':
#remove previous boundaries
if t != 0 or view_ind != 0:
boundary_r.remove()
boundary_l.remove()
# Make a fading colormap by changing opacity at ends
lut = np.reshape(np.array([150, 150, 150, 255]*256), (256,4))
fade_value = 125
lut[:fade_value,-1] = np.linspace(0, 255, fade_value)
lut[-fade_value:,-1] = np.linspace(255, 0, fade_value)
# Set up boundary visualisation
boundary_r = mlab.mesh(xix_boundary_r_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_r, ext_max_r, 1, nz, 0, (ny-1) * y_spacing],
opacity=1., representation='wireframe',
line_width=12., scalars=zgrid_zy)
boundary_l = mlab.mesh(xix_boundary_l_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_l, ext_max_l, 1, nz, 0, (ny-1) * y_spacing],
opacity=1., representation='wireframe',
line_width=12., scalars=zgrid_zy)
# Boundary color and other options
boundary_r.module_manager.scalar_lut_manager.lut.table = lut
boundary_l.module_manager.scalar_lut_manager.lut.table = lut
boundary_r.actor.property.lighting = False
boundary_r.actor.property.shading = False
boundary_l.actor.property.lighting = False
boundary_l.actor.property.shading = False
else:
#remove previous boundaries
if t != 0 or view_ind != 0:
boundary_r.remove()
boundary_l.remove()
# Make a fading colormap by changing opacity at ends
lut = np.reshape(np.array([150, 150, 150, 255]*256), (256,4))
fade_value = 20
lut[:fade_value,-1] = np.linspace(0, 255, fade_value)
lut[-fade_value:,-1] = np.linspace(255, 0, fade_value)
# Set up boundary visualisation
boundary_r = mlab.mesh(xix_boundary_r_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_r, ext_max_r, 1, nz, 0, (ny-1) * y_spacing],
opacity=0.7, scalars=zgrid_zy)
boundary_l = mlab.mesh(xix_boundary_l_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_l, ext_max_l, 1, nz, 0, (ny-1) * y_spacing],
opacity=0.7, scalars=zgrid_zy)
# Boundary color and other options
boundary_r.module_manager.scalar_lut_manager.lut.table = lut
boundary_l.module_manager.scalar_lut_manager.lut.table = lut
boundary_r.actor.property.lighting = False
boundary_r.actor.property.shading = False
boundary_l.actor.property.lighting = False
boundary_l.actor.property.shading = False
# Set viewing angle - For some unknown reason we must redefine the camera position each time.
# This is something to do with the boundaries being replaced each time.
mpf.view_position(fig, view, nx, ny, nz)
if save_images:
prefix = 'R1_'+str(R1) + '_' + mode + '_' + vis_mod_string + view + '_'# + '_norho_'
mlab.savefig(os.path.join(save_directory, prefix + str(t_ind+1) + '.png'))
if t_ind == nt - 1:
if make_video:
i2v.image2video(filepath=save_directory, prefix=prefix,
output_name=prefix+'video', out_extension='mp4',
fps=fps, n_loops=4, delete_images=True,
delete_old_videos=True, res=res[1])
# Log: to keep us updated with progress
if t_ind % 5 == 4:
print('Finished frame number ' + str(t_ind + 1) + ' out of ' + str(number_of_frames))
#Release some memory after each time step
gc.collect()
#step t forward
t = t + (t_end - t_start) / nt
# Close Mayavi window each time if we cant to make a video
if make_video:
mlab.close(fig)
print('Finished ' + mode) | mit | 2,062,885,535,254,290,400 | 48.086713 | 208 | 0.498518 | false | 3.590384 | false | false | false |
bundgus/python-playground | ssh-playground/demo_sftp.py | 1 | 2786 | #!/usr/bin/env python
# Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
# based on code provided by raymond mosteller (thanks!)
import base64
import getpass
import os
import socket
import sys
import traceback
import paramiko
from paramiko.py3compat import input
# setup logging
paramiko.util.log_to_file('demo_sftp.log')
hostname = '192.168.1.11'
port = 22
username = 'username'
password = 'password'
# Paramiko client configuration
UseGSSAPI = False # enable GSS-API / SSPI authentication
DoGSSAPIKeyExchange = False
# now, connect and use paramiko Transport to negotiate SSH2 across the connection
try:
t = paramiko.Transport((hostname, port))
t.connect(None, username, password, gss_host=socket.getfqdn(hostname),
gss_auth=UseGSSAPI, gss_kex=DoGSSAPIKeyExchange)
sftp = paramiko.SFTPClient.from_transport(t)
# dirlist on remote host
dirlist = sftp.listdir('.')
print("Dirlist: %s" % dirlist)
# copy this demo onto the server
try:
sftp.mkdir("demo_sftp_folder")
except IOError:
print('(assuming demo_sftp_folder/ already exists)')
with sftp.open('demo_sftp_folder/README', 'w') as f:
f.write('This was created by demo_sftp.py.\n')
with open('demo_sftp.py', 'r') as f:
data = f.read()
sftp.open('demo_sftp_folder/demo_sftp.py', 'w').write(data)
print('created demo_sftp_folder/ on the server')
# copy the README back here
with sftp.open('demo_sftp_folder/README', 'r') as f:
data = f.read()
with open('README_demo_sftp', 'w') as f:
f.write(data.decode('utf-8'))
print('copied README back here')
# BETTER: use the get() and put() methods
sftp.put('demo_sftp.py', 'demo_sftp_folder/demo_sftp.py')
sftp.get('demo_sftp_folder/README', 'README_demo_sftp')
t.close()
except Exception as e:
print('*** Caught exception: %s: %s' % (e.__class__, e))
traceback.print_exc()
try:
t.close()
except:
pass
sys.exit(1)
| mit | 2,605,952,891,896,653,000 | 30.303371 | 81 | 0.684135 | false | 3.44802 | false | false | false |
cmjatai/cmj | cmj/cerimonial/models.py | 1 | 27858 |
from django.contrib.auth.models import Group
from django.db import models
from django.db.models.deletion import SET_NULL, PROTECT, CASCADE
from django.utils.translation import ugettext_lazy as _
from cmj.core.models import CmjModelMixin, Trecho, Distrito, RegiaoMunicipal,\
CmjAuditoriaModelMixin, CmjSearchMixin, AreaTrabalho, Bairro, Municipio
from cmj.utils import YES_NO_CHOICES, NONE_YES_NO_CHOICES,\
get_settings_auth_user_model
from sapl.parlamentares.models import Parlamentar, Partido
from sapl.utils import LISTA_DE_UFS
FEMININO = 'F'
MASCULINO = 'M'
SEXO_CHOICE = ((FEMININO, _('Feminino')),
(MASCULINO, _('Masculino')))
IMP_BAIXA = 'B'
IMP_MEDIA = 'M'
IMP_ALTA = 'A'
IMP_CRITICA = 'C'
IMPORTANCIA_CHOICE = (
(IMP_BAIXA, _('Baixa')),
(IMP_MEDIA, _('Média')),
(IMP_ALTA, _('Alta')),
(IMP_CRITICA, _('Crítica')),
)
class DescricaoAbstractModel(models.Model):
descricao = models.CharField(
default='', max_length=254, verbose_name=_('Nome / Descrição'))
class Meta:
abstract = True
ordering = ('descricao',)
def __str__(self):
return self.descricao
class TipoTelefone(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Telefone')
verbose_name_plural = _('Tipos de Telefone')
class TipoEndereco(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Endereço')
verbose_name_plural = _('Tipos de Endereço')
class TipoEmail(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Email')
verbose_name_plural = _('Tipos de Email')
class Parentesco(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Parentesco')
verbose_name_plural = _('Parentescos')
class EstadoCivil(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Estado Civil')
verbose_name_plural = _('Estados Civis')
class PronomeTratamento(models.Model):
nome_por_extenso = models.CharField(
default='', max_length=254, verbose_name=_('Nome Por Extenso'))
abreviatura_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Singular Masculino'))
abreviatura_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Singular Feminino'))
abreviatura_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Plural Masculino'))
abreviatura_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Abreviatura Plural Feminino'))
vocativo_direto_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Singular Masculino'))
vocativo_direto_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Singular Feminino'))
vocativo_direto_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Plural Masculino'))
vocativo_direto_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Direto Plural Feminino'))
vocativo_indireto_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Singular Masculino'))
vocativo_indireto_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Singular Feminino'))
vocativo_indireto_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Plural Masculino'))
vocativo_indireto_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Vocativo Indireto Plural Feminino'))
enderecamento_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Singular Masculino'))
enderecamento_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Singular Feminino'))
enderecamento_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Plural Masculino'))
enderecamento_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Endereçamento Plural Feminino'))
prefixo_nome_singular_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Singular Masculino'))
prefixo_nome_singular_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Singular Feminino'))
prefixo_nome_plural_m = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Plural Masculino'))
prefixo_nome_plural_f = models.CharField(
default='', max_length=254, verbose_name=_(
'Prefixo Plural Feminino'))
class Meta:
verbose_name = _('Pronome de Tratamento')
verbose_name_plural = _('Pronomes de tratamento')
def __str__(self):
return self.nome_por_extenso
class TipoAutoridade(DescricaoAbstractModel):
pronomes = models.ManyToManyField(
PronomeTratamento,
related_name='tipoautoridade_set')
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo de Autoridade')
verbose_name_plural = _('Tipos de Autoridade')
class TipoLocalTrabalho(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tipo do Local de Trabalho')
verbose_name_plural = _('Tipos de Local de Trabalho')
class NivelInstrucao(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Nível de Instrução')
verbose_name_plural = _('Níveis de Instrução')
class OperadoraTelefonia(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Operadora de Telefonia')
verbose_name_plural = _('Operadoras de Telefonia')
class Contato(CmjSearchMixin, CmjAuditoriaModelMixin):
nome = models.CharField(max_length=100, verbose_name=_('Nome'))
nome_social = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Nome Social'))
apelido = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Apelido'))
data_nascimento = models.DateField(
blank=True, null=True, verbose_name=_('Data de Nascimento'))
sexo = models.CharField(
max_length=1, blank=True,
verbose_name=_('Sexo Biológico'), choices=SEXO_CHOICE)
identidade_genero = models.CharField(
blank=True, default='',
max_length=100, verbose_name=_('Como se reconhece?'))
tem_filhos = models.NullBooleanField(
choices=NONE_YES_NO_CHOICES,
default=None, verbose_name=_('Tem Filhos?'))
quantos_filhos = models.PositiveSmallIntegerField(
default=0, blank=True, verbose_name=_('Quantos Filhos?'))
estado_civil = models.ForeignKey(
EstadoCivil,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Estado Civil'))
nivel_instrucao = models.ForeignKey(
NivelInstrucao,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Nivel de Instrução'))
naturalidade = models.CharField(
max_length=50, blank=True, verbose_name=_('Naturalidade'))
nome_pai = models.CharField(
max_length=100, blank=True, verbose_name=_('Nome do Pai'))
nome_mae = models.CharField(
max_length=100, blank=True, verbose_name=_('Nome da Mãe'))
numero_sus = models.CharField(
max_length=100, blank=True, verbose_name=_('Número do SUS'))
cpf = models.CharField(max_length=15, blank=True, verbose_name=_('CPF'))
titulo_eleitor = models.CharField(
max_length=15,
blank=True,
verbose_name=_('Título de Eleitor'))
rg = models.CharField(max_length=30, blank=True, verbose_name=_('RG'))
rg_orgao_expedidor = models.CharField(
max_length=20, blank=True, verbose_name=_('Órgão Expedidor'))
rg_data_expedicao = models.DateField(
blank=True, null=True, verbose_name=_('Data de Expedição'))
ativo = models.BooleanField(choices=YES_NO_CHOICES,
default=True, verbose_name=_('Ativo?'))
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='contato_set',
blank=True, null=True, on_delete=PROTECT)
perfil_user = models.ForeignKey(
get_settings_auth_user_model(),
verbose_name=_('Perfil do Usuário'),
related_name='contato_set',
blank=True, null=True, on_delete=CASCADE)
profissao = models.CharField(
max_length=254, blank=True, verbose_name=_('Profissão'))
tipo_autoridade = models.ForeignKey(
TipoAutoridade,
verbose_name=TipoAutoridade._meta.verbose_name,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL)
cargo = models.CharField(max_length=254, blank=True, default='',
verbose_name=_('Cargo/Função'))
pronome_tratamento = models.ForeignKey(
PronomeTratamento,
verbose_name=PronomeTratamento._meta.verbose_name,
related_name='contato_set',
blank=True, null=True, on_delete=SET_NULL,
help_text=_('O pronome de tratamento é opcional, mas será \
obrigatório caso seja selecionado um tipo de autoridade.'))
observacoes = models.TextField(
blank=True, default='',
verbose_name=_('Outros observações sobre o Contato'))
@property
def fields_search(self):
return ['nome',
'nome_social',
'apelido']
class Meta:
verbose_name = _('Contato')
verbose_name_plural = _('Contatos')
ordering = ['nome']
permissions = (
('print_impressoenderecamento',
_('Pode Imprimir Impressos de Endereçamento')),
('print_rel_contato_agrupado_por_processo',
_('Pode Imprimir Relatório de Contatos Agrupados por Processo')),
('print_rel_contato_agrupado_por_grupo',
_('Pode Imprimir Relatório de Contatos Agrupados '
'Grupos de Contato')),
)
unique_together = (
('nome', 'data_nascimento', 'workspace', 'perfil_user'),)
def __str__(self):
return self.nome
class PerfilManager(models.Manager):
def for_user(self, user):
return super(
PerfilManager, self).get_queryset().get(
perfil_user=user)
class Perfil(Contato):
objects = PerfilManager()
class Meta:
proxy = True
class Telefone(CmjAuditoriaModelMixin):
contato = models.ForeignKey(
Contato, on_delete=CASCADE,
verbose_name=_('Contato'),
related_name="telefone_set")
operadora = models.ForeignKey(
OperadoraTelefonia, on_delete=SET_NULL,
related_name='telefone_set',
blank=True, null=True,
verbose_name=OperadoraTelefonia._meta.verbose_name)
tipo = models.ForeignKey(
TipoTelefone,
blank=True, null=True,
on_delete=SET_NULL,
related_name='telefone_set',
verbose_name='Tipo')
telefone = models.CharField(max_length=100,
verbose_name='Número do Telefone')
proprio = models.NullBooleanField(
choices=NONE_YES_NO_CHOICES,
blank=True, null=True, verbose_name=_('Próprio?'))
de_quem_e = models.CharField(
max_length=40, verbose_name='De quem é?', blank=True,
help_text=_('Se não é próprio, de quem é?'))
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferêncial?'))
permissao = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Permissão:'),
help_text=_("Permite que nossa instituição entre em contato \
com você neste telefone?"))
@property
def numero_nome_contato(self):
return str(self)
class Meta:
verbose_name = _('Telefone')
verbose_name_plural = _('Telefones')
def __str__(self):
return self.telefone
class TelefonePerfil(Telefone):
class Meta:
proxy = True
verbose_name = _('Telefone do Perfil')
verbose_name_plural = _('Telefones do Perfil')
class Email(CmjAuditoriaModelMixin):
contato = models.ForeignKey(
Contato, on_delete=CASCADE,
verbose_name=_('Contato'),
related_name="email_set")
tipo = models.ForeignKey(
TipoEmail,
blank=True, null=True,
on_delete=SET_NULL,
related_name='email_set',
verbose_name='Tipo')
email = models.EmailField(verbose_name='Email')
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferêncial?'))
permissao = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Permissão:'),
help_text=_("Permite que nossa instituição envie informações \
para este email?"))
class Meta:
verbose_name = _('Email')
verbose_name_plural = _("Email's")
def __str__(self):
return self.email
class EmailPerfil(Email):
class Meta:
proxy = True
verbose_name = _('Email do Perfil')
verbose_name_plural = _("Email's do Perfil")
class Dependente(CmjAuditoriaModelMixin):
parentesco = models.ForeignKey(Parentesco,
on_delete=PROTECT,
related_name='+',
verbose_name=_('Parentesco'))
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='dependente_set',
on_delete=CASCADE)
nome = models.CharField(max_length=100, verbose_name=_('Nome'))
nome_social = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Nome Social'))
apelido = models.CharField(
blank=True, default='', max_length=100, verbose_name=_('Apelido'))
sexo = models.CharField(
blank=True, max_length=1, verbose_name=_('Sexo Biológico'),
choices=SEXO_CHOICE)
data_nascimento = models.DateField(
blank=True, null=True, verbose_name=_('Data Nascimento'))
identidade_genero = models.CharField(
blank=True, default='',
max_length=100, verbose_name=_('Como se reconhece?'))
nivel_instrucao = models.ForeignKey(
NivelInstrucao,
related_name='dependente_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Nivel de Instrução'))
class Meta:
verbose_name = _('Dependente')
verbose_name_plural = _('Dependentes')
def __str__(self):
return self.nome
class DependentePerfil(Dependente):
class Meta:
proxy = True
verbose_name = _('Dependente do Perfil')
verbose_name_plural = _('Dependentes do Perfil')
class LocalTrabalho(CmjAuditoriaModelMixin):
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='localtrabalho_set',
on_delete=CASCADE)
nome = models.CharField(
max_length=254, verbose_name=_('Nome / Razão Social'))
nome_social = models.CharField(
blank=True, default='', max_length=254,
verbose_name=_('Nome Fantasia'))
tipo = models.ForeignKey(
TipoLocalTrabalho,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Tipo do Local de Trabalho'))
trecho = models.ForeignKey(
Trecho,
verbose_name=_('Trecho'),
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
uf = models.CharField(max_length=2, blank=True, choices=LISTA_DE_UFS,
verbose_name=_('Estado'))
municipio = models.ForeignKey(
Municipio,
verbose_name=Municipio._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
cep = models.CharField(max_length=9, blank=True, default='',
verbose_name=_('CEP'))
endereco = models.CharField(
max_length=254, blank=True, default='',
verbose_name=_('Endereço'),
help_text=_('O campo endereço também é um campo de busca. Nele '
'você pode digitar qualquer informação, inclusive '
'digitar o cep para localizar o endereço, e vice-versa!'))
numero = models.CharField(max_length=50, blank=True, default='',
verbose_name=_('Número'))
bairro = models.ForeignKey(
Bairro,
verbose_name=Bairro._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
distrito = models.ForeignKey(
Distrito,
verbose_name=Distrito._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
regiao_municipal = models.ForeignKey(
RegiaoMunicipal,
verbose_name=RegiaoMunicipal._meta.verbose_name,
related_name='localtrabalho_set',
blank=True, null=True, on_delete=SET_NULL)
complemento = models.CharField(max_length=30, blank=True, default='',
verbose_name=_('Complemento'))
data_inicio = models.DateField(
blank=True, null=True, verbose_name=_('Data de Início'))
data_fim = models.DateField(
blank=True, null=True, verbose_name=_('Data de Fim'))
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferencial?'))
cargo = models.CharField(
max_length=254, blank=True, default='',
verbose_name=_('Cargo/Função'),
help_text=_('Ao definir um cargo e função aqui, o '
'Cargo/Função preenchido na aba "Dados Básicos", '
'será desconsiderado ao gerar impressos!'))
class Meta:
verbose_name = _('Local de Trabalho')
verbose_name_plural = _('Locais de Trabalho')
def __str__(self):
return self.nome
class LocalTrabalhoPerfil(LocalTrabalho):
class Meta:
proxy = True
verbose_name = _('Local de Trabalho do Perfil')
verbose_name_plural = _('Locais de Trabalho do Perfil')
class Endereco(CmjAuditoriaModelMixin):
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='endereco_set',
on_delete=CASCADE)
tipo = models.ForeignKey(
TipoEndereco,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL,
verbose_name=_('Tipo do Endereço'))
trecho = models.ForeignKey(
Trecho,
verbose_name=_('Trecho'),
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
uf = models.CharField(max_length=2, blank=True, choices=LISTA_DE_UFS,
verbose_name=_('Estado'))
municipio = models.ForeignKey(
Municipio,
verbose_name=_('Município'),
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
cep = models.CharField(max_length=9, blank=True, default='',
verbose_name=_('CEP'))
endereco = models.CharField(
max_length=254, blank=True, default='',
verbose_name=_('Endereço'),
help_text=_('O campo endereço também é um campo de busca, nele '
'você pode digitar qualquer informação, inclusive '
'digitar o cep para localizar o endereço, e vice-versa!'))
numero = models.CharField(max_length=50, blank=True, default='',
verbose_name=_('Número'))
bairro = models.ForeignKey(
Bairro,
verbose_name=Bairro._meta.verbose_name,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
distrito = models.ForeignKey(
Distrito,
verbose_name=Distrito._meta.verbose_name,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
regiao_municipal = models.ForeignKey(
RegiaoMunicipal,
verbose_name=RegiaoMunicipal._meta.verbose_name,
related_name='endereco_set',
blank=True, null=True, on_delete=SET_NULL)
complemento = models.CharField(max_length=254, blank=True, default='',
verbose_name=_('Complemento'))
ponto_referencia = models.CharField(max_length=254, blank=True, default='',
verbose_name=_('Pontos de Referência'))
observacoes = models.TextField(
blank=True, default='',
verbose_name=_('Outros observações sobre o Endereço'))
preferencial = models.BooleanField(
choices=YES_NO_CHOICES,
default=True, verbose_name=_('Preferencial?'))
"""help_text=_('Correspondências automáticas serão geradas sempre '
'para os endereços preferenciais.')"""
class Meta:
verbose_name = _('Endereço')
verbose_name_plural = _('Endereços')
def __str__(self):
numero = (' - ' + self.numero) if self.numero else ''
return self.endereco + numero
class EnderecoPerfil(Endereco):
class Meta:
proxy = True
verbose_name = _('Endereço do Perfil')
verbose_name_plural = _('Endereços do Perfil')
class FiliacaoPartidaria(CmjAuditoriaModelMixin):
contato = models.ForeignKey(Contato,
verbose_name=_('Contato'),
related_name='filiacaopartidaria_set',
on_delete=CASCADE)
data = models.DateField(verbose_name=_('Data de Filiação'))
partido = models.ForeignKey(Partido,
related_name='filiacaopartidaria_set',
verbose_name=Partido._meta.verbose_name,
on_delete=PROTECT)
data_desfiliacao = models.DateField(
blank=True, null=True, verbose_name=_('Data de Desfiliação'))
@property
def contato_nome(self):
return str(self.contato)
class Meta:
verbose_name = _('Filiação Partidária')
verbose_name_plural = _('Filiações Partidárias')
def __str__(self):
return str(self.partido)
# -----------------------------------------------------------------
# -----------------------------------------------------------------
# PROCESSOS
# -----------------------------------------------------------------
# -----------------------------------------------------------------
class StatusProcesso(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Status de Processo')
verbose_name_plural = _('Status de Processos')
class ClassificacaoProcesso(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Classificacao de Processo')
verbose_name_plural = _('Classificações de Processos')
class TopicoProcesso(DescricaoAbstractModel):
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Tópico de Processo')
verbose_name_plural = _('Tópicos de Processos')
class AssuntoProcesso(DescricaoAbstractModel, CmjAuditoriaModelMixin):
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='assuntoprocesso_set',
on_delete=PROTECT)
class Meta(DescricaoAbstractModel.Meta):
verbose_name = _('Assunto de Processo')
verbose_name_plural = _('Assuntos de Processos')
class Processo(CmjSearchMixin, CmjAuditoriaModelMixin):
titulo = models.CharField(max_length=9999, verbose_name=_('Título'))
data = models.DateField(verbose_name=_('Data de Abertura'))
descricao = models.TextField(
blank=True, default='',
verbose_name=_('Descrição do Processo'))
observacoes = models.TextField(
blank=True, default='',
verbose_name=_('Outras observações sobre o Processo'))
solucao = models.TextField(
blank=True, default='',
verbose_name=_('Solução do Processo'))
contatos = models.ManyToManyField(Contato,
blank=True,
verbose_name=_(
'Contatos Interessados no Processo'),
related_name='processo_set',)
status = models.ForeignKey(StatusProcesso,
blank=True, null=True,
verbose_name=_('Status do Processo'),
related_name='processo_set',
on_delete=SET_NULL)
importancia = models.CharField(
max_length=1, blank=True,
verbose_name=_('Importância'), choices=IMPORTANCIA_CHOICE)
topicos = models.ManyToManyField(
TopicoProcesso, blank=True,
related_name='processo_set',
verbose_name=_('Tópicos'))
classificacoes = models.ManyToManyField(
ClassificacaoProcesso, blank=True,
related_name='processo_set',
verbose_name=_('Classificações'),)
assuntos = models.ManyToManyField(
AssuntoProcesso, blank=True,
related_name='processo_set',
verbose_name=_('Assuntos'),)
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='processo_set',
on_delete=PROTECT)
class Meta:
verbose_name = _('Processo')
verbose_name_plural = _('Processos')
ordering = ('titulo', )
def __str__(self):
return str(self.titulo)
@property
def fields_search(self):
return ['titulo',
'observacoes',
'descricao']
class ProcessoContato(Processo):
class Meta:
proxy = True
verbose_name = _('Processo')
verbose_name_plural = _('Processos')
class GrupoDeContatos(CmjAuditoriaModelMixin):
nome = models.CharField(max_length=100,
verbose_name=_('Nome do Grupo'))
contatos = models.ManyToManyField(Contato,
blank=True,
verbose_name=_(
'Contatos do Grupo'),
related_name='grupodecontatos_set',)
workspace = models.ForeignKey(
AreaTrabalho,
verbose_name=_('Área de Trabalho'),
related_name='grupodecontatos_set',
on_delete=PROTECT)
class Meta:
verbose_name = _('Grupo de Contatos')
verbose_name_plural = _('Grupos de Contatos')
ordering = ('nome', )
def __str__(self):
return str(self.nome)
| gpl-3.0 | 2,557,580,451,169,322,000 | 31.736718 | 79 | 0.601089 | false | 3.564468 | false | false | false |
rajalokan/nova | nova/tests/functional/notification_sample_tests/test_instance.py | 1 | 34922 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import mock
from nova import context
from nova import exception
from nova.tests import fixtures
from nova.tests.functional.notification_sample_tests \
import notification_sample_base
from nova.tests.unit import fake_notifier
class TestInstanceNotificationSample(
notification_sample_base.NotificationSampleTestBase):
def setUp(self):
self.flags(use_neutron=True)
super(TestInstanceNotificationSample, self).setUp()
self.neutron = fixtures.NeutronFixture(self)
self.useFixture(self.neutron)
self.cinder = fixtures.CinderFixture(self)
self.useFixture(self.cinder)
def _wait_until_swap_volume(self, server, volume_id):
for i in range(50):
volume_attachments = self.api.get_server_volumes(server['id'])
if len(volume_attachments) > 0:
for volume_attachment in volume_attachments:
if volume_attachment['volumeId'] == volume_id:
return
time.sleep(0.5)
self.fail('Volume swap operation failed.')
def _wait_until_swap_volume_error(self):
for i in range(50):
if self.cinder.swap_error:
return
time.sleep(0.5)
self.fail("Timed out waiting for volume swap error to occur.")
def test_instance_action(self):
# A single test case is used to test most of the instance action
# notifications to avoid booting up an instance for every action
# separately.
# Every instance action test function shall make sure that after the
# function the instance is in active state and usable by other actions.
# Therefore some action especially delete cannot be used here as
# recovering from that action would mean to recreate the instance and
# that would go against the whole purpose of this optimization
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
actions = [
self._test_power_off_on_server,
self._test_restore_server,
self._test_suspend_resume_server,
self._test_pause_unpause_server,
self._test_shelve_server,
self._test_shelve_offload_server,
self._test_unshelve_server,
self._test_resize_server,
self._test_revert_server,
self._test_resize_confirm_server,
self._test_snapshot_server,
self._test_rebuild_server,
self._test_reboot_server,
self._test_reboot_server_error,
self._test_trigger_crash_dump,
self._test_volume_attach_detach_server,
self._test_rescue_server,
self._test_unrescue_server,
self._test_soft_delete_server,
self._test_attach_volume_error,
]
for action in actions:
fake_notifier.reset()
action(server)
# Ensure that instance is in active state after an action
self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
def test_create_delete_server(self):
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
self.api.delete_server(server['id'])
self._wait_until_deleted(server)
self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS))
# This list needs to be in order.
expected_notifications = [
'instance-create-start',
'instance-create-end',
'instance-delete-start',
'instance-shutdown-start',
'instance-shutdown-end',
'instance-delete-end'
]
for idx, notification in enumerate(expected_notifications):
self._verify_notification(
notification,
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[idx])
@mock.patch('nova.compute.manager.ComputeManager._build_resources')
def test_create_server_error(self, mock_build):
def _build_resources(*args, **kwargs):
raise exception.FlavorDiskTooSmall()
mock_build.side_effect = _build_resources
server = self._boot_a_server(
expected_status='ERROR',
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-create-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-create-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _verify_instance_update_steps(self, steps, notifications,
initial=None):
replacements = {}
if initial:
replacements = initial
for i, step in enumerate(steps):
replacements.update(step)
self._verify_notification(
'instance-update',
replacements=replacements,
actual=notifications[i])
return replacements
def test_create_delete_server_with_instance_update(self):
# This makes server network creation synchronous which is necessary
# for notification samples that expect instance.info_cache.network_info
# to be set.
self.useFixture(fixtures.SpawnIsSynchronousFixture())
self.flags(notify_on_state_change='vm_and_task_state',
group='notifications')
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
instance_updates = self._wait_for_notifications('instance.update', 7)
# The first notification comes from the nova-conductor the
# rest is from the nova-compute. To keep the test simpler
# assert this fact and then modify the publisher_id of the
# first notification to match the template
self.assertEqual('conductor:fake-mini',
instance_updates[0]['publisher_id'])
instance_updates[0]['publisher_id'] = 'nova-compute:fake-mini'
create_steps = [
# nothing -> scheduling
{'reservation_id': server['reservation_id'],
'uuid': server['id'],
'host': None,
'node': None,
'state_update.new_task_state': 'scheduling',
'state_update.old_task_state': 'scheduling',
'state_update.state': 'building',
'state_update.old_state': 'building',
'state': 'building'},
# scheduling -> building
{
'state_update.new_task_state': None,
'state_update.old_task_state': 'scheduling',
'task_state': None},
# scheduled
{'host': 'compute',
'node': 'fake-mini',
'state_update.old_task_state': None},
# building -> networking
{'state_update.new_task_state': 'networking',
'state_update.old_task_state': 'networking',
'task_state': 'networking'},
# networking -> block_device_mapping
{'state_update.new_task_state': 'block_device_mapping',
'state_update.old_task_state': 'networking',
'task_state': 'block_device_mapping',
'ip_addresses': [{
"nova_object.name": "IpPayload",
"nova_object.namespace": "nova",
"nova_object.version": "1.0",
"nova_object.data": {
"mac": "fa:16:3e:4c:2c:30",
"address": "192.168.1.3",
"port_uuid": "ce531f90-199f-48c0-816c-13e38010b442",
"meta": {},
"version": 4,
"label": "private-network",
"device_name": "tapce531f90-19"
}}]
},
# block_device_mapping -> spawning
{'state_update.new_task_state': 'spawning',
'state_update.old_task_state': 'block_device_mapping',
'task_state': 'spawning',
},
# spawning -> active
{'state_update.new_task_state': None,
'state_update.old_task_state': 'spawning',
'state_update.state': 'active',
'launched_at': '2012-10-29T13:42:11Z',
'state': 'active',
'task_state': None,
'power_state': 'running'},
]
replacements = self._verify_instance_update_steps(
create_steps, instance_updates)
fake_notifier.reset()
# Let's generate some bandwidth usage data.
# Just call the periodic task directly for simplicity
self.compute.manager._poll_bandwidth_usage(context.get_admin_context())
self.api.delete_server(server['id'])
self._wait_until_deleted(server)
instance_updates = self._get_notifications('instance.update')
self.assertEqual(2, len(instance_updates))
delete_steps = [
# active -> deleting
{'state_update.new_task_state': 'deleting',
'state_update.old_task_state': 'deleting',
'state_update.old_state': 'active',
'state': 'active',
'task_state': 'deleting',
'bandwidth': [
{'nova_object.namespace': 'nova',
'nova_object.name': 'BandwidthPayload',
'nova_object.data':
{'network_name': 'private-network',
'out_bytes': 0,
'in_bytes': 0},
'nova_object.version': '1.0'}],
'tags': ["tag1"]
},
# deleting -> deleted
{'state_update.new_task_state': None,
'state_update.old_task_state': 'deleting',
'state_update.old_state': 'active',
'state_update.state': 'deleted',
'state': 'deleted',
'task_state': None,
'terminated_at': '2012-10-29T13:42:11Z',
'ip_addresses': [],
'power_state': 'pending',
'bandwidth': [],
'tags': ["tag1"]
},
]
self._verify_instance_update_steps(delete_steps, instance_updates,
initial=replacements)
def _test_power_off_on_server(self, server):
self.api.post_server_action(server['id'], {'os-stop': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHUTOFF')
self.api.post_server_action(server['id'], {'os-start': {}})
self._wait_for_state_change(self.api, server,
expected_status='ACTIVE')
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-power_off-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-power_off-end',
replacements={
'reservation_id': server['reservation_id'],
'power_state': 'running',
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-power_on-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-power_on-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
def _test_shelve_server(self, server):
self.flags(shelved_offload_time = -1)
self.api.post_server_action(server['id'], {'shelve': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-shelve-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-shelve-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
post = {'unshelve': None}
self.api.post_server_action(server['id'], post)
def _test_shelve_offload_server(self, server):
self.flags(shelved_offload_time=-1)
self.api.post_server_action(server['id'], {'shelve': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED')
self.api.post_server_action(server['id'], {'shelveOffload': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED_OFFLOADED')
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-shelve-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-shelve-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-shelve_offload-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-shelve_offload-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
self.api.post_server_action(server['id'], {'unshelve': None})
def _test_unshelve_server(self, server):
# setting the shelved_offload_time to 0 should set the
# instance status to 'SHELVED_OFFLOADED'
self.flags(shelved_offload_time = 0)
self.api.post_server_action(server['id'], {'shelve': {}})
self._wait_for_state_change(self.api, server,
expected_status='SHELVED_OFFLOADED')
post = {'unshelve': None}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-unshelve-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[4])
self._verify_notification(
'instance-unshelve-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[5])
def _test_suspend_resume_server(self, server):
post = {'suspend': {}}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.admin_api, server, 'SUSPENDED')
post = {'resume': None}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.admin_api, server, 'ACTIVE')
# Four versioned notification are generated.
# 0. instance-suspend-start
# 1. instance-suspend-end
# 2. instance-resume-start
# 3. instance-resume-end
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-suspend-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-suspend-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-resume-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-resume-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
self.flags(reclaim_instance_interval=0)
def _test_pause_unpause_server(self, server):
self.api.post_server_action(server['id'], {'pause': {}})
self._wait_for_state_change(self.api, server, 'PAUSED')
self.api.post_server_action(server['id'], {'unpause': {}})
self._wait_for_state_change(self.api, server, 'ACTIVE')
# Four versioned notifications are generated
# 0. instance-pause-start
# 1. instance-pause-end
# 2. instance-unpause-start
# 3. instance-unpause-end
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-pause-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-pause-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self._verify_notification(
'instance-unpause-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[2])
self._verify_notification(
'instance-unpause-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[3])
def _test_resize_server(self, server):
self.flags(allow_resize_to_same_host=True)
other_flavor_body = {
'flavor': {
'name': 'other_flavor',
'ram': 256,
'vcpus': 1,
'disk': 1,
'id': 'd5a8bb54-365a-45ae-abdb-38d249df7845'
}
}
other_flavor_id = self.api.post_flavor(other_flavor_body)['id']
extra_specs = {
"extra_specs": {
"hw:watchdog_action": "reset"}}
self.admin_api.post_extra_spec(other_flavor_id, extra_specs)
# Ignore the create flavor notification
fake_notifier.reset()
post = {
'resize': {
'flavorRef': other_flavor_id
}
}
self.api.post_server_action(server['id'], post)
self._wait_for_state_change(self.api, server, 'VERIFY_RESIZE')
self.assertEqual(4, len(fake_notifier.VERSIONED_NOTIFICATIONS))
# This list needs to be in order.
expected_notifications = [
'instance-resize-start',
'instance-resize-end',
'instance-resize_finish-start',
'instance-resize_finish-end'
]
for idx, notification in enumerate(expected_notifications):
self._verify_notification(
notification,
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[idx])
post = {'revertResize': None}
self.api.post_server_action(server['id'], post)
def _test_snapshot_server(self, server):
post = {'createImage': {'name': 'test-snap'}}
self.api.post_server_action(server['id'], post)
self._wait_for_notification('instance.snapshot.end')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-snapshot-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-snapshot-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _test_rebuild_server(self, server):
post = {
'rebuild': {
'imageRef': 'a2459075-d96c-40d5-893e-577ff92e721c',
'metadata': {}
}
}
self.api.post_server_action(server['id'], post)
# Before going back to ACTIVE state
# server state need to be changed to REBUILD state
self._wait_for_state_change(self.api, server,
expected_status='REBUILD')
self._wait_for_state_change(self.api, server,
expected_status='ACTIVE')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-rebuild-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-rebuild-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
@mock.patch('nova.compute.manager.ComputeManager.'
'_do_rebuild_instance_with_claim')
def test_rebuild_server_exc(self, mock_rebuild):
def _compute_resources_unavailable(*args, **kwargs):
raise exception.ComputeResourcesUnavailable(
reason="fake-resource")
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
fake_notifier.reset()
post = {
'rebuild': {
'imageRef': 'a2459075-d96c-40d5-893e-577ff92e721c',
'metadata': {}
}
}
self.api.post_server_action(server['id'], post)
mock_rebuild.side_effect = _compute_resources_unavailable
self._wait_for_state_change(self.api, server, expected_status='ERROR')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-rebuild-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
def _test_restore_server(self, server):
self.flags(reclaim_instance_interval=30)
self.api.delete_server(server['id'])
self._wait_for_state_change(self.api, server, 'SOFT_DELETED')
self.api.post_server_action(server['id'], {'restore': {}})
self._wait_for_state_change(self.api, server, 'ACTIVE')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-restore-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-restore-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
self.flags(reclaim_instance_interval=0)
def _test_reboot_server(self, server):
post = {'reboot': {'type': 'HARD'}}
self.api.post_server_action(server['id'], post)
self._wait_for_notification('instance.reboot.start')
self._wait_for_notification('instance.reboot.end')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-reboot-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-reboot-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
@mock.patch('nova.virt.fake.SmallFakeDriver.reboot')
def _test_reboot_server_error(self, server, mock_reboot):
def _hard_reboot(*args, **kwargs):
raise exception.UnsupportedVirtType(virt="FakeVirt")
mock_reboot.side_effect = _hard_reboot
post = {'reboot': {'type': 'HARD'}}
self.api.post_server_action(server['id'], post)
self._wait_for_notification('instance.reboot.start')
self._wait_for_notification('instance.reboot.error')
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-reboot-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-reboot-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _attach_volume_to_server(self, server, volume_id):
self.api.post_server_volume(
server['id'], {"volumeAttachment": {"volumeId": volume_id}})
self._wait_for_notification('instance.volume_attach.end')
def _detach_volume_from_server(self, server, volume_id):
self.api.delete_server_volume(server['id'], volume_id)
self._wait_for_notification('instance.volume_detach.end')
def _volume_swap_server(self, server, attachement_id, volume_id):
self.api.put_server_volume(server['id'], attachement_id, volume_id)
def test_volume_swap_server(self):
server = self._boot_a_server(
extra_params={'networks':
[{'port': self.neutron.port_1['id']}]})
self._attach_volume_to_server(server, self.cinder.SWAP_OLD_VOL)
self.cinder.swap_volume_instance_uuid = server['id']
self._volume_swap_server(server, self.cinder.SWAP_OLD_VOL,
self.cinder.SWAP_NEW_VOL)
self._wait_until_swap_volume(server, self.cinder.SWAP_NEW_VOL)
self.assertEqual(6, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_swap-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[4])
self._verify_notification(
'instance-volume_swap-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[5])
def test_volume_swap_server_with_error(self):
server = self._boot_a_server(
extra_params={'networks': [{'port': self.neutron.port_1['id']}]})
self._attach_volume_to_server(server, self.cinder.SWAP_ERR_OLD_VOL)
self.cinder.swap_volume_instance_error_uuid = server['id']
self._volume_swap_server(server, self.cinder.SWAP_ERR_OLD_VOL,
self.cinder.SWAP_ERR_NEW_VOL)
self._wait_until_swap_volume_error()
# Seven versioned notifications are generated. We only rely on the
# first six because _wait_until_swap_volume_error will return True
# after volume_api.unreserve is called on the cinder fixture, and that
# happens before the instance fault is handled in the compute manager
# which generates the last notification (compute.exception).
# 0. instance-create-start
# 1. instance-create-end
# 2. instance-volume_attach-start
# 3. instance-volume_attach-end
# 4. instance-volume_swap-start
# 5. instance-volume_swap-error
# 6. compute.exception
self.assertTrue(len(fake_notifier.VERSIONED_NOTIFICATIONS) >= 6,
'Unexpected number of versioned notifications. '
'Expected at least 6, got: %s' %
len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_swap-start',
replacements={
'new_volume_id': self.cinder.SWAP_ERR_NEW_VOL,
'old_volume_id': self.cinder.SWAP_ERR_OLD_VOL,
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[4])
self._verify_notification(
'instance-volume_swap-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[5])
def _test_revert_server(self, server):
pass
def _test_resize_confirm_server(self, server):
pass
def _test_trigger_crash_dump(self, server):
pass
def _test_volume_attach_detach_server(self, server):
self._attach_volume_to_server(server, self.cinder.SWAP_OLD_VOL)
# 0. volume_attach-start
# 1. volume_attach-end
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_attach-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-volume_attach-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
fake_notifier.reset()
self._detach_volume_from_server(server, self.cinder.SWAP_OLD_VOL)
# 0. volume_detach-start
# 1. volume_detach-end
self.assertEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_detach-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-volume_detach-end',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
def _test_rescue_server(self, server):
pass
def _test_unrescue_server(self, server):
pass
def _test_soft_delete_server(self, server):
pass
@mock.patch('nova.volume.cinder.API.attach')
def _test_attach_volume_error(self, server, mock_attach):
def attach_volume(*args, **kwargs):
raise exception.CinderConnectionFailed(
reason="Connection timed out")
mock_attach.side_effect = attach_volume
post = {"volumeAttachment": {"volumeId": self.cinder.SWAP_OLD_VOL}}
self.api.post_server_volume(server['id'], post)
self._wait_for_notification('instance.volume_attach.error')
# 0. volume_attach-start
# 1. volume_attach-error
# 2. compute.exception
# We only rely on the first 2 notifications, in this case we don't
# care about the exception notification.
self.assertLessEqual(2, len(fake_notifier.VERSIONED_NOTIFICATIONS))
self._verify_notification(
'instance-volume_attach-start',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[0])
self._verify_notification(
'instance-volume_attach-error',
replacements={
'reservation_id': server['reservation_id'],
'uuid': server['id']},
actual=fake_notifier.VERSIONED_NOTIFICATIONS[1])
| apache-2.0 | 4,255,457,280,156,007,400 | 39.988263 | 79 | 0.566777 | false | 4.220181 | true | false | false |
stchepanhagn/domain-learning | plan_learning.py | 1 | 2767 | """ plan_learning.py
- This module contain the procedure used for learning plans from experience.
Copyright (C) 2016 Stephan Chang
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program, located in the root of this repository.
If not, see <http://www.gnu.org/licenses/>.
"""
import pdb
import planning
import sys
import random
def main(args):
verbose = '-v' in args
n_arg = '-n' in args
try:
i = 1 + int(verbose)
examples_file = args[i]
domain_name = args[i+1]
except:
print "usage: {cmd} [-v] examples_file"\
" domain_name".format(cmd=args[0])
return
print """
PDDL Domain Learning Copyright (C) 2016 Stephan Chang
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
"""
examples = []
print "Parsing examples..."
with open(examples_file) as f:
line = f.readline().replace('\n', '')
while line:
triple = line.split('|')
example = (triple[0], triple[1], triple[2])
examples.append(example)
line = f.readline().replace('\n', '')
print "Done reading {n_examples} training examples!".format(n_examples=len(examples))
if not f.closed:
print "Warning: file stream is still open."
if n_arg:
n_examples = int(args[i+3])
else:
n_examples = len(examples)
print "Creating domain..."
domain = planning.Domain(domain_name)
# random.shuffle(examples)
for i in range(n_examples):
preconditions = examples[i][0].split(',')
operators = examples[i][1].split(',')
effects = examples[i][2].split(',')
domain.add_all_predicates(preconditions)
domain.add_all_predicates(effects)
domain.add_actions(operators, preconditions, effects)
print "Done!"
if verbose:
print str(domain)
else:
print "Outputting to file..."
output_file_name = "{domain_name}.pddl".format(domain_name=domain_name)
with open(output_file_name, 'w') as f:
f.write(str(domain))
print "Done!"
if __name__ == '__main__':
main(sys.argv)
| gpl-3.0 | 8,032,233,054,267,307,000 | 29.744444 | 89 | 0.647271 | false | 3.87535 | false | false | false |
dcy/epush | examples/rabbitmq/xiaomi.py | 1 | 1181 | #!/usr/bin/env python
#coding:utf-8
import pika
import json
HOST = 'localhost'
USERNAME = 'hisir'
PASSWORD = 'hisir123'
class Xiaomi():
def __init__(self):
credentials = pika.PlainCredentials(USERNAME, PASSWORD)
self.connection = pika.BlockingConnection(pika.ConnectionParameters(host=HOST, credentials=credentials))
self.channel = self.connection.channel()
def notification_send(self):
data = {'push_method': 'notification_send',
'title': 'Test 中文',
'description': 'Content',
'registration_id': 'go6VssZlTDDypm+hxYdaxycXtqM7M9NsTPbCjzyIyh0='}
self.in_mq(data)
def all(self):
data = {'push_method':'all',
'title':'Test中文',
'description':'Test'}
self.in_mq(data)
def end(self):
self.channel.close()
self.connection.close()
def in_mq(self, data):
self.channel.basic_publish(exchange='',
routing_key='xiaomi_c',
body=json.dumps(data))
if __name__ == "__main__":
xiaomi = Xiaomi()
xiaomi.notification_send()
#xiaomi.all()
xiaomi.end()
| bsd-3-clause | -3,308,385,428,919,708,000 | 23.4375 | 112 | 0.57971 | false | 3.34188 | false | false | false |
iw3hxn/LibrERP | account_payment_term_month/models/inherit_account_invoice.py | 1 | 3307 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Micronaet SRL (<http://www.micronaet.it>).
# Copyright (C) 2014 Agile Business Group sagl
# (<http://www.agilebg.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import orm
from tools.translate import _
class account_invoice(orm.Model):
_inherit = 'account.invoice'
def action_move_create(self, cr, uid, ids, context=None):
context = context or self.pool['res.users'].context_get(cr, uid)
ait_obj = self.pool['account.invoice.tax']
amount_tax = 0.0
if isinstance(ids, (int, long)):
ids = [ids]
for inv in self.browse(cr, uid, ids, context=context):
amount_tax = context.get('amount_tax', 0.0)
if not amount_tax:
compute_taxes = ait_obj.compute(cr, uid, inv.id, context=context)
for tax in compute_taxes:
amount_tax += compute_taxes[tax]['amount']
context.update({'amount_tax': amount_tax})
super(account_invoice, self).action_move_create(cr, uid, [inv.id], context=context)
return True
def onchange_payment_term_date_invoice(self, cr, uid, ids, payment_term_id, date_invoice):
res = {'value': {}}
if not ids:
return res
if not payment_term_id:
return res
context = self.pool['res.users'].context_get(cr, uid)
pt_obj = self.pool['account.payment.term']
ait_obj = self.pool['account.invoice.tax']
if not date_invoice:
date_invoice = time.strftime('%Y-%m-%d')
compute_taxes = ait_obj.compute(cr, uid, ids, context=context)
amount_tax = 0
for tax in compute_taxes:
amount_tax += compute_taxes[tax]['amount']
context.update({'amount_tax': amount_tax})
pterm_list = pt_obj.compute(cr, uid, payment_term_id, value=1, date_ref=date_invoice, context=context)
if pterm_list:
pterm_list = [line[0] for line in pterm_list]
pterm_list.sort()
res = {'value': {'date_due': pterm_list[-1]}}
else:
payment = self.pool['account.payment.term'].browse(cr, uid, payment_term_id, context)
raise orm.except_orm(_('Data Insufficient "{0}" !'.format(payment.name)),
_('The payment term of supplier does not have a payment term line!'))
return res
| agpl-3.0 | 286,080,595,633,511,650 | 40.3375 | 110 | 0.586332 | false | 3.881455 | false | false | false |
lablup/sorna-agent | src/ai/backend/kernel/vendor/aws_polly/__init__.py | 1 | 3171 | import asyncio
import ctypes
import logging
import os
import threading
import janus
from ... import BaseRunner
from .inproc import PollyInprocRunner
log = logging.getLogger()
class Runner(BaseRunner):
log_prefix = 'vendor.aws_polly-kernel'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.inproc_runner = None
self.sentinel = object()
self.input_queue = None
self.output_queue = None
# NOTE: If credentials are missing,
# boto3 will try to use the instance role.
self.access_key = \
self.child_env.get('AWS_ACCESS_KEY_ID', None)
self.secret_key = \
self.child_env.get('AWS_SECRET_ACCESS_KEY', None)
os.environ['AWS_DEFAULT_REGION'] = \
self.child_env.get('AWS_DEFAULT_REGION', 'ap-northeast-2')
async def init_with_loop(self):
self.input_queue = janus.Queue()
self.output_queue = janus.Queue()
async def build_heuristic(self) -> int:
raise NotImplementedError
async def execute_heuristic(self) -> int:
raise NotImplementedError
async def query(self, code_text) -> int:
self.ensure_inproc_runner()
await self.input_queue.async_q.put(code_text)
# Read the generated outputs until done
while True:
try:
msg = await self.output_queue.async_q.get()
except asyncio.CancelledError:
break
self.output_queue.async_q.task_done()
if msg is self.sentinel:
break
self.outsock.send_multipart(msg)
return 0
async def complete(self, data):
self.outsock.send_multipart([
b'completion',
[],
])
async def interrupt(self):
if self.inproc_runner is None:
log.error('No user code is running!')
return
# A dirty hack to raise an exception inside a running thread.
target_tid = self.inproc_runner.ident
if target_tid not in {t.ident for t in threading.enumerate()}:
log.error('Interrupt failed due to missing thread.')
return
affected_count = ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(target_tid),
ctypes.py_object(KeyboardInterrupt))
if affected_count == 0:
log.error('Interrupt failed due to invalid thread identity.')
elif affected_count > 1:
ctypes.pythonapi.PyThreadState_SetAsyncExc(
ctypes.c_long(target_tid),
ctypes.c_long(0))
log.error('Interrupt broke the interpreter state -- '
'recommended to reset the session.')
async def start_service(self, service_info):
return None, {}
def ensure_inproc_runner(self):
if self.inproc_runner is None:
self.inproc_runner = PollyInprocRunner(
self.input_queue.sync_q,
self.output_queue.sync_q,
self.sentinel,
self.access_key,
self.secret_key)
self.inproc_runner.start()
| lgpl-3.0 | -6,937,433,989,262,053,000 | 31.690722 | 73 | 0.584358 | false | 4.081081 | false | false | false |
zetaops/ulakbus | ulakbus/views/reports/base.py | 1 | 6017 | # -*- coding: utf-8 -*-
"""
"""
# Copyright (C) 2015 ZetaOps Inc.
#
# This file is licensed under the GNU General Public License v3
# (GPLv3). See LICENSE.txt for details.
from io import BytesIO
from zengine.lib.translation import gettext as _, gettext_lazy
import six
from zengine.forms import JsonForm
from zengine.forms import fields
from zengine.views.base import BaseView
import re
import base64
from datetime import datetime
try:
from ulakbus.lib.pdfdocument.document import PDFDocument, register_fonts_from_paths
except:
print("Warning: Reportlab module not found")
from ulakbus.lib.s3_file_manager import S3FileManager
from ulakbus.lib.common import get_file_url
class ReporterRegistry(type):
registry = {}
_meta = None
def __new__(mcs, name, bases, attrs):
# for key, prop in attrs.items():
# if hasattr(prop, 'view_method'):
if name == 'Reporter':
ReporterRegistry._meta = attrs['Meta']
if 'Meta' not in attrs:
attrs['Meta'] = type('Meta', (object,), ReporterRegistry._meta.__dict__)
else:
for k, v in ReporterRegistry._meta.__dict__.items():
if k not in attrs['Meta'].__dict__:
setattr(attrs['Meta'], k, v)
new_class = super(ReporterRegistry, mcs).__new__(mcs, name, bases, attrs)
if name != 'Reporter':
ReporterRegistry.registry[name] = new_class
return new_class
@staticmethod
def get_reporters():
return [{"text": v.get_title(),
"wf": 'generic_reporter',
"model": k,
"kategori": 'Raporlar',
"param": 'id'} for k, v in ReporterRegistry.registry.items()]
@staticmethod
def get_permissions():
return [("report.%s" % k, v.get_title(), "") for k, v in ReporterRegistry.registry.items()]
@staticmethod
def get_reporter(name):
return ReporterRegistry.registry[name]
FILENAME_RE = re.compile(r'[^A-Za-z0-9\-\.]+')
@six.add_metaclass(ReporterRegistry)
class Reporter(BaseView):
TITLE = ''
class Meta:
pass
def __init__(self, current):
super(Reporter, self).__init__(current)
self.cmd = current.input.get('cmd', 'show')
# print("CMD", self.cmd)
if self.cmd == 'show':
self.show()
elif self.cmd == 'printout':
self.printout()
class ReportForm(JsonForm):
printout = fields.Button(gettext_lazy(u"Yazdır"), cmd="printout")
def show(self):
objects = self.get_objects()
frm = self.ReportForm(current=self.current, title=self.get_title())
if objects:
frm.help_text = ''
if isinstance(objects[0], dict):
self.output['object'] = {'fields': objects, 'type': 'table-multiRow'}
else:
objects = dict((k, str(v)) for k, v in objects)
self.output['object'] = objects
else:
frm.help_text = _(u'Kayıt bulunamadı')
self.output['object'] = {}
self.set_client_cmd('form', 'show')
self.output['forms'] = frm.serialize()
self.output['forms']['constraints'] = {}
self.output['forms']['grouping'] = []
self.output['meta'] = {}
def printout(self):
register_fonts_from_paths('Vera.ttf',
'VeraIt.ttf',
'VeraBd.ttf',
'VeraBI.ttf',
'Vera')
objects = self.get_objects()
f = BytesIO()
pdf = PDFDocument(f, font_size=14)
pdf.init_report()
pdf.h1(self.tr2ascii(self.get_title()))
ascii_objects = []
if isinstance(objects[0], dict):
headers = objects[0].keys()
ascii_objects.append([self.tr2ascii(h) for h in headers])
for obj in objects:
ascii_objects.append([self.tr2ascii(k) for k in obj.values()])
else:
for o in objects:
ascii_objects.append((self.tr2ascii(o[0]), self.tr2ascii(o[1])))
pdf.table(ascii_objects)
pdf.generate()
download_url = self.generate_temp_file(
name=self.generate_file_name(),
content=base64.b64encode(f.getvalue()),
file_type='application/pdf',
ext='pdf'
)
self.set_client_cmd('download')
self.output['download_url'] = download_url
@staticmethod
def generate_temp_file(name, content, file_type, ext):
f = S3FileManager()
key = f.store_file(name=name, content=content, type=file_type, ext=ext)
return get_file_url(key)
def generate_file_name(self):
return "{0}-{1}".format(
FILENAME_RE.sub('-', self.tr2ascii(self.get_title()).lower()),
datetime.now().strftime("%d.%m.%Y-%H.%M.%S")
)
@staticmethod
def convert_choices(choices_dict_list):
result = []
for d in choices_dict_list:
try:
k = int(d[0])
except:
k = d[0]
result.append((k, d[1]))
return dict(result)
def get_headers(self):
return self.HEADERS
@classmethod
def get_title(cls):
return six.text_type(cls.TITLE)
def get_objects(self):
raise NotImplementedError
def tr2ascii(self, inp):
inp = six.text_type(inp)
shtlst = [
('ğ', 'g'),
('ı', 'i'),
('İ', 'I'),
('ç', 'c'),
('ö', 'o'),
('ü', 'u'),
('ş', 's'),
('Ğ', 'G'),
('Ş', 'S'),
('Ö', 'O'),
('Ü', 'U'),
('Ç', 'C'),
]
for t, a in shtlst:
inp = inp.replace(t, a)
return inp
def ReportDispatcher(current):
ReporterRegistry.get_reporter(current.input['model'])(current)
| gpl-3.0 | -235,994,614,447,476,320 | 29.467005 | 99 | 0.530157 | false | 3.758297 | false | false | false |
pombredanne/nTLP | examples/gridworlds/gw_bm_analysis.py | 1 | 4888 | # Copyright (c) 2011, 2012 by California Institute of Technology
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the California Institute of Technology nor
# the names of its contributors may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL CALTECH
# OR THE CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
# $Id$
# Take averages of the output from the gridworld benchmark script.
import numpy as np
import sys
import os
import string
expform = (string.Template("exp(${SOLVER}_a*x + ${SOLVER}_b)"), "exp(%.3f*x + %.3f)")
linform = (string.Template("${SOLVER}_a*x + ${SOLVER}_b"), "%.3f*x + %.3f")
plotfit = string.Template("""${SOLVER}_a = ${SOLVER}_b = 0.5
${SOLVER}_f(x) = $FORMULA
fit ${SOLVER}_f(x) \"$FILENAME\" using $XCOL:((stringcolumn(1) eq "$SOLVER") ? $$$YCOL : 1/0) via ${SOLVER}_a, ${SOLVER}_b
""")
plottpl = string.Template("\"$FILENAME\" using $XCOL:((stringcolumn(1) eq \"$SOLVER\") ? $$$YCOL : 1/0):$ERRCOL with errorbars \
title \"$SOLVER\" lt $COLOR, ${SOLVER}_f(x) title sprintf(\"$SOLVER fit: $FORMULA\", ${SOLVER}_a, ${SOLVER}_b) lt $COLOR")
pf = string.Template("""
set xlabel "$XAXIS"
set ylabel "$YAXIS"
set terminal png font "" 10
set output "$FN_PNG"
""")
columns = ["", "Solver", "Cells", "Goals", "WDensity", "AvgTime", "StDevTime", "AvgStates", "StDevStates"]
colnames = ["", "Solver", "Grid cells", "Number of goals", "Wall Density", "CPU time (s)", "", "Number of states", ""]
err = { columns.index("AvgTime") : columns.index("StDevTime"),
columns.index("AvgStates") : columns.index("StDevStates") }
if len(sys.argv) < 4:
print "Usage: gw_bm_analysis.py [data file] [x-col] [y-col] <exp/lin>"
sys.exit(0)
d = np.genfromtxt(sys.argv[1], dtype="S16, i4, i4, i4, f8, f8, i4", names=True)
xcol = columns.index(sys.argv[2])
ycol = columns.index(sys.argv[3])
if len(sys.argv) >= 5:
EXP = (sys.argv[4] == "exp")
else:
# Default linear fit
EXP = False
if EXP: eqn = expform
else: eqn = linform
avgs = []
solvers = ["NuSMV", "jtlv", "gr1c", "SPIN"]
# List of columns specifying dimension of a grid
dimension = ["W", "H", "Goals", "WDensity"]
for solver in solvers:
s_data = d[d["Solver"] == solver]
for dim in np.unique(s_data[dimension]):
# Mean & error in the mean
times = s_data[s_data[dimension] == dim]["Time"]
time_mean = times.mean()
time_stdev = times.std()/np.sqrt(len(times))
states = s_data[s_data[dimension] == dim]["NStates"]
states_mean = states.mean()
states_stdev = states.std()/np.sqrt(len(states))
avgs.append((solver, dim[0] * dim[1], dim[2], dim[3], time_mean,
time_stdev, states_mean, states_stdev))
(prefix, ext) = os.path.splitext(sys.argv[1])
outfile = prefix + ".avg" + ext
pltfile = prefix + ".avg.plt"
pngfile = prefix + ".png"
with open(outfile, "w") as f:
f.write(" ".join(columns[1:]) + "\n")
for a in avgs:
f.write("%s %d %d %.4f %.4f %.4f %.4f %.4f\n" % a)
with open(pltfile, "w") as f:
pl = []
for (n, solver) in enumerate(solvers):
fx = eqn[0].substitute(SOLVER=solver)
s = plotfit.substitute(SOLVER=solver, FILENAME=outfile, XCOL=xcol,
YCOL=ycol, FORMULA=fx)
f.write(s)
s = plottpl.substitute(SOLVER=solver, FILENAME=outfile, XCOL=xcol,
YCOL=ycol, ERRCOL=err[ycol], COLOR=n, FORMULA=eqn[1])
pl.append(s)
s = pf.safe_substitute(FN_PNG=pngfile, XAXIS=colnames[xcol],
YAXIS=colnames[ycol])
f.write(s)
if EXP: f.write("set log y\n")
f.write("plot " + ", ".join(pl))
| bsd-3-clause | 3,587,470,938,751,121,400 | 40.07563 | 128 | 0.653642 | false | 3.131326 | false | false | false |
mathiasertl/django-ca | ca/django_ca/deprecation.py | 1 | 1194 | # This file is part of django-ca (https://github.com/mathiasertl/django-ca).
#
# django-ca is free software: you can redistribute it and/or modify it under the terms of the GNU
# General Public License as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# django-ca is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with django-ca. If not,
# see <http://www.gnu.org/licenses/>.
"""Deprecation classes in django-ca."""
class RemovedInDjangoCA120Warning(PendingDeprecationWarning):
"""Warning if a feature will be removed in django-ca==1.20."""
class RemovedInDjangoCA121Warning(PendingDeprecationWarning):
"""Warning if a feature will be removed in django-ca==1.21."""
class RemovedInDjangoCA122Warning(PendingDeprecationWarning):
"""Warning if a feature will be removed in django-ca==1.22."""
RemovedInNextVersionWarning = RemovedInDjangoCA120Warning
| gpl-3.0 | 4,251,496,230,839,164,400 | 40.172414 | 98 | 0.767169 | false | 4.174825 | false | false | false |
DataDog/integrations-core | couchbase/tests/test_unit.py | 1 | 3050 | # (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from copy import deepcopy
import mock
import pytest
from datadog_checks.couchbase import Couchbase
def test_camel_case_to_joined_lower(instance):
couchbase = Couchbase('couchbase', {}, [instance])
CAMEL_CASE_TEST_PAIRS = {
'camelCase': 'camel_case',
'FirstCapital': 'first_capital',
'joined_lower': 'joined_lower',
'joined_Upper1': 'joined_upper1',
'Joined_upper2': 'joined_upper2',
'Joined_Upper3': 'joined_upper3',
'_leading_Underscore': 'leading_underscore',
'Trailing_Underscore_': 'trailing_underscore',
'DOubleCAps': 'd_ouble_c_aps',
'@@@super--$$-Funky__$__$$%': 'super_funky',
}
for test_input, expected_output in CAMEL_CASE_TEST_PAIRS.items():
test_output = couchbase.camel_case_to_joined_lower(test_input)
assert test_output == expected_output, 'Input was {}, expected output was {}, actual output was {}'.format(
test_input, expected_output, test_output
)
def test_extract_seconds_value(instance):
couchbase = Couchbase('couchbase', {}, [instance])
EXTRACT_SECONDS_TEST_PAIRS = {
'3.45s': 3.45,
'12ms': 0.012,
'700.5us': 0.0007005,
u'733.364\u00c2s': 0.000733364,
'0': 0,
}
for test_input, expected_output in EXTRACT_SECONDS_TEST_PAIRS.items():
test_output = couchbase.extract_seconds_value(test_input)
assert test_output == expected_output, 'Input was {}, expected output was {}, actual output was {}'.format(
test_input, expected_output, test_output
)
def test__get_query_monitoring_data(instance_query):
"""
`query_monitoring_url` can potentially fail, be sure we don't raise when the
endpoint is not reachable
"""
couchbase = Couchbase('couchbase', {}, [instance_query])
couchbase._get_query_monitoring_data()
@pytest.mark.parametrize(
'test_case, extra_config, expected_http_kwargs',
[
(
"new auth config",
{'username': 'new_foo', 'password': 'bar', 'tls_verify': False},
{'auth': ('new_foo', 'bar'), 'verify': False},
),
("legacy config", {'user': 'new_foo', 'ssl_verify': False}, {'auth': ('new_foo', 'password'), 'verify': False}),
],
)
def test_config(test_case, extra_config, expected_http_kwargs, instance):
instance = deepcopy(instance)
instance.update(extra_config)
check = Couchbase('couchbase', {}, [instance])
with mock.patch('datadog_checks.base.utils.http.requests') as r:
r.get.return_value = mock.MagicMock(status_code=200)
check.check(instance)
http_wargs = dict(
auth=mock.ANY, cert=mock.ANY, headers=mock.ANY, proxies=mock.ANY, timeout=mock.ANY, verify=mock.ANY
)
http_wargs.update(expected_http_kwargs)
r.get.assert_called_with('http://localhost:8091/pools/default/tasks', **http_wargs)
| bsd-3-clause | -6,643,786,405,936,923,000 | 34.057471 | 120 | 0.623279 | false | 3.419283 | true | false | false |
roshchupkin/hase | tools/VCF2hdf5.py | 1 | 4024 |
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from config import PYTHON_PATH
if PYTHON_PATH is not None:
for i in PYTHON_PATH: sys.path.insert(0,i)
import argparse
import h5py
import pandas as pd
import numpy as np
from hdgwas.tools import Timer
import tables
import glob
def probes_VCF2hdf5(data_path, save_path,study_name, chunk_size=1000000):
if os.path.isfile(os.path.join(save_path,'probes',study_name+'.h5')):
os.remove(os.path.join(save_path,'probes',study_name+'.h5'))
hash_table={'keys':np.array([],dtype=np.int),'allele':np.array([])}
df=pd.read_csv(data_path,sep='\t',chunksize=chunk_size, header=None,index_col=None)
for i,chunk in enumerate(df):
print 'add chunk {}'.format(i)
print chunk.head()
chunk.columns=[ "CHR","bp" ,"ID",'allele1','allele2','QUAL','FILTER','INFO'] #TODO (high) parse INFO
hash_1=chunk.allele1.apply(hash)
hash_2=chunk.allele2.apply(hash)
k,indices=np.unique(np.append(hash_1,hash_2),return_index=True)
s=np.append(chunk.allele1,chunk.allele2)[indices]
ind=np.invert(np.in1d(k,hash_table['keys']))
hash_table['keys']=np.append(hash_table['keys'],k[ind])
hash_table['allele']=np.append(hash_table['allele'],s[ind])
chunk.allele1=hash_1
chunk.allele2=hash_2
chunk.to_hdf(os.path.join(save_path,'probes',study_name+'.h5'),data_columns=["CHR","bp" ,"ID",'allele1','allele2'], key='probes',format='table',append=True,
min_itemsize = 25, complib='zlib',complevel=9 )
pd.DataFrame.from_dict(hash_table).to_csv(os.path.join(save_path,'probes',study_name+'_hash_table.csv.gz'),index=False,compression='gzip', sep='\t')
def ind_VCF2hdf5(data_path, save_path,study_name):
if os.path.isfile(os.path.join(save_path,'individuals',study_name+'.h5')):
os.remove(os.path.join(save_path,'individuals',study_name+'.h5'))
n=[]
f=open(data_path,'r')
for i,j in enumerate(f):
n.append((j[:-1]))
f.close()
n=np.array(n)
chunk=pd.DataFrame.from_dict({"individual":n})
chunk.to_hdf(os.path.join(save_path,'individuals',study_name+'.h5'), key='individuals',format='table',
min_itemsize = 25, complib='zlib',complevel=9 )
def genotype_VCF2hdf5(data_path,id, save_path, study_name):
df=pd.read_csv(data_path, header=None, index_col=None,sep='\t', dtype=np.float16)
data=df.as_matrix()
print data.shape
print 'Saving chunk...{}'.format(os.path.join(save_path,'genotype',str(id)+'_'+study_name+'.h5'))
h5_gen_file = tables.open_file(
os.path.join(save_path,'genotype',str(id)+'_'+study_name+'.h5'), 'w', title=study_name)
atom = tables.Float16Atom()
genotype = h5_gen_file.create_carray(h5_gen_file.root, 'genotype', atom,
(data.shape),
title='Genotype',
filters=tables.Filters(complevel=9, complib='zlib'))
genotype[:] = data
h5_gen_file.close()
os.remove(data_path)
if __name__=="__main__":
parser = argparse.ArgumentParser(description='Script to convert VCF data')
parser.add_argument("-study_name", required=True, type=str, help="Study specific name")
parser.add_argument("-id", type=str, help="subject id")
parser.add_argument("-data",required=True, type=str, help="path to file")
parser.add_argument("-out",required=True, type=str, help="path to results save folder")
parser.add_argument("-flag",required=True,type=str,choices=['individuals','probes','chunk'], help="path to file with SNPs info")
args = parser.parse_args()
print args
try:
print ('Creating directories...')
os.mkdir(os.path.join(args.out,'genotype') )
os.mkdir(os.path.join(args.out,'individuals') )
os.mkdir(os.path.join(args.out,'probes') )
os.mkdir(os.path.join(args.out,'tmp_files'))
except:
print('Directories "genotype","probes","individuals" are already exist in {}...'.format(args.out))
if args.flag=='probes':
probes_VCF2hdf5(args.data, args.out, args.study_name)
elif args.flag=='individuals':
ind_VCF2hdf5(args.data, args.out,args.study_name)
elif args.flag=='chunk':
genotype_VCF2hdf5(args.data,args.id, args.out,args.study_name)
| gpl-3.0 | 3,613,012,578,314,009,000 | 36.962264 | 158 | 0.696571 | false | 2.691639 | false | false | false |
joanayma/pyautorunalize | pyautorunanalize.py | 1 | 5119 | #! /bin/env python
"""
PyAutorunalizer 0.1
Python script for autorunalize: http://sysinternals.com/autoruns.com listing autoruns Windows
items. Version 11.6 or greater needed.
http://Virutotal.com externa database of viruses.
original idea: http://trustedsignal.blogspot.com.es/2012/02/finding-evil-automating-autoruns.html
original implementation uses cygwin32, bash and other blobs.
Virustotal API refer: https://github.com/botherder/virustotal/
Autoruns is part of Sysinternals' suit and owns the copyright. Windows are trademark of Microsoft.
Licence: GPLv2
#Use this script at your own.
This script is not inteded as a substitute for any antivirus. Is just a sanity check.
Individuals htat noncomplain the Virustotal or sysinternals terms or harms the antivirus
industry, are out of my resposability.
"""
import xml.etree.ElementTree as ElementTree
import json
import urllib,urllib.request
import sys,os,getopt,subprocess
fnull = open(os.devnull, "w")
def runanalizer(API_KEY):
#Check for autorunsc.exe
try:
with open('./autorunsc.exe'): pass
except IOError:
print('autorunsc.exe binary not found! Download from https://live.sysinternals.com/autorunsc.exe')
sys.exit(3)
try:
if os.environ['HTTP_PROXY'] != None:
proxies = {'https': 'http://{0}'.format(os.environ['HTTP_PROXY'])}
urllib.request.ProxyHandler(proxies)
print("[Info] Going through proxies: ",proxies)
except KeyError:
#not defined
pass
print('[Info] Getting list of files to analise from Autoruns ...')
autoruns_proc = subprocess.Popen(['autorunsc.exe', "/accepteula", '-xaf'], stdout=subprocess.PIPE, stderr = fnull)
autoruns_xml = (autoruns_proc.communicate()[0].decode("utf_16"))
autoruns_xml.replace('\r\n','\n')
#parse XML output
#items =[[]]
try:
autoruns_tree = ElementTree.fromstring(autoruns_xml)
except xml.etree.ElementTree.ParseError as e:
print('[Error] Error parsing xml autoruns\' output. \n Is Autoruns\' latest version?\n', e)
sys.exit(1002)
for item in autoruns_tree:
text = "[Object]"
if item is None:
text = text + " Invalid item (mostly not a binary image)\n"
break
imagepath = item.findtext('imagepath')
name = item.findtext('itemname')
if imagepath is not None:
sha256hash = item.findtext('sha256hash')
text = text + '' + name + '\n ' + imagepath + '\n ' + sha256hash + '\n scanning... '
print(text)
result = scan(sha256hash, API_KEY)
print(result)
def scan(sha256hash, API_KEY):
VIRUSTOTAL_REPORT_URL = 'https://www.virustotal.com/vtapi/v2/file/report'
VIRUSTOTAL_SCAN_URL = 'https://www.virustotal.com/vtapi/v2/file/scan'
if sha256hash == None:
response = "No valid hash for this file"
return response
data = urllib.parse.urlencode({
'resource' : sha256hash,
'apikey' : API_KEY
})
data = data.encode('utf-8')
try:
request = urllib.request.Request(VIRUSTOTAL_REPORT_URL, data)
reply = urllib.request.urlopen(request)
answer = 42
answer = reply.read().decode("utf-8")
report = json.loads(answer)
except Exception as e:
error = "\n[Error] Cannot obtain results from VirusTotal: {0}\n".format(e)
return error
sys.exit(4)
int(report['response_code']) == 0
if int(report['response_code']) == 0:
response = (report['verbose_msg'])
elif int(report['response_code']) < 0:
response = 'Not found on Virustotal database!'
#Shall send the file if is not on virustotal.
else:
response = 'FOUND'
if int(report['positives']) >= 0:
response = response + 'but not infected.'
else:
for av, scan in report['scans'].items():
if scan == 'detected':
response = response + ' INFECTED!\n engine:' + av + ',\n malware:' + scan['result'] + '\n'
return response
def help():
print(main.__doc__)
sys.exit(0)
def main(argv):
"""\n
Script for Windows basic security check using Sysinternal\'s Autoruns
and Virustotal.com\n
Thereforce, you need to get a public API Key from http://www.virustotal.com for your
scripting analysis\n
and autorunsc.exe binary.\n
Usage:\n
autorunalize.exe [--help] --API-KEY YOUR_API_KEY\n
-h, --help Shows this help.\n
-a, --API-KEY Your public API key from Virustotal.
This a 64 characters hexadecimal string.\n
Example:\n
./autorunalize.exe --API-KEY YOUR_API_KEY\n
"""
API_KEY = ''
try:
opts, args = getopt.getopt(argv,"ha:",["help","API-KEY="])
except getopt.GetoptError:
print('pyrunanalizer.py --API-KEY YOUR_API_KEY_HERE')
sys.exit(2)
for opt, arg in opts:
if opt in ('-h','--help'):
help()
sys.exit()
elif opt in ("-a", "--API-KEY"):
API_KEY = arg
runanalizer(API_KEY)
else:
help()
if __name__ == "__main__":
main(sys.argv[1:])
| gpl-2.0 | -8,408,628,864,717,367,000 | 33.355705 | 116 | 0.62942 | false | 3.229653 | false | false | false |
bilke/OpenSG-1.8 | SConsLocal/scons-local-0.96.1/SCons/Tool/__init__.py | 2 | 13279 | """SCons.Tool
SCons tool selection.
This looks for modules that define a callable object that can modify
a construction environment as appropriate for a given tool (or tool
chain).
Note that because this subsystem just *selects* a callable that can
modify a construction environment, it's possible for people to define
their own "tool specification" in an arbitrary callable function. No
one needs to use or tie in to this subsystem in order to roll their own
tool definition.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "/home/scons/scons/branch.0/baseline/src/engine/SCons/Tool/__init__.py 0.96.1.D001 2004/08/23 09:55:29 knight"
import imp
import sys
import SCons.Errors
import SCons.Defaults
class ToolSpec:
def __init__(self, name):
self.name = name
def __call__(self, env, *args, **kw):
env.Append(TOOLS = [ self.name ])
apply(self.generate, ( env, ) + args, kw)
def __str__(self):
return self.name
def Tool(name, toolpath=[]):
"Select a canned Tool specification, optionally searching in toolpath."
try:
file, path, desc = imp.find_module(name, toolpath)
try:
module = imp.load_module(name, file, path, desc)
spec = ToolSpec(name)
spec.generate = module.generate
spec.exists = module.exists
return spec
finally:
if file:
file.close()
except ImportError, e:
pass
full_name = 'SCons.Tool.' + name
if not sys.modules.has_key(full_name):
try:
file, path, desc = imp.find_module(name,
sys.modules['SCons.Tool'].__path__)
mod = imp.load_module(full_name, file, path, desc)
setattr(SCons.Tool, name, mod)
except ImportError, e:
raise SCons.Errors.UserError, "No tool named '%s': %s" % (name, e)
if file:
file.close()
spec = ToolSpec(name)
spec.generate = sys.modules[full_name].generate
spec.exists = sys.modules[full_name].exists
return spec
def createProgBuilder(env):
"""This is a utility function that creates the Program
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
"""
try:
program = env['BUILDERS']['Program']
except KeyError:
program = SCons.Builder.Builder(action = SCons.Defaults.LinkAction,
emitter = '$PROGEMITTER',
prefix = '$PROGPREFIX',
suffix = '$PROGSUFFIX',
src_suffix = '$OBJSUFFIX',
src_builder = 'Object',
target_scanner = SCons.Defaults.ProgScan)
env['BUILDERS']['Program'] = program
return program
def createStaticLibBuilder(env):
"""This is a utility function that creates the StaticLibrary
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
"""
try:
static_lib = env['BUILDERS']['StaticLibrary']
except KeyError:
static_lib = SCons.Builder.Builder(action = SCons.Defaults.ArAction,
emitter = '$LIBEMITTER',
prefix = '$LIBPREFIX',
suffix = '$LIBSUFFIX',
src_suffix = '$OBJSUFFIX',
src_builder = 'StaticObject')
env['BUILDERS']['StaticLibrary'] = static_lib
env['BUILDERS']['Library'] = static_lib
return static_lib
def createSharedLibBuilder(env):
"""This is a utility function that creates the SharedLibrary
Builder in an Environment if it is not there already.
If it is already there, we return the existing one.
"""
try:
shared_lib = env['BUILDERS']['SharedLibrary']
except KeyError:
action_list = [ SCons.Defaults.SharedCheck,
SCons.Defaults.ShLinkAction ]
shared_lib = SCons.Builder.Builder(action = action_list,
emitter = "$SHLIBEMITTER",
prefix = '$SHLIBPREFIX',
suffix = '$SHLIBSUFFIX',
target_scanner = SCons.Defaults.ProgScan,
src_suffix = '$SHOBJSUFFIX',
src_builder = 'SharedObject')
env['BUILDERS']['SharedLibrary'] = shared_lib
return shared_lib
def createObjBuilders(env):
"""This is a utility function that creates the StaticObject
and SharedObject Builders in an Environment if they
are not there already.
If they are there already, we return the existing ones.
This is a separate function because soooo many Tools
use this functionality.
The return is a 2-tuple of (StaticObject, SharedObject)
"""
try:
static_obj = env['BUILDERS']['StaticObject']
except KeyError:
static_obj = SCons.Builder.Builder(action = {},
emitter = {},
prefix = '$OBJPREFIX',
suffix = '$OBJSUFFIX',
src_builder = ['CFile', 'CXXFile'],
source_scanner = SCons.Defaults.ObjSourceScan, single_source=1)
env['BUILDERS']['StaticObject'] = static_obj
env['BUILDERS']['Object'] = static_obj
try:
shared_obj = env['BUILDERS']['SharedObject']
except KeyError:
shared_obj = SCons.Builder.Builder(action = {},
emitter = {},
prefix = '$SHOBJPREFIX',
suffix = '$SHOBJSUFFIX',
src_builder = ['CFile', 'CXXFile'],
source_scanner = SCons.Defaults.ObjSourceScan, single_source=1)
env['BUILDERS']['SharedObject'] = shared_obj
return (static_obj, shared_obj)
def createCFileBuilders(env):
"""This is a utility function that creates the CFile/CXXFile
Builders in an Environment if they
are not there already.
If they are there already, we return the existing ones.
This is a separate function because soooo many Tools
use this functionality.
The return is a 2-tuple of (CFile, CXXFile)
"""
try:
c_file = env['BUILDERS']['CFile']
except KeyError:
c_file = SCons.Builder.Builder(action = {},
emitter = {},
suffix = {None:'$CFILESUFFIX'})
env['BUILDERS']['CFile'] = c_file
env['CFILESUFFIX'] = '.c'
try:
cxx_file = env['BUILDERS']['CXXFile']
except KeyError:
cxx_file = SCons.Builder.Builder(action = {},
emitter = {},
suffix = {None:'$CXXFILESUFFIX'})
env['BUILDERS']['CXXFile'] = cxx_file
env['CXXFILESUFFIX'] = '.cc'
return (c_file, cxx_file)
def FindTool(tools, env):
for tool in tools:
t = Tool(tool)
if t.exists(env):
return tool
return None
def FindAllTools(tools, env):
def ToolExists(tool, env=env):
return Tool(tool).exists(env)
return filter (ToolExists, tools)
def tool_list(platform, env):
# XXX this logic about what tool to prefer on which platform
# should be moved into either the platform files or
# the tool files themselves.
# The search orders here are described in the man page. If you
# change these search orders, update the man page as well.
if str(platform) == 'win32':
"prefer Microsoft tools on Windows"
linkers = ['mslink', 'gnulink', 'ilink', 'linkloc', 'ilink32' ]
c_compilers = ['msvc', 'mingw', 'gcc', 'icl', 'icc', 'cc', 'bcc32' ]
cxx_compilers = ['msvc', 'icc', 'g++', 'c++', 'bcc32' ]
assemblers = ['masm', 'nasm', 'gas', '386asm' ]
fortran_compilers = ['g77', 'ifl', 'cvf', 'fortran']
ars = ['mslib', 'ar', 'tlib']
elif str(platform) == 'os2':
"prefer IBM tools on OS/2"
linkers = ['ilink', 'gnulink', 'mslink']
c_compilers = ['icc', 'gcc', 'msvc', 'cc']
cxx_compilers = ['icc', 'g++', 'msvc', 'c++']
assemblers = ['nasm', 'masm', 'gas']
fortran_compilers = ['ifl', 'g77']
ars = ['ar', 'mslib']
elif str(platform) == 'irix':
"prefer MIPSPro on IRIX"
linkers = ['sgilink', 'gnulink']
c_compilers = ['sgicc', 'gcc', 'cc']
cxx_compilers = ['sgic++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['f77', 'g77', 'fortran']
ars = ['sgiar']
elif str(platform) == 'sunos':
"prefer Forte tools on SunOS"
linkers = ['sunlink', 'gnulink']
c_compilers = ['suncc', 'gcc', 'cc']
cxx_compilers = ['sunc++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['f77', 'g77', 'fortran']
ars = ['sunar']
elif str(platform) == 'hpux':
"prefer aCC tools on HP-UX"
linkers = ['hplink', 'gnulink']
c_compilers = ['hpcc', 'gcc', 'cc']
cxx_compilers = ['hpc++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['f77', 'g77', 'fortran']
ars = ['ar']
elif str(platform) == 'aix':
"prefer AIX Visual Age tools on AIX"
linkers = ['aixlink', 'gnulink']
c_compilers = ['aixcc', 'gcc', 'cc']
cxx_compilers = ['aixc++', 'g++', 'c++']
assemblers = ['as', 'gas']
fortran_compilers = ['aixf77', 'g77', 'fortran']
ars = ['ar']
else:
"prefer GNU tools on all other platforms"
linkers = ['gnulink', 'mslink', 'ilink']
c_compilers = ['gcc', 'msvc', 'icc', 'cc']
cxx_compilers = ['g++', 'msvc', 'icc', 'c++']
assemblers = ['gas', 'nasm', 'masm']
fortran_compilers = ['g77', 'ifort', 'ifl', 'fortran']
ars = ['ar', 'mslib']
c_compiler = FindTool(c_compilers, env) or c_compilers[0]
# XXX this logic about what tool provides what should somehow be
# moved into the tool files themselves.
if c_compiler and c_compiler == 'mingw':
# MinGW contains a linker, C compiler, C++ compiler,
# Fortran compiler, archiver and assembler:
cxx_compiler = None
linker = None
assembler = None
fortran_compiler = None
ar = None
else:
# Don't use g++ if the C compiler has built-in C++ support:
if c_compiler in ('msvc', 'icc'):
cxx_compiler = None
else:
cxx_compiler = FindTool(cxx_compilers, env) or cxx_compilers[0]
linker = FindTool(linkers, env) or linkers[0]
assembler = FindTool(assemblers, env) or assemblers[0]
fortran_compiler = FindTool(fortran_compilers, env) or fortran_compilers[0]
ar = FindTool(ars, env) or ars[0]
other_tools = FindAllTools(['BitKeeper', 'CVS',
'dmd',
'dvipdf', 'dvips', 'gs',
'jar', 'javac', 'javah',
'latex', 'lex', 'm4', 'midl', 'msvs',
'pdflatex', 'pdftex', 'Perforce',
'RCS', 'rmic', 'SCCS',
# 'Subversion',
'swig',
'tar', 'tex', 'yacc', 'zip'],
env)
tools = ([linker, c_compiler, cxx_compiler,
fortran_compiler, assembler, ar]
+ other_tools)
return filter(lambda x: x, tools)
| lgpl-2.1 | 5,232,730,326,358,949,000 | 37.827485 | 125 | 0.540252 | false | 4.141921 | false | false | false |
brennie/reviewboard | reviewboard/oauth/forms.py | 1 | 11912 | """Forms for OAuth2 applications."""
from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ValidationError
from django.forms import widgets
from django.utils.translation import ugettext, ugettext_lazy as _
from djblets.forms.widgets import CopyableTextInput, ListEditWidget
from oauth2_provider.generators import (generate_client_id,
generate_client_secret)
from oauth2_provider.validators import URIValidator
from reviewboard.admin.form_widgets import RelatedUserWidget
from reviewboard.oauth.models import Application
from reviewboard.oauth.widgets import OAuthSecretInputWidget
from reviewboard.site.urlresolvers import local_site_reverse
class ApplicationChangeForm(forms.ModelForm):
"""A form for updating an Application.
This form is intended to be used by the admin site.
"""
DISABLED_FOR_SECURITY_ERROR = _(
'This Application has been disabled to keep your server secure. '
'It cannot be re-enabled until its client secret changes.'
)
client_id = forms.CharField(
label=_('Client ID'),
help_text=_(
'The client ID. Your application will use this in OAuth2 '
'authentication to identify itself.',
),
widget=CopyableTextInput(attrs={
'readonly': True,
'size': 100,
}),
required=False,
)
def __init__(self, data=None, initial=None, instance=None):
"""Initialize the form:
Args:
data (dict, optional):
The provided form data.
initial (dict, optional):
The initial form values.
instance (Application, optional):
The application to edit.
"""
super(ApplicationChangeForm, self).__init__(data=data,
initial=initial,
instance=instance)
if instance and instance.pk:
# If we are creating an application (as the
# ApplicationCreationForm is a subclass of this class), the
# client_secret wont be present so we don't have to initialize the
# widget.
client_secret = self.fields['client_secret']
client_secret.widget = OAuthSecretInputWidget(
attrs=client_secret.widget.attrs,
api_url=local_site_reverse('oauth-app-resource',
local_site=instance.local_site,
kwargs={'app_id': instance.pk}),
)
def clean_extra_data(self):
"""Prevent ``extra_data`` from being an empty string.
Returns:
unicode:
Either a non-zero length string of JSON-encoded data or ``None``.
"""
return self.cleaned_data['extra_data'] or None
def clean_redirect_uris(self):
"""Clean the ``redirect_uris`` field.
This method will ensure that all the URIs are valid by validating
each of them, as well as removing unnecessary whitespace.
Returns:
unicode:
A space-separated list of URIs.
Raises:
django.core.exceptions.ValidationError:
Raised when one or more URIs are invalid.
"""
validator = URIValidator()
redirect_uris = self.cleaned_data.get('redirect_uris', '').split()
errors = []
for uri in redirect_uris:
try:
validator(uri)
except ValidationError as e:
errors.append(e)
if errors:
raise ValidationError(errors)
# We join the list instead of returning the initial value because the
# the original value may have had multiple adjacent whitespace
# characters.
return ' '.join(redirect_uris)
def clean(self):
"""Validate the form.
This will validate the relationship between the
``authorization_grant_type`` and ``redirect_uris`` fields to ensure the
values are compatible.
This method is very similar to
:py:func:`Application.clean
<oauth2_provider.models.AbstractApplication.clean>`, but the data will
be verified by the form instead of the model to allow error messages to
be usable by consumers of the form.
This method does not raise an exception upon failing validation.
Instead, it sets errors internally so that they are related to the
pertinent field instead of the form as a whole.
Returns:
dict:
The cleaned form data.
"""
super(ApplicationChangeForm, self).clean()
grant_type = self.cleaned_data.get('authorization_grant_type')
# redirect_uris will not be present in cleaned_data if validation
# failed.
redirect_uris = self.cleaned_data.get('redirect_uris')
if (redirect_uris is not None and
len(redirect_uris) == 0 and
grant_type in (Application.GRANT_AUTHORIZATION_CODE,
Application.GRANT_IMPLICIT)):
# This is unfortunately not publicly exposed in Django 1.6, but it
# is exposed in later versions (as add_error).
self._errors['redirect_uris'] = self.error_class([
ugettext(
'The "redirect_uris" field may not be blank when '
'"authorization_grant_type" is "%s"'
)
% grant_type
])
self.cleaned_data.pop('redirect_uris')
if (self.instance and
self.instance.pk and
self.instance.is_disabled_for_security and
self.cleaned_data['enabled']):
raise ValidationError(self.DISABLED_FOR_SECURITY_ERROR)
if 'client_id' in self.cleaned_data:
del self.cleaned_data['client_id']
if 'client_secret' in self.cleaned_data:
del self.cleaned_data['client_secret']
return self.cleaned_data
class Meta:
model = Application
fields = '__all__'
help_texts = {
'authorization_grant_type': _(
'How the authorization is granted to the application.'
),
'client_secret': _(
'The client secret. This should only be known to Review Board '
'and your application.'
),
'client_type': _(
"The type of client. Confidential clients must be able to "
"keep users' passwords secure."
),
'name': _(
'The application name.'
),
'redirect_uris': _(
'A list of allowed URIs to redirect to.',
),
'skip_authorization': _(
'Whether or not users will be prompted for authentication. '
'This should most likely be unchecked.'
),
'user': _(
'The user who created the application. The selected user will '
'be able to change these settings from their account settings.'
),
}
widgets = {
'client_secret': CopyableTextInput(attrs={
'readonly': True,
'size': 100,
}),
'name': widgets.TextInput(attrs={'size': 60}),
'redirect_uris': ListEditWidget(attrs={'size': 60}, sep=' '),
'user': RelatedUserWidget(multivalued=False),
'original_user': RelatedUserWidget(multivalued=False),
}
labels = {
'authorization_grant_type': _('Authorization Grant Type'),
'client_secret': _('Client Secret'),
'client_type': _('Client Type'),
'name': _('Name'),
'redirect_uris': _('Redirect URIs'),
'skip_authorization': _('Skip Authorization'),
'user': _('User'),
}
class ApplicationCreationForm(ApplicationChangeForm):
"""A form for creating an Application.
This is meant to be used by the admin site.
"""
def save(self, commit=True):
"""Save the form.
This method will generate the ``client_id`` and ``client_secret``
fields.
Args:
commit (bool, optional):
Whether or not the Application should be saved to the database.
Returns:
reviewboard.oauth.models.Application:
The created Application.
"""
instance = super(ApplicationCreationForm, self).save(commit=False)
instance.client_id = generate_client_id()
instance.client_secret = generate_client_secret()
if commit:
instance.save()
return instance
class Meta(ApplicationChangeForm.Meta):
exclude = (
'client_id',
'client_secret',
)
class UserApplicationChangeForm(ApplicationChangeForm):
"""A form for an end user to change an Application."""
def __init__(self, user, data=None, initial=None, instance=None):
"""Initialize the form.
Args:
user (django.contrib.auth.models.User):
The user changing the form. Ignored, but included to match
:py:meth:`UserApplicationCreationForm.__init__`.
data (dict):
The provided data.
initial (dict, optional):
The initial form values.
instance (reviewboard.oauth.models.Application):
The Application that is to be edited.
"""
super(UserApplicationChangeForm, self).__init__(data=data,
initial=initial,
instance=instance)
class Meta(ApplicationChangeForm.Meta):
exclude = (
'extra_data',
'local_site',
'original_user',
'skip_authorization',
'user',
)
class UserApplicationCreationForm(ApplicationCreationForm):
"""A form for an end user to update an Application."""
def __init__(self, user, data, initial=None, instance=None):
"""Initialize the form.
Args:
user (django.contrib.auth.models.User):
The user changing the form. Ignored, but included to match
:py:meth:`UserApplicationCreationForm.__init__`.
data (dict):
The provided data.
initial (dict, optional):
The initial form values.
instance (reviewboard.oauth.models.Application, optional):
The Application that is to be edited.
This should always be ``None``.
"""
assert instance is None
super(UserApplicationCreationForm, self).__init__(data=data,
initial=initial,
instance=instance)
self.user = user
def save(self, commit=True):
"""Save the form.
This method will associate the user creating the application as its
owner.
Args:
commit (bool, optional):
Whether or not the Application should be saved to the database.
Returns:
reviewboard.oauth.models.Application:
The created Application.
"""
instance = super(UserApplicationCreationForm, self).save(commit=False)
instance.user = self.user
if commit:
instance.save()
return instance
class Meta(ApplicationCreationForm.Meta):
exclude = (ApplicationCreationForm.Meta.exclude +
UserApplicationChangeForm.Meta.exclude)
| mit | 8,614,490,914,715,181,000 | 33.034286 | 79 | 0.561702 | false | 5.028282 | false | false | false |
praekelt/nurseconnect | nurseconnect/tests/test_utils.py | 1 | 4506 | from freezegun import freeze_time
from django.test import TestCase
from django.test.client import Client
from django.contrib.auth.models import User
from molo.core.tests.base import MoloTestCaseMixin
from molo.core.models import SiteLanguageRelation, Languages, Main
from molo.surveys.models import MoloSurveyPage, MoloSurveySubmission
from molo.surveys.tests.test_models import create_survey
from nurseconnect.utils import (
get_period_date_format,
convert_string_to_boolean_list,
get_survey_results_for_user,
)
class UtilsTestCase(TestCase):
@freeze_time("2018-02-01")
def test_get_period_date_format_1(self):
self.assertEqual(
get_period_date_format(),
"201802"
)
@freeze_time("2012-12-01")
def test_get_period_date_format_2(self):
self.assertEqual(
get_period_date_format(),
"201212"
)
def test_convert_string_to_boolean_list_1(self):
self.assertEqual(
convert_string_to_boolean_list("true"),
[True]
)
def test_convert_string_to_boolean_list_2(self):
self.assertEqual(
convert_string_to_boolean_list("true,false"),
[True, False]
)
def test_convert_string_to_boolean_list_3(self):
self.assertEqual(
convert_string_to_boolean_list(" true, false"),
[True, False]
)
def test_convert_string_to_boolean_list_4(self):
self.assertEqual(
convert_string_to_boolean_list("TRUE,FalSE"),
[True, False]
)
def test_convert_string_to_boolean_list_5(self):
self.assertEqual(
convert_string_to_boolean_list("true,BANANA,false"),
[True, False]
)
def test_convert_string_to_boolean_list_6(self):
self.assertEqual(
convert_string_to_boolean_list("false , True"),
[False, True]
)
def test_convert_string_to_boolean_list_7(self):
self.assertEqual(
convert_string_to_boolean_list("false;true"),
[]
)
class SurveyUtilsTestCase(TestCase, MoloTestCaseMixin):
def setUp(self):
self.mk_main()
self.user = User.objects.create_user(
username='tester',
email='[email protected]',
password='tester')
def test_get_survey_results_for_user_1(self):
create_survey([
{
"question": "The sky is blue",
"type": 'radio',
"choices": ["true", "false"],
"required": True,
"page_break": False,
}
])
survey = MoloSurveyPage.objects.first()
survey.thank_you_text = "true"
survey.save()
MoloSurveySubmission.objects.create(
page=survey, user=self.user,
form_data='{"the-sky-is-blue": "True"}')
self.assertEqual(
get_survey_results_for_user(survey, self.user),
[{
"question": "The sky is blue",
"user_answer": True,
"correct_answer": True,
}]
)
def test_get_survey_results_for_user_2(self):
create_survey([
{
"question": "The sky is blue",
"type": 'radio',
"choices": ["true", "false"],
"required": True,
"page_break": False,
},
{
"question": "The grass is purple",
"type": 'radio',
"choices": ["true", "false"],
"required": True,
"page_break": False,
}
])
survey = MoloSurveyPage.objects.first()
survey.thank_you_text = "true,false"
survey.save()
MoloSurveySubmission.objects.create(
page=survey, user=self.user,
form_data=('{"the-sky-is-blue": "True", '
'"the-grass-is-purple": "True"}'))
self.assertEqual(
get_survey_results_for_user(survey, self.user),
[
{
"question": "The sky is blue",
"user_answer": True,
"correct_answer": True,
},
{
"question": "The grass is purple",
"user_answer": True,
"correct_answer": False,
},
]
)
| bsd-2-clause | -6,023,230,630,583,244,000 | 29.04 | 68 | 0.517976 | false | 3.977052 | true | false | false |
Patreon/cartographer | cartographer/field_types/schema_relationship.py | 1 | 4047 | from cartographer.resources import get_resource_registry
from cartographer.resources.resource_registry import ResourceRegistryKeys
class SchemaRelationship(object):
"""
`SchemaRelationship` describes how to translate related resources to and from JSON API and our Python models.
`SchemaRelationship` is has one primary method,
`related_serializer`, for creating a `JSONAPISerializer` instance based on its input arguments.
Subclasses of `SchemaSerializer` can override this method
to customize serialization behavior.
Parsing of related resources is not currently handled by this class,
and instead is handled by the `PostedDocument` class (or, more typically, its subclass `SchemaParser`.
"""
def __init__(self, model_type, id_attribute=None, model_property=None,
model_method=None, serializer_method=None, includes=None):
"""
NOTE: only one of id_attribute, model_property, model_method, or serializer_method should be provided
:param model_type: the JSON API `type` string for the related model
:param id_attribute: the foreign key column on the parent serializer model which identifies the related serializer
:param model_property: the property on the parent serializer model which returns the related serializer
:param model_method: the property on the parent serializer model which returns the related serializer
:param serializer_method: the name of the method on the parent serializer object which uses this schema
which should be called to get the child serializer.
:return: an instance of SchemaRelationship,
which will later be used to serialize Python into JSON API.
"""
identifier_args = [id_attribute, model_property, model_method, serializer_method]
provided_identifiers = [identifier
for identifier in identifier_args
if identifier]
if len(provided_identifiers) > 1:
raise Exception("only one of [{}] should be provided".format(identifier_args.join(", ")))
self.model_type = model_type
self.id_attribute = id_attribute
self.model_property = model_property
self.model_method = model_method
self.serializer_method = serializer_method
self.includes = includes
def related_serializer(self, parent_serializer, relationship_key):
"""
:param parent_serializer: The serializer which has our return value as a related resource
:param relationship_key: The name by which the parent serializer knows this child
:return: The child serializer which will later be used to serialize a related resource
"""
if self.serializer_method is not None:
return getattr(parent_serializer, self.serializer_method)()
model = None
if self.id_attribute is not None:
related_model_getter = self.resource_registry_entry().get(ResourceRegistryKeys.MODEL_GET)
model_id = getattr(parent_serializer.model, self.id_attribute)
if model_id is not None and related_model_getter is not None:
model = related_model_getter(model_id)
elif self.model_property is not None:
model = getattr(parent_serializer.model, self.model_property)
elif self.model_method is not None:
model = getattr(parent_serializer.model, self.model_method)()
if model:
serializer_class = self.resource_registry_entry().get(ResourceRegistryKeys.SERIALIZER)
return serializer_class(
model,
parent_serializer=parent_serializer,
relationship_name=relationship_key,
includes=self.includes
)
else:
from cartographer.serializers import JSONAPINullSerializer
return JSONAPINullSerializer()
def resource_registry_entry(self):
return get_resource_registry().get(self.model_type, {})
| apache-2.0 | 8,312,230,826,271,928,000 | 50.227848 | 122 | 0.679763 | false | 4.812128 | false | false | false |
ArseniyK/Sunflower | application/operation.py | 1 | 48288 | import os
import gtk
import gobject
import fnmatch
from threading import Thread, Event
from gui.input_dialog import OverwriteFileDialog, OverwriteDirectoryDialog, OperationError, QuestionOperationError
from gui.operation_dialog import CopyDialog, MoveDialog, DeleteDialog, RenameDialog
from gui.error_list import ErrorList
from plugin_base.provider import Mode as FileMode, TrashError, Support as ProviderSupport
from plugin_base.monitor import MonitorSignals
from common import format_size
from queue import OperationQueue
# import constants
from gui.input_dialog import OverwriteOption
class BufferSize:
LOCAL = 4096 * 1024
REMOTE = 100 * 1024
class Option:
FILE_TYPE = 0
DESTINATION = 1
SET_OWNER = 2
SET_MODE = 3
SET_TIMESTAMP = 4
SILENT = 5
SILENT_MERGE = 6
SILENT_OVERWRITE = 7
class Skip:
TRASH = 0
REMOVE = 1
WRITE = 2
CREATE = 3
MODE_SET = 4
MOVE = 5
RENAME = 6
READ = 7
class OperationType:
COPY = 0
MOVE = 1
DELETE = 2
RENAME = 3
LINK = 4
class Operation(Thread):
"""Parent class for all operation threads"""
def __init__(self, application, source, destination=None, options=None, destination_path=None):
Thread.__init__(self, target=self)
self._can_continue = Event()
self._abort = Event()
self._application = application
self._source = source
self._destination = destination
self._options = options
self._source_queue = None
self._destination_queue = None
self._merge_all = None
self._overwrite_all = None
self._response_cache = {}
# operation queue
self._operation_queue = None
self._operation_queue_name = None
# daemonize
self.daemon = True
# create operation dialog
self._dialog = None
self._create_dialog()
self._dir_list = []
self._file_list = []
self._error_list = []
self._selection_list = []
# store initial paths
self._source_path = self._source.get_path()
if self._destination is not None:
self._destination_path = destination_path or self._destination.get_path()
self._can_continue.set()
def _create_dialog(self):
"""Create operation dialog"""
pass
def _destroy_ui(self):
"""Destroy user interface"""
if self._dialog is not None:
with gtk.gdk.lock:
self._dialog.destroy()
def _get_free_space_input(self, needed, available):
"""Get user input when there is not enough space"""
size_format = self._application.options.get('size_format')
space_needed = format_size(needed, size_format)
space_available = format_size(available, size_format)
if self._options is not None and self._options[Option.SILENT]:
# silent option is enabled, we skip operation by default
self._error_list.append(_(
'Aborted. Not enough free space on target file system.\n'
'Needed: {0}\n'
'Available: {1}'
).format(space_needed, space_available))
should_continue = False
else:
# ask user what to do
with gtk.gdk.lock:
dialog = gtk.MessageDialog(
self._dialog.get_window(),
gtk.DIALOG_DESTROY_WITH_PARENT,
gtk.MESSAGE_WARNING,
gtk.BUTTONS_YES_NO,
_(
'Target file system does not have enough '
'free space for this operation to continue.\n\n'
'Needed: {0}\n'
'Available: {1}\n\n'
'Do you wish to continue?'
).format(space_needed, space_available)
)
dialog.set_default_response(gtk.RESPONSE_YES)
result = dialog.run()
dialog.destroy()
should_continue = result == gtk.RESPONSE_YES
return should_continue
def _get_merge_input(self, path):
"""Get merge confirmation"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, do what user specified
merge = self._options[Option.SILENT_MERGE]
self._merge_all = merge
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OverwriteDirectoryDialog(self._application, self._dialog.get_window())
title_element = os.path.basename(path)
message_element = os.path.basename(os.path.dirname(os.path.join(self._destination.get_path(), path)))
dialog.set_title_element(title_element)
dialog.set_message_element(message_element)
dialog.set_rename_value(title_element)
dialog.set_source(
self._source,
path,
relative_to=self._source_path
)
dialog.set_original(
self._destination,
path,
relative_to=self._destination_path
)
result = dialog.get_response()
merge = result[0] == gtk.RESPONSE_YES
if result[1][OverwriteOption.APPLY_TO_ALL]:
self._merge_all = merge
# in case user canceled operation
if result[0] == gtk.RESPONSE_CANCEL:
self.cancel()
return merge # return only response for current directory
def _get_overwrite_input(self, path):
"""Get overwrite confirmation"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, do what user specified
overwrite = self._options[Option.SILENT_OVERWRITE]
self._overwrite_all = overwrite
options = (False, '', True) # no rename, apply to all
else:
# we are not in silent mode, ask user what to do
with gtk.gdk.lock:
dialog = OverwriteFileDialog(self._application, self._dialog.get_window())
title_element = os.path.basename(path)
message_element = os.path.basename(os.path.dirname(os.path.join(self._destination.get_path(), path)))
dialog.set_title_element(title_element)
dialog.set_message_element(message_element)
dialog.set_rename_value(title_element)
dialog.set_source(
self._source,
path,
relative_to=self._source_path
)
dialog.set_original(
self._destination,
path,
relative_to=self._destination_path
)
result = dialog.get_response()
overwrite = result[0] == gtk.RESPONSE_YES
if result[1][OverwriteOption.APPLY_TO_ALL]:
self._overwrite_all = overwrite
# in case user canceled operation
if result[0] == gtk.RESPONSE_CANCEL:
self.cancel()
# pass options from input dialog
options = result[1]
return overwrite, options
def _get_write_error_input(self, error):
"""Get user response for write error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There is a problem writing data to destination '
'file. What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.WRITE] = response
# abort operation if user requested
if response == OperationError.RESPONSE_CANCEL:
self.cancel()
return response
def _get_create_error_input(self, error, is_directory=False):
"""Get user response for create error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
if not is_directory:
# set message for file
dialog.set_message(_(
'An error occurred while trying to create specified '
'file. What would you like to do?'
))
else:
# set message for directory
dialog.set_message(_(
'An error occurred while trying to create specified '
'directory. What would you like to do?'
))
dialog.set_error(str(error))
# get user response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.CREATE] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_mode_set_error_input(self, error):
"""Get user response for mode set error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'Problem with setting path parameter for '
'specified path. What would you like to do?'
))
dialog.set_error(str(error))
# get user response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.MODE_SET] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_remove_error_input(self, error):
"""Get user response for remove error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = OperationError.RESPONSE_SKIP
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem removing specified path. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.REMOVE] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_trash_error_input(self, error):
"""Get user response for remove error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = QuestionOperationError(self._application)
dialog.set_message(_(
'There was a problem trashing specified path. '
'Would you like to try removing it instead?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.TRASH] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_move_error_input(self, error):
"""Get user response for move error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem moving specified path. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.MOVE] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_rename_error_input(self, error):
"""Get user response for rename error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem renaming specified path. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.RENAME] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def _get_read_error_input(self, error):
"""Get user response for directory listing error"""
if self._options is not None and self._options[Option.SILENT]:
# we are in silent mode, set response and log error
self._error_list.append(str(error))
response = gtk.RESPONSE_NO
else:
# we are not in silent mode, ask user
with gtk.gdk.lock:
dialog = OperationError(self._application)
dialog.set_message(_(
'There was a problem with reading specified directory. '
'What would you like to do?'
))
dialog.set_error(str(error))
# get users response
response = dialog.get_response()
# check if this response applies to future errors
if response == OperationError.RESPONSE_SKIP_ALL:
response = OperationError.RESPONSE_SKIP
self._response_cache[Skip.READ] = response
# abort operation if user requested
if response == gtk.RESPONSE_CANCEL:
self.cancel()
return response
def set_selection(self, item_list):
"""Set list of selected items"""
self._selection_list.extend(item_list)
def set_operation_queue(self, queue_name):
"""Set operation to wait for queue."""
if queue_name is None:
return
# create new queue
self._operation_queue = Event()
self._operation_queue_name = queue_name
# schedule operation
OperationQueue.add(queue_name, self._operation_queue)
def set_source_queue(self, queue):
"""Set event queue for fall-back monitor support"""
self._source_queue = queue
def set_destination_queue(self, queue):
"""Set event queue for fall-back monitor support"""
self._destination_queue = queue
def pause(self):
"""Pause current operation"""
self._can_continue.clear()
def resume(self):
"""Resume current operation"""
self._can_continue.set()
def cancel(self):
"""Set an abort switch"""
self._abort.set()
# release lock set by the pause
if not self._can_continue.is_set():
self.resume()
class CopyOperation(Operation):
"""Operation thread used for copying files"""
def __init__(self, application, source, destination, options, destination_path=None):
Operation.__init__(self, application, source, destination, options, destination_path)
self._merge_all = None
self._overwrite_all = None
self._dir_list_create = []
self._total_count = 0
self._total_size = 0
self._buffer_size = 0
# cache settings
should_reserve = self._application.options.section('operations').get('reserve_size')
supported_by_provider = ProviderSupport.RESERVE_SIZE in self._destination.get_support()
self._reserve_size = should_reserve and supported_by_provider
# detect buffer size
if self._source.is_local and self._destination.is_local:
system_stat = self._destination.get_system_size(self._destination_path)
if system_stat.block_size:
self._buffer_size = system_stat.block_size * 1024
else:
self._buffer_size = BufferSize.LOCAL
else:
self._buffer_size = BufferSize.REMOTE
def _create_dialog(self):
"""Create progress dialog"""
self._dialog = CopyDialog(self._application, self)
def _update_status(self, status):
"""Set status and reset progress bars"""
self._dialog.set_status(status)
self._dialog.set_current_file("")
self._dialog.set_current_file_fraction(0)
def _get_lists(self):
"""Find all files for copying"""
gobject.idle_add(self._update_status, _('Searching for files...'))
# exclude files already selected with parent directory
for file_name in self._selection_list:
self._selection_list = filter(
lambda item: not item.startswith(file_name + os.path.sep),
self._selection_list
)
# traverse through the rest of the items
for item in self._selection_list:
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
# update current file label
gobject.idle_add(self._dialog.set_current_file, item)
gobject.idle_add(self._dialog.pulse)
if os.path.sep in item:
relative_path, item = os.path.split(item)
source_path = os.path.join(self._source_path, relative_path)
else:
relative_path = None
source_path = self._source_path
if self._source.is_dir(item, relative_to=source_path):
# item is directory
can_procede = True
can_create = True
# check if directory exists on destination
if self._destination.exists(item, relative_to=self._destination_path):
can_create = False
if self._merge_all is not None:
can_procede = self._merge_all
else:
can_procede = self._get_merge_input(item)
# if user didn't skip directory, scan and update lists
if can_procede:
self._dir_list.append((item, relative_path))
if can_create: self._dir_list_create.append((item, relative_path))
self._scan_directory(item, relative_path)
elif fnmatch.fnmatch(item, self._options[Option.FILE_TYPE]):
# item is a file, get stats and update lists
item_stat = self._source.get_stat(item, relative_to=source_path)
gobject.idle_add(self._dialog.increment_total_size, item_stat.size)
gobject.idle_add(self._dialog.increment_total_count, 1)
self._total_count += 1
self._total_size += item_stat.size
self._file_list.append((item, relative_path))
def _set_mode(self, path, mode):
"""Set mode for specified path"""
if not self._options[Option.SET_MODE]: return
try:
# try to set mode for specified path
self._destination.set_mode(
path,
mode,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.ATTRIBUTE_CHANGED, path, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem setting mode, ask user
if Skip.MODE_SET in self._response_cache:
response = self._response_cache[Skip.MODE_SET]
else:
response = self._get_mode_set_error_input(error)
# try to set mode again
if response == OperationError.RESPONSE_RETRY:
self._set_mode(path, mode)
return
def _set_owner(self, path, user_id, group_id):
"""Set owner and group for specified path"""
if not self._options[Option.SET_OWNER]: return
try:
# try set owner of specified path
self._destination.set_owner(
path,
user_id,
group_id,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.ATTRIBUTE_CHANGED, path, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem with setting owner, ask user
if Skip.MODE_SET in self._response_cache:
response = self._response_cache[Skip.MODE_SET]
else:
response = self._get_mode_set_error_input(error)
# try to set owner again
if response == OperationError.RESPONSE_RETRY:
self._set_owner(path, user_id, group_id)
return
def _set_timestamp(self, path, access_time, modify_time, change_time):
"""Set timestamps for specified path"""
if not self._options[Option.SET_TIMESTAMP]: return
try:
# try setting timestamp
self._destination.set_timestamp(
path,
access_time,
modify_time,
change_time,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.ATTRIBUTE_CHANGED, path, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem with setting owner, ask user
if Skip.MODE_SET in self._response_cache:
response = self._response_cache[Skip.MODE_SET]
else:
response = self._get_mode_set_error_input(error)
# try to set timestamp again
if response == OperationError.RESPONSE_RETRY:
self._set_timestamp(path, access_time, modify_time, change_time)
return
def _scan_directory(self, directory, relative_path=None):
"""Recursively scan directory and populate list"""
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
try:
# try to get listing from directory
item_list = self._source.list_dir(directory, relative_to=source_path)
except StandardError as error:
# problem with reading specified directory, ask user
if Skip.READ in self._response_cache:
response = self._response_cache[Skip.READ]
else:
response = self._get_read_error_input(error)
# try to scan specified directory again
if response == OperationError.RESPONSE_RETRY:
self._scan_directory(directory, relative_path)
return
for item in item_list:
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, os.path.join(directory, item))
gobject.idle_add(self._dialog.pulse)
full_name = os.path.join(directory, item)
# item is a directory, scan it
if self._source.is_dir(full_name, relative_to=source_path):
can_procede = True
can_create = True
if self._destination.exists(full_name, relative_to=self._destination_path):
can_create = False
if self._merge_all is not None:
can_procede = self._merge_all
else:
can_procede = self._get_merge_input(full_name)
if can_procede:
# allow processing specified directory
self._dir_list.append((full_name, source_path))
if can_create: self._dir_list_create.append((full_name, source_path))
self._scan_directory(full_name, relative_path)
elif fnmatch.fnmatch(item, self._options[Option.FILE_TYPE]):
# item is a file, update global statistics
item_stat = self._source.get_stat(full_name, relative_to=source_path)
gobject.idle_add(self._dialog.increment_total_size, item_stat.size)
gobject.idle_add(self._dialog.increment_total_count, 1)
self._total_count += 1
self._total_size += item_stat.size
self._file_list.append((full_name, relative_path))
def _create_directory(self, directory, relative_path=None):
"""Create specified directory"""
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
file_stat = self._source.get_stat(directory, relative_to=source_path)
mode = file_stat.mode if self._options[Option.SET_MODE] else 0755
try:
# try to create a directory
if self._destination.exists(directory, relative_to=self._destination_path):
if not self._destination.is_dir(directory, relative_to=self._destination_path):
raise StandardError(_(
'Unable to create directory because file with the same name '
'already exists in target directory.'
))
else:
# inode with specified name doesn't exist, create directory
self._destination.create_directory(
directory,
mode,
relative_to=self._destination_path
)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.CREATED, directory, None)
self._destination_queue.put(event, False)
except StandardError as error:
# there was a problem creating directory
if Skip.CREATE in self._response_cache:
response = self._response_cache[Skip.CREATE]
else:
response = self._get_create_error_input(error, True)
# try to create directory again
if response == OperationError.RESPONSE_RETRY:
self._create_directory(directory)
# exit method
return
# set owner
self._set_owner(directory, file_stat.user_id, file_stat.group_id)
def _copy_file(self, file_name, relative_path=None):
"""Copy file content"""
can_procede = True
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
dest_file = file_name
sh = None
dh = None
# check if destination file exists
if self._destination.exists(file_name, relative_to=self._destination_path):
if self._overwrite_all is not None:
can_procede = self._overwrite_all
else:
can_procede, options = self._get_overwrite_input(file_name)
# get new name if user specified
if options[OverwriteOption.RENAME]:
dest_file = os.path.join(
os.path.dirname(file_name),
options[OverwriteOption.NEW_NAME]
)
elif source_path == self._destination_path:
can_procede = False
# if user skipped this file return
if not can_procede:
self._file_list.pop(self._file_list.index((file_name, relative_path)))
# update total size
file_stat = self._source.get_stat(file_name, relative_to=source_path)
gobject.idle_add(self._dialog.increment_current_size, file_stat.size)
return
try:
# get file stats
destination_size = 0L
file_stat = self._source.get_stat(file_name, relative_to=source_path, extended=True)
# get file handles
sh = self._source.get_file_handle(file_name, FileMode.READ, relative_to=source_path)
dh = self._destination.get_file_handle(dest_file, FileMode.WRITE, relative_to=self._destination_path)
# report error properly
if sh is None:
raise StandardError('Unable to open source file in read mode.')
if dh is None:
raise StandardError('Unable to open destination file in write mode.')
# reserve file size
if self._reserve_size:
# try to reserve file size in advance,
# can be slow on memory cards and network
try:
dh.truncate(file_stat.size)
except:
dh.truncate()
else:
# just truncate file to 0 size in case source file is smaller
dh.truncate()
dh.seek(0)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.CREATED, dest_file, None)
self._destination_queue.put(event, False)
except StandardError as error:
# close handles if they exist
if hasattr(sh, 'close'): sh.close()
if hasattr(dh, 'close'): sh.close()
if Skip.CREATE in self._response_cache:
response = self._response_cache[Skip.CREATE]
else:
response = self._get_create_error_input(error)
# try to create file again and copy contents
if response == OperationError.RESPONSE_RETRY:
self._copy_file(dest_file)
else:
# user didn't want to retry, remove file from list
self._file_list.pop(self._file_list.index((file_name, relative_path)))
# remove amount of copied bytes from total size
gobject.idle_add(self._dialog.increment_current_size, -destination_size)
# exit method
return
while True:
if self._abort.is_set(): break
self._can_continue.wait() # pause lock
data = sh.read(self._buffer_size)
if data:
try:
# try writing data to destination
dh.write(data)
except IOError as error:
# handle error
if Skip.WRITE in self._response_cache:
response = self._response_cache[Skip.WRITE]
else:
response = self._get_write_error_input(error)
# try to write data again
if response == OperationError.RESPONSE_RETRY:
gobject.idle_add(self._dialog.increment_current_size, -dh.tell())
if hasattr(sh, 'close'): sh.close()
if hasattr(dh, 'close'): sh.close()
self._copy_file(dest_file)
return
destination_size += len(data)
gobject.idle_add(self._dialog.increment_current_size, len(data))
if file_stat.size > 0: # ensure we don't end up with error on 0 size files
gobject.idle_add(
self._dialog.set_current_file_fraction,
destination_size / float(file_stat.size)
)
else:
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
# push event to the queue
if self._destination_queue is not None:
event = (MonitorSignals.CHANGED, dest_file, None)
self._destination_queue.put(event, False)
else:
sh.close()
dh.close()
# set file parameters
self._set_mode(dest_file, file_stat.mode)
self._set_owner(dest_file, file_stat.user_id, file_stat.group_id)
self._set_timestamp(
dest_file,
file_stat.time_access,
file_stat.time_modify,
file_stat.time_change
)
break
def _create_directory_list(self):
"""Create all directories in list"""
gobject.idle_add(self._update_status, _('Creating directories...'))
for number, directory in enumerate(self._dir_list_create, 0):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, directory[0])
self._create_directory(directory[0], directory[1]) # create directory
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(number) / len(self._dir_list)
)
def _copy_file_list(self):
"""Copy list of files to destination path"""
# update status
gobject.idle_add(self._update_status, _('Copying files...'))
item_list = self._file_list[:]
# copy all the files in list
for file_name, source_path in item_list:
# abort operation if requested
if self._abort.is_set(): break
self._can_continue.wait() # pause lock
# copy file
gobject.idle_add(self._dialog.set_current_file, file_name)
self._copy_file(file_name, source_path)
gobject.idle_add(self._dialog.increment_current_count, 1)
def run(self):
"""Main thread method, this is where all the stuff is happening"""
# set dialog info
with gtk.gdk.lock:
self._dialog.set_source(self._source_path)
self._dialog.set_destination(self._destination_path)
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
# get list of items to copy
self._get_lists()
# check for available free space
system_info = self._destination.get_system_size(self._destination_path)
if ProviderSupport.SYSTEM_SIZE in self._destination.get_support() \
and self._total_size > system_info.size_available:
should_continue = self._get_free_space_input(self._total_size, system_info.size_available)
# exit if user chooses to
if not should_continue:
self.cancel()
# clear selection on source directory
with gtk.gdk.lock:
parent = self._source.get_parent()
if self._source_path == parent.path:
parent.deselect_all()
# perform operation
self._create_directory_list()
self._copy_file_list()
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Copy Operation')
message = ngettext(
'Copying of {0} item from "{1}" to "{2}" is completed!',
'Copying of {0} items from "{1}" to "{2}" is completed!',
len(self._file_list) + len(self._dir_list)
).format(
len(self._file_list) + len(self._dir_list),
os.path.basename(self._source_path),
os.path.basename(self._destination_path)
)
# queue notification
notify_manager.notify(title, message)
# show error list if needed
if len(self._error_list) > 0:
error_list = ErrorList(self._dialog)
error_list.set_operation_name(_('Copy Operation'))
error_list.set_source(self._source_path)
error_list.set_destination(self._destination_path)
error_list.set_errors(self._error_list)
error_list.show()
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
class MoveOperation(CopyOperation):
"""Operation thread used for moving files"""
def _remove_path(self, path, item_list, relative_path=None):
"""Remove path specified path."""
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
try:
# try removing specified path
self._source.remove_path(path, relative_to=source_path)
# push event to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, path, None)
self._source_queue.put(event, False)
except StandardError as error:
# problem removing path, ask user what to do
if Skip.REMOVE in self._response_cache:
response = self._response_cache[Skip.REMOVE]
else:
response = self._get_remove_error_input(error)
# try removing path again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(path, item_list)
else:
# user didn't want to retry, remove path from item_list
item_list.pop(item_list.index(path))
def _create_dialog(self):
"""Create progress dialog"""
self._dialog = MoveDialog(self._application, self)
def _move_file(self, file_name, relative_path=None):
"""Move specified file using provider rename method"""
can_procede = True
source_path = self._source_path if relative_path is None else os.path.join(self._source_path, relative_path)
dest_file = file_name
# check if destination file exists
if self._destination.exists(file_name, relative_to=self._destination_path):
if self._overwrite_all is not None:
can_procede = self._overwrite_all
else:
can_procede, options = self._get_overwrite_input(file_name)
# get new name if user specified
if options[OverwriteOption.RENAME]:
dest_file = os.path.join(
os.path.dirname(file_name),
options[OverwriteOption.NEW_NAME]
)
# if user skipped this file return
if not can_procede:
self._file_list.pop(self._file_list.index((file_name, relative_path)))
return
# move file
try:
self._source.move_path(
file_name,
os.path.join(self._destination_path, dest_file),
relative_to=source_path
)
# push events to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, file_name, None)
self._source_queue.put(event, False)
if self._destination_queue is not None:
event = (MonitorSignals.CREATED, dest_file, None)
self._destination_queue.put(event, False)
except StandardError as error:
# problem with moving file, ask user what to do
if Skip.MOVE in self._response_cache:
response = self._response_cache[Skip.MOVE]
else:
response = self._get_move_error_input(error)
# try moving file again
if response == OperationError.RESPONSE_RETRY:
self._move_file(dest_file)
else:
# user didn't want to retry, remove file from list
self._file_list.pop(self._file_list.index((file_name, relative_path)))
# exit method
return
def _move_file_list(self):
"""Move files from the list"""
gobject.idle_add(self._update_status, _('Moving files...'))
item_list = self._file_list[:]
for file_name, source_path in item_list:
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
# move file
gobject.idle_add(self._dialog.set_current_file, file_name)
self._move_file(file_name, source_path)
gobject.idle_add(self._dialog.increment_current_count, 1)
def _delete_file_list(self):
"""Remove files from source list"""
gobject.idle_add(self._update_status, _('Deleting source files...'))
item_list = self._file_list[:]
for number, item in enumerate(item_list, 0):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
# remove path
gobject.idle_add(self._dialog.set_current_file, item[0])
self._remove_path(item[0], self._file_list, item[1])
# update current count
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(number) / len(item_list)
)
self._delete_directories()
def _delete_directories(self):
"""Remove empty directories after moving files"""
gobject.idle_add(self._update_status, _('Deleting source directories...'))
dir_list = self._dir_list[:]
dir_list.reverse() # remove deepest directories first
for number, directory in enumerate(dir_list, 0):
source_path = self._source_path if directory[1] is None else os.path.join(self._source_path, directory[1])
directory = directory[0]
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
if self._source.exists(directory, relative_to=source_path):
gobject.idle_add(self._dialog.set_current_file, directory)
# try to get a list of items inside of directory
try:
item_list = self._source.list_dir(directory, relative_to=source_path)
except:
item_list = None
# remove directory if empty
if item_list is not None and len(item_list) == 0:
self._remove_path(directory, dir_list, relative_path=source_path)
# update current count
if len(dir_list) > 0:
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(number) / len(dir_list)
)
else:
# prevent division by zero
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
def _check_devices(self):
"""Check if source and destination are on the same file system"""
dev_source = self._source.get_stat(self._source.get_path(), extended=True).device
dev_destination = self._destination.get_stat(self._destination.get_path(), extended=True).device
return dev_source == dev_destination
def run(self):
"""Main thread method
We override this method from CopyDialog in order to provide
a bit smarter move operation.
"""
# set dialog info
with gtk.gdk.lock:
self._dialog.set_source(self._source_path)
self._dialog.set_destination(self._destination_path)
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
# get list of items
self._get_lists()
# check for available free space
system_info = self._destination.get_system_size(self._destination_path)
if self._total_size > system_info.size_available and not self._check_devices():
should_continue = self._get_free_space_input(self._total_size, system_info.size_available)
# exit if user chooses to
if not should_continue:
self.cancel()
# clear selection on source directory
with gtk.gdk.lock:
parent = self._source.get_parent()
if self._source_path == parent.path:
parent.deselect_all()
# create directories
self._create_directory_list()
# copy/move files
if self._check_devices():
# both paths are on the same file system, move instead of copy
self._move_file_list()
self._delete_directories()
else:
# paths are located on different file systems, copy and remove
self._copy_file_list()
self._delete_file_list()
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Move Operation')
message = ngettext(
'Moving of {0} item from "{1}" to "{2}" is completed!',
'Moving of {0} items from "{1}" to "{2}" is completed!',
len(self._file_list) + len(self._dir_list)
).format(
len(self._file_list) + len(self._dir_list),
os.path.basename(self._source_path),
os.path.basename(self._destination_path)
)
# queue notification
notify_manager.notify(title, message)
# shop error list if needed
if len(self._error_list) > 0:
error_list = ErrorList(self._dialog)
error_list.set_operation_name(_('Move Operation'))
error_list.set_source(self._source_path)
error_list.set_destination(self._destination_path)
error_list.set_errors(self._error_list)
error_list.show()
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
class DeleteOperation(Operation):
"""Operation thread used for deleting files"""
def __init__(self, application, provider):
Operation.__init__(self, application, provider)
# allow users to force deleting items
self._force_delete = False
def _create_dialog(self):
"""Create operation dialog"""
self._dialog = DeleteDialog(self._application, self)
def _remove_path(self, path):
"""Remove path"""
try:
# try removing specified path
self._source.remove_path(path, relative_to=self._source_path)
# push event to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, path, None)
self._source_queue.put(event, False)
except StandardError as error:
# problem removing path, ask user what to do
if Skip.REMOVE in self._response_cache:
response = self._response_cache[Skip.REMOVE]
else:
response = self._get_remove_error_input(error)
# try removing path again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(path)
def _trash_path(self, path):
"""Move path to the trash"""
try:
# try trashing specified path
self._source.trash_path(path, relative_to=self._source_path)
# push event to the queue
if self._source_queue is not None:
event = (MonitorSignals.DELETED, path, None)
self._source_queue.put(event, False)
except TrashError as error:
# problem removing path, ask user what to do
if Skip.TRASH in self._response_cache:
response = self._response_cache[Skip.TRASH]
else:
response = self._get_trash_error_input(error)
# try moving path to trash again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(path)
def set_force_delete(self, force):
"""Set forced deletion instead of trashing files"""
self._force_delete = force
def run(self):
"""Main thread method, this is where all the stuff is happening"""
self._file_list = self._selection_list[:] # use predefined selection list
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
with gtk.gdk.lock:
# clear selection on source directory
parent = self._source.get_parent()
if self._source_path == parent.path:
parent.deselect_all()
# select removal method
trash_files = self._application.options.section('operations').get('trash_files')
trash_available = ProviderSupport.TRASH in self._source.get_support()
if self._force_delete:
remove_method = self._remove_path
else:
remove_method = (
self._remove_path,
self._trash_path
)[trash_files and trash_available]
# remove them
for index, item in enumerate(self._file_list, 1):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, item)
remove_method(item)
# update current count
if len(self._file_list) > 0:
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(index) / len(self._file_list)
)
else:
# prevent division by zero
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Delete Operation')
message = ngettext(
'Removal of {0} item from "{1}" is completed!',
'Removal of {0} items from "{1}" is completed!',
len(self._file_list)
).format(
len(self._file_list),
os.path.basename(self._source_path)
)
# queue notification
notify_manager.notify(title, message)
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
class RenameOperation(Operation):
"""Thread used for rename of large number of files"""
def __init__(self, application, provider, path, file_list):
Operation.__init__(self, application, provider)
self._destination = provider
self._destination_path = path
self._source_path = path
self._file_list = file_list
def _create_dialog(self):
"""Create operation dialog"""
self._dialog = RenameDialog(self._application, self)
def _rename_path(self, old_name, new_name, index):
"""Rename specified path"""
can_procede = True
try:
# check if specified path already exists
if self._destination.exists(new_name, relative_to=self._source_path):
can_procede, options = self._get_overwrite_input(new_name)
# get new name if user specified
if options[OverwriteOption.RENAME]:
new_name = os.path.join(
os.path.dirname(new_name),
options[OverwriteOption.NEW_NAME]
)
if not can_procede:
# user canceled overwrite, skip the file
self._file_list.pop(index)
return
else:
# rename path
self._source.rename_path(old_name, new_name, relative_to=self._source_path)
# push event to the queue
if self._source_queue is not None:
delete_event = (MonitorSignals.DELETE, old_name, None)
create_event = (MonitorSignals.CREATED, new_name, None)
self._source_queue.put(delete_event, False)
self._source_queue.put(create_event, False)
except StandardError as error:
# problem renaming path, ask user what to do
if Skip.RENAME in self._response_cache:
response = self._response_cache[Skip.RENAME]
else:
response = self._get_rename_error_input(error)
# try renaming path again
if response == OperationError.RESPONSE_RETRY:
self._remove_path(old_name, new_name, index)
else:
# user didn't want to retry, remove path from list
self._file_list.pop(index)
def run(self):
"""Main thread method, this is where all the stuff is happening"""
# wait for operation queue if needed
if self._operation_queue is not None:
self._operation_queue.wait()
for index, item in enumerate(self._file_list, 1):
if self._abort.is_set(): break # abort operation if requested
self._can_continue.wait() # pause lock
gobject.idle_add(self._dialog.set_current_file, item[0])
self._rename_path(item[0], item[1], index-1)
# update current count
if len(self._file_list) > 0:
gobject.idle_add(
self._dialog.set_current_file_fraction,
float(index) / len(self._file_list)
)
else:
# prevent division by zero
gobject.idle_add(self._dialog.set_current_file_fraction, 1)
# notify user if window is not focused
with gtk.gdk.lock:
if not self._dialog.is_active() and not self._application.is_active() and not self._abort.is_set():
notify_manager = self._application.notification_manager
title = _('Rename Operation')
message = ngettext(
'Rename of {0} item from "{1}" is completed!',
'Rename of {0} items from "{1}" is completed!',
len(self._file_list)
).format(
len(self._file_list),
os.path.basename(self._source_path)
)
# queue notification
notify_manager.notify(title, message)
# destroy dialog
self._destroy_ui()
# start next operation
if self._operation_queue is not None:
OperationQueue.start_next(self._operation_queue_name)
| gpl-3.0 | -8,770,339,951,091,888,000 | 29.236694 | 114 | 0.676711 | false | 3.389583 | false | false | false |
sujithshankar/anaconda | pyanaconda/constants.py | 1 | 6817 | #
# constants.py: anaconda constants
#
# Copyright (C) 2001 Red Hat, Inc. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Used for digits, ascii_letters, punctuation constants
import string # pylint: disable=deprecated-module
from pyanaconda.i18n import N_
# Use -1 to indicate that the selinux configuration is unset
SELINUX_DEFAULT = -1
# where to look for 3rd party addons
ADDON_PATHS = ["/usr/share/anaconda/addons"]
from pykickstart.constants import AUTOPART_TYPE_LVM
# common string needs to be easy to change
from pyanaconda import product
productName = product.productName
productVersion = product.productVersion
productArch = product.productArch
bugzillaUrl = product.bugUrl
isFinal = product.isFinal
# for use in device names, eg: "fedora", "rhel"
shortProductName = productName.lower() # pylint: disable=no-member
if productName.count(" "): # pylint: disable=no-member
shortProductName = ''.join(s[0] for s in shortProductName.split())
# DriverDisc Paths
DD_ALL = "/tmp/DD"
DD_FIRMWARE = "/tmp/DD/lib/firmware"
DD_RPMS = "/tmp/DD-*"
TRANSLATIONS_UPDATE_DIR = "/tmp/updates/po"
ANACONDA_CLEANUP = "anaconda-cleanup"
MOUNT_DIR = "/run/install"
DRACUT_REPODIR = "/run/install/repo"
DRACUT_ISODIR = "/run/install/source"
ISO_DIR = MOUNT_DIR + "/isodir"
IMAGE_DIR = MOUNT_DIR + "/image"
INSTALL_TREE = MOUNT_DIR + "/source"
BASE_REPO_NAME = "anaconda"
# NOTE: this should be LANG_TERRITORY.CODESET, e.g. en_US.UTF-8
DEFAULT_LANG = "en_US.UTF-8"
DEFAULT_VC_FONT = "eurlatgr"
DEFAULT_KEYBOARD = "us"
DRACUT_SHUTDOWN_EJECT = "/run/initramfs/usr/lib/dracut/hooks/shutdown/99anaconda-eject.sh"
# VNC questions
USEVNC = N_("Start VNC")
USETEXT = N_("Use text mode")
# Runlevel files
RUNLEVELS = {3: 'multi-user.target', 5: 'graphical.target'}
# Network
NETWORK_CONNECTION_TIMEOUT = 45 # in seconds
NETWORK_CONNECTED_CHECK_INTERVAL = 0.1 # in seconds
# DBus
DEFAULT_DBUS_TIMEOUT = -1 # use default
# Thread names
THREAD_EXECUTE_STORAGE = "AnaExecuteStorageThread"
THREAD_STORAGE = "AnaStorageThread"
THREAD_STORAGE_WATCHER = "AnaStorageWatcher"
THREAD_CHECK_STORAGE = "AnaCheckStorageThread"
THREAD_CUSTOM_STORAGE_INIT = "AnaCustomStorageInit"
THREAD_WAIT_FOR_CONNECTING_NM = "AnaWaitForConnectingNMThread"
THREAD_PAYLOAD = "AnaPayloadThread"
THREAD_PAYLOAD_RESTART = "AnaPayloadRestartThread"
THREAD_INPUT_BASENAME = "AnaInputThread"
THREAD_SYNC_TIME_BASENAME = "AnaSyncTime"
THREAD_EXCEPTION_HANDLING_TEST = "AnaExceptionHandlingTest"
THREAD_LIVE_PROGRESS = "AnaLiveProgressThread"
THREAD_SOFTWARE_WATCHER = "AnaSoftwareWatcher"
THREAD_CHECK_SOFTWARE = "AnaCheckSoftwareThread"
THREAD_SOURCE_WATCHER = "AnaSourceWatcher"
THREAD_INSTALL = "AnaInstallThread"
THREAD_CONFIGURATION = "AnaConfigurationThread"
THREAD_FCOE = "AnaFCOEThread"
THREAD_ISCSI_DISCOVER = "AnaIscsiDiscoverThread"
THREAD_ISCSI_LOGIN = "AnaIscsiLoginThread"
THREAD_GEOLOCATION_REFRESH = "AnaGeolocationRefreshThread"
THREAD_DATE_TIME = "AnaDateTimeThread"
THREAD_TIME_INIT = "AnaTimeInitThread"
THREAD_DASDFMT = "AnaDasdfmtThread"
THREAD_KEYBOARD_INIT = "AnaKeyboardThread"
THREAD_ADD_LAYOUTS_INIT = "AnaAddLayoutsInitThread"
# Geolocation constants
# geolocation providers
# - values are used by the geoloc CLI/boot option
GEOLOC_PROVIDER_FEDORA_GEOIP = "provider_fedora_geoip"
GEOLOC_PROVIDER_HOSTIP = "provider_hostip"
GEOLOC_PROVIDER_GOOGLE_WIFI = "provider_google_wifi"
# geocoding provider
GEOLOC_GEOCODER_NOMINATIM = "geocoder_nominatim"
# default providers
GEOLOC_DEFAULT_PROVIDER = GEOLOC_PROVIDER_FEDORA_GEOIP
GEOLOC_DEFAULT_GEOCODER = GEOLOC_GEOCODER_NOMINATIM
# timeout (in seconds)
GEOLOC_TIMEOUT = 3
ANACONDA_ENVIRON = "anaconda"
FIRSTBOOT_ENVIRON = "firstboot"
# Tainted hardware
UNSUPPORTED_HW = 1 << 28
# Password validation
PASSWORD_MIN_LEN = 8
PASSWORD_EMPTY_ERROR = N_("The password is empty.")
PASSWORD_CONFIRM_ERROR_GUI = N_("The passwords do not match.")
PASSWORD_CONFIRM_ERROR_TUI = N_("The passwords you entered were different. Please try again.")
PASSWORD_WEAK = N_("The password you have provided is weak. %s")
PASSWORD_WEAK_WITH_ERROR = N_("The password you have provided is weak: %s.")
PASSWORD_WEAK_CONFIRM = N_("You have provided a weak password. Press Done again to use anyway.")
PASSWORD_WEAK_CONFIRM_WITH_ERROR = N_("You have provided a weak password: %s. Press Done again to use anyway.")
PASSWORD_ASCII = N_("The password you have provided contains non-ASCII characters. You may not be able to switch between keyboard layouts to login. Press Done to continue.")
PASSWORD_DONE_TWICE = N_("You will have to press Done twice to confirm it.")
PASSWORD_STRENGTH_DESC = [N_("Empty"), N_("Weak"), N_("Fair"), N_("Good"), N_("Strong")]
# the number of seconds we consider a noticeable freeze of the UI
NOTICEABLE_FREEZE = 0.1
# all ASCII characters
PW_ASCII_CHARS = string.digits + string.ascii_letters + string.punctuation + " "
# Recognizing a tarfile
TAR_SUFFIX = (".tar", ".tbz", ".tgz", ".txz", ".tar.bz2", "tar.gz", "tar.xz")
# screenshots
SCREENSHOTS_DIRECTORY = "/tmp/anaconda-screenshots"
SCREENSHOTS_TARGET_DIRECTORY = "/root/anaconda-screenshots"
# cmdline arguments that append instead of overwrite
CMDLINE_APPEND = ["modprobe.blacklist", "ifname"]
DEFAULT_AUTOPART_TYPE = AUTOPART_TYPE_LVM
# Default to these units when reading user input when no units given
SIZE_UNITS_DEFAULT = "MiB"
# Constants for reporting status to IPMI. These are from the IPMI spec v2 rev1.1, page 512.
IPMI_STARTED = 0x7 # installation started
IPMI_FINISHED = 0x8 # installation finished successfully
IPMI_ABORTED = 0x9 # installation finished unsuccessfully, due to some non-exn error
IPMI_FAILED = 0xA # installation hit an exception
# for how long (in seconds) we try to wait for enough entropy for LUKS
# keep this a multiple of 60 (minutes)
MAX_ENTROPY_WAIT = 10 * 60
# X display number to use
X_DISPLAY_NUMBER = 1
# Payload status messages
PAYLOAD_STATUS_PROBING_STORAGE = N_("Probing storage...")
PAYLOAD_STATUS_PACKAGE_MD = N_("Downloading package metadata...")
PAYLOAD_STATUS_GROUP_MD = N_("Downloading group metadata...")
# Window title text
WINDOW_TITLE_TEXT = N_("Anaconda Installer")
| gpl-2.0 | 920,207,107,352,325,500 | 35.068783 | 173 | 0.74945 | false | 3.260163 | false | false | false |
mwclient/mwclient | mwclient/page.py | 1 | 20723 | import six
from six import text_type
import time
from mwclient.util import parse_timestamp
import mwclient.listing
import mwclient.errors
class Page(object):
def __init__(self, site, name, info=None, extra_properties=None):
if type(name) is type(self):
self.__dict__.update(name.__dict__)
return
self.site = site
self.name = name
self._textcache = {}
if not info:
if extra_properties:
prop = 'info|' + '|'.join(six.iterkeys(extra_properties))
extra_props = []
for extra_prop in six.itervalues(extra_properties):
extra_props.extend(extra_prop)
else:
prop = 'info'
extra_props = ()
if type(name) is int:
info = self.site.get('query', prop=prop, pageids=name,
inprop='protection', *extra_props)
else:
info = self.site.get('query', prop=prop, titles=name,
inprop='protection', *extra_props)
info = six.next(six.itervalues(info['query']['pages']))
self._info = info
if 'invalid' in info:
raise mwclient.errors.InvalidPageTitle(info.get('invalidreason'))
self.namespace = info.get('ns', 0)
self.name = info.get('title', u'')
if self.namespace:
self.page_title = self.strip_namespace(self.name)
else:
self.page_title = self.name
self.base_title = self.page_title.split('/')[0]
self.base_name = self.name.split('/')[0]
self.touched = parse_timestamp(info.get('touched'))
self.revision = info.get('lastrevid', 0)
self.exists = 'missing' not in info
self.length = info.get('length')
self.protection = {
i['type']: (i['level'], i['expiry'])
for i in info.get('protection', ())
if i
}
self.redirect = 'redirect' in info
self.pageid = info.get('pageid', None)
self.contentmodel = info.get('contentmodel', None)
self.pagelanguage = info.get('pagelanguage', None)
self.restrictiontypes = info.get('restrictiontypes', None)
self.last_rev_time = None
self.edit_time = None
def redirects_to(self):
""" Get the redirect target page, or None if the page is not a redirect."""
info = self.site.get('query', prop='pageprops', titles=self.name, redirects='')
if 'redirects' in info['query']:
for page in info['query']['redirects']:
if page['from'] == self.name:
return Page(self.site, page['to'])
return None
else:
return None
def resolve_redirect(self):
""" Get the redirect target page, or the current page if its not a redirect."""
target_page = self.redirects_to()
if target_page is None:
return self
else:
return target_page
def __repr__(self):
return "<Page object '%s' for %s>" % (self.name.encode('utf-8'), self.site)
def __unicode__(self):
return self.name
@staticmethod
def strip_namespace(title):
if title[0] == ':':
title = title[1:]
return title[title.find(':') + 1:]
@staticmethod
def normalize_title(title):
# TODO: Make site dependent
title = title.strip()
if title[0] == ':':
title = title[1:]
title = title[0].upper() + title[1:]
title = title.replace(' ', '_')
return title
def can(self, action):
"""Check if the current user has the right to carry out some action
with the current page.
Example:
>>> page.can('edit')
True
"""
level = self.protection.get(action, (action,))[0]
if level == 'sysop':
level = 'editprotected'
return level in self.site.rights
def get_token(self, type, force=False):
return self.site.get_token(type, force, title=self.name)
def text(self, section=None, expandtemplates=False, cache=True, slot='main'):
"""Get the current wikitext of the page, or of a specific section.
If the page does not exist, an empty string is returned. By
default, results will be cached and if you call text() again
with the same section and expandtemplates the result will come
from the cache. The cache is stored on the instance, so it
lives as long as the instance does.
Args:
section (int): Section number, to only get text from a single section.
expandtemplates (bool): Expand templates (default: `False`)
cache (bool): Use in-memory caching (default: `True`)
"""
if not self.can('read'):
raise mwclient.errors.InsufficientPermission(self)
if not self.exists:
return u''
if section is not None:
section = text_type(section)
key = hash((section, expandtemplates))
if cache and key in self._textcache:
return self._textcache[key]
revs = self.revisions(prop='content|timestamp', limit=1, section=section,
slots=slot)
try:
rev = next(revs)
if 'slots' in rev:
text = rev['slots'][slot]['*']
else:
text = rev['*']
self.last_rev_time = rev['timestamp']
except StopIteration:
text = u''
self.last_rev_time = None
if not expandtemplates:
self.edit_time = time.gmtime()
else:
# The 'rvexpandtemplates' option was removed in MediaWiki 1.32, so we have to
# make an extra API call, see https://github.com/mwclient/mwclient/issues/214
text = self.site.expandtemplates(text)
if cache:
self._textcache[key] = text
return text
def save(self, *args, **kwargs):
"""Alias for edit, for maintaining backwards compatibility."""
return self.edit(*args, **kwargs)
def edit(self, text, summary=u'', minor=False, bot=True, section=None, **kwargs):
"""Update the text of a section or the whole page by performing an edit operation.
"""
return self._edit(summary, minor, bot, section, text=text, **kwargs)
def append(self, text, summary=u'', minor=False, bot=True, section=None,
**kwargs):
"""Append text to a section or the whole page by performing an edit operation.
"""
return self._edit(summary, minor, bot, section, appendtext=text, **kwargs)
def prepend(self, text, summary=u'', minor=False, bot=True, section=None,
**kwargs):
"""Prepend text to a section or the whole page by performing an edit operation.
"""
return self._edit(summary, minor, bot, section, prependtext=text, **kwargs)
def _edit(self, summary, minor, bot, section, **kwargs):
if not self.site.logged_in and self.site.force_login:
raise mwclient.errors.AssertUserFailedError()
if self.site.blocked:
raise mwclient.errors.UserBlocked(self.site.blocked)
if not self.can('edit'):
raise mwclient.errors.ProtectedPageError(self)
if not self.site.writeapi:
raise mwclient.errors.NoWriteApi(self)
data = {}
if minor:
data['minor'] = '1'
if not minor:
data['notminor'] = '1'
if self.last_rev_time:
data['basetimestamp'] = time.strftime('%Y%m%d%H%M%S', self.last_rev_time)
if self.edit_time:
data['starttimestamp'] = time.strftime('%Y%m%d%H%M%S', self.edit_time)
if bot:
data['bot'] = '1'
if section is not None:
data['section'] = section
data.update(kwargs)
if self.site.force_login:
data['assert'] = 'user'
def do_edit():
result = self.site.post('edit', title=self.name, summary=summary,
token=self.get_token('edit'),
**data)
if result['edit'].get('result').lower() == 'failure':
raise mwclient.errors.EditError(self, result['edit'])
return result
try:
result = do_edit()
except mwclient.errors.APIError as e:
if e.code == 'badtoken':
# Retry, but only once to avoid an infinite loop
self.get_token('edit', force=True)
try:
result = do_edit()
except mwclient.errors.APIError as e:
self.handle_edit_error(e, summary)
else:
self.handle_edit_error(e, summary)
# 'newtimestamp' is not included if no change was made
if 'newtimestamp' in result['edit'].keys():
self.last_rev_time = parse_timestamp(result['edit'].get('newtimestamp'))
# Workaround for https://phabricator.wikimedia.org/T211233
for cookie in self.site.connection.cookies:
if 'PostEditRevision' in cookie.name:
self.site.connection.cookies.clear(cookie.domain, cookie.path,
cookie.name)
# clear the page text cache
self._textcache = {}
return result['edit']
def handle_edit_error(self, e, summary):
if e.code == 'editconflict':
raise mwclient.errors.EditError(self, summary, e.info)
elif e.code in {'protectedtitle', 'cantcreate', 'cantcreate-anon',
'noimageredirect-anon', 'noimageredirect', 'noedit-anon',
'noedit', 'protectedpage', 'cascadeprotected',
'customcssjsprotected',
'protectednamespace-interface', 'protectednamespace'}:
raise mwclient.errors.ProtectedPageError(self, e.code, e.info)
elif e.code == 'assertuserfailed':
raise mwclient.errors.AssertUserFailedError()
else:
raise e
def touch(self):
"""Perform a "null edit" on the page to update the wiki's cached data of it.
This is useful in contrast to purge when needing to update stored data on a wiki,
for example Semantic MediaWiki properties or Cargo table values, since purge
only forces update of a page's displayed values and not its store.
"""
if not self.exists:
return
self.append('')
def move(self, new_title, reason='', move_talk=True, no_redirect=False):
"""Move (rename) page to new_title.
If user account is an administrator, specify no_redirect as True to not
leave a redirect.
If user does not have permission to move page, an InsufficientPermission
exception is raised.
"""
if not self.can('move'):
raise mwclient.errors.InsufficientPermission(self)
if not self.site.writeapi:
raise mwclient.errors.NoWriteApi(self)
data = {}
if move_talk:
data['movetalk'] = '1'
if no_redirect:
data['noredirect'] = '1'
result = self.site.post('move', ('from', self.name), to=new_title,
token=self.get_token('move'), reason=reason, **data)
return result['move']
def delete(self, reason='', watch=False, unwatch=False, oldimage=False):
"""Delete page.
If user does not have permission to delete page, an InsufficientPermission
exception is raised.
"""
if not self.can('delete'):
raise mwclient.errors.InsufficientPermission(self)
if not self.site.writeapi:
raise mwclient.errors.NoWriteApi(self)
data = {}
if watch:
data['watch'] = '1'
if unwatch:
data['unwatch'] = '1'
if oldimage:
data['oldimage'] = oldimage
result = self.site.post('delete', title=self.name,
token=self.get_token('delete'),
reason=reason, **data)
return result['delete']
def purge(self):
"""Purge server-side cache of page. This will re-render templates and other
dynamic content.
"""
self.site.post('purge', titles=self.name)
# def watch: requires 1.14
# Properties
def backlinks(self, namespace=None, filterredir='all', redirect=False,
limit=None, generator=True):
"""List pages that link to the current page, similar to Special:Whatlinkshere.
API doc: https://www.mediawiki.org/wiki/API:Backlinks
"""
prefix = mwclient.listing.List.get_prefix('bl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(
prefix, namespace=namespace, filterredir=filterredir,
))
if redirect:
kwargs['%sredirect' % prefix] = '1'
kwargs[prefix + 'title'] = self.name
return mwclient.listing.List.get_list(generator)(
self.site, 'backlinks', 'bl', limit=limit, return_values='title',
**kwargs
)
def categories(self, generator=True, show=None):
"""List categories used on the current page.
API doc: https://www.mediawiki.org/wiki/API:Categories
Args:
generator (bool): Return generator (Default: True)
show (str): Set to 'hidden' to only return hidden categories
or '!hidden' to only return non-hidden ones.
Returns:
mwclient.listings.PagePropertyGenerator
"""
prefix = mwclient.listing.List.get_prefix('cl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(
prefix, show=show
))
if generator:
return mwclient.listing.PagePropertyGenerator(
self, 'categories', 'cl', **kwargs
)
else:
# TODO: return sortkey if wanted
return mwclient.listing.PageProperty(
self, 'categories', 'cl', return_values='title', **kwargs
)
def embeddedin(self, namespace=None, filterredir='all', limit=None, generator=True):
"""List pages that transclude the current page.
API doc: https://www.mediawiki.org/wiki/API:Embeddedin
Args:
namespace (int): Restricts search to a given namespace (Default: None)
filterredir (str): How to filter redirects, either 'all' (default),
'redirects' or 'nonredirects'.
limit (int): Maximum amount of pages to return per request
generator (bool): Return generator (Default: True)
Returns:
mwclient.listings.List: Page iterator
"""
prefix = mwclient.listing.List.get_prefix('ei', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(prefix, namespace=namespace,
filterredir=filterredir))
kwargs[prefix + 'title'] = self.name
return mwclient.listing.List.get_list(generator)(
self.site, 'embeddedin', 'ei', limit=limit, return_values='title',
**kwargs
)
def extlinks(self):
"""List external links from the current page.
API doc: https://www.mediawiki.org/wiki/API:Extlinks
"""
return mwclient.listing.PageProperty(self, 'extlinks', 'el', return_values='*')
def images(self, generator=True):
"""List files/images embedded in the current page.
API doc: https://www.mediawiki.org/wiki/API:Images
"""
if generator:
return mwclient.listing.PagePropertyGenerator(self, 'images', '')
else:
return mwclient.listing.PageProperty(self, 'images', '',
return_values='title')
def iwlinks(self):
"""List interwiki links from the current page.
API doc: https://www.mediawiki.org/wiki/API:Iwlinks
"""
return mwclient.listing.PageProperty(self, 'iwlinks', 'iw',
return_values=('prefix', '*'))
def langlinks(self, **kwargs):
"""List interlanguage links from the current page.
API doc: https://www.mediawiki.org/wiki/API:Langlinks
"""
return mwclient.listing.PageProperty(self, 'langlinks', 'll',
return_values=('lang', '*'),
**kwargs)
def links(self, namespace=None, generator=True, redirects=False):
"""List links to other pages from the current page.
API doc: https://www.mediawiki.org/wiki/API:Links
"""
prefix = mwclient.listing.List.get_prefix('pl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(prefix, namespace=namespace))
if redirects:
kwargs['redirects'] = '1'
if generator:
return mwclient.listing.PagePropertyGenerator(self, 'links', 'pl', **kwargs)
else:
return mwclient.listing.PageProperty(self, 'links', 'pl',
return_values='title', **kwargs)
def revisions(self, startid=None, endid=None, start=None, end=None,
dir='older', user=None, excludeuser=None, limit=50,
prop='ids|timestamp|flags|comment|user',
expandtemplates=False, section=None,
diffto=None, slots=None, uselang=None):
"""List revisions of the current page.
API doc: https://www.mediawiki.org/wiki/API:Revisions
Args:
startid (int): Revision ID to start listing from.
endid (int): Revision ID to stop listing at.
start (str): Timestamp to start listing from.
end (str): Timestamp to end listing at.
dir (str): Direction to list in: 'older' (default) or 'newer'.
user (str): Only list revisions made by this user.
excludeuser (str): Exclude revisions made by this user.
limit (int): The maximum number of revisions to return per request.
prop (str): Which properties to get for each revision,
default: 'ids|timestamp|flags|comment|user'
expandtemplates (bool): Expand templates in rvprop=content output
section (int): Section number. If rvprop=content is set, only the contents
of this section will be retrieved.
diffto (str): Revision ID to diff each revision to. Use "prev", "next" and
"cur" for the previous, next and current revision respectively.
slots (str): The content slot (Mediawiki >= 1.32) to retrieve content from.
uselang (str): Language to use for parsed edit comments and other localized
messages.
Returns:
mwclient.listings.List: Revision iterator
"""
kwargs = dict(mwclient.listing.List.generate_kwargs(
'rv', startid=startid, endid=endid, start=start, end=end, user=user,
excludeuser=excludeuser, diffto=diffto, slots=slots
))
if self.site.version[:2] < (1, 32) and 'rvslots' in kwargs:
# https://github.com/mwclient/mwclient/issues/199
del kwargs['rvslots']
kwargs['rvdir'] = dir
kwargs['rvprop'] = prop
kwargs['uselang'] = uselang
if expandtemplates:
kwargs['rvexpandtemplates'] = '1'
if section is not None:
kwargs['rvsection'] = section
return mwclient.listing.RevisionsIterator(self, 'revisions', 'rv', limit=limit,
**kwargs)
def templates(self, namespace=None, generator=True):
"""List templates used on the current page.
API doc: https://www.mediawiki.org/wiki/API:Templates
"""
prefix = mwclient.listing.List.get_prefix('tl', generator)
kwargs = dict(mwclient.listing.List.generate_kwargs(prefix, namespace=namespace))
if generator:
return mwclient.listing.PagePropertyGenerator(self, 'templates', prefix,
**kwargs)
else:
return mwclient.listing.PageProperty(self, 'templates', prefix,
return_values='title', **kwargs)
| mit | 138,916,023,745,932,510 | 37.304991 | 90 | 0.564059 | false | 4.30742 | false | false | false |
dmittov/AlcoBot | bot.py | 1 | 2312 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
import logging
import telegram
import cocktail
from time import sleep
from urllib2 import URLError
def main():
logging.basicConfig(
level=logging.DEBUG,
filename='debug.log',
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# Telegram Bot Authorization Token
TOKEN = None
with open('prod.token') as fh:
TOKEN = fh.readline()
logging.info(TOKEN)
bot = telegram.Bot(TOKEN)
try:
update_id = bot.getUpdates()[0].update_id
except IndexError:
update_id = None
while True:
try:
update_id = response(bot, update_id)
except telegram.TelegramError as e:
# These are network problems with Telegram.
if e.message in ("Bad Gateway", "Timed out"):
sleep(1)
elif e.message == "Unauthorized":
# The user has removed or blocked the bot.
update_id += 1
else:
raise e
except URLError as e:
sleep(1)
def response(bot, update_id):
# Request updates after the last update_id
for update in bot.getUpdates(offset=update_id, timeout=10):
# chat_id is required to reply to any message
chat_id = update.message.chat_id
update_id = update.update_id + 1
try:
message = cocktail.coctail_msg(update.message.text)
except Exception as e:
message = e.message
if message:
bot.sendMessage(chat_id=chat_id,
text=message)
return update_id
if __name__ == '__main__':
main()
| gpl-3.0 | 4,792,354,141,523,781,000 | 30.243243 | 71 | 0.62154 | false | 4.077601 | false | false | false |
nullzero/wprobot | wp/ltime.py | 1 | 1990 | # -*- coding: utf-8 -*-
"""
Library to manage everything about date and time.
"""
__version__ = "1.0.2"
__author__ = "Sorawee Porncharoenwase"
import datetime
import time
def wrapMonth(m):
"""Convert zero-based month number to zero-based month number."""
m -= 1
if m < 0:
m += 12
if m >= 12:
m -= 12
return m
def weekdayThai(d):
"""Return Thai name of days of the week."""
return map(lambda x: u"วัน" + x,
[u"จันทร์", u"อังคาร", u"พุธ", u"พฤหัสบดี", u"ศุกร์",
u"เสาร์", u"อาทิตย์"])[d]
def monthEng(m):
"""Return English name of month."""
return [u"January", u"February", u"March", u"April", u"May", u"June",
u"July", u"August", u"September", u"October", u"November",
u"December"][wrapMonth(m)]
def monthThai(m):
"""Return Thai name of month."""
return [u"มกราคม", u"กุมภาพันธ์", u"มีนาคม", u"เมษายน", u"พฤษภาคม",
u"มิถุนายน", u"กรกฎาคม", u"สิงหาคม", u"กันยายน", u"ตุลาคม",
u"พฤศจิกายน", u"ธันวาคม"][wrapMonth(m)]
def monthThaiAbbr(m):
"""Return Thai abbreviated name of month."""
return [u"ม.ค.", u"ก.พ.", u"มี.ค.", u"เม.ย.", u"พ.ค.", u"มิ.ย.",
u"ก.ค.", u"ส.ค.", u"ก.ย.", u"ต.ค.", u"พ.ย.", u"ธ.ค."][wrapMonth(m)]
def getNumDay(year, month):
"""Return length of day in given month"""
if month == 2:
if year % 400 == 0:
return 29
elif year % 100 == 0:
return 28
elif year % 4 == 0:
return 29
else:
return 28
return [0, 31, 0, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31][month]
td = datetime.timedelta
sleep = time.sleep
| mit | 5,983,318,943,164,025,000 | 27.440678 | 79 | 0.51907 | false | 1.707019 | false | false | false |
Gr8z/Legend-Cogs | profanity/profanity.py | 1 | 2085 | import discord
from discord.ext import commands
from .utils.dataIO import dataIO, fileIO
import os
import asyncio
BOTCOMMANDER_ROLES = ["Family Representative", "Clan Manager", "Clan Deputy", "Co-Leader", "Hub Officer", "admin"]
class profanity:
"""profanity!"""
def __init__(self, bot):
self.bot = bot
self.bannedwords = dataIO.load_json('data/Profanity/banned_words.json')
async def banned_words(self, message):
word_set = set(self.bannedwords)
phrase_set = set(message.content.replace("*", "").replace("_", "").replace("#", "").split())
if word_set.intersection(phrase_set):
await self.bot.delete_message(message)
msg = await self.bot.send_message(
message.channel,
"{}, **We do not allow Hateful, obscene, offensive, racist, sexual, or violent words in any public channels.**".format(
message.author.mention
)
)
await asyncio.sleep(6)
await self.bot.delete_message(msg)
return
async def on_message_edit(self, before, after):
await self.banned_words(after)
async def on_message(self, message):
server = message.server
author = message.author
if message.author.id == self.bot.user.id:
return
botcommander_roles = [discord.utils.get(server.roles, name=r) for r in BOTCOMMANDER_ROLES]
botcommander_roles = set(botcommander_roles)
author_roles = set(author.roles)
if len(author_roles.intersection(botcommander_roles)):
return
await self.banned_words(message)
def check_folders():
if not os.path.exists("data/Profanity"):
print("Creating data/Profanity folder...")
os.makedirs("data/Profanity")
def check_files():
f = "data/Profanity/banned_words.json"
if not fileIO(f, "check"):
print("Creating empty banned_words.json...")
fileIO(f, "save", [])
def setup(bot):
check_folders()
check_files()
bot.add_cog(profanity(bot)) | mit | 7,216,413,436,609,016,000 | 31.092308 | 135 | 0.61295 | false | 3.619792 | false | false | false |
samuelfekete/Pythonometer | tests/test_questions.py | 1 | 1786 | """Test all questions."""
import os
import sys
import unittest
sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
from pythonometer.quiz import all_questions
from pythonometer.questions.base import WrongAnswer
class TestQuestions(unittest.TestCase):
"""Test the questions.
All question tests are the same, so they are loaded dynamically.
"""
pass
# Add a test for every question.
for question in all_questions():
def question_test(self, question=question):
current_question = question()
# Assert that a question string is supplied.
question_string = current_question.get_question_text()
self.assertIsInstance(question_string, basestring)
# Assert that at least one correct answer is given.
self.assert_(current_question.get_correct_answers())
# Assert that checking with the correct answers returns True.
for correct_answer in current_question.get_correct_answers():
self.assert_(current_question.check_answer(correct_answer))
# Assert that checking with the wrong answers raises WrongAnswer.
for wrong_answer in current_question.get_wrong_answers():
with self.assertRaises(WrongAnswer):
current_question.check_answer(wrong_answer)
# Assert that checking a wrong answer raises WrongAnswer.
with self.assertRaises(WrongAnswer):
current_question.check_answer('')
# Assert that checking the answer with bad code raises WrongAnswer.
with self.assertRaises(WrongAnswer):
current_question.check_answer('raise Exception')
setattr(TestQuestions, 'test_{}'.format(question.__name__), question_test)
if __name__ == '__main__':
unittest.main()
| mit | 8,631,838,962,788,521,000 | 32.074074 | 82 | 0.68645 | false | 4.409877 | true | false | false |
iocast/poiservice | lib/FilterEncodingWizard.py | 1 | 2742 | '''
Created on May 16, 2011
@author: michel
'''
import json
class FilterEncodingWizard(object):
comparision = [{
'value' : 'PropertyIsEqualTo',
'display' : '=',
'xml' : '<PropertyIsEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsEqualTo>'},
{'value' : 'PropertyIsNotEqualTo',
'display' : '!=',
'xml' : '<PropertyIsNotEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsNotEqualTo>'},
{'value' : 'PropertyIsLessThan',
'display' : '<',
'xml' : '<PropertyIsLessThan><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsLessThan>'},
{'value' : 'PropertyIsGreaterThan',
'display' : '>',
'xml' : '<PropertyIsGreaterThan><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsGreaterThan>'},
{'value' : 'PropertyIsLessThanOrEqualTo',
'display' : '<=',
'xml' : '<PropertyIsLessThanOrEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsLessThanOrEqualTo>'},
{'value' : 'PropertyIsGreaterThanOrEqualTo',
'display' : '>=',
'xml' : '<PropertyIsGreaterThanOrEqualTo><PropertyName>${value}</PropertyName><Literal>${literal}</Literal></PropertyIsGreaterThanOrEqualTo>'}
#{'value' : 'PropertyIsLike',
# 'display' : 'Like',
# 'xml' : ''},
#{'value' : 'PropertyIsBetween',
# 'display' : 'Between',
# 'xml' : ''},
#{'value' : 'PropertyIsNull',
# 'display' : 'Nul',
# 'xml' : ''}
]
logical = [
{'value' : 'Or',
'display' : 'or',
'xml' : '<Or>${statement}</Or>'},
{
'value' : 'And',
'display' : 'and',
'xml' : '<And>${statement}</And>'}
]
def comparisonToJson(self):
return json.dumps(self.comparision)
def comparisonToHTML(self):
html = '<select onChange="javascript:queryBuilder.operatorChanged(this);">'
for value in self.comparision:
html += '<option value="' + value['value'] + '">' + value['display'] + '</option>'
html += '</select>'
return html
def logicalToJson(self):
return json.dumps(self.logical)
def logicalToHTML(self):
html = '<select>'
for value in self.logical:
html += '<option value="' + value['value'] + '">' + value['display'] + '</option>'
html += '</select>'
return html;
| mit | -6,222,942,528,908,910,000 | 39.338235 | 155 | 0.522611 | false | 4.098655 | false | false | false |
cardmaster/makeclub | controlers/activity.py | 1 | 10024 | '''Copyright(C): Leaf Johnson 2011
This file is part of makeclub.
makeclub is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
makeclub is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with makeclub. If not, see <http://www.gnu.org/licenses/>.
'''
from google.appengine.api.users import get_current_user, create_login_url, User
from google.appengine.ext import webapp
from google.appengine.ext import db
from errors import errorPage
from infopage import infoPage
from access import hasActPrivilige, hasClubPrivilige
from models import Activity, Membership, Club, ActivityParticipator, ActivityBill
from url import urldict
from template import render
class ActivityBase(webapp.RequestHandler):
def __init__(self, *args, **kw):
super(ActivityBase, self).__init__(*args, **kw)
self.actobj = None
def getActModel(self):
aid, = self.urlcfg.analyze(self.request.path)
if (aid):
id = int(aid)
return Activity.get_by_id(id)
else:
return None
def actionPath(self):
return self.request.path
def templateParams(self):
act = self.actobj
club = act.club
cluburl = urldict['ClubView'].path(club.slug)
templateVars = dict(club = club, cluburl = cluburl, act = act, action = self.actionPath() )
return templateVars
def makeResponseText(self, act):
templateVars = self.templateParams()
return render(self.template, templateVars, self.request.url)
def checkPrivilige(self):
user = get_current_user()
if (not user):
errorPage ( self.response, "Not login", create_login_url(self.request.url), 403)
return False
if (not hasActPrivilige(user, self.actobj, self.actOperation)):
errorPage ( self.response, "Not authorrized", urldict['ClubView'].path(self.actobj.club.slug), 403)
return False
return True
def dbg(self, *args):
return #Clean up debug code
self.response.out.write (" ".join([str(arg) for arg in args]))
self.response.out.write ("<br />\n")
def get(self, *args):
actobj = self.getActModel()
if (actobj):
self.actobj = actobj
if (self.checkPrivilige()):
self.response.out.write (self.makeResponseText(actobj))
else:
return
else:
return errorPage( self.response, "No such Activity", urldict['ClubList'].path(), 404)
class SpecialOp:
def __init__(self, oper = '', url = '', needPost = False, data = [], display = ''):
self.oper = oper
if (not display):
display = oper
self.display = display
self.url = url
self.needPost = needPost
self.data = data
class ActivityView(ActivityBase):
def __init__(self, *args, **kw):
super (ActivityView, self).__init__(*args, **kw)
self.template = 'activity_view.html'
self.urlcfg = urldict['ActivityView']
self.actOperation = "view"
def templateParams(self):
defaults = super (ActivityView, self).templateParams()
user = get_current_user();
aid = self.actobj.key().id()
specialOps = []
if (hasActPrivilige(user, self.actobj, "edit" )):
sop = SpecialOp('edit', urldict['ActivityEdit'].path(aid), False)
specialOps.append(sop)
urlcfg = urldict['ActivityParticipate']
soplist = ['join', 'quit', 'confirm']
if (self.actobj.isBilled):
soplist.append("rebill")
else:
soplist.append("bill")
for oper in soplist:
if (hasActPrivilige(user, self.actobj, oper) ):
data = [('target', user.email()), ]
sop = SpecialOp(oper, urlcfg.path(aid, oper), True, data)
specialOps.append(sop)
defaults['specialOps'] = specialOps
participatorOps = []
for oper in ('confirm', ):
if (hasActPrivilige(user, self.actobj, oper) ):
sop = SpecialOp(oper, urlcfg.path(aid, oper), True, [])
participatorOps.append(sop)
defaults['participatorOps'] = participatorOps
apq = ActivityParticipator.all()
apq.filter ('activity = ', self.actobj)
defaults['participators'] = apq
return defaults
class ActivityParticipate(webapp.RequestHandler):
def getActModel(self, id):
try:
iid = int(id)
except:
return None
actobj = Activity.get_by_id(iid)
return actobj
def get(self, *args):
urlcfg = urldict['ActivityParticipate']
id, oper = urlcfg.analyze(self.request.path)
self.response.out.write (
'on id %s, operation %s' % (id, oper)
)
def post(self, *args):
urlcfg = urldict['ActivityParticipate']
id, oper = urlcfg.analyze(self.request.path)
id = int(id)
actobj = self.getActModel(id)
if (not actobj):
return errorPage (self.response, urldict['ClubList'].path(), "No such activity", 404 )
user = get_current_user();
if (not user):
return errorPage ( self.response, "Not login", create_login_url(self.request.url), 403)
target = self.request.get ('target')
cluburl = urldict['ClubView'].path(actobj.club.slug)
if (not hasActPrivilige(user, actobj, oper,target) ):
return errorPage ( self.response, "Can not access", cluburl, 403)
if (target):
targetUser = User(target)
if(not targetUser):
return errorPage ( self.response, "Illegal access", cluburl, 403)
else: #if target omitted, use current user as target
targetUser = user
mem = Membership.between (targetUser, actobj.club)
if (not mem):
return errorPage ( self.response, "Not a member", cluburl, 403)
acturl = urldict['ActivityView'].path(id)
if (oper == 'join'):
actp = ActivityParticipator.between (mem, actobj)
if (not actp):
actp = ActivityParticipator(member = mem, activity = actobj)
actp.put()
return infoPage (self.response, "Successfully Joined", "%s has join activity %s" % (mem.name, actobj.name), acturl)
elif (oper == 'quit'):
actp = ActivityParticipator.between(mem, actobj)
if (actp):
if (actp.confirmed):
return errorPage ( self.response, "Cannot delete confirmed participator", acturl, 403)
else:
actp.delete()
return infoPage (self.response, "Successfully Quited", "%s success quit activity %s" % (mem.name, actobj.name), acturl)
elif (oper == 'confirm'):
actp = ActivityParticipator.between(mem, actobj)
if (actp):
actp.confirmed = not actp.confirmed
actp.put()
return infoPage (self.response, "Successfully Confirmed", "success confirmed %s join activity %s" % (mem.name, actobj.name), acturl)
else:
return errorPage ( self.response, "No Such a Member", acturl, 404)
elif (oper == 'bill' or oper == "rebill"):
billobj = ActivityBill.generateBill(actobj, oper == "rebill")#If in rebill operation, we could enable rebill
if (billobj):
billobj.put()
billDict = dict(billobj = billobj)
return infoPage (self.response, "Successfully Billded", str(billobj.memberBill), acturl)
else:
return errorPage (self.response, "Error Will Generate Bill", acturl, 501)
def extractRequestData(request, interested, dbg=None):
retval = dict()
for (key, valid) in interested.iteritems() :
val = valid (request.get(key))
if (dbg):
dbg ( "Extract:", key, "=", val)
if (val):
retval [key] = val
return retval
import re
def parseDuration(times):
#support only h
tstr = times[:-1]
print "Times String: ", tstr
return float(tstr)
def parseBill (billstr, dbg = None):
entries = billstr.split (',')
ary = []
if (dbg):
dbg ("Bill String:", billstr)
dbg ("Splitted:", entries)
i = 1
for ent in entries:
ent = ent.strip()
if (i == 2):
val = ent
ary.append ( (key, val) )
i = 0
else :
key = ent
i += 1
return ary
class ActivityEdit(ActivityBase):
def __init__(self, *args, **kw):
super (ActivityEdit, self).__init__(*args, **kw)
self.template = 'activity_edit.html'
self.urlcfg = urldict['ActivityEdit']
self.actobj = None
self.actOperation = "edit"
def parseBillDbg(self, billstr):
return parseBill(billstr, self.dbg)
def updateObject(self, actobj):
interested = dict (name = str, intro = str, duration = parseDuration, bill = self.parseBillDbg)
reqs = extractRequestData (self.request, interested, self.dbg)
for (key, val) in reqs.iteritems():
self.dbg (key, "=", val)
setattr (actobj, key, val)
#Will read data from postdata, and update the pass-in actobj.
pass
def post(self, *args):
actobj = self.getActModel()
if (actobj):
self.actobj = actobj
if (self.checkPrivilige()):
if (self.request.get ('delete', False)):
actobj.delete()
return infoPage (self.response, "Successful deleted", "Deleted Activity %s" % actobj.name, "/")
self.updateObject(actobj)
key = actobj.put()
if (key):
return errorPage( self.response, "Successfully storing this Activity", urldict['ActivityView'].path(key.id()), 200)
else:
return errorPage( self.response, "Error while storing this Activity", urldict['ActivityEdit'].path(actobj.key().id()), 501)
else:
return errorPage( self.response, "No such Activity", urldict['ClubList'].path(), 404)
class ActivityNew(ActivityEdit):
def getActModel(self):
urlcfg = urldict['ActivityNew']
slug, = urlcfg.analyze(self.request.path)
user = get_current_user()
club = Club.getClubBySlug(slug)
if (user and club):
newact = Activity.createDefault(user, club)
if (newact): newact.bill = [('Filed Expense', 80), ('Balls Expense', 30)]
return newact
else:
return None
def checkPrivilige(self):
user = get_current_user()
if (not user):
errorPage ( self.response, "Not login", create_login_url(self.request.url), 403)
return False
if (not hasClubPrivilige(user, self.actobj.club, "newact")):
errorPage ( self.response, "Not Authorized to edit", urldict['ClubView'].path(self.actobj.club.slug), 403)
return False
return True
| agpl-3.0 | -6,981,099,863,504,542,000 | 34.048951 | 142 | 0.680467 | false | 3.03023 | false | false | false |
pedrogazquez/appBares | rango/forms.py | 1 | 3240 | from django import forms
from django.contrib.auth.models import User
from rango.models import Tapa, Bar, UserProfile
class BarForm(forms.ModelForm):
name = forms.CharField(max_length=128, help_text="Por favor introduzca el nombre del bar")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
likes = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
# An inline class to provide additional information on the form.
class Meta:
# Provide an association between the ModelForm and a model
model = Bar
# class TapaForm(forms.ModelForm):
# nombre = forms.CharField(max_length=128, help_text="Por favor introduzca el nombre de la tapa")
# url = forms.URLField(max_length=200, help_text="Por favor introduzca la direccion de la imagen de la tapa")
# views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
# def clean(self):
# cleaned_data = self.cleaned_data
# url = cleaned_data.get('url')
# # If url is not empty and doesn't start with 'http://' add 'http://' to the beginning
# if url and not url.startswith('http://'):
# url = 'http://' + url
# cleaned_data['url'] = url
# return cleaned_data
# class Meta:
# # Provide an association between the ModelForm and a model
# model = Tapa
# # What fields do we want to include in our form?
# # This way we don't need every field in the model present.
# # Some fields may allow NULL values, so we may not want to include them...
# # Here, we are hiding the foreign keys
# fields = ('nombre', 'url','views')
class TapaForm(forms.ModelForm):
nombre = forms.CharField(max_length=128, help_text="Por favor introduzca el nombre de la tapa")
url = forms.URLField(max_length=200, help_text="Por favor introduzca la direccion de la imagen de la tapa")
views = forms.IntegerField(widget=forms.HiddenInput(), initial=0)
class Meta:
# Provide an association between the ModelForm and a model
model = Tapa
# What fields do we want to include in our form?
# This way we don't need every field in the model present.
# Some fields may allow NULL values, so we may not want to include them...
# Here, we are hiding the foreign key.
# we can either exclude the category field from the form,
exclude = ('bar',)
#or specify the fields to include (i.e. not include the category field)
fields = ('nombre', 'url','views')
class UserForm(forms.ModelForm):
username = forms.CharField(help_text="Please enter a username.")
email = forms.CharField(help_text="Please enter your email.")
password = forms.CharField(widget=forms.PasswordInput(), help_text="Please enter a password.")
class Meta:
model = User
fields = ('username', 'email', 'password')
class UserProfileForm(forms.ModelForm):
website = forms.URLField(help_text="Please enter your website.", required=False)
picture = forms.ImageField(help_text="Select a profile image to upload.", required=False)
class Meta:
model = UserProfile
fields = ('website', 'picture') | gpl-3.0 | 233,954,924,696,194,240 | 41.810811 | 113 | 0.659877 | false | 3.941606 | false | false | false |
google-research/disentanglement_lib | disentanglement_lib/data/ground_truth/cars3d.py | 1 | 4067 | # coding=utf-8
# Copyright 2018 The DisentanglementLib Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Cars3D data set."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from disentanglement_lib.data.ground_truth import ground_truth_data
from disentanglement_lib.data.ground_truth import util
import numpy as np
import PIL
import scipy.io as sio
from six.moves import range
from sklearn.utils import extmath
from tensorflow.compat.v1 import gfile
CARS3D_PATH = os.path.join(
os.environ.get("DISENTANGLEMENT_LIB_DATA", "."), "cars")
class Cars3D(ground_truth_data.GroundTruthData):
"""Cars3D data set.
The data set was first used in the paper "Deep Visual Analogy-Making"
(https://papers.nips.cc/paper/5845-deep-visual-analogy-making) and can be
downloaded from http://www.scottreed.info/. The images are rescaled to 64x64.
The ground-truth factors of variation are:
0 - elevation (4 different values)
1 - azimuth (24 different values)
2 - object type (183 different values)
"""
def __init__(self):
self.factor_sizes = [4, 24, 183]
features = extmath.cartesian(
[np.array(list(range(i))) for i in self.factor_sizes])
self.latent_factor_indices = [0, 1, 2]
self.num_total_factors = features.shape[1]
self.index = util.StateSpaceAtomIndex(self.factor_sizes, features)
self.state_space = util.SplitDiscreteStateSpace(self.factor_sizes,
self.latent_factor_indices)
self.data_shape = [64, 64, 3]
self.images = self._load_data()
@property
def num_factors(self):
return self.state_space.num_latent_factors
@property
def factors_num_values(self):
return self.factor_sizes
@property
def observation_shape(self):
return self.data_shape
def sample_factors(self, num, random_state):
"""Sample a batch of factors Y."""
return self.state_space.sample_latent_factors(num, random_state)
def sample_observations_from_factors(self, factors, random_state):
"""Sample a batch of observations X given a batch of factors Y."""
all_factors = self.state_space.sample_all_factors(factors, random_state)
indices = self.index.features_to_index(all_factors)
return self.images[indices].astype(np.float32)
def _load_data(self):
dataset = np.zeros((24 * 4 * 183, 64, 64, 3))
all_files = [x for x in gfile.ListDirectory(CARS3D_PATH) if ".mat" in x]
for i, filename in enumerate(all_files):
data_mesh = _load_mesh(filename)
factor1 = np.array(list(range(4)))
factor2 = np.array(list(range(24)))
all_factors = np.transpose([
np.tile(factor1, len(factor2)),
np.repeat(factor2, len(factor1)),
np.tile(i,
len(factor1) * len(factor2))
])
indexes = self.index.features_to_index(all_factors)
dataset[indexes] = data_mesh
return dataset
def _load_mesh(filename):
"""Parses a single source file and rescales contained images."""
with gfile.Open(os.path.join(CARS3D_PATH, filename), "rb") as f:
mesh = np.einsum("abcde->deabc", sio.loadmat(f)["im"])
flattened_mesh = mesh.reshape((-1,) + mesh.shape[2:])
rescaled_mesh = np.zeros((flattened_mesh.shape[0], 64, 64, 3))
for i in range(flattened_mesh.shape[0]):
pic = PIL.Image.fromarray(flattened_mesh[i, :, :, :])
pic.thumbnail((64, 64, 3), PIL.Image.ANTIALIAS)
rescaled_mesh[i, :, :, :] = np.array(pic)
return rescaled_mesh * 1. / 255
| apache-2.0 | 5,423,659,911,183,028,000 | 35.3125 | 79 | 0.687239 | false | 3.380715 | false | false | false |
JudoWill/ResearchNotebooks | GA-PhredProcessing.py | 1 | 1153 | # -*- coding: utf-8 -*-
# <nbformat>3.0</nbformat>
# <codecell>
import os, os.path
import shutil
import glob
import sys
from subprocess import check_call, check_output
os.chdir('/home/will/Dropbox/PhredDirectory/')
staden_path = '/home/will/staden-2.0.0b9.x86_64/bin/'
sys.path.append('/home/will/PySeqUtils/')
# <codecell>
from GeneralSeqTools import call_muscle, fasta_reader, fasta_writer
# <codecell>
#from Bio import SeqIO
from Bio.SeqIO.AbiIO import AbiIterator
files = glob.glob('../Wigdahl Trace files/2:11:11/*.ab1')
seqs = []
for f in files:
rec = AbiIterator(open(f, mode = 'rb'), trim = True).next()
seqs.append( (rec.id, rec.seq.tostring()) )
# <codecell>
!/home/will/staden-2.0.0b9.x86_64/bin/convert_trace --help
# <codecell>
res = call_muscle(seqs)
with open('align_data.fasta', 'w') as handle:
fasta_writer(handle, res)
# <codecell>
from HIVTransTool import process_seqs
results = list(process_seqs(seqs[:50], extract_regions = True, known_names = 50))
# <codecell>
for row in results:
if row['RegionName'] == 'LTR5':
print row['Name'], row['QueryNuc']
# <codecell>
results[:5]
# <codecell>
| mit | 8,730,449,133,413,499,000 | 18.87931 | 81 | 0.679965 | false | 2.656682 | false | false | false |
intel-hpdd/intel-manager-for-lustre | chroma_core/services/job_scheduler/job_scheduler_client.py | 1 | 10557 | # Copyright (c) 2020 DDN. All rights reserved.
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
"""
The service `job_scheduler` handles both RPCs (JobSchedulerRpc) and a queue (NotificationQueue).
The RPCs are used for explicit requests to modify the system or run a particular task, while the queue
is used for updates received from agent reports. Access to both of these, along with some additional
non-remote functionality is wrapped in JobSchedulerClient.
"""
from django import db
from chroma_core.services import log_register
from chroma_core.services.rpc import ServiceRpcInterface
from chroma_core.models import ManagedHost, Command
log = log_register(__name__)
class JobSchedulerRpc(ServiceRpcInterface):
methods = [
"set_state",
"run_jobs",
"cancel_job",
"create_host_ssh",
"test_host_contact",
"create_filesystem",
"create_ostpool",
"create_task",
"remove_task",
"update_ostpool",
"delete_ostpool",
"create_client_mount",
"create_copytool",
"register_copytool",
"unregister_copytool",
"update_nids",
"trigger_plugin_update",
"update_lnet_configuration",
"create_host",
"create_targets",
"available_transitions",
"available_jobs",
"get_locks",
"update_corosync_configuration",
"get_transition_consequences",
"configure_stratagem",
"update_stratagem",
"run_stratagem",
]
class JobSchedulerClient(object):
"""Because there are some tasks which are the domain of the job scheduler but do not need to
be run in the context of the service, the RPCs and queue operations are accompanied in this
class by some operations that run locally. The local operations are
read-only operations such as querying what operations are possible for a particular object.
"""
@classmethod
def command_run_jobs(cls, job_dicts, message):
"""Create and run some Jobs, within a single Command.
:param job_dicts: List of 1 or more dicts like {'class_name': 'MyJobClass', 'args': {<dict of arguments to Job constructor>}}
:param message: User-visible string describing the operation, e.g. "Detecting filesystems"
:return: The ID of a new Command
"""
return JobSchedulerRpc().run_jobs(job_dicts, message)
@classmethod
def command_set_state(cls, object_ids, message, run=True):
"""Modify the system in whatever way is necessary to reach the state
specified in `object_ids`. Creates Jobs under a single Command. May create
no Jobs if the system is already in the state, or already scheduled to be
in that state. If the system is already scheduled to be in that state, then
the returned Command will be connected to the existing Jobs which take the system to
the desired state.
:param cls:
:param object_ids: List of three-tuples (natural_key, object_id, new_state)
:param message: User-visible string describing the operation, e.g. "Starting filesystem X"
:param run: Test only. Schedule jobs without starting them.
:return: The ID of a new Command
"""
return JobSchedulerRpc().set_state(object_ids, message, run)
@classmethod
def available_transitions(cls, object_list):
"""Return the transitions available for each object in list
See the Job Scheduler method of the same name for details.
"""
return JobSchedulerRpc().available_transitions(object_list)
@classmethod
def available_jobs(cls, object_list):
"""Query which jobs (other than changes to state) can be run on this object.
See the Job Scheduler method of the same name for details.
"""
return JobSchedulerRpc().available_jobs(object_list)
@classmethod
def get_transition_consequences(cls, stateful_object, new_state):
"""Query what the side effects of a state transition are. Effectively does
a dry run of scheduling jobs for the transition.
The return format is like this:
::
{
'transition_job': <job dict>,
'dependency_jobs': [<list of job dicts>]
}
# where each job dict is like
{
'class': '<job class name>',
'requires_confirmation': <boolean, whether to prompt for confirmation>,
'confirmation_prompt': <string, confirmation prompt>,
'description': <string, description of the job>,
'stateful_object_id': <ID of the object modified by this job>,
'stateful_object_content_type_id': <Content type ID of the object modified by this job>
}
:param stateful_object: A StatefulObject instance
:param new_state: Hypothetical new value of the 'state' attribute
"""
return JobSchedulerRpc().get_transition_consequences(
stateful_object.__class__.__name__, stateful_object.id, new_state
)
@classmethod
def cancel_job(cls, job_id):
"""Attempt to cancel a job which is already scheduled (and possibly running)
:param job_id: ID of a Job object
"""
JobSchedulerRpc().cancel_job(job_id)
@classmethod
def create_host_ssh(cls, address, server_profile, root_pw, pkey, pkey_pw):
"""
Create a host which will be set up using SSH
:param address: SSH address
:return: (<ManagedHost instance>, <Command instance>)
"""
host_id, command_id = JobSchedulerRpc().create_host_ssh(address, server_profile, root_pw, pkey, pkey_pw)
return (ManagedHost.objects.get(pk=host_id), Command.objects.get(pk=command_id))
@classmethod
def test_host_contact(cls, address, root_pw=None, pkey=None, pkey_pw=None):
command_id = JobSchedulerRpc().test_host_contact(address, root_pw, pkey, pkey_pw)
return Command.objects.get(pk=command_id)
@classmethod
def update_corosync_configuration(cls, corosync_configuration_id, mcast_port, network_interface_ids):
command_id = JobSchedulerRpc().update_corosync_configuration(
corosync_configuration_id, mcast_port, network_interface_ids
)
return Command.objects.get(pk=command_id)
@classmethod
def create_filesystem(cls, fs_data):
return JobSchedulerRpc().create_filesystem(fs_data)
@classmethod
def create_ostpool(cls, pool_data):
return JobSchedulerRpc().create_ostpool(pool_data)
@classmethod
def update_ostpool(cls, pool_data):
return JobSchedulerRpc().update_ostpool(pool_data)
@classmethod
def delete_ostpool(cls, pool):
return JobSchedulerRpc().delete_ostpool(pool)
@classmethod
def create_task(cls, task_data):
return JobSchedulerRpc().create_task(task_data)
@classmethod
def remove_task(cls, task_id):
return JobSchedulerRpc().create_task(task_id)
@classmethod
def update_nids(cls, nid_data):
return JobSchedulerRpc().update_nids(nid_data)
@classmethod
def trigger_plugin_update(cls, include_host_ids, exclude_host_ids, plugin_names):
"""
Cause the plugins on the hosts passed to send an update irrespective of whether any
changes have occurred.
:param include_host_ids: List of host ids to include in the trigger update.
:param exclude_host_ids: List of host ids to exclude from the include list (makes for usage easy)
:param plugin_names: list of plugins to trigger update on - empty list means all.
:return: command id that caused updates to be sent.
"""
assert isinstance(include_host_ids, list)
assert isinstance(exclude_host_ids, list)
assert isinstance(plugin_names, list)
return JobSchedulerRpc().trigger_plugin_update(include_host_ids, exclude_host_ids, plugin_names)
@classmethod
def update_lnet_configuration(cls, lnet_configuration_list):
return JobSchedulerRpc().update_lnet_configuration(lnet_configuration_list)
@classmethod
def create_host(cls, fqdn, nodename, address, server_profile_id):
# The address of a host isn't something we can learn from it (the
# address is specifically how the host is to be reached from the manager
# for outbound connections, not just its FQDN). If during creation we know
# the address, then great, accept it. Else default to FQDN, it's a reasonable guess.
if address is None:
address = fqdn
host_id, command_id = JobSchedulerRpc().create_host(fqdn, nodename, address, server_profile_id)
return (ManagedHost.objects.get(pk=host_id), Command.objects.get(pk=command_id))
@classmethod
def create_targets(cls, targets_data):
from chroma_core.models import ManagedTarget, Command
target_ids, command_id = JobSchedulerRpc().create_targets(targets_data)
return (list(ManagedTarget.objects.filter(id__in=target_ids)), Command.objects.get(pk=command_id))
@classmethod
def create_client_mount(cls, host, filesystem_name, mountpoint):
from chroma_core.models import LustreClientMount
client_mount_id = JobSchedulerRpc().create_client_mount(host.id, filesystem_name, mountpoint)
return LustreClientMount.objects.get(id=client_mount_id)
@classmethod
def create_copytool(cls, copytool_data):
from chroma_core.models import Copytool
copytool_id = JobSchedulerRpc().create_copytool(copytool_data)
return Copytool.objects.get(id=copytool_id)
@classmethod
def register_copytool(cls, copytool_id, uuid):
JobSchedulerRpc().register_copytool(copytool_id, uuid)
@classmethod
def unregister_copytool(cls, copytool_id):
JobSchedulerRpc().unregister_copytool(copytool_id)
@classmethod
def get_locks(cls):
return JobSchedulerRpc().get_locks()
@classmethod
def configure_stratagem(cls, stratagem_data):
return JobSchedulerRpc().configure_stratagem(stratagem_data)
@classmethod
def update_stratagem(cls, stratagem_data):
return JobSchedulerRpc().update_stratagem(stratagem_data)
@classmethod
def run_stratagem(cls, mdts, fs_id, stratagem_data):
return JobSchedulerRpc().run_stratagem(mdts, fs_id, stratagem_data)
| mit | -5,882,066,424,023,178,000 | 36.703571 | 133 | 0.666856 | false | 4.030928 | true | false | false |
fsmMLK/inkscapeMadeEasy | examples/iME_Draw_lineStyle_and_markers.py | 1 | 4006 | #!/usr/bin/python
import inkex
import inkscapeMadeEasy_Base as inkBase
import inkscapeMadeEasy_Draw as inkDraw
import math
class myExtension(inkBase.inkscapeMadeEasy):
def __init__(self):
inkex.Effect.__init__(self)
self.OptionParser.add_option("--myColorPicker", action="store", type="string", dest="lineColorPickerVar", default='0')
self.OptionParser.add_option("--myColorOption", action="store", type="string", dest="lineColorOptionVar", default='0')
def effect(self):
# sets the position to the viewport center, round to next 10.
position=[self.view_center[0],self.view_center[1]]
position[0]=int(math.ceil(position[0] / 10.0)) * 10
position[1]=int(math.ceil(position[1] / 10.0)) * 10
# creates a dot marker, with red stroke color and gray (40%) filling color
myDotMarker = inkDraw.marker.createDotMarker(self,
nameID='myDot' ,
RenameMode=1, # overwrite an eventual markers with the same name
scale=0.2,
strokeColor=inkDraw.color.defined('red'),
fillColor=inkDraw.color.gray(0.4))
# parses the input options to get the color of the line
lineColor = inkDraw.color.parseColorPicker(self.options.lineColorOptionVar, self.options.lineColorPickerVar)
# create a new line style with a 2.0 pt line and the marker just defined at both ends
myLineStyleDot = inkDraw.lineStyle.set(lineWidth=2.0,
lineColor=lineColor,
fillColor=inkDraw.color.defined('blue'),
lineJoin='round',
lineCap='round',
markerStart=myDotMarker,
markerMid=myDotMarker,
markerEnd=myDotMarker,
strokeDashArray=None)
#root_layer = self.current_layer
root_layer = self.document.getroot()
# draws a line using the new line style. (see inkscapeMadeEasy_Draw.line class for further info on this function
inkDraw.line.relCoords(root_layer,coordsList= [[0,100],[100,0]],offset=position,lineStyle=myLineStyleDot)
# -- Creates a second line style with ellipsis and
# creates a ellipsis marker with default values
infMarkerStart,infMarkerEnd = inkDraw.marker.createElipsisMarker(self,
nameID='myEllipsis' ,
RenameMode=1) # overwrite an eventual markers with the same name
# create a new line style
myStyleInf = inkDraw.lineStyle.set(lineWidth=1.0,
lineColor=lineColor,
fillColor=None,
lineJoin='round',
lineCap='round',
markerStart=infMarkerStart,
markerMid=None,
markerEnd=infMarkerEnd,
strokeDashArray=None)
# draws a line using the new line style. (see inkscapeMadeEasy_Draw.line class for further info on this function
inkDraw.line.relCoords(root_layer,coordsList= [[0,100],[100,0]],offset=[position[0]+300,position[1]],lineStyle=myStyleInf)
if __name__ == '__main__':
x = myExtension()
x.affect()
| gpl-3.0 | -872,509,245,772,700,800 | 53.876712 | 148 | 0.496006 | false | 4.927429 | false | false | false |
wetek-enigma/enigma2 | lib/python/Screens/ButtonSetup.py | 1 | 29746 | from GlobalActions import globalActionMap
from Components.ActionMap import ActionMap, HelpableActionMap
from Components.Button import Button
from Components.ChoiceList import ChoiceList, ChoiceEntryComponent
from Components.SystemInfo import SystemInfo
from Components.config import config, ConfigSubsection, ConfigText, ConfigYesNo
from Components.PluginComponent import plugins
from Screens.ChoiceBox import ChoiceBox
from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Plugins.Plugin import PluginDescriptor
from Tools.BoundFunction import boundFunction
from ServiceReference import ServiceReference
from enigma import eServiceReference, eActionMap
from Components.Label import Label
import os
def getButtonSetupKeys():
return [(_("Red"), "red", ""),
(_("Red long"), "red_long", ""),
(_("Green"), "green", ""),
(_("Green long"), "green_long", ""),
(_("Yellow"), "yellow", ""),
(_("Yellow long"), "yellow_long", ""),
(_("Blue"), "blue", ""),
(_("Blue long"), "blue_long", ""),
(_("Info (EPG)"), "info", "Infobar/InfoPressed/1"),
(_("Info (EPG) Long"), "info_long", "Infobar/showEventInfoPlugins/1"),
(_("Epg/Guide"), "epg", "Infobar/EPGPressed/1"),
(_("Epg/Guide long"), "epg_long", "Infobar/showEventGuidePlugins/1"),
(_("Left"), "cross_left", ""),
(_("Right"), "cross_right", ""),
(_("Up"), "cross_up", ""),
(_("Down"), "cross_down", ""),
(_("PageUp"), "pageup", ""),
(_("PageUp long"), "pageup_long", ""),
(_("PageDown"), "pagedown", ""),
(_("PageDown long"), "pagedown_long", ""),
(_("Channel up"), "channelup", ""),
(_("Channel down"), "channeldown", ""),
(_("TV"), "showTv", ""),
(_("Radio"), "radio", ""),
(_("Radio long"), "radio_long", ""),
(_("Rec"), "rec", ""),
(_("Rec long"), "rec_long", ""),
(_("Teletext"), "text", ""),
(_("Help"), "displayHelp", ""),
(_("Help long"), "displayHelp_long", ""),
(_("Subtitle"), "subtitle", ""),
(_("Subtitle long"), "subtitle_long", ""),
(_("Menu"), "mainMenu", ""),
(_("List/Fav"), "list", ""),
(_("List/Fav long"), "list_long", ""),
(_("PVR"), "pvr", ""),
(_("PVR long"), "pvr_long", ""),
(_("Favorites"), "favorites", ""),
(_("Favorites long"), "favorites_long", ""),
(_("File"), "file", ""),
(_("File long"), "file_long", ""),
(_("OK long"), "ok_long", ""),
(_("Media"), "media", ""),
(_("Media long"), "media_long", ""),
(_("Open"), "open", ""),
(_("Open long"), "open_long", ""),
(_("Www"), "www", ""),
(_("Www long"), "www_long", ""),
(_("Directory"), "directory", ""),
(_("Directory long"), "directory_long", ""),
(_("Back/Recall"), "back", ""),
(_("Back/Recall") + " " + _("long"), "back_long", ""),
(_("Home"), "home", ""),
(_("End"), "end", ""),
(_("Next"), "next", ""),
(_("Previous"), "previous", ""),
(_("Audio"), "audio", ""),
(_("Play"), "play", ""),
(_("Playpause"), "playpause", ""),
(_("Stop"), "stop", ""),
(_("Pause"), "pause", ""),
(_("Rewind"), "rewind", ""),
(_("Fastforward"), "fastforward", ""),
(_("Skip back"), "skip_back", ""),
(_("Skip forward"), "skip_forward", ""),
(_("activatePiP"), "activatePiP", ""),
(_("Timer"), "timer", ""),
(_("Playlist"), "playlist", ""),
(_("Playlist long"), "playlist_long", ""),
(_("Timeshift"), "timeshift", ""),
(_("Homepage"), "homep", ""),
(_("Homepage long"), "homep_long", ""),
(_("Search/WEB"), "search", ""),
(_("Search/WEB long"), "search_long", ""),
(_("Slow"), "slow", ""),
(_("Mark/Portal/Playlist"), "mark", ""),
(_("Sleep"), "sleep", ""),
(_("Sleep long"), "sleep_long", ""),
(_("Power"), "power", ""),
(_("Power long"), "power_long", ""),
(_("HDMIin"), "HDMIin", "Infobar/HDMIIn"),
(_("HDMIin") + " " + _("long"), "HDMIin_long", (SystemInfo["LcdLiveTV"] and "Infobar/ToggleLCDLiveTV") or ""),
(_("Context"), "contextMenu", "Infobar/showExtensionSelection"),
(_("Context long"), "context_long", ""),
(_("SAT"), "sat", "Infobar/openSatellites"),
(_("SAT long"), "sat_long", ""),
(_("Prov"), "prov", ""),
(_("Prov long"), "prov_long", ""),
(_("F1/LAN"), "f1", ""),
(_("F1/LAN long"), "f1_long", ""),
(_("F2"), "f2", ""),
(_("F2 long"), "f2_long", ""),
(_("F3"), "f3", ""),
(_("F3 long"), "f3_long", ""),
(_("F4"), "f4", ""),
(_("F4 long"), "f4_long", ""),]
config.misc.ButtonSetup = ConfigSubsection()
config.misc.ButtonSetup.additional_keys = ConfigYesNo(default=True)
for x in getButtonSetupKeys():
exec "config.misc.ButtonSetup." + x[1] + " = ConfigText(default='" + x[2] + "')"
def getButtonSetupFunctions():
ButtonSetupFunctions = []
twinPlugins = []
twinPaths = {}
pluginlist = plugins.getPlugins(PluginDescriptor.WHERE_EVENTINFO)
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path and 'selectedevent' not in plugin.__call__.func_code.co_varnames:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
ButtonSetupFunctions.append((plugin.name, plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) , "EPG"))
twinPlugins.append(plugin.name)
pluginlist = plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EXTENSIONSMENU, PluginDescriptor.WHERE_EVENTINFO])
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
ButtonSetupFunctions.append((plugin.name, plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) , "Plugins"))
twinPlugins.append(plugin.name)
ButtonSetupFunctions.append((_("Show graphical multi EPG"), "Infobar/openGraphEPG", "EPG"))
ButtonSetupFunctions.append((_("Main menu"), "Infobar/mainMenu", "InfoBar"))
ButtonSetupFunctions.append((_("Show help"), "Infobar/showHelp", "InfoBar"))
ButtonSetupFunctions.append((_("Show extension selection"), "Infobar/showExtensionSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Zap down"), "Infobar/zapDown", "InfoBar"))
ButtonSetupFunctions.append((_("Zap up"), "Infobar/zapUp", "InfoBar"))
ButtonSetupFunctions.append((_("Volume down"), "Infobar/volumeDown", "InfoBar"))
ButtonSetupFunctions.append((_("Volume up"), "Infobar/volumeUp", "InfoBar"))
ButtonSetupFunctions.append((_("Show Infobar"), "Infobar/toggleShow", "InfoBar"))
ButtonSetupFunctions.append((_("Show service list"), "Infobar/openServiceList", "InfoBar"))
ButtonSetupFunctions.append((_("Show favourites list"), "Infobar/openBouquets", "InfoBar"))
ButtonSetupFunctions.append((_("Show satellites list"), "Infobar/openSatellites", "InfoBar"))
ButtonSetupFunctions.append((_("History back"), "Infobar/historyBack", "InfoBar"))
ButtonSetupFunctions.append((_("History next"), "Infobar/historyNext", "InfoBar"))
ButtonSetupFunctions.append((_("Show eventinfo plugins"), "Infobar/showEventInfoPlugins", "EPG"))
ButtonSetupFunctions.append((_("Show event details"), "Infobar/openEventView", "EPG"))
ButtonSetupFunctions.append((_("Show single service EPG"), "Infobar/openSingleServiceEPG", "EPG"))
ButtonSetupFunctions.append((_("Show multi channel EPG"), "Infobar/openMultiServiceEPG", "EPG"))
ButtonSetupFunctions.append((_("Show Audioselection"), "Infobar/audioSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Enable digital downmix"), "Infobar/audioDownmixOn", "InfoBar"))
ButtonSetupFunctions.append((_("Disable digital downmix"), "Infobar/audioDownmixOff", "InfoBar"))
ButtonSetupFunctions.append((_("Switch to radio mode"), "Infobar/showRadio", "InfoBar"))
ButtonSetupFunctions.append((_("Switch to TV mode"), "Infobar/showTv", "InfoBar"))
ButtonSetupFunctions.append((_("Show servicelist or movies"), "Infobar/showServiceListOrMovies", "InfoBar"))
ButtonSetupFunctions.append((_("Show movies"), "Infobar/showMovies", "InfoBar"))
ButtonSetupFunctions.append((_("Instant record"), "Infobar/instantRecord", "InfoBar"))
ButtonSetupFunctions.append((_("Start instant recording"), "Infobar/startInstantRecording", "InfoBar"))
ButtonSetupFunctions.append((_("Activate timeshift End"), "Infobar/activateTimeshiftEnd", "InfoBar"))
ButtonSetupFunctions.append((_("Activate timeshift end and pause"), "Infobar/activateTimeshiftEndAndPause", "InfoBar"))
ButtonSetupFunctions.append((_("Start timeshift"), "Infobar/startTimeshift", "InfoBar"))
ButtonSetupFunctions.append((_("Stop timeshift"), "Infobar/stopTimeshift", "InfoBar"))
ButtonSetupFunctions.append((_("Start teletext"), "Infobar/startTeletext", "InfoBar"))
ButtonSetupFunctions.append((_("Show subservice selection"), "Infobar/subserviceSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Show subtitle selection"), "Infobar/subtitleSelection", "InfoBar"))
ButtonSetupFunctions.append((_("Show subtitle quick menu"), "Infobar/subtitleQuickMenu", "InfoBar"))
ButtonSetupFunctions.append((_("Letterbox zoom"), "Infobar/vmodeSelection", "InfoBar"))
if SystemInfo["PIPAvailable"]:
ButtonSetupFunctions.append((_("Show PIP"), "Infobar/showPiP", "InfoBar"))
ButtonSetupFunctions.append((_("Swap PIP"), "Infobar/swapPiP", "InfoBar"))
ButtonSetupFunctions.append((_("Move PIP"), "Infobar/movePiP", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle PIPzap"), "Infobar/togglePipzap", "InfoBar"))
ButtonSetupFunctions.append((_("Activate HbbTV (Redbutton)"), "Infobar/activateRedButton", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle HDMI-In full screen"), "Infobar/HDMIInFull", "InfoBar"))
ButtonSetupFunctions.append((_("Toggle HDMI-In PiP"), "Infobar/HDMIInPiP", "InfoBar"))
if SystemInfo["LcdLiveTV"]:
ButtonSetupFunctions.append((_("Toggle LCD LiveTV"), "Infobar/ToggleLCDLiveTV", "InfoBar"))
ButtonSetupFunctions.append((_("Hotkey Setup"), "Module/Screens.ButtonSetup/ButtonSetup", "Setup"))
ButtonSetupFunctions.append((_("Software update"), "Module/Screens.SoftwareUpdate/UpdatePlugin", "Setup"))
ButtonSetupFunctions.append((_("CI (Common Interface) Setup"), "Module/Screens.Ci/CiSelection", "Setup"))
ButtonSetupFunctions.append((_("Tuner Configuration"), "Module/Screens.Satconfig/NimSelection", "Scanning"))
ButtonSetupFunctions.append((_("Manual Scan"), "Module/Screens.ScanSetup/ScanSetup", "Scanning"))
ButtonSetupFunctions.append((_("Automatic Scan"), "Module/Screens.ScanSetup/ScanSimple", "Scanning"))
for plugin in plugins.getPluginsForMenu("scan"):
ButtonSetupFunctions.append((plugin[0], "MenuPlugin/scan/" + plugin[2], "Scanning"))
ButtonSetupFunctions.append((_("Network setup"), "Module/Screens.NetworkSetup/NetworkAdapterSelection", "Setup"))
ButtonSetupFunctions.append((_("Network menu"), "Infobar/showNetworkMounts", "Setup"))
ButtonSetupFunctions.append((_("Plugin Browser"), "Module/Screens.PluginBrowser/PluginBrowser", "Setup"))
ButtonSetupFunctions.append((_("Channel Info"), "Module/Screens.ServiceInfo/ServiceInfo", "Setup"))
ButtonSetupFunctions.append((_("SkinSelector"), "Module/Screens.SkinSelector/SkinSelector", "Setup"))
ButtonSetupFunctions.append((_("LCD SkinSelector"), "Module/Screens.SkinSelector/LcdSkinSelector", "Setup"))
ButtonSetupFunctions.append((_("Timer"), "Module/Screens.TimerEdit/TimerEditList", "Setup"))
ButtonSetupFunctions.append((_("Open AutoTimer"), "Infobar/showAutoTimerList", "Setup"))
for plugin in plugins.getPluginsForMenu("system"):
if plugin[2]:
ButtonSetupFunctions.append((plugin[0], "MenuPlugin/system/" + plugin[2], "Setup"))
ButtonSetupFunctions.append((_("Standby"), "Module/Screens.Standby/Standby", "Power"))
ButtonSetupFunctions.append((_("Restart"), "Module/Screens.Standby/TryQuitMainloop/2", "Power"))
ButtonSetupFunctions.append((_("Restart enigma"), "Module/Screens.Standby/TryQuitMainloop/3", "Power"))
ButtonSetupFunctions.append((_("Deep standby"), "Module/Screens.Standby/TryQuitMainloop/1", "Power"))
ButtonSetupFunctions.append((_("SleepTimer"), "Module/Screens.SleepTimerEdit/SleepTimerEdit", "Power"))
ButtonSetupFunctions.append((_("PowerTimer"), "Module/Screens.PowerTimerEdit/PowerTimerEditList", "Power"))
ButtonSetupFunctions.append((_("Usage Setup"), "Setup/usage", "Setup"))
ButtonSetupFunctions.append((_("User interface settings"), "Setup/userinterface", "Setup"))
ButtonSetupFunctions.append((_("Recording Setup"), "Setup/recording", "Setup"))
ButtonSetupFunctions.append((_("Harddisk Setup"), "Setup/harddisk", "Setup"))
ButtonSetupFunctions.append((_("Subtitles Settings"), "Setup/subtitlesetup", "Setup"))
ButtonSetupFunctions.append((_("Language"), "Module/Screens.LanguageSelection/LanguageSelection", "Setup"))
ButtonSetupFunctions.append((_("OscamInfo Mainmenu"), "Module/Screens.OScamInfo/OscamInfoMenu", "Plugins"))
ButtonSetupFunctions.append((_("CCcamInfo Mainmenu"), "Module/Screens.CCcamInfo/CCcamInfoMain", "Plugins"))
ButtonSetupFunctions.append((_("Movieplayer"), "Module/Screens.MovieSelection/MovieSelection", "Plugins"))
if os.path.isdir("/etc/ppanels"):
for x in [x for x in os.listdir("/etc/ppanels") if x.endswith(".xml")]:
x = x[:-4]
ButtonSetupFunctions.append((_("PPanel") + " " + x, "PPanel/" + x, "PPanels"))
if os.path.isdir("/usr/script"):
for x in [x for x in os.listdir("/usr/script") if x.endswith(".sh")]:
x = x[:-3]
ButtonSetupFunctions.append((_("Shellscript") + " " + x, "Shellscript/" + x, "Shellscripts"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/ScriptRunner.pyo"):
ButtonSetupFunctions.append((_("ScriptRunner"), "ScriptRunner/", "Plugins"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/QuickMenu.pyo"):
ButtonSetupFunctions.append((_("QuickMenu"), "QuickMenu/", "Plugins"))
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Kodi/plugin.pyo"):
ButtonSetupFunctions.append((_("Kodi MediaCenter"), "Kodi/", "Plugins"))
return ButtonSetupFunctions
class ButtonSetup(Screen):
def __init__(self, session, args=None):
Screen.__init__(self, session)
self['description'] = Label(_('Click on your remote on the button you want to change'))
self.session = session
self.setTitle(_("Hotkey Setup"))
self["key_red"] = Button(_("Exit"))
self.list = []
self.ButtonSetupKeys = getButtonSetupKeys()
self.ButtonSetupFunctions = getButtonSetupFunctions()
for x in self.ButtonSetupKeys:
self.list.append(ChoiceEntryComponent('',(_(x[0]), x[1])))
self["list"] = ChoiceList(list=self.list[:config.misc.ButtonSetup.additional_keys.value and len(self.ButtonSetupKeys) or 10], selection = 0)
self["choosen"] = ChoiceList(list=[])
self.getFunctions()
self["actions"] = ActionMap(["OkCancelActions"],
{
"cancel": self.close,
}, -1)
self["ButtonSetupButtonActions"] = ButtonSetupActionMap(["ButtonSetupActions"], dict((x[1], self.ButtonSetupGlobal) for x in self.ButtonSetupKeys))
self.longkeyPressed = False
self.onLayoutFinish.append(self.__layoutFinished)
self.onExecBegin.append(self.getFunctions)
self.onShown.append(self.disableKeyMap)
self.onClose.append(self.enableKeyMap)
def __layoutFinished(self):
self["choosen"].selectionEnabled(0)
def disableKeyMap(self):
globalActionMap.setEnabled(False)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 0)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 1)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 4)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 5)
def enableKeyMap(self):
globalActionMap.setEnabled(True)
eActionMap.getInstance().bindKey("keymap.xml", "generic", 103, 5, "ListboxActions", "moveUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 108, 5, "ListboxActions", "moveDown")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 105, 5, "ListboxActions", "pageUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 106, 5, "ListboxActions", "pageDown")
def ButtonSetupGlobal(self, key):
if self.longkeyPressed:
self.longkeyPressed = False
else:
index = 0
for x in self.list[:config.misc.ButtonSetup.additional_keys.value and len(self.ButtonSetupKeys) or 10]:
if key == x[0][1]:
self["list"].moveToIndex(index)
if key.endswith("_long"):
self.longkeyPressed = True
break
index += 1
self.getFunctions()
self.session.open(ButtonSetupSelect, self["list"].l.getCurrentSelection())
def getFunctions(self):
key = self["list"].l.getCurrentSelection()[0][1]
if key:
selected = []
for x in eval("config.misc.ButtonSetup." + key + ".value.split(',')"):
function = list(function for function in self.ButtonSetupFunctions if function[1] == x )
if function:
selected.append(ChoiceEntryComponent('',((function[0][0]), function[0][1])))
self["choosen"].setList(selected)
class ButtonSetupSelect(Screen):
def __init__(self, session, key, args=None):
Screen.__init__(self, session)
self.skinName="ButtonSetupSelect"
self['description'] = Label(_('Select the desired function and click on "OK" to assign it. Use "CH+/-" to toggle between the lists. Select an assigned function and click on "OK" to de-assign it. Use "Next/Previous" to change the order of the assigned functions.'))
self.session = session
self.key = key
self.setTitle(_("Hotkey Setup for") + ": " + key[0][0])
self["key_red"] = Button(_("Cancel"))
self["key_green"] = Button(_("Save"))
self.mode = "list"
self.ButtonSetupFunctions = getButtonSetupFunctions()
self.config = eval("config.misc.ButtonSetup." + key[0][1])
self.expanded = []
self.selected = []
for x in self.config.value.split(','):
function = list(function for function in self.ButtonSetupFunctions if function[1] == x )
if function:
self.selected.append(ChoiceEntryComponent('',((function[0][0]), function[0][1])))
self.prevselected = self.selected[:]
self["choosen"] = ChoiceList(list=self.selected, selection=0)
self["list"] = ChoiceList(list=self.getFunctionList(), selection=0)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions", "DirectionActions", "KeyboardInputActions"],
{
"ok": self.keyOk,
"cancel": self.cancel,
"red": self.cancel,
"green": self.save,
"up": self.keyUp,
"down": self.keyDown,
"left": self.keyLeft,
"right": self.keyRight,
"pageUp": self.toggleMode,
"pageDown": self.toggleMode,
"shiftUp": self.moveUp,
"shiftDown": self.moveDown,
}, -1)
self.onShown.append(self.enableKeyMap)
self.onClose.append(self.disableKeyMap)
self.onLayoutFinish.append(self.__layoutFinished)
def __layoutFinished(self):
self["choosen"].selectionEnabled(0)
def disableKeyMap(self):
globalActionMap.setEnabled(False)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 0)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 1)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 4)
eActionMap.getInstance().unbindNativeKey("ListboxActions", 5)
def enableKeyMap(self):
globalActionMap.setEnabled(True)
eActionMap.getInstance().bindKey("keymap.xml", "generic", 103, 5, "ListboxActions", "moveUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 108, 5, "ListboxActions", "moveDown")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 105, 5, "ListboxActions", "pageUp")
eActionMap.getInstance().bindKey("keymap.xml", "generic", 106, 5, "ListboxActions", "pageDown")
def getFunctionList(self):
functionslist = []
catagories = {}
for function in self.ButtonSetupFunctions:
if not catagories.has_key(function[2]):
catagories[function[2]] = []
catagories[function[2]].append(function)
for catagorie in sorted(list(catagories)):
if catagorie in self.expanded:
functionslist.append(ChoiceEntryComponent('expanded',((catagorie), "Expander")))
for function in catagories[catagorie]:
functionslist.append(ChoiceEntryComponent('verticalline',((function[0]), function[1])))
else:
functionslist.append(ChoiceEntryComponent('expandable',((catagorie), "Expander")))
return functionslist
def toggleMode(self):
if self.mode == "list" and self.selected:
self.mode = "choosen"
self["choosen"].selectionEnabled(1)
self["list"].selectionEnabled(0)
elif self.mode == "choosen":
self.mode = "list"
self["choosen"].selectionEnabled(0)
self["list"].selectionEnabled(1)
def keyOk(self):
if self.mode == "list":
currentSelected = self["list"].l.getCurrentSelection()
if currentSelected[0][1] == "Expander":
if currentSelected[0][0] in self.expanded:
self.expanded.remove(currentSelected[0][0])
else:
self.expanded.append(currentSelected[0][0])
self["list"].setList(self.getFunctionList())
else:
if currentSelected[:2] in self.selected:
self.selected.remove(currentSelected[:2])
else:
self.selected.append(currentSelected[:2])
elif self.selected:
self.selected.remove(self["choosen"].l.getCurrentSelection())
if not self.selected:
self.toggleMode()
self["choosen"].setList(self.selected)
def keyLeft(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.pageUp)
def keyRight(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.pageDown)
def keyUp(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.moveUp)
def keyDown(self):
self[self.mode].instance.moveSelection(self[self.mode].instance.moveDown)
def moveUp(self):
self.moveChoosen(self.keyUp)
def moveDown(self):
self.moveChoosen(self.keyDown)
def moveChoosen(self, direction):
if self.mode == "choosen":
currentIndex = self["choosen"].getSelectionIndex()
swapIndex = (currentIndex + (direction == self.keyDown and 1 or -1)) % len(self["choosen"].list)
self["choosen"].list[currentIndex], self["choosen"].list[swapIndex] = self["choosen"].list[swapIndex], self["choosen"].list[currentIndex]
self["choosen"].setList(self["choosen"].list)
direction()
else:
return 0
def save(self):
configValue = []
for x in self.selected:
configValue.append(x[0][1])
self.config.value = ",".join(configValue)
self.config.save()
self.close()
def cancel(self):
if self.selected != self.prevselected:
self.session.openWithCallback(self.cancelCallback, MessageBox, _("Are you sure to cancel all changes"), default=False)
else:
self.close()
def cancelCallback(self, answer):
answer and self.close()
class ButtonSetupActionMap(ActionMap):
def action(self, contexts, action):
if (action in tuple(x[1] for x in getButtonSetupKeys()) and self.actions.has_key(action)):
res = self.actions[action](action)
if res is not None:
return res
return 1
else:
return ActionMap.action(self, contexts, action)
class helpableButtonSetupActionMap(HelpableActionMap):
def action(self, contexts, action):
if (action in tuple(x[1] for x in getButtonSetupKeys()) and self.actions.has_key(action)):
res = self.actions[action](action)
if res is not None:
return res
return 1
else:
return ActionMap.action(self, contexts, action)
class InfoBarButtonSetup():
def __init__(self):
self.ButtonSetupKeys = getButtonSetupKeys()
self["ButtonSetupButtonActions"] = helpableButtonSetupActionMap(self, "ButtonSetupActions",
dict((x[1],(self.ButtonSetupGlobal, boundFunction(self.getHelpText, x[1]))) for x in self.ButtonSetupKeys), -10)
self.longkeyPressed = False
self.onExecEnd.append(self.clearLongkeyPressed)
def clearLongkeyPressed(self):
self.longkeyPressed = False
def getKeyFunctions(self, key):
if key in ("play", "playpause", "Stop", "stop", "pause", "rewind", "next", "previous", "fastforward", "skip_back", "skip_forward") and (self.__class__.__name__ == "MoviePlayer" or hasattr(self, "timeshiftActivated") and self.timeshiftActivated()):
return False
selection = eval("config.misc.ButtonSetup." + key + ".value.split(',')")
selected = []
for x in selection:
if x.startswith("ZapPanic"):
selected.append(((_("Panic to") + " " + ServiceReference(eServiceReference(x.split("/", 1)[1]).toString()).getServiceName()), x))
elif x.startswith("Zap"):
selected.append(((_("Zap to") + " " + ServiceReference(eServiceReference(x.split("/", 1)[1]).toString()).getServiceName()), x))
else:
function = list(function for function in getButtonSetupFunctions() if function[1] == x )
if function:
selected.append(function[0])
return selected
def getHelpText(self, key):
selected = self.getKeyFunctions(key)
if not selected:
return
if len(selected) == 1:
return selected[0][0]
else:
return _("ButtonSetup") + " " + tuple(x[0] for x in self.ButtonSetupKeys if x[1] == key)[0]
def ButtonSetupGlobal(self, key):
if self.longkeyPressed:
self.longkeyPressed = False
else:
selected = self.getKeyFunctions(key)
if not selected:
return 0
elif len(selected) == 1:
if key.endswith("_long"):
self.longkeyPressed = True
return self.execButtonSetup(selected[0])
else:
key = tuple(x[0] for x in self.ButtonSetupKeys if x[1] == key)[0]
self.session.openWithCallback(self.execButtonSetup, ChoiceBox, (_("Hotkey")) + " " + key, selected)
def execButtonSetup(self, selected):
if selected:
selected = selected[1].split("/")
if selected[0] == "Plugins":
twinPlugins = []
twinPaths = {}
pluginlist = plugins.getPlugins(PluginDescriptor.WHERE_EVENTINFO)
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path and 'selectedevent' not in plugin.__call__.func_code.co_varnames:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
if plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) == "/".join(selected):
self.runPlugin(plugin)
return
twinPlugins.append(plugin.name)
pluginlist = plugins.getPlugins([PluginDescriptor.WHERE_PLUGINMENU, PluginDescriptor.WHERE_EXTENSIONSMENU])
pluginlist.sort(key=lambda p: p.name)
for plugin in pluginlist:
if plugin.name not in twinPlugins and plugin.path:
if twinPaths.has_key(plugin.path[24:]):
twinPaths[plugin.path[24:]] += 1
else:
twinPaths[plugin.path[24:]] = 1
if plugin.path[24:] + "/" + str(twinPaths[plugin.path[24:]]) == "/".join(selected):
self.runPlugin(plugin)
return
twinPlugins.append(plugin.name)
elif selected[0] == "MenuPlugin":
for plugin in plugins.getPluginsForMenu(selected[1]):
if plugin[2] == selected[2]:
self.runPlugin(plugin[1])
return
elif selected[0] == "Infobar":
if hasattr(self, selected[1]):
exec "self." + ".".join(selected[1:]) + "()"
else:
return 0
elif selected[0] == "Module":
try:
exec "from " + selected[1] + " import *"
exec "self.session.open(" + ",".join(selected[2:]) + ")"
except:
print "[ButtonSetup] error during executing module %s, screen %s" % (selected[1], selected[2])
elif selected[0] == "Setup":
exec "from Screens.Setup import *"
exec "self.session.open(Setup, \"" + selected[1] + "\")"
elif selected[0].startswith("Zap"):
if selected[0] == "ZapPanic":
self.servicelist.history = []
self.pipShown() and self.showPiP()
self.servicelist.servicelist.setCurrent(eServiceReference("/".join(selected[1:])))
self.servicelist.zap(enable_pipzap = True)
if hasattr(self, "lastservice"):
self.lastservice = eServiceReference("/".join(selected[1:]))
self.close()
else:
self.show()
from Screens.MovieSelection import defaultMoviePath
moviepath = defaultMoviePath()
if moviepath:
config.movielist.last_videodir.value = moviepath
elif selected[0] == "PPanel":
ppanelFileName = '/etc/ppanels/' + selected[1] + ".xml"
if os.path.isfile(ppanelFileName) and os.path.isdir('/usr/lib/enigma2/python/Plugins/Extensions/PPanel'):
from Plugins.Extensions.PPanel.ppanel import PPanel
self.session.open(PPanel, name=selected[1] + ' PPanel', node=None, filename=ppanelFileName, deletenode=None)
elif selected[0] == "Shellscript":
command = '/usr/script/' + selected[1] + ".sh"
if os.path.isfile(command) and os.path.isdir('/usr/lib/enigma2/python/Plugins/Extensions/PPanel'):
from Plugins.Extensions.PPanel.ppanel import Execute
self.session.open(Execute, selected[1] + " shellscript", None, command)
else:
from Screens.Console import Console
exec "self.session.open(Console,_(selected[1]),[command])"
elif selected[0] == "EMC":
try:
from Plugins.Extensions.EnhancedMovieCenter.plugin import showMoviesNew
from Screens.InfoBar import InfoBar
open(showMoviesNew(InfoBar.instance))
except Exception as e:
print('[EMCPlayer] showMovies exception:\n' + str(e))
elif selected[0] == "ScriptRunner":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/ScriptRunner.pyo"):
from Plugins.Extensions.Infopanel.ScriptRunner import ScriptRunner
self.session.open (ScriptRunner)
elif selected[0] == "QuickMenu":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Infopanel/QuickMenu.pyo"):
from Plugins.Extensions.Infopanel.QuickMenu import QuickMenu
self.session.open (QuickMenu)
elif selected[0] == "Kodi":
if os.path.isfile("/usr/lib/enigma2/python/Plugins/Extensions/Kodi/plugin.pyo"):
from Plugins.Extensions.Kodi.plugin import KodiMainScreen
self.session.open(KodiMainScreen)
def showServiceListOrMovies(self):
if hasattr(self, "openServiceList"):
self.openServiceList()
elif hasattr(self, "showMovies"):
self.showMovies()
def ToggleLCDLiveTV(self):
config.lcd.showTv.value = not config.lcd.showTv.value
| gpl-2.0 | 6,284,683,870,160,974,000 | 45.917981 | 266 | 0.686042 | false | 3.254486 | true | false | false |
googleapis/googleapis-gen | google/ads/googleads/v7/googleads-py/google/ads/googleads/v7/errors/types/feed_attribute_reference_error.py | 1 | 1293 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v7.errors',
marshal='google.ads.googleads.v7',
manifest={
'FeedAttributeReferenceErrorEnum',
},
)
class FeedAttributeReferenceErrorEnum(proto.Message):
r"""Container for enum describing possible feed attribute
reference errors.
"""
class FeedAttributeReferenceError(proto.Enum):
r"""Enum describing possible feed attribute reference errors."""
UNSPECIFIED = 0
UNKNOWN = 1
CANNOT_REFERENCE_REMOVED_FEED = 2
INVALID_FEED_NAME = 3
INVALID_FEED_ATTRIBUTE_NAME = 4
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | -425,834,407,816,632,600 | 30.536585 | 74 | 0.702243 | false | 4.13099 | false | false | false |
examachine/pisi | pisi/exml/xmlfilepiks.py | 1 | 2519 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2005, TUBITAK/UEKAE
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
# Author: Eray Ozkural <[email protected]>
"""
XmlFile class further abstracts a dom object using the
high-level dom functions provided in xmlext module (and sorely lacking
in xml.dom :( )
function names are mixedCase for compatibility with minidom,
an 'old library'
this implementation uses piksemel
"""
import gettext
__trans = gettext.translation('pisi', fallback=True)
_ = __trans.ugettext
import codecs
import exceptions
import piksemel as iks
import pisi
from pisi.file import File
from pisi.util import join_path as join
class Error(pisi.Error):
pass
class XmlFile(object):
"""A class to help reading and writing an XML file"""
def __init__(self, tag):
self.rootTag = tag
def newDocument(self):
"""clear DOM"""
self.doc = iks.newDocument(self.rootTag)
def unlink(self):
"""deallocate DOM structure"""
del self.doc
def rootNode(self):
"""returns root document element"""
return self.doc
def readxmlfile(self, file):
raise Exception("not implemented")
try:
self.doc = iks.parse(file)
return self.doc
except Exception, e:
raise Error(_("File '%s' has invalid XML") % (localpath) )
def readxml(self, uri, tmpDir='/tmp', sha1sum=False,
compress=None, sign=None, copylocal = False):
uri = File.make_uri(uri)
#try:
localpath = File.download(uri, tmpDir, sha1sum=sha1sum,
compress=compress,sign=sign, copylocal=copylocal)
#except IOError, e:
# raise Error(_("Cannot read URI %s: %s") % (uri, unicode(e)) )
try:
self.doc = iks.parse(localpath)
return self.doc
except Exception, e:
raise Error(_("File '%s' has invalid XML") % (localpath) )
def writexml(self, uri, tmpDir = '/tmp', sha1sum=False, compress=None, sign=None):
f = File(uri, File.write, sha1sum=sha1sum, compress=compress, sign=sign)
f.write(self.doc.toPrettyString())
f.close()
def writexmlfile(self, f):
f.write(self.doc.toPrettyString())
| gpl-3.0 | -4,301,337,258,087,709,000 | 27.303371 | 86 | 0.628821 | false | 3.645441 | false | false | false |
csm0042/rpihome_v3 | rpihome_v3/schedule_service/service_main.py | 1 | 6835 | #!/usr/bin/python3
""" service_main.py:
"""
# Import Required Libraries (Standard, Third Party, Local) ********************
import asyncio
import datetime
import logging
if __name__ == "__main__":
import os
import sys
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
from rpihome_v3.occupancy_service.msg_processing import create_heartbeat_msg
from rpihome_v3.occupancy_service.msg_processing import process_heartbeat_msg
from rpihome_v3.schedule_service.msg_processing import process_get_device_scheduled_state_msg
# Authorship Info *************************************************************
__author__ = "Christopher Maue"
__copyright__ = "Copyright 2017, The RPi-Home Project"
__credits__ = ["Christopher Maue"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "Christopher Maue"
__email__ = "[email protected]"
__status__ = "Development"
# Internal Service Work Task **************************************************
class MainTask(object):
def __init__(self, log, **kwargs):
# Configure logger
self.log = log or logging.getLogger(__name__)
# Define instance variables
self.ref_num = None
self.msg_in_queue = None
self.msg_out_queue = None
self.schedule = []
self.service_addresses = []
self.message_types = []
self.last_check_hb = datetime.datetime.now()
self.out_msg = str()
self.out_msg_list = []
self.next_msg = str()
self.next_msg_split = []
self.msg_source_addr = str()
self.msg_type = str()
self.destinations = []
# Map input variables
if kwargs is not None:
for key, value in kwargs.items():
if key == "ref":
self.ref_num = value
self.log.debug('Ref number generator set during __init__ '
'to: %s', self.ref_num)
if key == "schedule":
self.schedule = value
self.log.debug('Schedule set during __init__ '
'to: %s', self.schedule)
if key == "msg_in_queue":
self.msg_in_queue = value
self.log.debug('Message in queue set during __init__ '
'to: %s', self.msg_in_queue)
if key == "msg_out_queue":
self.msg_out_queue = value
self.log.debug('Message out queue set during __init__ '
'to: %s', self.msg_out_queue)
if key == "service_addresses":
self.service_addresses = value
self.log.debug('Service address list set during __init__ '
'to: %s', self.service_addresses)
if key == "message_types":
self.message_types = value
self.log.debug('Message type list set during __init__ '
'to: %s', self.message_types)
@asyncio.coroutine
def run(self):
""" task to handle the work the service is intended to do """
self.log.info('Starting schedule service main task')
while True:
# Initialize result list
self.out_msg_list = []
# INCOMING MESSAGE HANDLING
if self.msg_in_queue.qsize() > 0:
self.log.debug('Getting Incoming message from queue')
self.next_msg = self.msg_in_queue.get_nowait()
self.log.debug('Message pulled from queue: [%s]', self.next_msg)
# Determine message type
self.next_msg_split = self.next_msg.split(',')
if len(self.next_msg_split) >= 6:
self.log.debug('Extracting source address and message type')
self.msg_source_addr = self.next_msg_split[1]
self.msg_type = self.next_msg_split[5]
self.log.debug('Source Address: %s', self.msg_source_addr)
self.log.debug('Message Type: %s', self.msg_type)
# Service Check (heartbeat)
if self.msg_type == self.message_types['heartbeat']:
self.log.debug('Message is a heartbeat')
self.out_msg_list = process_heartbeat_msg(
self.log,
self.ref_num,
self.next_msg,
self.message_types)
# Device scheduled command checks
if self.msg_type == self.message_types['get_device_scheduled_state']:
self.log.debug('Message is a get device scheduled state message')
self.out_msg_list = process_get_device_scheduled_state_msg(
self.log,
self.ref_num,
self.schedule,
self.next_msg,
self.message_types)
# Que up response messages in outgoing msg que
if len(self.out_msg_list) > 0:
self.log.debug('Queueing response message(s)')
for self.out_msg in self.out_msg_list:
self.msg_out_queue.put_nowait(self.out_msg)
self.log.debug('Message [%s] successfully queued', self.out_msg)
# PERIODIC TASKS
# Periodically send heartbeats to other services
if datetime.datetime.now() >= (self.last_check_hb + datetime.timedelta(seconds=120)):
self.destinations = [
(self.service_addresses['automation_addr'],
self.service_addresses['automation_port'])
]
self.out_msg_list = create_heartbeat_msg(
self.log,
self.ref_num,
self.destinations,
self.service_addresses['schedule_addr'],
self.service_addresses['schedule_port'],
self.message_types)
# Que up response messages in outgoing msg que
if len(self.out_msg_list) > 0:
self.log.debug('Queueing response message(s)')
for self.out_msg in self.out_msg_list:
self.msg_out_queue.put_nowait(self.out_msg)
self.log.debug('Response message [%s] successfully queued',
self.out_msg)
# Update last-check
self.last_check_hb = datetime.datetime.now()
# Yield to other tasks for a while
yield from asyncio.sleep(0.25)
| gpl-3.0 | -8,828,960,547,473,434,000 | 42.814103 | 97 | 0.501244 | false | 4.353503 | false | false | false |
Ledoux/ShareYourSystem | Pythonlogy/ShareYourSystem/Standards/Classors/Switcher/Drafts/__init__ copy.py | 1 | 8024 | #<ImportSpecificModules>
import operator
,Doer,Representer
from ShareYourSystem.Functers import Functer,Triggerer,Hooker
BaseModuleStr="ShareYourSystem.Functers.Functer"
DecorationModuleStr="ShareYourSystem.Standards.Classors.Classer")
#</ImportSpecificModules>
#<DefineLocals>
SYS.setSubModule(globals())
SwitchingBeforeStr='Before'
SwitchingAfterStr='After'
SwitchingBindStr='bind'
#</DefineLocals>
#<DefineClass>
@DecorationClass()
class SwitcherClass(BaseClass):
def default_init(self,**_KwargVariablesDict):
#<DefineSpecificDo>
self.SwitchingFunction=None #<NotRepresented>
self.SwitchedFunction=None #<NotRepresented>
self.SwitchedFunctionStr="" #<NotRepresented>
self.SwitchedBoolSuffixStr="" #<NotRepresented>
self.SwitchedClassBoolKeyStr="" #<NotRepresented>
self.SwitchedInstanceBoolKeyStr="" #<NotRepresented>
#</DefineSpecificDo>
#Call the parent init method
BaseClass.__init__(self,**_KwargVariablesDict)
def __call__(self,_Variable):
#Switch
self.switch(_Variable)
#Link
self.FunctedFunction=self.SwitchedFunction
#Call the call of the parent class
return BaseClass.__call__(self,self.SwitchingFunction)
def switch(self,_Variable=None):
#set the switching Function
if self.SwitchingFunction==None:
self.SwitchingFunction=_Variable
#set the SwitchedFunctionStr this is the functing function..and we remove all the tagged Functer@
self.SwitchedFunctionStr=self.SwitchingFunction.__name__.split(Functer.FunctingDecorationStr)[-1]
#debug
self.debug(('self.',self,['SwitchedFunctionStr']))
#Cut the pre attributing part if there is one
if Functer.FunctingAttributeStr in self.SwitchedFunctionStr:
self.SwitchedFunctionStr=self.SwitchedFunctionStr.split(Functer.FunctingAttributeStr)[-1]
#self.SwitchedDoneFunctionStr=Doer.getDoneStrWithDoStr(self.SwitchedFunctionStr)
#SwitchedBoolSuffixStr=self.SwitchedDoneFunctionStr[0].upper()+self.SwitchedDoneFunctionStr[1:]
self.SwitchedBoolSuffixStr=self.SwitchedFunctionStr[0].upper()+self.SwitchedFunctionStr[1:]+'Bool'
self.SwitchedInstanceBoolKeyStr='Switching'+self.SwitchedBoolSuffixStr
#self.SwitchedInstanceBoolKeyStr='SwitchedInstance'+self.SwitchedBoolSuffixStr
self.SwitchedClassBoolKeyStr='SwitchedClass'+self.SwitchedBoolSuffixStr
#debug
self.debug(('self.',self,['SwitchedInstanceBoolKeyStr','SwitchedClassBoolKeyStr']))
#Definition the SwitchedFunction
def SwitchedFunction(*_LiargVariablesList,**_KwargVariablesDict):
#Alias
InstanceVariable=_LiargVariablesList[0]
#Append for debbuging
#if hasattr(InstanceVariable,'DebuggingNotFrameFunctionStrsList'):
# if 'SwitchedFunction' not in InstanceVariable.DebuggingNotFrameFunctionStrsList:
# InstanceVariable.DebuggingNotFrameFunctionStrsList.append('SwitchedFunction')
#debug
'''
self.debug(
[
('self.',self,['SwitchedClassBoolKeyStr','SwitchedInstanceBoolKeyStr']),
Representer.represent(InstanceVariable,**{'RepresentingAlineaIsBool':False})
]
)
'''
#set the SwitchedBool if it was not already
if hasattr(InstanceVariable,self.SwitchedInstanceBoolKeyStr)==False:
#debug
'''
self.debug('The InstanceVariable has not the SwitchedBoolSuffixStr..so set it to False')
'''
#set
InstanceVariable.__setattr__(self.SwitchedInstanceBoolKeyStr,False)
elif getattr(InstanceVariable,self.SwitchedInstanceBoolKeyStr):
#debug
'''
self.debug('The Instance has already done this method')
'''
#Return
return InstanceVariable
#debug
'''
self.debug(('self.',self,['SwitchedBoolSuffixStr']))
'''
#At the level of the class set the new binding set function
if hasattr(InstanceVariable.__class__,self.SwitchedClassBoolKeyStr)==False:
#Definition the binding function that will call the init one
def bindBefore(*_TriggeringVariablesList,**_TriggeringVariablesDict):
#Alias
TriggeredInstanceVariable=_TriggeringVariablesList[0]
#debug
'''
self.debug('Reinit with '+Representer.represent(
TriggeredInstanceVariable.SettingKeyVariable,**{'RepresentingAlineaIsBool':False}
)
)
'''
#Definition the init method to trigger
SwitchedInitMethod=Functer.getFunctingFunctionWithFuncFunction(
TriggeredInstanceVariable.__class__.init
)
#debug
'''
self.debug(
[
'SwitchedInitMethod is '+str(SwitchedInitMethod),
"SwitchedInitMethod.func_globals['__file__'] is "+SwitchedInitMethod.func_globals['__file__']
]
)
'''
#Call the init method (just at the level of this class definition) (so IMPORTANT this is init not __init__)
SwitchedInitMethod(TriggeredInstanceVariable)
#set the name
TriggeredBeforeMethodStr='bindBeforeWith'+self.SwitchedBoolSuffixStr
bindBefore.__name__=TriggeredBeforeMethodStr
#debug
'''
self.debug(
[
("self.",self,['SwitchedDoneFunctionStr','SwitchedBoolSuffixStr']),
("TriggeredMethodStr is "+TriggeredMethodStr)
]
)
'''
#Link the bindBefore function
setattr(
InstanceVariable.__class__,
TriggeredBeforeMethodStr,
Triggerer.TriggererClass(**
{
'TriggeringConditionVariable':[
(
'SettingKeyVariable',
(operator.eq,self.SwitchedInstanceBoolKeyStr)
),
(
self.SwitchedInstanceBoolKeyStr,
(operator.eq,True)
),
('SettingValueVariable',(operator.eq,False))
],
'TriggeringHookStr':"Before"
}
)(bindBefore)
)
#Call with a default instance this bind function to be installed
getattr(InstanceVariable.__class__(),TriggeredBeforeMethodStr)()
'''
#Definition the binding function that will set the switched bool to True
def bindAfter(*_TriggeringVariablesList,**_TriggeringVariablesDict):
#Alias
TriggeredInstanceVariable=_TriggeringVariablesList[0]
#Say that it is ok
setattr(TriggeredInstanceVariable,self.SwitchedInstanceBoolKeyStr,False)
setattr(TriggeredInstanceVariable,self.SwitchedInstanceBoolKeyStr,True)
#set the name
TriggeredAfterMethodStr='bindAfterWith'+self.SwitchedBoolSuffixStr
bindAfter.__name__=TriggeredAfterMethodStr
#Link the bindAfter function
setattr(
InstanceVariable.__class__,
TriggeredAfterMethodStr,
Triggerer.TriggererClass(**
{
'TriggeringConditionVariable':[
(
'SettingKeyVariable',
(operator.eq,self.SwitchedInstanceBoolKeyStr)
),
(
self.SwitchedInstanceBoolKeyStr,
(operator.eq,True)
),
('SettingValueVariable',(operator.eq,False))
],
'TriggeringHookStr':"After"
}
)(bindAfter)
)
#Call with a default instance this bind function to be installed
getattr(InstanceVariable.__class__(),TriggeredAfterMethodStr)()
'''
#Say that it is ok
setattr(InstanceVariable.__class__,self.SwitchedClassBoolKeyStr,True)
#debug
'''
self.debug(
[
#('InstanceVariable is '+SYS._str(InstanceVariable)),
('_LiargVariablesList is '+str(_LiargVariablesList))
]
)
'''
#Call the SwitchingFunction
self.SwitchingFunction(*_LiargVariablesList,**_KwargVariablesDict)
#debug
'''
self.debug(('self.',self,['SwitchedBoolSuffixStr']))
'''
#set True for the Bool after the call
InstanceVariable.__setattr__(self.SwitchedInstanceBoolKeyStr,True)
#debug
'''
self.debug(('InstanceVariable.',InstanceVariable,[self.SwitchedBoolSuffixStr]))
'''
#Return self for the wrapped method call
return InstanceVariable
#set
self.SwitchedFunction=SwitchedFunction
#Return self
return self
#</DefineClass>
| mit | 331,411,898,130,539,900 | 28.284672 | 112 | 0.692672 | false | 3.575758 | false | false | false |
bokeh-cookbook/bokeh-cookbook | plugins/ipynb/markup.py | 1 | 5935 | from __future__ import absolute_import, print_function, division
import os
import json
try:
# Py3k
from html.parser import HTMLParser
except ImportError:
# Py2.7
from HTMLParser import HTMLParser
from pelican import signals
from pelican.readers import MarkdownReader, HTMLReader, BaseReader
from .ipynb import get_html_from_filepath, fix_css
def register():
"""
Register the new "ipynb" reader
"""
def add_reader(arg):
arg.settings["READERS"]["ipynb"] = IPythonNB
signals.initialized.connect(add_reader)
class IPythonNB(BaseReader):
"""
Extend the Pelican.BaseReader to `.ipynb` files can be recognized
as a markup language:
Setup:
`pelicanconf.py`:
```
MARKUP = ('md', 'ipynb')
```
"""
enabled = True
file_extensions = ['ipynb']
def read(self, filepath):
metadata = {}
metadata['ipython'] = True
# Files
filedir = os.path.dirname(filepath)
filename = os.path.basename(filepath)
metadata_filename = filename.split('.')[0] + '.ipynb-meta'
metadata_filepath = os.path.join(filedir, metadata_filename)
if os.path.exists(metadata_filepath):
# Metadata is on a external file, process using Pelican MD Reader
md_reader = MarkdownReader(self.settings)
_content, metadata = md_reader.read(metadata_filepath)
else:
# Load metadata from ipython notebook file
ipynb_file = open(filepath)
notebook_metadata = json.load(ipynb_file)['metadata']
# Change to standard pelican metadata
for key, value in notebook_metadata.items():
key = key.lower()
if key in ("title", "date", "category", "tags", "slug", "author"):
metadata[key] = self.process_metadata(key, value)
keys = [k.lower() for k in metadata.keys()]
if not set(['title', 'date']).issubset(set(keys)):
# Probably using ipynb.liquid mode
md_filename = filename.split('.')[0] + '.md'
md_filepath = os.path.join(filedir, md_filename)
if not os.path.exists(md_filepath):
raise Exception("Could not find metadata in `.ipynb-meta`, inside `.ipynb` or external `.md` file.")
else:
raise Exception("""Could not find metadata in `.ipynb-meta` or inside `.ipynb` but found `.md` file,
assuming that this notebook is for liquid tag usage if true ignore this error""")
content, info = get_html_from_filepath(filepath)
# Generate Summary: Do it before cleaning CSS
if 'summary' not in [key.lower() for key in self.settings.keys()]:
parser = MyHTMLParser(self.settings, filename)
if hasattr(content, 'decode'): # PY2
content = '<body>%s</body>' % content.encode('utf-8')
content = content.decode("utf-8")
else:
content = '<body>%s</body>' % content
parser.feed(content)
parser.close()
content = parser.body
if ('IPYNB_USE_META_SUMMARY' in self.settings.keys() and self.settings['IPYNB_USE_META_SUMMARY'] is False) or 'IPYNB_USE_META_SUMMARY' not in self.settings.keys():
metadata['summary'] = parser.summary
content = fix_css(content, info)
return content, metadata
class MyHTMLParser(HTMLReader._HTMLParser):
"""
Custom Pelican `HTMLReader._HTMLParser` to create the summary of the content
based on settings['SUMMARY_MAX_LENGTH'].
Summary is stoped if founds any div containing ipython notebook code cells.
This is needed in order to generate valid HTML for the summary,
a simple string split will break the html generating errors on the theme.
The downside is that the summary length is not exactly the specified, it stops at
completed div/p/li/etc tags.
"""
def __init__(self, settings, filename):
HTMLReader._HTMLParser.__init__(self, settings, filename)
self.settings = settings
self.filename = filename
self.wordcount = 0
self.summary = None
self.stop_tags = [('div', ('class', 'input')), ('div', ('class', 'output')), ('h2', ('id', 'Header-2'))]
if 'IPYNB_STOP_SUMMARY_TAGS' in self.settings.keys():
self.stop_tags = self.settings['IPYNB_STOP_SUMMARY_TAGS']
if 'IPYNB_EXTEND_STOP_SUMMARY_TAGS' in self.settings.keys():
self.stop_tags.extend(self.settings['IPYNB_EXTEND_STOP_SUMMARY_TAGS'])
def handle_starttag(self, tag, attrs):
HTMLReader._HTMLParser.handle_starttag(self, tag, attrs)
if self.wordcount < self.settings['SUMMARY_MAX_LENGTH']:
mask = [stoptag[0] == tag and (stoptag[1] is None or stoptag[1] in attrs) for stoptag in self.stop_tags]
if any(mask):
self.summary = self._data_buffer
self.wordcount = self.settings['SUMMARY_MAX_LENGTH']
def handle_endtag(self, tag):
HTMLReader._HTMLParser.handle_endtag(self, tag)
if self.wordcount < self.settings['SUMMARY_MAX_LENGTH']:
self.wordcount = len(strip_tags(self._data_buffer).split(' '))
if self.wordcount >= self.settings['SUMMARY_MAX_LENGTH']:
self.summary = self._data_buffer
def strip_tags(html):
"""
Strip html tags from html content (str)
Useful for summary creation
"""
s = HTMLTagStripper()
s.feed(html)
return s.get_data()
class HTMLTagStripper(HTMLParser):
"""
Custom HTML Parser to strip HTML tags
Useful for summary creation
"""
def __init__(self):
HTMLParser.__init__(self)
self.reset()
self.fed = []
def handle_data(self, html):
self.fed.append(html)
def get_data(self):
return ''.join(self.fed)
| agpl-3.0 | -8,813,531,780,843,853,000 | 34.969697 | 175 | 0.609436 | false | 3.975218 | false | false | false |
mtlynch/ndt-e2e-clientworker | client_wrapper/install_selenium_extensions.py | 1 | 3193 | import argparse
import os
import platform
import urllib
import tempfile
import names
driver_urls = {
'chrome_os_x': {
'url':
'http://chromedriver.storage.googleapis.com/2.21/chromedriver_mac32.zip',
'file_name': 'chromedriver_mac32.zip'
},
'chrome_ubuntu': {
'url':
'http://chromedriver.storage.googleapis.com/2.21/chromedriver_linux64.zip',
'file_name': 'chromedriver_linux64.zip'
},
'chrome_windows_10': {
'url':
'http://chromedriver.storage.googleapis.com/2.21/chromedriver_win32.zip',
'file_name': 'chromedriver_win32.zip'
},
'edge_windows_10': {
'url':
'https://download.microsoft.com/download/8/D/0/8D0D08CF-790D-4586-B726-C6469A9ED49C/MicrosoftWebDriver.msi',
'file_name': 'MicrosoftWebDriver.msi'
},
'safari_os_x': {
'url':
'http://selenium-release.storage.googleapis.com/2.48/SafariDriver.safariextz',
'file_name': 'SafariDriver.safariextz',
}
}
def _download_chrome_drivers():
"""Downloads Chrome drivers for Selenium."""
# Mac OS X
if platform.system() == 'Darwin':
remote_file = driver_urls['chrome_os_x']
elif platform.system() == 'Linux':
remote_file = driver_urls['chrome_ubuntu']
elif platform.system() == 'Windows':
remote_file = driver_urls['chrome_windows_10']
else:
raise ValueError('Unsupported OS specified: %s' % (platform.system()))
_download_temp_file(remote_file['url'], remote_file['file_name'])
def _download_temp_file(url, file_name):
"""Downloads file into temp directory.
Args:
url: A string representing the URL the file is to be downloaded from.
file_name: A string representing the name of the file to be downloaded.
"""
temp_dir = tempfile.mkdtemp()
download_path = os.path.join(temp_dir, file_name)
print('File downloading to %s' % download_path)
urllib.URLopener().retrieve(url, download_path)
def _download_edge_drivers():
"""Downloads Edge drivers for Selenium."""
remote_file = driver_urls['edge_windows_10']
_download_temp_file(remote_file['url'], remote_file['file_name'])
def _download_safari_drivers():
"""Downloads Safari drivers for Selenium."""
remote_file = driver_urls['safari_os_x']
_download_temp_file(remote_file['url'], remote_file['file_name'])
def main(args):
if args.browser == names.CHROME:
_download_chrome_drivers()
elif args.browser == names.EDGE:
_download_edge_drivers()
elif args.browser == names.SAFARI:
_download_safari_drivers()
elif args.browser == names.FIREFOX:
pass
else:
raise ValueError('Unsupported browser specified: %s' % (args.browser))
if __name__ == '__main__':
parser = argparse.ArgumentParser(
prog='NDT E2E Testing Client Selenium Extension Installer',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--browser',
help='Browser to run under (for browser-based client)',
choices=('chrome', 'firefox', 'safari', 'edge'))
main(parser.parse_args())
| apache-2.0 | 7,890,214,524,247,196,000 | 31.917526 | 116 | 0.634826 | false | 3.524283 | false | false | false |
cwacek/python-jsonschema-objects | test/test_regression_126.py | 1 | 1829 | import pytest
import python_jsonschema_objects as pjs
import collections
@pytest.fixture
def schema():
return {
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Test",
"definitions": {
"MyEnum1": {"type": "string", "enum": ["E_A", "E_B"]},
"MyEnum2": {"type": "string", "enum": ["F_A", "F_B", "F_C", "F_D"]},
"MyInt": {
"default": "0",
"type": "integer",
"minimum": 0,
"maximum": 4294967295,
},
"MyObj1": {
"type": "object",
"properties": {
"e1": {"$ref": "#/definitions/MyEnum1"},
"e2": {"$ref": "#/definitions/MyEnum2"},
"i1": {"$ref": "#/definitions/MyInt"},
},
"required": ["e1", "e2", "i1"],
},
"MyArray": {
"type": "array",
"items": {"$ref": "#/definitions/MyObj1"},
"minItems": 0,
"uniqueItems": True,
},
"MyMsg1": {
"type": "object",
"properties": {"a1": {"$ref": "#/definitions/MyArray"}},
},
"MyMsg2": {"type": "object", "properties": {"s1": {"type": "string"}}},
},
"type": "object",
"oneOf": [{"$ref": "#/definitions/MyMsg1"}, {"$ref": "#/definitions/MyMsg2"}],
}
def test_regression_126(schema):
builder = pjs.ObjectBuilder(schema)
ns = builder.build_classes(standardize_names=False)
Obj1 = ns.MyObj1
Array1 = ns.MyArray
Msg1 = ns.MyMsg1
o1 = Obj1(e1="E_A", e2="F_C", i1=2600)
o2 = Obj1(e1="E_B", e2="F_D", i1=2500)
objs = Array1([o1, o2])
msg = Msg1(a1=objs)
print(msg.serialize())
| mit | -1,888,342,341,075,785,700 | 30.534483 | 86 | 0.42865 | false | 3.362132 | false | false | false |
Krakn/learning | src/python/advent_of_code/2017/05/a_maze_of_twisty_trampolines_all_alike.py | 1 | 3322 | #!/usr/bin/env python3
'''
--- Day 5: A Maze of Twisty Trampolines, All Alike ---
'''
def load_input(filename):
'''
Parse input file, returning an array of maze offsets.
'''
maze = list()
with open(filename, 'r') as file_input:
for line in file_input.readlines():
maze.append(int(line.strip()))
return maze
def part1(maze):
'''
--- Part 1 ---
An urgent interrupt arrives from the CPU: it's trapped in a maze of jump
instructions, and it would like assistance from any programs with spare
cycles to help find the exit.
The message includes a list of the offsets for each jump. Jumps are
relative: -1 moves to the previous instruction, and 2 skips the next one.
Start at the first instruction in the list. The goal is to follow the jumps
until one leads outside the list.
In addition, these instructions are a little strange; after each jump, the
offset of that instruction increases by 1. So, if you come across an offset
of 3, you would move three instructions forward, but change it to a 4 for
the next time it is encountered.
For example, consider the following list of jump offsets:
0 3 0 1 -3 Positive jumps ("forward") move downward; negative jumps move
upward. For legibility in this example, these offset values
will be written all on one line, with the current instruction
marked in parentheses. The following steps would be taken
before an exit is found:
(0) 3 0 1 -3 - Before we have taken any steps.
(1) 3 0 1 -3 - Jump with offset 0 (that is, don't jump at all).
Fortunately, the instruction is then incremented
to 1.
2 (3) 0 1 -3 - Step forward because of the instruction we just modified.
The first instruction is incremented again, now to 2.
2 4 0 1 (-3) - Jump all the way to the end; leave a 4 behind.
2 (4) 0 1 -2 - Go back to where we just were; increment -3 to -2.
2 5 0 1 -2 - Jump 4 steps forward, escaping the maze. In this
example, the exit is reached in 5 steps.
How many steps does it take to reach the exit?
'''
index = 0
steps = 0
while index >= 0 and index < len(maze):
maze[index] += 1
index = index + maze[index] - 1
steps += 1
return steps
def part2(maze):
'''
--- Part Two ---
Now, the jumps are even stranger: after each jump, if the offset was three
or more, instead decrease it by 1. Otherwise, increase it by 1 as before.
Using this rule with the above example, the process now takes 10 steps,
and the offset values after finding the exit are left as 2 3 2 3 -1.
How many steps does it now take to reach the exit?
'''
index = 0
steps = 0
while index >= 0 and index < len(maze):
if maze[index] >= 3:
maze[index] -= 1
index = index + maze[index] + 1
else:
maze[index] += 1
index = index + maze[index] - 1
steps += 1
return steps
if __name__ == "__main__":
MAZE1 = load_input('input.txt')
MAZE2 = load_input('input.txt')
print("Part 1:", part1(MAZE1))
print("Part 2:", part2(MAZE2))
| isc | -1,629,784,482,675,308,500 | 33.604167 | 79 | 0.609573 | false | 3.805269 | false | false | false |
winterbird-code/adbb | adbb/__init__.py | 1 | 2124 | #!/usr/bin/env python
#
# This file is part of adbb.
#
# adbb is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# adbb is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with adbb. If not, see <http://www.gnu.org/licenses/>.
import multiprocessing
import logging
import logging.handlers
import sys
import adbb.db
from adbb.link import AniDBLink
from adbb.animeobjs import Anime, AnimeTitle, Episode, File
from adbb.anames import get_titles
anidb_client_name = "adbb"
anidb_client_version = 2
anidb_api_version = 3
log = None
_anidb = None
_sessionmaker = None
def init(
anidb_user,
anidb_pwd,
sql_db_url,
debug=False,
loglevel='info',
logger=None,
outgoing_udp_port=9876):
if logger is None:
logger = logging.getLogger(__name__)
logger.setLevel(loglevel.upper())
if debug:
logger.setLevel(logging.DEBUG)
lh = logging.StreamHandler()
lh.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s %(filename)s:%(lineno)d - %(message)s'))
logger.addHandler(lh)
lh = logging.handlers.SysLogHandler(address='/dev/log')
lh.setFormatter(logging.Formatter(
'adbb %(filename)s/%(funcName)s:%(lineno)d - %(message)s'))
logger.addHandler(lh)
global log, _anidb, _sessionmaker
log = logger
_sessionmaker = adbb.db.init_db(sql_db_url)
_anidb = adbb.link.AniDBLink(
anidb_user,
anidb_pwd,
myport=outgoing_udp_port)
def get_session():
return _sessionmaker()
def close_session(session):
session.close()
def close():
global _anidb
_anidb.stop()
| gpl-3.0 | -7,247,581,232,844,061,000 | 25.222222 | 83 | 0.663842 | false | 3.50495 | false | false | false |
jcfr/mystic | examples/TEST_ffitPP2_b.py | 1 | 1429 | #!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 1997-2015 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LICENSE
"""
Testing the polynomial fitting problem of [1] using scipy's Nelder-Mead algorithm.
Reference:
[1] Storn, R. and Price, K. Differential Evolution - A Simple and Efficient
Heuristic for Global Optimization over Continuous Spaces. Journal of Global
Optimization 11: 341-359, 1997.
"""
from test_ffit import Chebyshev8, plot_solution, print_solution
from TEST_ffitPP_b import ChebyshevCost
if __name__ == '__main__':
import random
from mystic.solvers import fmin
#from mystic._scipyoptimize import fmin
from mystic.tools import random_seed
random_seed(123)
import pp
import sys
if len(sys.argv) > 1:
tunnelport = sys.argv[1]
ppservers = ("localhost:%s" % tunnelport,)
else:
ppservers = ()
myserver = pp.Server(ppservers=ppservers)
trials = []
for trial in range(8):
x = tuple([random.uniform(-100,100) + Chebyshev8[i] for i in range(9)])
trials.append(x)
results = [myserver.submit(fmin,(ChebyshevCost,x),(),()) for x in trials]
for solution in results:
print_solution(solution())
#plot_solution(solution)
# end of file
| bsd-3-clause | 4,351,184,427,107,404,300 | 27.58 | 82 | 0.687194 | false | 3.277523 | false | false | false |
Aloomaio/googleads-python-lib | examples/ad_manager/v201805/creative_service/create_creative_from_template.py | 1 | 3666 | #!/usr/bin/env python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates a new template creative for a given advertiser.
To determine which companies are advertisers, run get_advertisers.py.
To determine which creative templates exist, run
get_all_creative_templates.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
import os
import uuid
# Import appropriate modules from the client library.
from googleads import ad_manager
# Set id of the advertiser (company) that the creative will be assigned to.
ADVERTISER_ID = 'INSERT_ADVERTISER_COMPANY_ID_HERE'
def main(client, advertiser_id):
# Initialize appropriate service.
creative_service = client.GetService('CreativeService', version='v201805')
# Use the image banner with optional third party tracking template.
creative_template_id = '10000680'
# Create image asset.
file_name = 'image%s.jpg' % uuid.uuid4()
image_data = open(os.path.join(os.path.split(__file__)[0], '..', '..', 'data',
'medium_rectangle.jpg'), 'r').read()
size = {
'width': '300',
'height': '250'
}
asset = {
'xsi_type': 'CreativeAsset',
'fileName': file_name,
'assetByteArray': image_data,
'size': size
}
# Create creative from templates.
creative = {
'xsi_type': 'TemplateCreative',
'name': 'Template Creative #%s' % uuid.uuid4(),
'advertiserId': advertiser_id,
'size': size,
'creativeTemplateId': creative_template_id,
'creativeTemplateVariableValues': [
{
'xsi_type': 'AssetCreativeTemplateVariableValue',
'uniqueName': 'Imagefile',
'asset': asset
},
{
'xsi_type': 'LongCreativeTemplateVariableValue',
'uniqueName': 'Imagewidth',
'value': '300'
},
{
'xsi_type': 'LongCreativeTemplateVariableValue',
'uniqueName': 'Imageheight',
'value': '250'
},
{
'xsi_type': 'UrlCreativeTemplateVariableValue',
'uniqueName': 'ClickthroughURL',
'value': 'www.google.com'
},
{
'xsi_type': 'StringCreativeTemplateVariableValue',
'uniqueName': 'Targetwindow',
'value': '_blank'
}
]
}
# Call service to create the creative.
creative = creative_service.createCreatives([creative])[0]
# Display results.
print ('Template creative with id "%s", name "%s", and type "%s" was '
'created and can be previewed at %s.'
% (creative['id'], creative['name'],
ad_manager.AdManagerClassType(creative), creative['previewUrl']))
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client, ADVERTISER_ID)
| apache-2.0 | 1,060,630,061,611,670,300 | 32.027027 | 80 | 0.638298 | false | 3.887593 | false | false | false |
eduble/panteda | operators/map/heatmap.py | 1 | 4193 | #!/usr/bin/env python3
import numpy as np
import numpy.random
from time import time
# web mercator projection functions
# ---------------------------------
def linear_lat(lat, atanh = np.arctanh, sin = np.sin, radians = np.radians):
return atanh(sin(radians(lat)))
def inv_linear_lat(ll, asin = np.arcsin, tanh = np.tanh, degrees = np.degrees):
return degrees(asin(tanh(ll)))
def lng_to_x(w, lng_min, lng_max, lng):
return (lng - lng_min) * (w / (lng_max - lng_min))
def lat_to_y(h, lat_min, lat_max, lat):
return (linear_lat(lat) - linear_lat(lat_min)) * (h / (linear_lat(lat_max) - linear_lat(lat_min)))
def x_to_lng(w, lng_min, lng_max, x):
return x * ((lng_max - lng_min)/w) + lng_min
def y_to_lat(h, lat_min, lat_max, y):
return inv_linear_lat(y * ((linear_lat(lat_max) - linear_lat(lat_min))/h) + linear_lat(lat_min))
# heatmap data generation
# -----------------------
class HeatMap:
def __init__(self, lnglat, width, height, westlng, eastlng, southlat, northlat):
# compute pixel bounds of the map
x = np.append(np.arange(0, width, 5), width)
y = np.append(np.arange(0, height, 5), height)
# project pixel bounds coordinates (x, y -> lng, lat)
edgelng = x_to_lng(width, westlng, eastlng, x)
centerlng = x_to_lng(width, westlng, eastlng, (x[1:] + x[:-1])/2)
edgelat = y_to_lat(height, southlat, northlat, y)
centerlat = y_to_lat(height, southlat, northlat, (y[1:] + y[:-1])/2)
# prepare computation parameters
self.bins = edgelng, edgelat
self.range = (westlng, eastlng), (southlat, northlat)
self.iterator = lnglat.chunks()
self.heatmap = None
# prepare compression parameters
scalelat = (edgelat[1:] - edgelat[:-1]).min() / 2
self.approx_centerlat = numpy.rint((centerlat - centerlat[0]) / scalelat)
scalelng = edgelng[1] - edgelng[0] # longitude is linear
self.approx_centerlng = numpy.rint((centerlng - centerlng[0]) / scalelng)
self.scales = dict(lat=scalelat, lng=scalelng)
self.offsets = dict(lat=centerlat[0], lng=centerlng[0])
# stream status parameters
self.done = False
def compute(self, time_credit):
# make histogram:
# - create a pixel grid
# - given a tuple (lng, lat) increment the corresponding pixel
deadline = time() + time_credit
deadline_reached = False
for chunk in self.iterator:
lng, lat = chunk.columns
chunk_heatmap = np.histogram2d(lng, lat, bins=self.bins, range=self.range)[0]
if self.heatmap is None:
self.heatmap = chunk_heatmap.T
else:
self.heatmap += chunk_heatmap.T
if time() > deadline:
deadline_reached = True
break
if not deadline_reached:
# we left the loop because of the end of iteration
self.done = True
# get sparse matrix representation: (lat, lng, intensity) tuples.
# in order to lower network usage, we will transfer this data in a
# compressed form: lng & lat values will be transfered as integers
# together with a scaling factor and an offset to be applied.
def compressed_form(self):
# count number of points
count = int(self.heatmap.sum())
if count == 0:
# if no points, return empty data
data = dict(lat = [], lng = [], val = [])
else:
# apply threshold and
# compute approximated sparse matrix data
nonzero_xy = ((self.heatmap / self.heatmap.max()) > 0.05).nonzero()
nonzero_x = nonzero_xy[1]
nonzero_y = nonzero_xy[0]
data = dict(
lat = self.approx_centerlat[nonzero_y].astype(int).tolist(),
lng = self.approx_centerlng[nonzero_x].astype(int).tolist(),
val = self.heatmap[nonzero_xy].astype(int).tolist()
)
return dict(
data = data,
scales = self.scales,
offsets = self.offsets,
count = count,
done = self.done
)
| gpl-3.0 | -2,101,006,383,756,166,100 | 41.785714 | 102 | 0.577152 | false | 3.494167 | false | false | false |
mohitreddy1996/Gender-Detection-from-Signature | src/train_test/random_forests.py | 1 | 1140 | from sklearn.metrics import precision_recall_fscore_support
import pandas as pd
import numpy as np
from sklearn.ensemble import RandomForestClassifier
from sklearn.preprocessing import MinMaxScaler, normalize
df = pd.read_csv('../../Dataset/dataset.csv', delimiter='\t')
dataset = df.values
mask = np.random.rand(len(df)) < .80
train = df[mask]
test = df[~mask]
X = pd.DataFrame()
Y = pd.DataFrame()
X = train.ix[:, 2:len(train.columns) - 1]
Y = train.ix[:, len(train.columns) - 1: len(train.columns)]
X_Test = pd.DataFrame()
Y_Test = pd.DataFrame()
# After Normalising
X_standard = normalize(X)
print X_standard.shape
X_Test = test.ix[:, 2:len(test.columns) - 1]
Y_Test = test.ix[:, len(test.columns) - 1: len(test.columns)]
X_Test_standard = normalize(X_Test)
print X_Test_standard.shape
print "Training Data Set Size : ", str(len(X))
print "Testing Data Set Size : ", str(len(X_Test))
# tune parameters here.
rf = RandomForestClassifier(n_estimators=150, max_features=20)
rf.fit(X_standard, Y)
# predict
Y_Result = rf.predict(X_Test_standard)
print precision_recall_fscore_support(Y_Test, Y_Result, average='micro')
| mit | 5,244,072,690,348,756,000 | 20.923077 | 72 | 0.711404 | false | 2.961039 | true | true | false |
aquaya/ivrhub | ivrhub/models.py | 1 | 3129 | ''' mongoengine models
'''
from mongoengine import *
class User(Document):
''' some are admins some are not
'''
admin_rights = BooleanField(required=True)
api_id = StringField()
api_key = StringField()
email = EmailField(required=True, unique=True, max_length=254)
email_confirmation_code = StringField(required=True)
email_confirmed = BooleanField(required=True)
forgot_password_code = StringField()
last_login_time = DateTimeField(required=True)
name = StringField()
organizations = ListField(ReferenceField('Organization'))
password_hash = StringField(required=True)
registration_time = DateTimeField(required=True)
verified = BooleanField(required=True)
class Organization(Document):
''' people join orgs
'''
description = StringField(default='')
# url-safe version of the name
label = StringField(unique=True, required=True)
location = StringField(default='')
name = StringField(unique=True, required=True)
class Form(Document):
''' the heart of the system
'''
# unique code for requesting this form via sms or a call
calling_code = StringField()
creation_time = DateTimeField()
creator = ReferenceField(User)
description = StringField(default = '')
# url-safe version of the name
label = StringField(unique_with='organization')
language = StringField(default = '')
name = StringField(unique_with='organization')
organization = ReferenceField(Organization)
# have to store questions here as well so we know the order
questions = ListField(ReferenceField('Question'))
class Question(Document):
''' connected to forms
'''
audio_filename = StringField()
audio_url = StringField()
creation_time = DateTimeField()
description = StringField()
form = ReferenceField(Form)
# url-safe version of the name
label = StringField(unique_with='form')
name = StringField(unique_with='form')
# 'text_prompt', 'audio_file' or 'audio_url'
prompt_type = StringField(default='text_prompt')
# 'keypad' or 'voice' or 'no response'
response_type = StringField(default='keypad')
s3_key = StringField()
s3_url = StringField()
text_prompt = StringField()
text_prompt_language = StringField(default='en')
class Response(Document):
''' individual response to a form
'''
call_sid = StringField()
completion_time = DateTimeField()
form = ReferenceField(Form)
# whether this was a 'call' or 'ringback' or 'scheduled call'
initiated_using = StringField()
initiation_time = DateTimeField()
# track the progress of the response
last_question_asked = ReferenceField(Question)
# any notes about the response as a whole
notes = StringField()
respondent_phone_number = StringField()
class Answer(Document):
''' connected to questions and responses
'''
audio_url = StringField()
keypad_input = StringField()
# any notes on this answer (like a transcription)
notes = StringField()
question = ReferenceField(Question)
response = ReferenceField(Response)
| mit | 7,868,206,963,524,489,000 | 31.59375 | 66 | 0.686801 | false | 4.298077 | false | false | false |
tzangms/PyConTW | pycon_project/biblion/views.py | 1 | 3501 | from datetime import datetime
from django.core.urlresolvers import reverse
from django.http import HttpResponse, Http404
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.template.loader import render_to_string
from django.utils import simplejson as json
from django.contrib.sites.models import Site
from biblion.exceptions import InvalidSection
from biblion.models import Post, FeedHit
from biblion.settings import ALL_SECTION_NAME
def blog_index(request):
posts = Post.objects.current()
posts = posts.filter(language=request.LANGUAGE_CODE)
return render_to_response("biblion/blog_list.html", {
"posts": posts,
}, context_instance=RequestContext(request))
def blog_section_list(request, section):
try:
posts = Post.objects.section(section)
except InvalidSection:
raise Http404()
return render_to_response("biblion/blog_section_list.html", {
"section_slug": section,
"section_name": dict(Post.SECTION_CHOICES)[Post.section_idx(section)],
"posts": posts,
}, context_instance=RequestContext(request))
def blog_post_detail(request, **kwargs):
if "post_pk" in kwargs:
if request.user.is_authenticated() and request.user.is_staff:
queryset = Post.objects.all()
post = get_object_or_404(queryset, pk=kwargs["post_pk"])
else:
raise Http404()
else:
queryset = Post.objects.current()
queryset = queryset.filter(
published__year = int(kwargs["year"]),
published__month = int(kwargs["month"]),
published__day = int(kwargs["day"]),
)
post = get_object_or_404(queryset, slug=kwargs["slug"])
post.inc_views()
return render_to_response("biblion/blog_post.html", {
"post": post,
}, context_instance=RequestContext(request))
def serialize_request(request):
data = {
"path": request.path,
"META": {
"QUERY_STRING": request.META.get("QUERY_STRING"),
"REMOTE_ADDR": request.META.get("REMOTE_ADDR"),
}
}
for key in request.META:
if key.startswith("HTTP"):
data["META"][key] = request.META[key]
return json.dumps(data)
def blog_feed(request, section=None):
try:
posts = Post.objects.section(section)
except InvalidSection:
raise Http404()
if section is None:
section = ALL_SECTION_NAME
current_site = Site.objects.get_current()
feed_title = "%s Blog: %s" % (current_site.name, section[0].upper() + section[1:])
blog_url = "http://%s%s" % (current_site.domain, reverse("blog"))
url_name, kwargs = "blog_feed", {"section": section}
feed_url = "http://%s%s" % (current_site.domain, reverse(url_name, kwargs=kwargs))
if posts:
feed_updated = posts[0].published
else:
feed_updated = datetime(2009, 8, 1, 0, 0, 0)
# create a feed hit
hit = FeedHit()
hit.request_data = serialize_request(request)
hit.save()
atom = render_to_string("biblion/atom_feed.xml", {
"feed_id": feed_url,
"feed_title": feed_title,
"blog_url": blog_url,
"feed_url": feed_url,
"feed_updated": feed_updated,
"entries": posts,
"current_site": current_site,
})
return HttpResponse(atom, mimetype="application/atom+xml")
| bsd-3-clause | -8,178,591,767,137,141,000 | 29.181034 | 86 | 0.623536 | false | 3.822052 | false | false | false |
jseabold/statsmodels | statsmodels/sandbox/distributions/sppatch.py | 5 | 24020 | '''patching scipy to fit distributions and expect method
This adds new methods to estimate continuous distribution parameters with some
fixed/frozen parameters. It also contains functions that calculate the expected
value of a function for any continuous or discrete distribution
It temporarily also contains Bootstrap and Monte Carlo function for testing the
distribution fit, but these are neither general nor verified.
Author: josef-pktd
License: Simplified BSD
'''
from statsmodels.compat.python import lmap
import numpy as np
from scipy import stats, optimize, integrate
########## patching scipy
#vonmises does not define finite bounds, because it is intended for circular
#support which does not define a proper pdf on the real line
stats.distributions.vonmises.a = -np.pi
stats.distributions.vonmises.b = np.pi
#the next 3 functions are for fit with some fixed parameters
#As they are written, they do not work as functions, only as methods
def _fitstart(self, x):
'''example method, method of moment estimator as starting values
Parameters
----------
x : ndarray
data for which the parameters are estimated
Returns
-------
est : tuple
preliminary estimates used as starting value for fitting, not
necessarily a consistent estimator
Notes
-----
This needs to be written and attached to each individual distribution
This example was written for the gamma distribution, but not verified
with literature
'''
loc = np.min([x.min(),0])
a = 4/stats.skew(x)**2
scale = np.std(x) / np.sqrt(a)
return (a, loc, scale)
def _fitstart_beta(self, x, fixed=None):
'''method of moment estimator as starting values for beta distribution
Parameters
----------
x : ndarray
data for which the parameters are estimated
fixed : None or array_like
sequence of numbers and np.nan to indicate fixed parameters and parameters
to estimate
Returns
-------
est : tuple
preliminary estimates used as starting value for fitting, not
necessarily a consistent estimator
Notes
-----
This needs to be written and attached to each individual distribution
References
----------
for method of moment estimator for known loc and scale
https://en.wikipedia.org/wiki/Beta_distribution#Parameter_estimation
http://www.itl.nist.gov/div898/handbook/eda/section3/eda366h.htm
NIST reference also includes reference to MLE in
Johnson, Kotz, and Balakrishan, Volume II, pages 221-235
'''
#todo: separate out this part to be used for other compact support distributions
# e.g. rdist, vonmises, and truncnorm
# but this might not work because it might still be distribution specific
a, b = x.min(), x.max()
eps = (a-b)*0.01
if fixed is None:
#this part not checked with books
loc = a - eps
scale = (a - b) * (1 + 2*eps)
else:
if np.isnan(fixed[-2]):
#estimate loc
loc = a - eps
else:
loc = fixed[-2]
if np.isnan(fixed[-1]):
#estimate scale
scale = (b + eps) - loc
else:
scale = fixed[-1]
#method of moment for known loc scale:
scale = float(scale)
xtrans = (x - loc)/scale
xm = xtrans.mean()
xv = xtrans.var()
tmp = (xm*(1-xm)/xv - 1)
p = xm * tmp
q = (1 - xm) * tmp
return (p, q, loc, scale) #check return type and should fixed be returned ?
def _fitstart_poisson(self, x, fixed=None):
'''maximum likelihood estimator as starting values for Poisson distribution
Parameters
----------
x : ndarray
data for which the parameters are estimated
fixed : None or array_like
sequence of numbers and np.nan to indicate fixed parameters and parameters
to estimate
Returns
-------
est : tuple
preliminary estimates used as starting value for fitting, not
necessarily a consistent estimator
Notes
-----
This needs to be written and attached to each individual distribution
References
----------
MLE :
https://en.wikipedia.org/wiki/Poisson_distribution#Maximum_likelihood
'''
#todo: separate out this part to be used for other compact support distributions
# e.g. rdist, vonmises, and truncnorm
# but this might not work because it might still be distribution specific
a = x.min()
eps = 0 # is this robust ?
if fixed is None:
#this part not checked with books
loc = a - eps
else:
if np.isnan(fixed[-1]):
#estimate loc
loc = a - eps
else:
loc = fixed[-1]
#MLE for standard (unshifted, if loc=0) Poisson distribution
xtrans = (x - loc)
lambd = xtrans.mean()
#second derivative d loglike/ dlambd Not used
#dlldlambd = 1/lambd # check
return (lambd, loc) #check return type and should fixed be returned ?
def nnlf_fr(self, thetash, x, frmask):
# new frozen version
# - sum (log pdf(x, theta),axis=0)
# where theta are the parameters (including loc and scale)
#
try:
if frmask is not None:
theta = frmask.copy()
theta[np.isnan(frmask)] = thetash
else:
theta = thetash
loc = theta[-2]
scale = theta[-1]
args = tuple(theta[:-2])
except IndexError:
raise ValueError("Not enough input arguments.")
if not self._argcheck(*args) or scale <= 0:
return np.inf
x = np.array((x-loc) / scale)
cond0 = (x <= self.a) | (x >= self.b)
if (np.any(cond0)):
return np.inf
else:
N = len(x)
#raise ValueError
return self._nnlf(x, *args) + N*np.log(scale)
def fit_fr(self, data, *args, **kwds):
'''estimate distribution parameters by MLE taking some parameters as fixed
Parameters
----------
data : ndarray, 1d
data for which the distribution parameters are estimated,
args : list ? check
starting values for optimization
kwds :
- 'frozen' : array_like
values for frozen distribution parameters and, for elements with
np.nan, the corresponding parameter will be estimated
Returns
-------
argest : ndarray
estimated parameters
Examples
--------
generate random sample
>>> np.random.seed(12345)
>>> x = stats.gamma.rvs(2.5, loc=0, scale=1.2, size=200)
estimate all parameters
>>> stats.gamma.fit(x)
array([ 2.0243194 , 0.20395655, 1.44411371])
>>> stats.gamma.fit_fr(x, frozen=[np.nan, np.nan, np.nan])
array([ 2.0243194 , 0.20395655, 1.44411371])
keep loc fixed, estimate shape and scale parameters
>>> stats.gamma.fit_fr(x, frozen=[np.nan, 0.0, np.nan])
array([ 2.45603985, 1.27333105])
keep loc and scale fixed, estimate shape parameter
>>> stats.gamma.fit_fr(x, frozen=[np.nan, 0.0, 1.0])
array([ 3.00048828])
>>> stats.gamma.fit_fr(x, frozen=[np.nan, 0.0, 1.2])
array([ 2.57792969])
estimate only scale parameter for fixed shape and loc
>>> stats.gamma.fit_fr(x, frozen=[2.5, 0.0, np.nan])
array([ 1.25087891])
Notes
-----
self is an instance of a distribution class. This can be attached to
scipy.stats.distributions.rv_continuous
*Todo*
* check if docstring is correct
* more input checking, args is list ? might also apply to current fit method
'''
loc0, scale0 = lmap(kwds.get, ['loc', 'scale'],[0.0, 1.0])
Narg = len(args)
if Narg == 0 and hasattr(self, '_fitstart'):
x0 = self._fitstart(data)
elif Narg > self.numargs:
raise ValueError("Too many input arguments.")
else:
args += (1.0,)*(self.numargs-Narg)
# location and scale are at the end
x0 = args + (loc0, scale0)
if 'frozen' in kwds:
frmask = np.array(kwds['frozen'])
if len(frmask) != self.numargs+2:
raise ValueError("Incorrect number of frozen arguments.")
else:
# keep starting values for not frozen parameters
for n in range(len(frmask)):
# Troubleshooting ex_generic_mle_tdist
if isinstance(frmask[n], np.ndarray) and frmask[n].size == 1:
frmask[n] = frmask[n].item()
# If there were array elements, then frmask will be object-dtype,
# in which case np.isnan will raise TypeError
frmask = frmask.astype(np.float64)
x0 = np.array(x0)[np.isnan(frmask)]
else:
frmask = None
#print(x0
#print(frmask
return optimize.fmin(self.nnlf_fr, x0,
args=(np.ravel(data), frmask), disp=0)
#The next two functions/methods calculate expected value of an arbitrary
#function, however for the continuous functions intquad is use, which might
#require continuouity or smoothness in the function.
#TODO: add option for Monte Carlo integration
def expect(self, fn=None, args=(), loc=0, scale=1, lb=None, ub=None, conditional=False):
'''calculate expected value of a function with respect to the distribution
location and scale only tested on a few examples
Parameters
----------
all parameters are keyword parameters
fn : function (default: identity mapping)
Function for which integral is calculated. Takes only one argument.
args : tuple
argument (parameters) of the distribution
lb, ub : numbers
lower and upper bound for integration, default is set to the support
of the distribution
conditional : bool (False)
If true then the integral is corrected by the conditional probability
of the integration interval. The return value is the expectation
of the function, conditional on being in the given interval.
Returns
-------
expected value : float
Notes
-----
This function has not been checked for it's behavior when the integral is
not finite. The integration behavior is inherited from scipy.integrate.quad.
'''
if fn is None:
def fun(x, *args):
return x*self.pdf(x, loc=loc, scale=scale, *args)
else:
def fun(x, *args):
return fn(x)*self.pdf(x, loc=loc, scale=scale, *args)
if lb is None:
lb = loc + self.a * scale #(self.a - loc)/(1.0*scale)
if ub is None:
ub = loc + self.b * scale #(self.b - loc)/(1.0*scale)
if conditional:
invfac = (self.sf(lb, loc=loc, scale=scale, *args)
- self.sf(ub, loc=loc, scale=scale, *args))
else:
invfac = 1.0
return integrate.quad(fun, lb, ub,
args=args)[0]/invfac
def expect_v2(self, fn=None, args=(), loc=0, scale=1, lb=None, ub=None, conditional=False):
'''calculate expected value of a function with respect to the distribution
location and scale only tested on a few examples
Parameters
----------
all parameters are keyword parameters
fn : function (default: identity mapping)
Function for which integral is calculated. Takes only one argument.
args : tuple
argument (parameters) of the distribution
lb, ub : numbers
lower and upper bound for integration, default is set using
quantiles of the distribution, see Notes
conditional : bool (False)
If true then the integral is corrected by the conditional probability
of the integration interval. The return value is the expectation
of the function, conditional on being in the given interval.
Returns
-------
expected value : float
Notes
-----
This function has not been checked for it's behavior when the integral is
not finite. The integration behavior is inherited from scipy.integrate.quad.
The default limits are lb = self.ppf(1e-9, *args), ub = self.ppf(1-1e-9, *args)
For some heavy tailed distributions, 'alpha', 'cauchy', 'halfcauchy',
'levy', 'levy_l', and for 'ncf', the default limits are not set correctly
even when the expectation of the function is finite. In this case, the
integration limits, lb and ub, should be chosen by the user. For example,
for the ncf distribution, ub=1000 works in the examples.
There are also problems with numerical integration in some other cases,
for example if the distribution is very concentrated and the default limits
are too large.
'''
#changes: 20100809
#correction and refactoring how loc and scale are handled
#uses now _pdf
#needs more testing for distribution with bound support, e.g. genpareto
if fn is None:
def fun(x, *args):
return (loc + x*scale)*self._pdf(x, *args)
else:
def fun(x, *args):
return fn(loc + x*scale)*self._pdf(x, *args)
if lb is None:
#lb = self.a
try:
lb = self.ppf(1e-9, *args) #1e-14 quad fails for pareto
except ValueError:
lb = self.a
else:
lb = max(self.a, (lb - loc)/(1.0*scale)) #transform to standardized
if ub is None:
#ub = self.b
try:
ub = self.ppf(1-1e-9, *args)
except ValueError:
ub = self.b
else:
ub = min(self.b, (ub - loc)/(1.0*scale))
if conditional:
invfac = self._sf(lb,*args) - self._sf(ub,*args)
else:
invfac = 1.0
return integrate.quad(fun, lb, ub,
args=args, limit=500)[0]/invfac
### for discrete distributions
#TODO: check that for a distribution with finite support the calculations are
# done with one array summation (np.dot)
#based on _drv2_moment(self, n, *args), but streamlined
def expect_discrete(self, fn=None, args=(), loc=0, lb=None, ub=None,
conditional=False):
'''calculate expected value of a function with respect to the distribution
for discrete distribution
Parameters
----------
(self : distribution instance as defined in scipy stats)
fn : function (default: identity mapping)
Function for which integral is calculated. Takes only one argument.
args : tuple
argument (parameters) of the distribution
optional keyword parameters
lb, ub : numbers
lower and upper bound for integration, default is set to the support
of the distribution, lb and ub are inclusive (ul<=k<=ub)
conditional : bool (False)
If true then the expectation is corrected by the conditional
probability of the integration interval. The return value is the
expectation of the function, conditional on being in the given
interval (k such that ul<=k<=ub).
Returns
-------
expected value : float
Notes
-----
* function is not vectorized
* accuracy: uses self.moment_tol as stopping criterium
for heavy tailed distribution e.g. zipf(4), accuracy for
mean, variance in example is only 1e-5,
increasing precision (moment_tol) makes zipf very slow
* suppnmin=100 internal parameter for minimum number of points to evaluate
could be added as keyword parameter, to evaluate functions with
non-monotonic shapes, points include integers in (-suppnmin, suppnmin)
* uses maxcount=1000 limits the number of points that are evaluated
to break loop for infinite sums
(a maximum of suppnmin+1000 positive plus suppnmin+1000 negative integers
are evaluated)
'''
#moment_tol = 1e-12 # increase compared to self.moment_tol,
# too slow for only small gain in precision for zipf
#avoid endless loop with unbound integral, eg. var of zipf(2)
maxcount = 1000
suppnmin = 100 #minimum number of points to evaluate (+ and -)
if fn is None:
def fun(x):
#loc and args from outer scope
return (x+loc)*self._pmf(x, *args)
else:
def fun(x):
#loc and args from outer scope
return fn(x+loc)*self._pmf(x, *args)
# used pmf because _pmf does not check support in randint
# and there might be problems(?) with correct self.a, self.b at this stage
# maybe not anymore, seems to work now with _pmf
self._argcheck(*args) # (re)generate scalar self.a and self.b
if lb is None:
lb = (self.a)
else:
lb = lb - loc
if ub is None:
ub = (self.b)
else:
ub = ub - loc
if conditional:
invfac = self.sf(lb,*args) - self.sf(ub+1,*args)
else:
invfac = 1.0
tot = 0.0
low, upp = self._ppf(0.001, *args), self._ppf(0.999, *args)
low = max(min(-suppnmin, low), lb)
upp = min(max(suppnmin, upp), ub)
supp = np.arange(low, upp+1, self.inc) #check limits
#print('low, upp', low, upp
tot = np.sum(fun(supp))
diff = 1e100
pos = upp + self.inc
count = 0
#handle cases with infinite support
while (pos <= ub) and (diff > self.moment_tol) and count <= maxcount:
diff = fun(pos)
tot += diff
pos += self.inc
count += 1
if self.a < 0: #handle case when self.a = -inf
diff = 1e100
pos = low - self.inc
while (pos >= lb) and (diff > self.moment_tol) and count <= maxcount:
diff = fun(pos)
tot += diff
pos -= self.inc
count += 1
if count > maxcount:
# replace with proper warning
print('sum did not converge')
return tot/invfac
stats.distributions.rv_continuous.fit_fr = fit_fr
stats.distributions.rv_continuous.nnlf_fr = nnlf_fr
stats.distributions.rv_continuous.expect = expect
stats.distributions.rv_discrete.expect = expect_discrete
stats.distributions.beta_gen._fitstart = _fitstart_beta #not tried out yet
stats.distributions.poisson_gen._fitstart = _fitstart_poisson #not tried out yet
########## end patching scipy
def distfitbootstrap(sample, distr, nrepl=100):
'''run bootstrap for estimation of distribution parameters
hard coded: only one shape parameter is allowed and estimated,
loc=0 and scale=1 are fixed in the estimation
Parameters
----------
sample : ndarray
original sample data for bootstrap
distr : distribution instance with fit_fr method
nrepl : int
number of bootstrap replications
Returns
-------
res : array (nrepl,)
parameter estimates for all bootstrap replications
'''
nobs = len(sample)
res = np.zeros(nrepl)
for ii in range(nrepl):
rvsind = np.random.randint(nobs, size=nobs)
x = sample[rvsind]
res[ii] = distr.fit_fr(x, frozen=[np.nan, 0.0, 1.0])
return res
def distfitmc(sample, distr, nrepl=100, distkwds={}):
'''run Monte Carlo for estimation of distribution parameters
hard coded: only one shape parameter is allowed and estimated,
loc=0 and scale=1 are fixed in the estimation
Parameters
----------
sample : ndarray
original sample data, in Monte Carlo only used to get nobs,
distr : distribution instance with fit_fr method
nrepl : int
number of Monte Carlo replications
Returns
-------
res : array (nrepl,)
parameter estimates for all Monte Carlo replications
'''
arg = distkwds.pop('arg')
nobs = len(sample)
res = np.zeros(nrepl)
for ii in range(nrepl):
x = distr.rvs(arg, size=nobs, **distkwds)
res[ii] = distr.fit_fr(x, frozen=[np.nan, 0.0, 1.0])
return res
def printresults(sample, arg, bres, kind='bootstrap'):
'''calculate and print(Bootstrap or Monte Carlo result
Parameters
----------
sample : ndarray
original sample data
arg : float (for general case will be array)
bres : ndarray
parameter estimates from Bootstrap or Monte Carlo run
kind : {'bootstrap', 'montecarlo'}
output is printed for Mootstrap (default) or Monte Carlo
Returns
-------
None, currently only printing
Notes
-----
still a bit a mess because it is used for both Bootstrap and Monte Carlo
made correction:
reference point for bootstrap is estimated parameter
not clear:
I'm not doing any ddof adjustment in estimation of variance, do we
need ddof>0 ?
todo: return results and string instead of printing
'''
print('true parameter value')
print(arg)
print('MLE estimate of parameters using sample (nobs=%d)'% (nobs))
argest = distr.fit_fr(sample, frozen=[np.nan, 0.0, 1.0])
print(argest)
if kind == 'bootstrap':
#bootstrap compares to estimate from sample
argorig = arg
arg = argest
print('%s distribution of parameter estimate (nrepl=%d)'% (kind, nrepl))
print('mean = %f, bias=%f' % (bres.mean(0), bres.mean(0)-arg))
print('median', np.median(bres, axis=0))
print('var and std', bres.var(0), np.sqrt(bres.var(0)))
bmse = ((bres - arg)**2).mean(0)
print('mse, rmse', bmse, np.sqrt(bmse))
bressorted = np.sort(bres)
print('%s confidence interval (90%% coverage)' % kind)
print(bressorted[np.floor(nrepl*0.05)], bressorted[np.floor(nrepl*0.95)])
print('%s confidence interval (90%% coverage) normal approximation' % kind)
print(stats.norm.ppf(0.05, loc=bres.mean(), scale=bres.std()),)
print(stats.norm.isf(0.05, loc=bres.mean(), scale=bres.std()))
print('Kolmogorov-Smirnov test for normality of %s distribution' % kind)
print(' - estimated parameters, p-values not really correct')
print(stats.kstest(bres, 'norm', (bres.mean(), bres.std())))
if __name__ == '__main__':
examplecases = ['largenumber', 'bootstrap', 'montecarlo'][:]
if 'largenumber' in examplecases:
print('\nDistribution: vonmises')
for nobs in [200]:#[20000, 1000, 100]:
x = stats.vonmises.rvs(1.23, loc=0, scale=1, size=nobs)
print('\nnobs:', nobs)
print('true parameter')
print('1.23, loc=0, scale=1')
print('unconstrained')
print(stats.vonmises.fit(x))
print(stats.vonmises.fit_fr(x, frozen=[np.nan, np.nan, np.nan]))
print('with fixed loc and scale')
print(stats.vonmises.fit_fr(x, frozen=[np.nan, 0.0, 1.0]))
print('\nDistribution: gamma')
distr = stats.gamma
arg, loc, scale = 2.5, 0., 20.
for nobs in [200]:#[20000, 1000, 100]:
x = distr.rvs(arg, loc=loc, scale=scale, size=nobs)
print('\nnobs:', nobs)
print('true parameter')
print('%f, loc=%f, scale=%f' % (arg, loc, scale))
print('unconstrained')
print(distr.fit(x))
print(distr.fit_fr(x, frozen=[np.nan, np.nan, np.nan]))
print('with fixed loc and scale')
print(distr.fit_fr(x, frozen=[np.nan, 0.0, 1.0]))
print('with fixed loc')
print(distr.fit_fr(x, frozen=[np.nan, 0.0, np.nan]))
ex = ['gamma', 'vonmises'][0]
if ex == 'gamma':
distr = stats.gamma
arg, loc, scale = 2.5, 0., 1
elif ex == 'vonmises':
distr = stats.vonmises
arg, loc, scale = 1.5, 0., 1
else:
raise ValueError('wrong example')
nobs = 100
nrepl = 1000
sample = distr.rvs(arg, loc=loc, scale=scale, size=nobs)
print('\nDistribution:', distr)
if 'bootstrap' in examplecases:
print('\nBootstrap')
bres = distfitbootstrap(sample, distr, nrepl=nrepl )
printresults(sample, arg, bres)
if 'montecarlo' in examplecases:
print('\nMonteCarlo')
mcres = distfitmc(sample, distr, nrepl=nrepl,
distkwds=dict(arg=arg, loc=loc, scale=scale))
printresults(sample, arg, mcres, kind='montecarlo')
| bsd-3-clause | -3,253,983,564,312,369,000 | 32.03989 | 91 | 0.617527 | false | 3.827888 | false | false | false |
codefisher/mozbutton_sdk | builder/restartless_button.py | 1 | 28578 | import os
import re
import json
import codecs
import lxml.etree as ET
from copy import deepcopy
from collections import namedtuple, defaultdict
try:
from PIL import Image
except ImportError:
pass
from builder.ext_button import Button, Option, ChromeString, ChromeFile
try:
basestring
except NameError:
basestring = str # py3
Keys = namedtuple("Keys", ['command', 'button'])
ExtraUI = namedtuple("ExtraUI", ["parent", "parent_id", "index", "code", "after"])
class RestartlessButton(Button):
def __init__(self, *args, **kwargs):
super(RestartlessButton, self).__init__(*args, **kwargs)
self._ui_ids = set()
self._included_js_files = []
self._bootstrap_globals = []
self._bootstrap_startup = []
self._bootstrap_shutdown = []
for folder, button, files in self._info:
if "bootstrap" in files:
for file_name in os.listdir(os.path.join(folder, "bootstrap")):
if file_name[0] != ".":
with open(os.path.join(folder, "bootstrap", file_name), "r") as js_fp:
data = js_fp.read()
if file_name == "global.js":
self._bootstrap_globals.append(data)
elif file_name == "startup.js":
self._bootstrap_startup.append(data)
elif file_name == "shutdown.js":
self._bootstrap_shutdown.append(data)
def get_files(self):
for file_name, data in self.get_jsm_files().items():
yield (file_name + ".jsm", data)
def locale_files(self, button_locales, *args, **kwargs):
dtd_data = button_locales.get_dtd_data(self.get_locale_strings(),
self, untranslated=False, format_type="properties")
for locale, data in dtd_data.items():
yield locale, "button_labels.properties", data
locales_inuse = dtd_data.keys()
key_strings = button_locales.get_string_data(self.get_key_strings(),
self, format_type="properties")
for locale, data in self.locale_file_filter(key_strings, locales_inuse):
yield locale, "keys.properties", data
for locale, file_name, data in super(RestartlessButton, self).locale_files(button_locales, locales_inuse):
yield locale, file_name, data
def jsm_keyboard_shortcuts(self, file_name):
if not self._settings.get("use_keyboard_shortcuts"):
return
for button in self._button_keys.keys():
func = self._button_commands.get(file_name, {}).get(button)
if func is not None:
yield Keys(self._patch_call(func), button)
def option_data(self):
scripts = []
if self._settings.get("use_keyboard_shortcuts"):
scripts.append("key-option.js")
with open(self.find_file("key-option.xul"), "r") as key_option_file:
key_option_template = key_option_file.read()
for button in self._button_keys.keys():
xul = self.format_string(key_option_template,
button=button,
menu_label=button + ".label")
applications = " ".join(self._button_applications[button])
self._button_options[button + "-key-item"].append(
Option("tb-key-shortcut.option.title:lightning.png:" + applications, xul))
self._button_applications[
button + "-key-item"] = self._button_applications[button]
files, javascript = super(RestartlessButton, self).option_data()
return files, javascript + scripts
def get_pref_list(self):
settings = super(RestartlessButton, self).get_pref_list()
pref_root = self._settings.get("pref_root")
if self._settings.get('use_keyboard_shortcuts'):
for button in self._button_keys.keys():
settings.append(("{}key-disabled.{}".format(pref_root, button), 'false'))
properties = self.pref_locale_file("'chrome://{chrome_name}/locale/{prefix}keys.properties'")
settings.append(("{}key.{}".format(pref_root, button), properties))
settings.append(("{}modifier.{}".format(pref_root, button), properties))
return settings
def get_js_files(self):
js_files = super(RestartlessButton, self).get_js_files()
if self._settings.get("use_keyboard_shortcuts"):
with open(self.find_file("key-option.js")) as key_option_fp:
js_files["key-option"] = self.string_subs(key_option_fp.read())
self._included_js_files = js_files.keys()
return js_files
def get_chrome_strings(self):
for chrome_string in super(RestartlessButton, self).get_chrome_strings():
yield chrome_string
yield ChromeString(file_name='bootstrap.js', data=self.create_bootstrap())
defaults = self.get_defaults()
if defaults:
yield ChromeString(file_name=os.path.join("chrome", "content", "defaultprefs.js"), data=defaults)
def get_chrome_files(self):
for chrome_file in super(RestartlessButton, self).get_chrome_files():
yield chrome_file
yield ChromeFile(file_name=os.path.join("chrome", "content", "customizable.jsm"), path=self.find_file('customizable.jsm'))
def create_bootstrap(self):
chrome_name = self._settings.get("chrome_name")
loaders = []
resource = ""
if self.resource_files:
resource = "createResource('{0}', 'chrome://{0}/content/resources/');".format(chrome_name)
window_modules = defaultdict(list)
for file_name in self._button_files:
for overlay in self._settings.get("files_to_window").get(file_name, ()):
window_modules[overlay].append(file_name)
for overlay, modules in window_modules.items():
mods = "\n\t\t".join(["modules.push('chrome://{0}/content/{1}.jsm');".format(chrome_name, file_name) for file_name in modules])
loaders.append("(uri == '{0}') {{\n\t\t{1}\n\t}}".format(overlay, mods))
if self._settings.get("show_updated_prompt"):
install_template = self.env.get_template('bootstrap.js')
install = install_template.render(**self._settings)
else:
install = ""
template = self.env.get_template('bootstrap.js')
return template.render(
resource=resource, install=install,
globals=self.string_subs("\n".join(self._bootstrap_globals)),
startup=self.string_subs("\n".join(self._bootstrap_startup)),
shutdown=self.string_subs("\n".join(self._bootstrap_shutdown)),
loaders = "if" + " else if".join(loaders),
**self._settings)
def _jsm_create_menu(self, file_name, buttons):
if not self._settings.get('menuitems'):
return ''
statements = []
data = self.create_menu_dom(file_name, buttons)
in_submenu = [menuitem for menuitem in data if menuitem.parent_id is None]
in_menu = [menuitem for menuitem in data if menuitem.parent_id is not None]
num = 0
template = self.env.get_template('menu.js')
if in_submenu:
menu_id, menu_label, locations = self._settings.get("menu_meta")
if isinstance(locations, basestring):
locations = [locations]
for i, location in enumerate(locations):
menu_id_num = "{0}_{1}".format(menu_id, i) if i else menu_id
meta = self._settings.get("file_to_menu").get(location, {}).get(file_name)
if meta:
menu_name, insert_after = meta
statements.append(template.render(**{
"menu_name": menu_name,
"menu_id": menu_id_num,
"label": menu_label,
"class": "menu-iconic",
"menu_label": menu_label,
"insert_after": insert_after,
"menuitems_sorted": self._settings.get("menuitems_sorted")
}))
num += 3
for item, _, _ in in_submenu:
item_statements, count, _ = self._create_dom(
item, top="menupopup_2", count=num, doc="document")
num = count + 1
statements.extend(item_statements)
for item, menu_name, insert_after in in_menu:
statements.append("var menupopup_{0} = document.getElementById('{1}');".format(num, menu_name))
var_name = "menupopup_%s" % num
num += 1
item.attrib["insertafter"] = insert_after
item_statements, count, _ = self._create_dom(item, top=var_name, count=num)
num = count + 1
statements.extend(item_statements)
return "\n\t".join(statements)
def _dom_string_lookup(self, value):
result = []
items = re.findall(r'&.+?;|[^&;]+', value)
for item in items:
if item == "&brandShortName;":
result.append("Cc['@mozilla.org/xre/app-info;1'].createInstance(Ci.nsIXULAppInfo).name")
elif item[0] == '&' and item[-1] == ';':
result.append("buttonStrings.get('%s')" % item[1:-1])
else:
result.append("'%s'" % item)
return ' + '.join(result)
def _create_dom(self, root, top=None, count=0, doc='document', child_parent=None, rename=None, append_children=True):
num = count
if rename == None:
rename = {}
children = []
statements = [
"var %s_%s = %s.createElement('%s');" % (root.tag, num, doc, rename.get(root.tag, root.tag)),
]
javascript_object = self._settings.get("javascript_object")
for key, value in sorted(root.attrib.items(), key=self._attr_key):
if key == 'id':
statements.append("%s_%s.id = '%s';" % (root.tag, num, value))
elif key in ('label', 'tooltiptext') or (root.tag == 'key' and key in ('key', 'keycode', 'modifiers')):
statements.append("%s_%s.setAttribute('%s', %s);" % ((root.tag, num, key, self._dom_string_lookup(value))))
elif key == "class":
for val in value.split():
statements.append('%s_%s.classList.add("%s");' % (root.tag, num, val))
elif key[0:2] == 'on':
if key == 'oncommand' and root.tag == 'key':
# we do this because key elements without a oncommand are optimized away
# but we can't call our function, because that might not exist
# in the window scope, so the event listener has to be used
statements.append("%s_%s.setAttribute('oncommand', 'void(0);');" % (root.tag, num))
statements.append("%s_%s.addEventListener('%s', function(event) {\n\t\t\t\t%s\n\t\t\t}, false);" % (root.tag, num, key[2:], self._patch_call(value)))
elif key == "insertafter":
pass
elif key == "showamenu":
statements.append("{}_{}.addEventListener('DOMMenuItemActive', {}.menuLoaderEvent, false);".format(root.tag, num, javascript_object))
statements.append("%s_%s._handelMenuLoaders = true;" % (root.tag, num))
statements.append("%s_%s.setAttribute('%s', '%s');" % ((root.tag, num, key, value)))
elif key == "toolbarname":
# this is just for our custom toolbars which are named "Toolbar Buttons 1" and the like
name, sep, other = value.partition(' ')
other = " + '%s%s'" % (sep, other) if sep else ""
value = "buttonStrings.get('%s')%s" % (name, other)
statements.append("%s_%s.setAttribute('%s', %s);" % ((root.tag, num, key, value)))
elif key == "type" and value == "menu-button" and 'id' in root.attrib:
statements.append('''if(extensionPrefs.getPrefType('menupopup.hide.{0}') == extensionPrefs.PREF_INVALID || !extensionPrefs.getBoolPref('menupopup.hide.{0}')) {{\n\t\t\t\t{1}_{2}.setAttribute("{3}", "{4}");\n\t\t\t}}'''.format(root.attrib['id'], root.tag, num, key, value))
else:
statements.append('%s_%s.setAttribute("%s", "%s");' % ((root.tag, num, key, value)))
for node in root:
sub_nodes, count, _ = self._create_dom(node, '%s_%s' % (root.tag, num), count+1, doc=doc, rename=rename, child_parent=(child_parent if top == None else None))
if append_children:
statements.extend(sub_nodes)
else:
children = sub_nodes
if not top:
statements.append('return %s_%s;' % (root.tag, num))
else:
if "insertafter" in root.attrib:
statements.append("%s.insertBefore(%s_%s, %s.getElementById('%s').nextSibling);" % (top, root.tag, num, doc, root.attrib.get("insertafter")))
else:
statements.append('%s.appendChild(%s_%s);' % (top if not child_parent else child_parent, root.tag, num))
return statements, count, children
def _attr_key(self, attr):
order = ('id', 'defaultarea', 'type', 'label', 'tooltiptext', 'command', 'onclick', 'oncommand')
if attr[0].lower() in order:
return order.index(attr[0].lower())
return 100
def _create_dom_button(self, button_id, root, file_name, count, toolbar_ids):
add_to_main_toolbar = self._settings.get("add_to_main_toolbar")
if 'viewid' in root.attrib:
self._ui_ids.add(root.attrib["viewid"])
statements, _, children = self._create_dom(root, child_parent="popupset", append_children=False)
children[0] = """var popupset = document.getElementById('PanelUI-multiView');
if(popupset) {
var menupopup_1 = document.createElement('panelview');
} else {
var menupopup_1 = document.createElement('menupopup');
popupset = document.documentElement;
}"""
data = {
"type": "'view'",
"onBeforeCreated": 'function (document) {\n\t\t\t\tvar window = document.defaultView;\n\t\t\t\t%s\n\t\t\t}' % "\n\t\t\t\t".join(children),
}
elif 'usepanelview' in root.attrib:
self._ui_ids.add("{0}-panel-view".format(root.attrib["id"]))
root.attrib["onclick"] = """if(event.target != event.currentTarget || ('button' in event && event.button != 0)) {{
return;
}}
var item = event.target;
if(item.nodeName == 'key') {{
item = document.getElementById('{0}');
}}
if(item.getAttribute('cui-areatype') == 'menu-panel') {{
var win = item.ownerDocument.defaultView;
event.preventDefault();
event.stopPropagation();
item.ownerDocument.getElementById('{0}-panel-view').ownerButton = item;
win.PanelUI.showSubView('{0}-panel-view', item, CustomizableUI.AREA_PANEL);
}}""".format(root.attrib["id"])
if 'type' not in root.attrib:
popup_opener = """ else {
item.firstChild.openPopup(item, "after_start");
}"""
if 'oncommand' not in root.attrib:
root.attrib["oncommand"] = root.attrib["onclick"] + popup_opener
else:
root.attrib["onclick"] += popup_opener
statements, _, _ = self._create_dom(root)
root_clone = deepcopy(root)
popup = root_clone[0]
if root.attrib['usepanelview'] == 'button-menu':
del root_clone.attrib["type"]
popup.insert(0, ET.Element("menuseparator"))
popup.insert(0, ET.Element("menuitem", root_clone.attrib))
for node in popup:
node.attrib['class'] = 'subviewbutton'
if 'onpopupshowing' in popup.attrib:
popup.attrib['onViewShowing'] = popup.attrib['onpopupshowing']
del popup.attrib['onpopupshowing']
if 'onpopuphiding' in popup.attrib:
popup.attrib['onViewHiding'] = popup.attrib['onpopuphiding']
del popup.attrib['onpopuphiding']
_, _, children = self._create_dom(root_clone, child_parent="popupset", rename={'menuitem': 'toolbarbutton'}, append_children=False)
children.pop(0)
data = {
"type": "'custom'",
"onBuild": '''function (document) {
var window = document.defaultView;
var popupset = document.getElementById('PanelUI-multiView');
if(popupset) {
var menupopup_1 = document.createElement('panelview');
%s
menupopup_1.id = "%s-panel-view";
}
%s
}''' % ("\n\t\t\t\t\t".join(children), root.attrib['id'], "\n\t\t\t\t".join(statements))
}
else:
statements, _, _ = self._create_dom(root)
data = {
"type": "'custom'",
"onBuild": 'function (document) {\n\t\t\t\tvar window = document.defaultView;\n\t\t\t\t%s\n\t\t\t}' % "\n\t\t\t\t".join(statements)
}
self._apply_toolbox(file_name, data)
toolbar_max_count = self._settings.get("buttons_per_toolbar")
if add_to_main_toolbar and button_id in add_to_main_toolbar:
data['defaultArea'] = "'%s'" % self._settings.get('file_to_main_toolbar').get(file_name)
elif self._settings.get("put_button_on_toolbar"):
toolbar_index = count // toolbar_max_count
if len(toolbar_ids) > toolbar_index:
data['defaultArea'] = "'%s'" % toolbar_ids[toolbar_index]
for key, value in root.attrib.items():
if key in ('label', 'tooltiptext'):
data[key] = self._dom_string_lookup(value)
elif key == "id":
data[key] = "'%s'" % value
elif key == 'oncommand':
self._button_commands[file_name][button_id] = value
elif key == 'viewid':
data["viewId"] = "'%s'" % value
elif key == 'onviewshowing':
data["onViewShowing"] = "function(event){\n\t\t\t\t%s\n\t\t\t}" % self._patch_call(value)
elif key == 'onviewhideing':
data["onViewHiding"] = "function(event){\n\t\t\t\t%s\n\t\t\t}" % self._patch_call(value)
for js_file in self._get_js_file_list(file_name):
if self._button_js_setup.get(js_file, {}).get(button_id):
data["onCreated"] = "function(aNode){\n\t\t\tvar document = aNode.ownerDocument;\n\t\t\t%s\n\t\t}" % self._button_js_setup[js_file][button_id]
items = sorted(data.items(), key=self._attr_key)
return "CustomizableUI.createWidget({\n\t\t\t%s\n\t\t});" % ",\n\t\t\t".join("%s: %s" % (key, value) for key, value in items)
def _apply_toolbox(self, file_name, data):
toolbox_info = self._settings.get("file_to_toolbar_box2").get(file_name)
if toolbox_info:
window_file, toolbox_id = toolbox_info
data["toolbox"] = "'%s'" % toolbox_id
if window_file:
data["window"] = "'%s'" % window_file
def _patch_call(self, value):
data = []
if re.search(r'\bthis\b', value):
value = re.sub(r'\bthis\b', 'aThis', value)
data.append("var aThis = event.currentTarget;")
if re.search(r'\bdocument\b', value):
data.append("var document = event.target.ownerDocument;")
if re.search(r'\bwindow\b', value):
data.append("var window = event.target.ownerDocument.defaultView;")
data.append(value)
return "\n\t\t\t\t".join(data)
def _create_jsm_button(self, button_id, root, file_name, count, toolbar_ids):
toolbar_max_count = self._settings.get("buttons_per_toolbar")
add_to_main_toolbar = self._settings.get("add_to_main_toolbar")
data = {}
attr = root.attrib
self._apply_toolbox(file_name, data)
if add_to_main_toolbar and button_id in add_to_main_toolbar:
data['defaultArea'] = "'%s'" % self._settings.get('file_to_main_toolbar').get(file_name)
elif self._settings.get("put_button_on_toolbar"):
toolbar_index = count // toolbar_max_count
if len(toolbar_ids) > toolbar_index:
data['defaultArea'] = "'%s'" % toolbar_ids[toolbar_index]
for key, value in attr.items():
if key in ('label', 'tooltiptext'):
data[key] = self._dom_string_lookup(value)
elif key == "id":
data[key] = "'%s'" % value
elif key in ('onclick', 'oncommand'):
if key == 'oncommand':
self._button_commands[file_name][button_id] = value
key = 'onCommand' if key == 'oncommand' else 'onClick'
data[key] = "function(event) {\n\t\t\t\t%s\n\t\t\t}" % self._patch_call(value)
for js_file in self._get_js_file_list(file_name):
if self._button_js_setup.get(js_file, {}).get(button_id):
data["onCreated"] = "function(aNode) {\n\t\t\t\tvar document = aNode.ownerDocument;\n\t\t\t\t%s\n\t\t\t}" % self._button_js_setup[js_file][button_id]
items = sorted(data.items(), key=self._attr_key)
result = "CustomizableUI.createWidget({\n\t\t\t%s\n\t\t});" % ",\n\t\t\t".join("%s: %s" % (key, value) for (key, value) in items)
return result
def get_jsm_files(self):
result = {}
simple_attrs = {'label', 'tooltiptext', 'id', 'oncommand', 'onclick', 'key', 'class'}
button_hash, toolbar_template = self._get_toolbar_info()
template = self.env.get_template('button.jsm')
javascript_object = self._settings.get("javascript_object")
for file_name, values in self._button_xul.items():
jsm_buttons = []
js_includes = [js_file for js_file in self._get_js_file_list(file_name)
if js_file != "loader" and js_file in self._included_js_files]
toolbars, toolbar_ids = self._create_jsm_toolbar(button_hash, toolbar_template, file_name, values)
count = 0
modules = set()
for button_id, xul in values.items():
root = ET.fromstring(xul.replace('&', '&'))
modules.update(self._modules[button_id])
attr = root.attrib
if not len(root) and not set(attr.keys()).difference(simple_attrs) and (not "class" in attr or attr["class"] == "toolbarbutton-1 chromeclass-toolbar-additional"):
jsm_buttons.append(self._create_jsm_button(button_id, root, file_name, count, toolbar_ids))
else:
jsm_buttons.append(self._create_dom_button(button_id, root, file_name, count, toolbar_ids))
count += 1
default_mods = {
"resource://gre/modules/Services.jsm",
"resource:///modules/CustomizableUI.jsm",
"resource://services-common/stringbundle.js"
}
modules_import = "\n".join("try { Cu.import('%s'); } catch(e) {}" % mod for mod in modules if mod and mod not in default_mods)
if self._settings.get("menu_meta"):
menu_id, menu_label, _ = self._settings.get("menu_meta")
else:
menu_id, menu_label = "", ""
end = set()
menu = self._jsm_create_menu(file_name, values)
for js_file in set(self._get_js_file_list(file_name) + [file_name]):
if self._button_js_setup.get(js_file, {}):
end.update(self._button_js_setup[js_file].values())
if (self._settings.get("menuitems") and menu) or self._settings.get('location_placement'):
end.add(javascript_object + ".setUpMenuShower(document);")
extra_ui = self.create_extra_ui(file_name, values)
result[file_name] = template.render(
modules=modules_import,
locale_file_prefix=self._settings.get("locale_file_prefix"),
scripts=js_includes,
button_ids=json.dumps(list(values.keys())),
toolbar_ids=json.dumps(toolbar_ids),
toolbars=toolbars,
menu_id=menu_id,
ui_ids=json.dumps(list(self._ui_ids)),
toolbox=self._settings.get("file_to_toolbar_box").get(file_name, ('', ''))[1],
menu=menu,
keys=list(self.jsm_keyboard_shortcuts(file_name)),
end="\n\t".join(end),
buttons=jsm_buttons,
extra_ui=extra_ui,
javascript_object=self._settings.get("javascript_object"),
pref_root=self._settings.get("pref_root"),
chrome_name=self._settings.get("chrome_name")
)
return result
def create_extra_ui(self, file_name, values):
location = self._settings.get("location_placement")
result = []
if location and file_name in self._settings.get("file_to_location", {}).get(location):
for index, (button_id, xul) in enumerate(values.items()):
parent, parent_id, after, attrib = self._settings.get("file_to_location").get(location).get(file_name)
root = ET.fromstring(xul.replace('&', '&'))
root.attrib["insertafter"] = after
root.attrib["id"] += "-extra-ui"
self._ui_ids.add(root.attrib["id"])
if attrib:
for name, value in attrib.items():
if value is None:
del root.attrib[name]
else:
root.attrib[name] = value
parent_var = "{}_{}".format(parent, index)
statements, _, _ = self._create_dom(root, top=parent_var)
result.append(ExtraUI(parent, parent_id, index, "\n\t\t".join(statements), after))
return result
def _create_jsm_toolbar(self, button_hash, toolbar_template, file_name, values):
toolbar_ids = []
toolbars = []
if file_name in self._settings.get("extra_toolbars_disabled"):
return '', []
count = 0
max_count = self._settings.get("buttons_per_toolbar")
buttons = list(values.keys())
for box_setting, include_setting in [("file_to_toolbar_box", "include_toolbars"),
("file_to_bottom_box", "include_satusbars")]:
toolbar_node, toolbar_box = self._settings.get(box_setting).get(file_name, ('', ''))
data = {
"defaultset": "",
"persist": "collapsed,hidden",
"context": "toolbar-context-menu",
"class": "toolbar-buttons-toolbar chromeclass-toolbar",
"mode": "icons",
"iconsize": "small",
"customizable": "true",
}
if self._settings.get(include_setting) and toolbar_box:
number = self.toolbar_count(include_setting, values, max_count)
for i in range(number):
if self._settings.get("put_button_on_toolbar"):
data["defaultset"] = ",".join(buttons[i * max_count:(i + 1) * max_count])
button_hash.update(bytes(i))
hash = button_hash.hexdigest()[:6]
label_number = "" if (number + count) == 1 else " %s" % (i + count + 1)
toolbar_ids.append("tb-toolbar-%s" % hash)
if include_setting != "include_toolbars":
data["toolboxid"] = toolbar_box
data["id"] = "tb-toolbar-%s" % hash
toolbarname = self._dom_string_lookup("&tb-toolbar-buttons-toggle-toolbar.name;%s" % label_number)
values["tb-toolbar-buttons-toggle-toolbar-%s" % hash] = toolbar_template.replace("{{hash}}", hash).replace("{{ number }}", label_number)
toolbars.append("""createToolbar(document, '%s', %s, %s)""" % (toolbar_box, json.dumps(data), toolbarname))
count += number
return "\n\t\t".join(toolbars), toolbar_ids
| mit | 4,315,887,822,240,294,400 | 52.217877 | 288 | 0.549689 | false | 3.762243 | false | false | false |
soybean217/lora-python | UServer/admin_server/admin_http_api/api/api_group.py | 1 | 3730 | import json
from wtforms import ValidationError
from userver.object.application import Application
from . import api, root
from flask import request, Response
from userver.object.group import Group
from binascii import hexlify
from utils.errors import KeyDuplicateError, PatchError
from .decorators import group_filter_valid, group_exists
from .forms import get_formdata_from_json_or_form
from .forms.form_group import AddGroupForm, PatchGroup, device_operate
from ..http_auth import auth
@api.route(root + 'groups', methods=['GET'])
@auth.auth_required
@group_filter_valid
def group_list(user=None, app=None):
if request.method == 'GET':
if app is not None:
groups = Group.objects.all(app_eui=app.app_eui)
elif user is not None:
groups = []
apps = Application.query.filter_by(user_id=user.id)
for app in apps:
groups += Group.objects.all(app.app_eui)
else:
groups = Group.objects.all()
groups = [group.obj_to_dict() for group in groups]
groups_json = json.dumps(groups)
return Response(status=200, response=groups_json)
# elif request.method == 'POST':
# formdata = get_formdata_from_json_or_form(request)
# add_group = AddGroupForm(formdata)
# try:
# if add_group.validate():
# if len(add_group['appskey'].data) != 0:
# group = Group(add_group['app_eui'].data, add_group['name'].data, add_group['addr'].data, add_group['nwkskey'].data, appskey=add_group['appskey'].data)
# else:
# group = Group(add_group['app_eui'].data, add_group['name'].data, add_group['addr'].data, add_group['nwkskey'].data)
# group.save()
# return Response(status=201, response=json.dumps(group.obj_to_dict()))
# else:
# return Response(status=406, response=json.dumps({'errors': add_group.errors,
# 'succeed': False}))
# except KeyDuplicateError as error:
# return Response(status=403, response=json.dumps({"error": str(error),
# "succeed": False}))
@api.route(root + 'groups/<group_id>', methods=['GET'])
@auth.auth_required
@group_exists
def group_index(group):
if request.method == 'GET':
group_json = json.dumps(group.obj_to_dict())
return group_json, 200
# elif request.method == 'PATCH':
# try:
# formdata = get_formdata_from_json_or_form(request)
# PatchGroup.patch(group, formdata)
# return Response(status=200, response=json.dumps(group.obj_to_dict()))
# except (AssertionError, ValidationError, PatchError) as e:
# return json.dumps({"error": str(e)}), 406
# elif request.method == 'POST':
# POST Down Msg
# pass
# elif request.method == 'DELETE':
# try:
# group.delete()
# return json.dumps({'errors': "Group: %s deleted." % hexlify(group.id).decode(),
# 'succeed': False}), 200
# except Exception as e:
# return json.dumps({'errors': "Fail to delete group: %s.\n%s" % (hexlify(group.id).decode(), str(e)),
# 'succeed': False}), 400
# elif request.method == 'POST':
# formdata = get_formdata_from_json_or_form(request)
# error = device_operate(group, formdata)
# if error is None or len(error) == 0:
# return json.dumps({'success': True}), 200
# else:
# return json.dumps({'error': str(error)}), 406
#
| mit | -6,227,232,879,890,345,000 | 43.404762 | 172 | 0.574263 | false | 3.718843 | false | false | false |
spino327/sdr_testbed | DistributedTestbed/SlaveRX.py | 1 | 6293 | '''
Copyright (c) 2011, Universidad Industrial de Santander, Colombia
University of Delaware
All rights reserved.
@author: Sergio Pino
@author: Henry Arguello
Website: http://www.eecis.udel.edu/
emails : [email protected] - [email protected]
Date : Feb, 2011
'''
import socket
import time
import sys
from receiver.RXApp import RXApp
from util.PropertyReader import readProperties
from util import Utils
class SlaveRX(object):
'''
SlaveRX is responsible of control the RX USRP node.
'''
def __init__(self, host, port, path):
'''
Constructor
@param host: refers to the local host address
@param port: port for the server to listen
@param path: File system path where the data will be stored
'''
# server
self.server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server.bind((host, port))
self.server.listen(1)
self.path = path
self.app = None
def setRXProperties(self, lo_off, fc, dec, gain, sync):
'''
Set the USRP RX properties
@param lo_off: local oscillator offset (int)
@param fc: Center frequency (float)
@param dec: Decimation factor (int)
@param gain: Gain of the receiver in dB (int)
@param sync: True if the Hardware will use the GPSDO (boolean)
'''
self.lo_off = lo_off
self.fc = fc
self.dec = dec
self.gain = gain
self.sync = sync
def launch(self):
'''
calls startup
'''
print("i: launch SlaveRX")
while True:
sc, addr = self.server.accept()
sc.settimeout(10*60)
print("\n\ni: SlaveRX Connection from " + str(addr) + ", time " + time.strftime("%d-%m-%y/%H:%M:%S"))
tic = time.time()
try:
self.__startup__(sc, addr)
except Exception, e:
print("e: " + str(e))
sc.close()
print("i: SlaveRX Connection closed, duration: " + str(time.time() - tic) + " [seg]\n\n")
print("i: SlaveRX end launch")
def record(self, prefix, at, signame):
"""
@param prefix: prefix path folder where the signals are stored, e.g. /home/folder/
@param at: attenuation factor
@param signame: filename of the signal
Start recording
"""
# creating the folder
folder = self.path + prefix
folder = folder if (folder.endswith("/")) else folder + "/"
Utils.ensure_dir(folder)
# signal file
filename = folder + signame + "_at" + str(at) +"_G" + str(self.gain) + ".dat"
print("i: record filename = " + filename)
self.app = RXApp(self.fc, self.dec, self.gain, "addr=192.168.10.2", self.sync, filename, self.lo_off)
self.app.launch()
def __startup__(self, sc, addr):
'''
Responsible for starting the application; for creating and showing
the initial GUI.
'''
print("i: startup")
msg = sc.recv(1024)
if msg == "start":
sc.send("ok")
print("i: start ok")
msg = sc.recv(1024)
print("i: msg = " + msg)
while msg != "finish":
tic = time.time()
if msg.find("startRec") >= 0:
# message "startRec:/prefix_path/:at:signame:"
print("i: startRec received")
values = msg.split(":")
prefix = values[1]
at = float(values[2])
signame = values[3]
self.record(prefix, at, signame)
sc.send("ok")
elif msg.find("stopRec") >= 0:
print("i: stopRec received")
if self.app.stopApp():
print("i: stopRec successful")
sc.send("ok")
else:
print("i: stopRec failed")
sc.send("error")
else:
print("i: ending")
break
print("i: cmd duration: " + str(time.time() - tic) + " [seg]\n")
msg = sc.recv(1024)
else:
print("e: not start")
sc.send("error")
if msg == "finish":
print("i: finish cmd received")
sc.close()
print("i: end startup")
def __exit__(self):
'''
This method runs on the event dispatching thread.
'''
print "somebody call me!"
self.__exit__()
if __name__ == '__main__':
'''
Creates an instance of the specified {@code Application}
subclass, sets the {@code ApplicationContext} {@code
application} property, and then calls the new {@code
Application's} {@code startup} method. The {@code launch} method is
typically called from the Application's {@code main}:
'''
# Reading the properties
confFile = "confRX.txt"
if(len(sys.argv) > 1):
arg = sys.argv[1]
confFile = arg if len(arg) > 0 else confFile
else:
print("working with default config file path")
properties = readProperties(confFile)
print("Properties:")
for p in properties:
print("\t" + p + " : " + properties[p])
path = properties["rxpath"]
path = path if (path.endswith("/")) else path+"/"
sync = True if properties["sync"] == "True" else False
app = SlaveRX(properties["rxip"],
int(properties["rxport"]),
path)
app.setRXProperties(int(properties["lo_off"]),
float(properties["fc"]),
int(properties["dec"]),
int(properties["gain"]),
sync)
app.launch()
exit() | apache-2.0 | -2,031,581,930,941,409,300 | 29.259615 | 113 | 0.482918 | false | 4.301435 | false | false | false |
alanc10n/py-rau | pyrau/rau.py | 1 | 1747 | import argparse
from redis import StrictRedis
from pyrau.commands import Command
def delete(args, command):
""" Execute the delete command """
command.delete(args.pattern)
def keys(args, command):
""" Execute the keys command """
details = args.details | args.sorted
command.keys(args.pattern, details, args.sorted)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-H', '--host', default='localhost', help='Host')
parser.add_argument('-p', '--port', default=6379, type=int, help='Port')
parser.add_argument('-b', '--batch_size', default=20,
type=int, help='Batch size for pipeline operations')
subparsers = parser.add_subparsers(help='Commands')
del_parser = subparsers.add_parser('delete', help='Delete key(s)')
del_parser.add_argument('pattern', type=str, help='Key pattern')
del_parser.set_defaults(func=delete)
key_parser = subparsers.add_parser('keys', help="List keys")
key_parser.add_argument('-p', '--pattern', help='Key pattern',
default=None)
key_parser.add_argument('-d', '--details',
help='Include details for key(s)',
action='store_true')
key_parser.add_argument('-s', '--sorted',
help='Sort result by size, implies --details',
action='store_true')
key_parser.set_defaults(func=keys)
args = parser.parse_args()
return args
def main():
args = parse_args()
redis = StrictRedis(host=args.host, port=args.port)
command = Command(redis)
command.batch_size = args.batch_size
args.func(args, command)
if __name__ == '__main__':
main()
| mit | -40,468,994,944,548,810 | 31.962264 | 76 | 0.606754 | false | 3.943567 | false | false | false |
TerryRen/TrPython | NetLib/SuperCaptcha.py | 1 | 9743 | #python 2.7
#coding=utf-8
__author__ = "Terry.Ren"
#try:
# import Image
#except ImportError:
# from PIL import Image
from PIL import Image
from PIL import ImageDraw
import ImageEnhance
import os
import urllib
import StringIO
import uuid
import pytesseract #open source
class Captcha(object):
def __init__(self, isDebug = False):
self.__isDebug = isDebug
self.__currentStepId = 1
self.__tempFileList = []
def __BuildTempFileFullName(self, localDir, extName):
fname = str(uuid.uuid1()) + "_" + str(self.__currentStepId) + "." + extName
fname = os.path.join(localDir,fname)
self.__currentStepId += 1
self.__tempFileList.append(fname)
return fname
'''
Store remote image to local dir
'''
def __StoreImage2LocalDir(self, imageUrl , localDir , extName):
response = urllib.urlopen(imageUrl)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
with open(tempFileFullName, 'wb') as f:
f.write(response.read())
return tempFileFullName
def Clearup(self):
for filename in self.__tempFileList:
if os.path.isfile(filename):
os.remove(filename)
'''
image enhance
'''
def __imageEnhance(self, image):
enhancer = ImageEnhance.Contrast(image)
image_enhancer = enhancer.enhance(4)
return image_enhancer
'''
two value
'''
def __twoValue(self, image):
img = image.convert('RGBA') # convert to RGBA
pix = img.load() #read pix
for x in range(img.size[0]): #remove [top-bottom] border
pix[x, 0] = pix[x, img.size[1] - 1] = (255, 255, 255, 255)
for y in range(img.size[1]): #remove [left-right] border
pix[0, y] = pix[img.size[0] - 1, y] = (255, 255, 255, 255)
for y in range(img.size[1]): # two value: R=95,G=95,B=95
for x in range(img.size[0]):
if pix[x, y][0] < 95 or pix[x, y][1] < 95 or pix[x, y][2] < 95:
pix[x, y] = (0, 0, 0, 255)
else:
pix[x, y] = (255, 255, 255, 255)
return img
'''
Get Captcha Code from on-line web site
'''
def GetOnlineCaptchaCode(self, imageUrl, isStoreOriginalImage = False, localDir = '', extName = 'jpg'):
if isStoreOriginalImage == True:
if not os.path.isdir(localDir):
raise ValueError("please validate the argument GetOnlineCaptchaCode.localDir...")
localFileName = self.__StoreImage2LocalDir(imageUrl , localDir , extName)
img = Image.open(localFileName)
else:
imgBuf = StringIO.StringIO(urllib.urlopen(imageUrl).read())
img = Image.open(imgBuf)
print img.format, img.size, img.mode
# image Enhance
img = self.__imageEnhance(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
img = self.__twoValue(img)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
img.save(tempFileFullName) # must use local file via tesseract-orc
text = pytesseract.image_to_string(Image.open(tempFileFullName))
return text
'''
Get Captcha Code from local
'''
def GetLocalCaptchaCode(self, imagePath, extName = 'jpg'):
localDir = os.path.dirname(imagePath)
img = Image.open(imagePath)
print img.format, img.size, img.mode
# image Enhance
img = self.__imageEnhance(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
img = img.convert('RGBA') # convert to RGBA
pix = img.load() #read pix
for x in range(img.size[0]): #remove [top-bottom] border
pix[x, 0] = pix[x, img.size[1] - 1] = (255, 255, 255, 255)
for y in range(img.size[1]): #remove [left-right] border
pix[0, y] = pix[img.size[0] - 1, y] = (255, 255, 255, 255)
for y in range(img.size[1]): # two value: R=95,G=95,B=95
for x in range(img.size[0]):
if pix[x, y][0] < 90 or pix[x, y][1] < 90 or pix[x, y][2] < 90:
pix[x, y] = (0, 0, 0, 255)
else:
pix[x, y] = (255, 255, 255, 255)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
img.save(tempFileFullName) # must use local file via tesseract-orc
text = pytesseract.image_to_string(Image.open(tempFileFullName))
return text
def TestImage(self):
data = [(1,0),(0,1)]
size = (2,2)
image = Image.new("1",size)
draw = ImageDraw.Draw(image)
for x in xrange(0,size[0]):
for y in xrange(0,size[1]):
draw.point((x,y),data[x][y])
image.save("D:\\GitHub\\TrPython\\NetLib\\Test\\1.gif")
class SmartCaptcha(object):
def __init__(self, isDebug = False):
self.__isDebug = isDebug
self.__currentStepId = 1
self.__tempFileList = []
def __BuildTempFileFullName(self, localDir, extName):
fname = str(uuid.uuid1()) + "_" + str(self.__currentStepId) + "." + extName
fname = os.path.join(localDir,fname)
self.__currentStepId += 1
self.__tempFileList.append(fname)
return fname
'''
Store remote image to local dir
'''
def __StoreImage2LocalDir(self, imageUrl , localDir , extName):
response = urllib.urlopen(imageUrl)
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
with open(tempFileFullName, 'wb') as f:
f.write(response.read())
return tempFileFullName
def Clearup(self):
for filename in self.__tempFileList:
if os.path.isfile(filename):
os.remove(filename)
'''
image enhance
'''
def __imageEnhance(self, image):
enhancer = ImageEnhance.Contrast(image)
image_enhancer = enhancer.enhance(4)
return image_enhancer
'''
two value
'''
def __twoValue(self, image):
img = image.convert('RGBA') # convert to RGBA
pix = img.load() #read pix
for x in range(img.size[0]): #remove [top-bottom] border
pix[x, 0] = pix[x, img.size[1] - 1] = (255, 255, 255, 255)
for y in range(img.size[1]): #remove [left-right] border
pix[0, y] = pix[img.size[0] - 1, y] = (255, 255, 255, 255)
for y in range(img.size[1]): # two value: R=100,G=100,B=120
for x in range(img.size[0]):
if pix[x, y][0] < 100 and pix[x, y][1] < 100:
pix[x, y] = (0, 0, 0, 255)
else:
pix[x, y] = (255, 255, 255, 255)
return img
def __getEffectivePoint(self, pix, x , y):
point, sx , sy = 0, x-1, y-1
#print sx+3 , sy +3 ,x , y
for i in xrange(3):
for j in xrange(3):
if sx+i == x and sy+j == y:
continue
if pix[sx+i,sy+j] == pix[x,y]:
point += 1
return point;
'''
1111111
1011101
1011101
1111111
'''
def __clearNoise(self, img, effectivePoint ,processCount):
for ct in xrange(0, processCount):
pix = img.load() #read pix
for x in xrange(1,img.size[0] - 1):
for y in xrange(1, img.size[1] - 1):
point = self.__getEffectivePoint(pix , x , y)
if point < effectivePoint:
pix[x, y] = (255, 255, 255, 255) # set to Noise
return img
'''
Get Captcha Code from local
'''
def GetLocalCaptchaCode(self, imagePath, extName = 'jpg'):
localDir = os.path.dirname(imagePath)
img = Image.open(imagePath)
print img.format, img.size, img.mode
# image Enhance
img = self.__imageEnhance(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
# two value
img = self.__twoValue(img)
if self.__isDebug:
img.save(self.__BuildTempFileFullName(localDir, extName))
# clear Noise
img = self.__clearNoise(img, 3 , 1)
# orc
tempFileFullName = self.__BuildTempFileFullName(localDir, extName)
img.save(tempFileFullName) # must use local file via tesseract-orc
text = pytesseract.image_to_string(Image.open(tempFileFullName))
return text
if __name__ == "__main__":
print '[unit test]'
#validate1 = Captcha()
#print validate1.GetOnlineCaptchaCode("http://202.119.81.113:8080/verifycode.servlet")
#validate2 = Captcha(True)
#print validate2.GetOnlineCaptchaCode("http://202.119.81.113:8080/verifycode.servlet",True,"D:\\GitHub\\TrPython\\NetLib\\Test")
#validate2.Clearup()
#validate3 = Captcha(True)
#print validate3.GetLocalCaptchaCode("D:\\GitHub\\TrPython\\NetLib\\Test\\1400.gif","gif")
#validate3.TestImage()
validate4 = SmartCaptcha(True)
print validate4.GetLocalCaptchaCode("D:\\GitHub\\TrPython\\NetLib\\Test\\xxf2.jpg","jpg")
#print validate4.GetLocalCaptchaCode("D:\\GitHub\\TrPython\\NetLib\\Test\\queaa.jpg","jpg")
print pytesseract.image_to_string(Image.open("D:\\GitHub\\TrPython\\NetLib\\Test\\xxf2.jpg"))
| apache-2.0 | 4,605,390,416,347,193,000 | 29.600629 | 132 | 0.550098 | false | 3.510462 | true | false | false |
weggert/calendar_sync | calendar_sync.py | 1 | 6753 | #!/usr/bin/python
import fileinput
import os
class CalendarManager:
def __init__(self, calendar_name, dry_run, include_descriptions):
self.calendar_name = calendar_name
self.dry_run = dry_run
self.include_descriptions = include_descriptions
def clear_calendar(self):
command = """
osascript -e 'tell application "Calendar" to tell calendar "%s"
set eventList to every event
repeat with e in eventList
delete e
end repeat
end tell'
"""
command = command % self.calendar_name
if not self.dry_run:
os.system(command)
print 'Calendar cleared'
def create_calendar_event(self, summary, start_date, end_date, all_day, location, description):
if not self.include_descriptions:
description = ''
properties = 'start date:theStartDate, end date:theEndDate, summary:"%s", description:"%s", location:"%s"'\
% (summary, description, location)
if all_day is True:
properties += ', allday event:true'
command = """
osascript -e 'set theStartDate to date "%s"
set theEndDate to date "%s"
tell application "Calendar" to tell calendar "%s"
set theEvent to make new event with properties {%s}
end tell'
"""
command = command % (start_date, end_date, self.calendar_name, properties)
if not self.dry_run:
os.system(command)
self.print_summary(summary, start_date, end_date, all_day, location, description)
@staticmethod
def print_summary(summary, start_date, end_date, all_day, location, description):
print 'Summary: ' + summary
print ' Start: ' + start_date
print ' End: ' + end_date
print ' All Day: ' + str(all_day)
print ' Location: ' + location
print ' Description: ' + description
print ''
class CalendarSummaryProcessor:
class LineType:
EventStart, Summary, Location, Date, Time, Where, Notes, Status, Other = range(9)
def __init__(self):
pass
def __init__(self, calendar_name, dry_run, include_descriptions):
self.calendar_manager = CalendarManager(
calendar_name=calendar_name,
dry_run=dry_run,
include_descriptions=include_descriptions)
self.reset()
self.processing_event = False
self.first_description_line = True
self.last_description_line_was_blank = False
self.summary = ''
self.date = ''
self.time = ''
self.location = ''
self.description = ''
def reset(self):
self.processing_event = False
self.first_description_line = True
self.last_description_line_was_blank = False
self.summary = ''
self.date = ''
self.time = ''
self.location = ''
self.description = ''
def process_summary(self):
self.calendar_manager.clear_calendar()
for input_line in fileinput.input():
line_type = self.get_line_type(input_line)
if line_type is self.LineType.EventStart:
if self.processing_event:
if self.summary != 'Remote'\
and self.summary != 'IP Video - Daily Scrum'\
and self.summary != 'Cloud Team Scrum':
start_date, end_date, all_day = self.get_start_end_dates(self.date, self.time)
self.calendar_manager.create_calendar_event(
self.summary, start_date, end_date, all_day, self.location, self.description)
self.reset()
if line_type is self.LineType.Summary:
self.summary = self.sanitize_line(input_line.strip()[9:])
self.processing_event = True
if line_type is self.LineType.Date:
self.date = input_line.strip()[6:]
if line_type is self.LineType.Time:
self.time = input_line.strip()[6:]
if line_type is self.LineType.Location:
self.location = self.sanitize_line(input_line.strip()[10:])
self.processing_event = True
if line_type is self.LineType.Other:
description_line = self.sanitize_line(input_line.strip())
if len(description_line) > 0:
self.description = self.description + description_line + '\n'
self.last_description_line_was_blank = False
else:
if not self.first_description_line and not self.last_description_line_was_blank:
self.description += '\n'
self.last_description_line_was_blank = True
self.first_description_line = False
if self.processing_event:
start_date, end_date, all_day = self.get_start_end_dates(self.date, self.time)
self.calendar_manager.create_calendar_event(
self.summary, start_date, end_date, all_day, self.location, self.description)
@staticmethod
def get_start_end_dates(date, time):
dates = date.split(" to ")
times = time.split(" to ")
start_date = dates[0] + ' ' + times[0]
end_date = dates[1] + ' ' + times[1]
all_day = False
if times[0] == '12:00:00 AM' and times[1] == "12:00:00 AM" and dates[0] != dates[1]:
all_day = True
return start_date, end_date, all_day
def get_line_type(self, input_line):
if input_line.startswith('EVENT'):
return self.LineType.EventStart
if input_line.startswith('Summary:'):
return self.LineType.Summary
if input_line.startswith('Date:'):
return self.LineType.Date
if input_line.startswith('Time:'):
return self.LineType.Time
if input_line.startswith('Location:'):
return self.LineType.Location
if input_line.startswith('Where'):
return self.LineType.Where
if input_line.startswith('Notes'):
return self.LineType.Notes
if input_line.startswith('Status'):
return self.LineType.Status
return self.LineType.Other
def process_named_line(self, input_line):
colon_position = input_line.find(':')
return self.sanitize_line(input_line[colon_position+1:].strip())
@staticmethod
def sanitize_line(input_line):
return input_line.replace("'", "").replace('"', '').replace('*~*~*~*~*~*~*~*~*~*', '').strip()
CalendarSummaryProcessor(calendar_name='Work Calendar',
dry_run=False,
include_descriptions=True).process_summary() | apache-2.0 | -4,109,685,570,745,103,000 | 34.925532 | 115 | 0.577077 | false | 4.085299 | false | false | false |
allenai/allennlp | allennlp/modules/text_field_embedders/basic_text_field_embedder.py | 1 | 5232 | from typing import Dict
import inspect
import torch
from overrides import overrides
from allennlp.common.checks import ConfigurationError
from allennlp.data import TextFieldTensors
from allennlp.modules.text_field_embedders.text_field_embedder import TextFieldEmbedder
from allennlp.modules.time_distributed import TimeDistributed
from allennlp.modules.token_embedders.token_embedder import TokenEmbedder
from allennlp.modules.token_embedders import EmptyEmbedder
@TextFieldEmbedder.register("basic")
class BasicTextFieldEmbedder(TextFieldEmbedder):
"""
This is a `TextFieldEmbedder` that wraps a collection of
[`TokenEmbedder`](../token_embedders/token_embedder.md) objects. Each
`TokenEmbedder` embeds or encodes the representation output from one
[`allennlp.data.TokenIndexer`](../../data/token_indexers/token_indexer.md). As the data produced by a
[`allennlp.data.fields.TextField`](../../data/fields/text_field.md) is a dictionary mapping names to these
representations, we take `TokenEmbedders` with corresponding names. Each `TokenEmbedders`
embeds its input, and the result is concatenated in an arbitrary (but consistent) order.
Registered as a `TextFieldEmbedder` with name "basic", which is also the default.
# Parameters
token_embedders : `Dict[str, TokenEmbedder]`, required.
A dictionary mapping token embedder names to implementations.
These names should match the corresponding indexer used to generate
the tensor passed to the TokenEmbedder.
"""
def __init__(self, token_embedders: Dict[str, TokenEmbedder]) -> None:
super().__init__()
# NOTE(mattg): I'd prefer to just use ModuleDict(token_embedders) here, but that changes
# weight locations in torch state dictionaries and invalidates all prior models, just for a
# cosmetic change in the code.
self._token_embedders = token_embedders
for key, embedder in token_embedders.items():
name = "token_embedder_%s" % key
self.add_module(name, embedder)
self._ordered_embedder_keys = sorted(self._token_embedders.keys())
@overrides
def get_output_dim(self) -> int:
output_dim = 0
for embedder in self._token_embedders.values():
output_dim += embedder.get_output_dim()
return output_dim
def forward(
self, text_field_input: TextFieldTensors, num_wrapping_dims: int = 0, **kwargs
) -> torch.Tensor:
if sorted(self._token_embedders.keys()) != sorted(text_field_input.keys()):
message = "Mismatched token keys: %s and %s" % (
str(self._token_embedders.keys()),
str(text_field_input.keys()),
)
embedder_keys = set(self._token_embedders.keys())
input_keys = set(text_field_input.keys())
if embedder_keys > input_keys and all(
isinstance(embedder, EmptyEmbedder)
for name, embedder in self._token_embedders.items()
if name in embedder_keys - input_keys
):
# Allow extra embedders that are only in the token embedders (but not input) and are empty to pass
# config check
pass
else:
raise ConfigurationError(message)
embedded_representations = []
for key in self._ordered_embedder_keys:
# Note: need to use getattr here so that the pytorch voodoo
# with submodules works with multiple GPUs.
embedder = getattr(self, "token_embedder_{}".format(key))
if isinstance(embedder, EmptyEmbedder):
# Skip empty embedders
continue
forward_params = inspect.signature(embedder.forward).parameters
forward_params_values = {}
missing_tensor_args = set()
for param in forward_params.keys():
if param in kwargs:
forward_params_values[param] = kwargs[param]
else:
missing_tensor_args.add(param)
for _ in range(num_wrapping_dims):
embedder = TimeDistributed(embedder)
tensors: Dict[str, torch.Tensor] = text_field_input[key]
if len(tensors) == 1 and len(missing_tensor_args) == 1:
# If there's only one tensor argument to the embedder, and we just have one tensor to
# embed, we can just pass in that tensor, without requiring a name match.
token_vectors = embedder(list(tensors.values())[0], **forward_params_values)
else:
# If there are multiple tensor arguments, we have to require matching names from the
# TokenIndexer. I don't think there's an easy way around that.
token_vectors = embedder(**tensors, **forward_params_values)
if token_vectors is not None:
# To handle some very rare use cases, we allow the return value of the embedder to
# be None; we just skip it in that case.
embedded_representations.append(token_vectors)
return torch.cat(embedded_representations, dim=-1)
| apache-2.0 | 8,035,300,947,094,428,000 | 47.444444 | 114 | 0.640673 | false | 4.253659 | false | false | false |
perlygatekeeper/glowing-robot | google_test/free_the_bunny_prisoners/solution_5_fails.py | 1 | 1090 | import itertools
def solution(bunnies,keys_required):
answer = []
for i in range(bunnies):
answer.append([])
# if keys_required > bunnies:
# return None
if keys_required == 0:
return [[0]]
elif keys_required == 1:
key = 0
for group in range(bunnies):
answer[group].append(key)
elif bunnies == keys_required:
key = 0
for group in range(bunnies):
answer[group].append(key)
key += 1
else:
key = 0
for item in itertools.combinations(range(bunnies), keys_required):
for group in item:
answer[group].append(key)
key += 1
return answer
for num_buns in range(1,10):
for num_required in range(10):
key_dist = solution(num_buns,num_required)
print("-" * 60)
print("Answer for {0:d} bunnies, requiring {1:d}".format(num_buns,num_required))
if ( len(key_dist[0]) * len(key_dist) ) < 25:
print(key_dist)
else:
for bun in key_dist:
print(bun)
| artistic-2.0 | -8,434,445,743,401,300,000 | 28.459459 | 88 | 0.538532 | false | 3.449367 | false | false | false |
mrjmad/nagademon_2014 | nagademon2014/maingame/models/history_elements.py | 1 | 6460 | # -*- coding: utf-8 -*-
from __future__ import (print_function, division, absolute_import, unicode_literals)
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
from django.db import models
USER_MODEL = settings.AUTH_USER_MODEL
@python_2_unicode_compatible
class Character(models.Model):
short_name = models.CharField(_("NPC's short Name"), max_length=20, unique=True)
first_name = models.CharField("Firstname of Character", max_length=50)
last_name = models.CharField("Lastname of Character", max_length=50)
gender = models.PositiveSmallIntegerField(u"Gender of Character")
description = models.TextField("Description")
def __str__(self):
return u"%s %s" % (self.first_name, self.last_name)
class Meta:
abstract = True
@python_2_unicode_compatible
class PlayerCharacter(Character):
def __str__(self):
return u"PC : %s %s" % (self.first_name, self.last_name)
@python_2_unicode_compatible
class NPCharacter(Character):
def __str__(self):
return u"NPC : %s %s" % (self.first_name, self.last_name)
class PlaceManager(models.Manager):
def get_by_natural_key(self, short_name):
return self.get(short_name=short_name)
@python_2_unicode_compatible
class Place(models.Model):
objects = PlaceManager()
begin_sound = models.CharField(_("Begin's Sound"), max_length=200, blank=True, null=True)
ambiance_sound = models.CharField(_("Ambiance's Sound"), max_length=200, blank=True, null=True)
short_name = models.CharField(_("Place's short Name"), max_length=20, unique=True)
name = models.CharField("Scene's Name", max_length=200)
filename = models.CharField("Scene's Filename", max_length=80)
text = models.TextField("Scene's Text")
def __str__(self):
return self.name
def natural_key(self):
return self.short_name,
@python_2_unicode_compatible
class Scene(models.Model):
short_name = models.CharField(_("Scene's short Name"), max_length=20, unique=True)
name = models.CharField("Scene's Name", max_length=200)
filename = models.CharField("Scene's Filename", max_length=80)
begin_sound = models.CharField(_("Begin's Sound"), max_length=200, blank=True, null=True)
ambiance_sound = models.CharField(_("Ambiance's Sound"), max_length=200, blank=True, null=True)
synopsis = models.TextField("Scene's synopsis, only for authors")
final = models.BooleanField("Final Round ?", default=False)
place = models.ForeignKey(Place, verbose_name="Scene's Place",
blank=True, null=True)
is_active = models.BooleanField(_("Is active ?"), default=True)
order = models.PositiveIntegerField(_("Scene's Order"), default=0)
need_a_trigger = models.BooleanField(_("Activable only by a trigger"), default=False)
def __str__(self):
return self.name
@python_2_unicode_compatible
class PartScene(models.Model):
text = models.CharField("Scene's Text", max_length=400)
for_scene = models.ForeignKey(Scene, verbose_name="Scene")
limited_to_player = models.ForeignKey(PlayerCharacter, blank=True, null=True)
parent = models.ForeignKey('self', blank=True, null=True)
active = models.BooleanField(default=True)
def __str__(self):
return "Text %s |for scene :%s" % (self.text, self.for_scene)
@python_2_unicode_compatible
class Choice1PartSceneto1Scene(models.Model):
text = models.CharField("Choice's Text", max_length=400)
for_part_scene = models.ForeignKey(PartScene, verbose_name="Current Part Scene",
related_name="current_choices_set")
next_scene = models.ForeignKey(Scene, verbose_name="Next Scene",
related_name="leading_choices_set",
null=True, blank=True)
next_part_scene = models.ForeignKey(PartScene, verbose_name="Next Part Scene",
related_name="leading_choices_set",
null=True, blank=True)
def __str__(self):
return "%s |for scene %s , part scene id :%s" % (self.text,
self.for_part_scene.for_scene,
self.for_part_scene.id)
@python_2_unicode_compatible
class Quest(models.Model):
short_name = models.CharField(_("Quest's short Name"), max_length=20, unique=True)
title = models.CharField("Quest's Title", max_length=140)
text = models.TextField("Quest's Text")
time_frame = models.PositiveIntegerField(_("Maximum Time (in minutes) for validate the Quest"), default=0)
given_by = models.ForeignKey(NPCharacter, verbose_name=_('Given by'))
scene = models.ForeignKey(Scene, verbose_name=_("Scene who Quest is activable"),
related_name=_("quests_for_scene"))
scene_after = models.ForeignKey(Scene, verbose_name=_("Scene after the End's Quest"),
related_name=_("finished_quests_for_scene"))
apparition_function = models.CharField(_("Name of Apparition's Function"), max_length=120, blank=True, null=True)
validation_function = models.CharField(_("Name of Validation's Function"), max_length=120)
def __str__(self):
return "%s | for scene :%s, by NPC %s in time %s" % (self.title, self.scene, self.given_by,
self.timedelta)
class ObjectType(models.Model):
name = models.CharField(u"Type Object Name", max_length=200)
description = models.TextField("Type's Description", blank=True, null=True)
short_name = models.CharField(_("Type Object's short Name"), max_length=20, unique=True)
class OneObject(models.Model):
name = models.CharField(_("Type Object Name"), max_length=200)
type = models.ForeignKey(ObjectType, verbose_name=_("Object's Type"))
description = models.TextField("Object's Description", blank=True, null=True)
initial_place = models.ForeignKey(Place, verbose_name=_("Object's Initial place"),
related_name=_("initial_objects_set"), blank=True, null=True)
stored_in = models.ForeignKey(Place, related_name=_("objects_stored_set"),
verbose_name=_("Where the object is stored"), blank=True, null=True)
| mit | 5,791,698,670,006,022,000 | 43.551724 | 117 | 0.645201 | false | 3.875225 | false | false | false |
vitale232/ves | ves/VESinverse_vectorized.py | 1 | 12839 | # -*- coding: utf-8 -*-
"""
Created on Thu Jan 28 16:32:48 2016
@author: jclark
this code uses the Ghosh method to determine the apparent resistivities
for a layered earth model. Either schlumberger or Wenner configurations
can be used
"""
import numpy as np
import random
import matplotlib
matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
plt.style.use('bmh')
import sys
# Schlumberger filter
fltr1 = [0., .00046256, -.0010907, .0017122, -.0020687,
.0043048, -.0021236, .015995, .017065, .098105, .21918, .64722,
1.1415, .47819, -3.515, 2.7743, -1.201, .4544, -.19427, .097364,
-.054099, .031729, -.019109, .011656, -.0071544, .0044042,
-.002715, .0016749, -.0010335, .00040124]
#Wenner Filter
fltr2 = [0., .000238935, .00011557, .00017034, .00024935,
.00036665, .00053753, .0007896, .0011584, .0017008, .0024959,
.003664, .0053773, .007893, .011583, .016998, .024934, .036558,
.053507, .078121, .11319, .16192, .22363, .28821, .30276, .15523,
-.32026, -.53557, .51787, -.196, .054394, -.015747, .0053941,
-.0021446, .000665125]
print(len(fltr1))
print(len(fltr2))
#I know there must be a better method to assign lists. And probably numpy
#arrays would be best. But my Python wasn't up to it. If the last letter
#is an 'l' that means it is a log10 of the value
# 65 is completely arbitrary
p = [0] * 20 # earth layer parameters?
r = [0] * 65 # apparent resistivty?
rl = [0] * 65 # np.log(r) ?
t = [0] * 50 #
b = [0] * 65 #
asav = [0] * 65 # voltage spacing in meters?
asavl = [0] * 65 # np.log(asav)
adatl = [0] * 65 # interpolated voltage spacing ( np.log(10) / 6 )?
rdatl = [0] * 65 # np.log()
# adat = [0] * 65 # voltage spacing input
# rdat = [0] * 65 # apparent res input
pkeep = [0] * 65 # earth parameters after applying equations?
rkeep = [0] * 65 # r after applying equations?
rkeepl = [0] * 65 # np.log()!
pltanswer = [0] * 65
pltanswerl = [0] * 65
pltanswerkeep = [0] * 65
pltanswerkeepl = [0] * 65
rl = [0] * 65
small = [0] * 65
xlarge = [0] * 65
x=[0] * 100
y = [0] * 100
y2 = [0] * 100
u = [0] * 5000
new_x = [0] * 1000
new_y = [0] * 1000
ndat = 13
#hard coded data input - spacing and apparent resistivities measured
#in teh field
adat = [0., 0.55, 0.95, 1.5, 2.5, 3., 4.5, 5.5, 9., 12., 20., 30., 70.]
rdat = [0., 125., 110., 95., 40., 24., 15., 10.5, 8., 6., 6.5, 11., 25.]
one30 = 1.e30 # What's the purpose of this and should it be user input?
rms = one30 # Just a starting value for rmserror?
errmin = 1.e10 # Should this be user input?
# INPUT
array_spacing = 'wenner' # 1 is for shchlumberger and 2 is for Wenner
nLayers = 3 #number of layers
n = 2 * nLayers - 1 # What does n represent? number of parameters
spac = 0.2 # smallest electrode spacing - should this come from the input file?
m = 20 # number of points where resistivity is calculated
spac = np.log(spac)
delx = np.log(10.0) / 6. # I take it this is the sample interval on the log scale?
# this is where the range in parameters should be input from a GUI
# I'm hard coding this in for now
#enter thickenss range for each layer and then resistivity range.
#for 3 layers small[1] and small[2] are low end of thickness range
# small[3], small[4] and small[5] are the low end of resistivities
# I think I have it coded up that these are getting grabbed from the rectangles currently.
# Is that the best way to go?
small[1] = 1.
small[2] = 10.
small[3] = 20.
small[4] = 2.
small[5] = 500.
xlarge[1] = 5
xlarge[2] = 75.
xlarge[3] = 200.
xlarge[4] = 100
xlarge[5] = 3000.
iter_ = 10000 #number of iterations for the Monte Carlo guesses. to be input on GUI
# Is 10000 the most reasonable default, or should I play with it?
def readData(adat, rdat, ndat, return_indexed=False):
#normally this is where the data would be read from the csv file
# but now I'm just hard coding it in as global lists
for i in range(1, ndat):
adatl[i] = np.log10(adat[i])
rdatl[i] = np.log10(rdat[i])
if return_indexed:
return adatl[:ndat], rdatl[:ndat]
else:
return adatl, rdatl
<<<<<<< HEAD
=======
def error(): # simple rms error calc
sumerror = 0.
#pltanswer = [0]*64
spline(m, one30, one30, asavl, rl, y2) # So this calculates the predicted fit?
# and essentially operates on the list in place?
for i in range(1, ndat): # So you always skip the value 0? due to -inf returns?
ans = splint(m, adatl[i], asavl, rl, y2) # Then this calulates error?
sumerror = sumerror + (rdatl[i] - ans) * (rdatl[i] - ans)
#print(i,sum1,rdat[i],rdatl[i],ans)
pltanswerl[i] = ans
pltanswer[i] = np.power(10, ans)
rms = np.sqrt(sumerror / (ndat - 1))
# check the spline routine
# for i in range(1,m+1,1):
# anstest = splint(m, asavl[i],asavl,rl,y2)
# print( asavl[i], rl[i], anstest)
#print(' rms = ', rms)
# if you erally want to get a good idea of all perdictions from Montecarlo
# perform the following plot (caution - change iter to a smaller number)
#plt.loglog(adat[1:ndat],pltanswer[1:ndat])
return rms
>>>>>>> 60497dd... ?s
def transf(y, i):
# these lines apparently find the computer precision ep
ep = 1.0
ep = ep / 2.0
fctr = ep + 1.
while fctr > 1.:
ep = ep / 2.0
fctr = ep + 1.
u = 1. / np.exp(y) # y = spac - 19. * delx - 0.13069
t[1] = p[n]
for j in range(2, nLayers + 1, 1):
pwr = -2. * u * p[nLayers + 1 - j]
if pwr < np.log(2. * ep):
pwr = np.log(2. * ep)
a = np.exp(pwr)
b = (1. - a) / (1. + a)
rs = p[n + 1 - j]
tpr = b * rs
t[j] = (tpr + t[j - 1]) / (1. + tpr * t[j - 1] / (rs * rs))
r[i] = t[nLayers]
return
def filters(b, k):
for i in range(1, m + 1):
re = 0.
for j in range(1, k + 1):
re = re + b[j] * r[i + k - j] # include ranges of thickness, res . push button for rmse error, observed data
# surf thicknes .2 - 100
# res 2-3000 # could use huge ranges at cost of time
r[i] = re
return
def rmsfit():
if array_spacing.lower() == 'wenner':
y = spac - 19. * delx - 0.13069
mum1 = m + 28
for i in range(1, mum1 + 1):
transf(y, i)
y = y + delx
filters(fltr1, 29)
elif array_spacing.lower() == 'schlumberger':
s = np.log(2.)
y = spac - 10.8792495 * delx
mum2 = m + 33
for i in range(1, mum2 + 1):
transf(y, i)
a = r[i]
y1 = y + s
transf(y1, i)
r[i] = 2. * a - r[i]
y = y + delx
filters(fltr2, 34)
else:
print("\nType of survey not indicated.")
raise SystemExit('Exiting.\n\n Take better care next time.')
x = spac
#print("A-Spacing App. Resistivity")
for i in range(1, m + 1):
a = np.exp(x)
asav[i] = a
asavl[i] = np.log10(a)
rl[i] = np.log10(r[i])
x = x + delx
#print("%7.2f %9.3f " % ( asav[i], r[i]))
rms = error()
return rms
def error(): # simple rms error calc
sumerror = 0.
#pltanswer = [0]*64
spline(m, one30, one30, asavl, rl, y2) # So this calculates the predicted fit?
# and essentially operates on the list in place?
for i in range(1, ndat): # So you always skip the value 0? due to -inf returns?
ans = splint(m, adatl[i], asavl, rl, y2) # Then this calulates error?
sumerror = sumerror + (rdatl[i] - ans) * (rdatl[i] - ans)
#print(i,sum1,rdat[i],rdatl[i],ans)
pltanswerl[i] = ans
pltanswer[i] = np.power(10, ans)
rms = np.sqrt(sumerror / (ndat - 1))
# check the spline routine
# for i in range(1,m+1,1):
# anstest = splint(m, asavl[i],asavl,rl,y2)
# print( asavl[i], rl[i], anstest)
#print(' rms = ', rms)
# if you erally want to get a good idea of all perdictions from Montecarlo
# perform the following plot (caution - change iter to a smaller number)
#plt.loglog(adat[1:ndat],pltanswer[1:ndat])
return rms
# my code to do a spline fit to predicted data at the nice spacing of Ghosh
# use splint to determine the spline interpolated prediction at the
# spacing where the measured resistivity was taken - to compare observation
# to prediction
def spline(n, yp1, ypn, x=[] ,y=[] ,y2=[]):
"""Still struggling to understand the general operation of this function."""
u = [0] * 1000
one29 = 0.99e30
#print(x,y)
if yp1 > one29:
y2[0] = 0.
u[0] = 0.
else:
y2[0] = -0.5
u[0] = (3. / (x[1] - x[0])) * ((y[1] - y[0]) / (x[1] - x[0]) - yp1)
for i in range(1, n):
#print(i,x[i])
sig = (x[i] - x[i-1]) / (x[i+1] - x[i-1])
p=sig * y2[i - 1] + 2.
y2[i] = (sig-1.) / p
u[i] = (((6. * ((y[i+1] - y[i]) / (x[i+1] - x[i]) - (y[i] - y[i-1]) /
x[i] - x[i-1])) / (x[i + 1] - x[i - 1]) - sig * u[i - 1]) / p)
if ypn > one29:
qn = 0.
un = 0.
else:
qn = 0.5
un = (3. / (x[n] - x[n - 1])) * (ypn - (y[n] - y[n - 1]) / (x[n] - x[n - 1]))
y2[n] = (un - qn * u[n - 1]) / (qn * y2[n - 1] + 1.)
for k in range(n-1, -1, -1):
y2[k] = y2[k] * y2[k + 1] + u[k]
return
def splint(n, x ,xa=[], ya=[], y2a=[]): # Is this function the T function?
"""Still struggling to understand the general operation of this function."""
klo = 0
khi = n
while khi - klo > 1:
k = int((khi + klo) // 2)
if xa[k] > x:
khi = k
else:
klo = k
h = xa[khi] - xa[klo]
if abs(h) < 1e-20:
print(" bad xa input")
#print(x,xa[khi],xa[klo])
a = (xa[khi] - x) / h
b = (x - xa[klo]) / h
y = (a * ya[klo] + b * ya[khi] + ((a * a * a - a) * y2a[klo] +
(b * b * b - b) * y2a[khi]) * (h * h) /6.)
#print("x= ", x,"y= ", y, " ya= ", ya[khi]," y2a= ", y2a[khi], " h= ",h)
return y
#main here
if __name__ == '__main__':
adatl, rdatl = readData(adat, rdat, ndat, return_indexed=False)
print(adat[1:ndat],rdat[1:ndat])
print('log stufffff')
print(adatl[1:ndat], rdatl[1:ndat]) # is this to skip 0?
#enter thickenss range for each layer and then resistivity range.
#for 3 layers small[1] and small[2] are low end of thickness range
# small[3], small[4] and small[5] are the low end of resistivities
for iloop in range(1, int(iter_/2) + 1):
#print( ' iloop is ', iloop)
for i in range(1, n + 1): # number of parameters + 1
randNumber = random.random() # IS this just to add noise to the model?
# #print(randNumber, ' random')
# print(xlarge)
# print(small)
# s = input('')
# print('xlarge[i]: {}, small[i]: {}'.format(xlarge[i], small[i]))
p[i] = (xlarge[i] - small[i]) * randNumber + small[i]
# print(p)
print('\n')
print(p)
# s = input('')
rms = rmsfit()
if rms < errmin:
print('rms ', rms, ' errmin ', errmin)
for i in range(1, n + 1):
pkeep[i] = p[i]
for i in range(1, m + 1):
rkeep[i] = r[i]
rkeepl[i] = rl[i]
for i in range(1, ndat + 1):
pltanswerkeepl[i] = pltanswerl[i]
pltanswerkeep[i] = pltanswer[i]
errmin = rms
#output the best fitting earth model
print(' Layer ', ' Thickness ', ' Res_ohm-m ')
for i in range(1,nLayers,1):
print(i, pkeep[i], pkeep[nLayers+i-1])
print( nLayers, ' Infinite ', pkeep[n])
for i in range(1,m+1, 1):
asavl[i] = np.log10(asav[i])
#output the error of fit
print( ' RMS error ', errmin)
print( ' Spacing', ' Res_pred ', ' Log10_spacing ', ' Log10_Res_pred ')
for i in range(1,m+1,1):
#print(asav[i], rkeep[i], asavl[i], rkeepl[i])
print("%7.2f %9.3f %9.3f %9.3f" % ( asav[i], rkeep[i],
asavl[i], rkeepl[i]))
print('plot a lot')
plt.loglog(asav[1:m],rkeep[1:m],'-') # resistivity prediction curve
plt.loglog(adat[1:ndat],pltanswerkeep[1:ndat], 'ro') # predicted data red dots
s=7
plt.loglog(adat[1:ndat],rdat[1:ndat],'bo',markersize=s) #original data blue dots
plt.show()
plt.grid(True)
sys.exit(0)
| lgpl-3.0 | -1,006,991,285,408,766,500 | 31.442708 | 120 | 0.53283 | false | 2.705225 | false | false | false |
mikoim/funstuff | null/crawler/tt2db.py | 1 | 1709 | # -*- coding: utf-8 -*-
import urllib.request
import time
import pymongo
import http.client
import re
def httpWrapper(url):
try:
data_raw = urllib.request.urlopen(url).read().decode('utf-8')
except:
return "NULL"
return data_raw
def getGirlName(data_raw):
matches = re.findall('名前[ ]+?/[ ]+?(.+?)(|\n)*( |)*(|\n)*( |)*(\(|<br />)', data_raw)
for match in matches[0]:
return match.replace(' ', '')
return
def getGrilPhotos(data_raw):
matches = re.findall('<span>(photos/.+?.jpg)</span>', data_raw)
if len(matches) == 0:
matches = re.findall('<a href="(photos/.+?.jpg)">', data_raw)
return matches
def getLastModTime(path):
conn = http.client.HTTPConnection("twintail-japan.com")
conn.request("HEAD", path)
res = conn.getresponse()
return int(time.mktime(time.strptime(res.getheaders()[2][1], '%a, %d %b %Y %H:%M:%S %Z')) * 1000)
conn = pymongo.Connection()
db = conn.tw2db
col = db.tm
for x in range(1, 3):
baseUrl = "http://twintail-japan.com/sailor/contents/%d.html" % x
data_raw = httpWrapper(baseUrl)
if data_raw != "NULL":
name = getGirlName(data_raw)
for photo in getGrilPhotos(data_raw):
dbtml = {'author' : '', 'time' : '', 'title' : '', 'via' : '', 'src' : '', 'message' : ''}
dbtml['author'] = name
dbtml['title'] = name + " @ セーラ服とツインテール"
dbtml['via'] = baseUrl
dbtml['message'] = ""
dbtml['time'] = getLastModTime("/sailor/contents/%d.html" % x)
dbtml['src'] = 'http://twintail-japan.com/sailor/contents/%s' % (photo)
col.insert(dbtml)
print(x) | mit | -7,780,927,202,663,628,000 | 26.606557 | 102 | 0.562686 | false | 2.978761 | false | false | false |
digifant/eMonitor | tools/update-osm-data.py | 1 | 10402 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
import logging.handlers
import traceback
import os
import time
from optparse import OptionParser
import MySQLdb
import codecs
import requests
import sys
import pdb
import argparse
from pprint import pprint
def osmWebUrl (lat,lng):
return "http://www.openstreetmap.org/?&mlat=%s&mlon=%s&zoom=17" % (lat,lng)
def str2bool(v):
if v.lower() in ('yes', 'true', 't', 'y', '1', 'j', 'ja'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0', 'nein'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def prompt(query):
sys.stdout.write('%s [y/n]: ' % query)
val = raw_input()
try:
ret = str2bool(val)
except ValueError:
sys.stdout.write('Please answer with a y/n\n')
return prompt(query)
return ret
# returns None if not found!
def queryOsmNominatin(street, streetno, city ):
url = 'http://nominatim.openstreetmap.org/search'
params = 'format=json&city={}&street={}'.format(city, street)
#params = 'format=json&city=%s&street=%s' % (city, address)
if streetno != '':
params += ' {}'.format(streetno)
params = params.replace (' ', '+')
params = params.replace ('<', '<')
params = params.replace ('>', '>')
logging.debug ("OSM nominatim query: %s?%s" % (url,params))
headers = {
'User-Agent': 'OSMSyncForFireFighterStreetDbOfOurTown',
'From': '[email protected]'
}
r = requests.get('{}?{}'.format(url, params), timeout=3, headers=headers)
#logging.debug("osm nomination result: %s" % pprint(r.json()))
#import pdb; pdb.set_trace()
_position = None
try:
_position = {'lat':r.json()[0]['lat'], 'lng':r.json()[0]['lon'], 'osm_id':r.json()[0]['osm_id'].decode('iso-8859-1').encode('utf8') }
except IndexError:
logging.error ("street %s not found! (housenumber=%s)" % (street, streetno))
#logging.debug (_position)
return _position
def updateMysqlStreets (db, user, passwd, command):
# Open database connection
db = MySQLdb.connect("localhost",user,passwd,db )
# prepare a cursor object using cursor() method
cursor = db.cursor()
# execute SQL query using execute() method.
cursor.execute("SELECT VERSION()")
# Fetch a single row using fetchone() method.
data = cursor.fetchone()
print "Database version : %s " % data
not_found = {}
if command == "update_position":
sql = "SELECT * FROM streets"
try:
cursor.execute(sql)
results = cursor.fetchall()
for row in results:
print ("Street DB %s lat=%s lng=%s" % (row[1].decode('iso-8859-1').encode('utf8'), row[5], row[6]) )
if ( row[0] > 0 ):
_position = queryOsmNominatin (street=row[1].decode('iso-8859-1').encode('utf8'), streetno='', city='Kleinblittersdorf')
#No heavy uses (an absolute maximum of 1 request per second).
#http://wiki.openstreetmap.org/wiki/Nominatim_usage_policy
time.sleep (1)
if _position != None:
if row[9] == int(_position['osm_id']):
sql = 'update streets set lat=%s, lng=%s where id = %s' % (float(_position['lat']), float(_position['lng']), int(row[0]))
logging.debug ("sql query %s" % sql)
try:
cursor.execute(sql)
db.commit()
logging.info ("street %s updated lat and lng to (%s,%s)" % (row[1].decode('iso-8859-1').encode('utf8'), float(_position['lat']), float(_position['lng'])))
except:
db.rollback()
logging.error ("SQL Error %s" % traceback.format_exc())
else:
logging.fatal ("OSMID stimmt nicht überein! %s vs %s" % (row[9], _position['osm_id'] ))
else:
logging.fatal ("OSM nominatin Query failed!")
not_found[row[0]] = row[1].decode('iso-8859-1').encode('utf8')
except:
logging.error ("DB Error %s" % traceback.format_exc() )
# disconnect from server
db.close()
logging.info ("Sync finished")
if len(not_found) > 0:
logging.error ("didnt found %s streets:" % len(not_found))
for k in not_found.keys():
logging.error ("not found: id=%s streetname=%s" % (k, not_found[k]))
def verifyMysqlStreets (db, user, passwd, command, street=-1):
# Open database connection
db = MySQLdb.connect("localhost",user,passwd,db )
# prepare a cursor object using cursor() method
cursor = db.cursor()
# execute SQL query using execute() method.
cursor.execute("SELECT VERSION()")
# Fetch a single row using fetchone() method.
data = cursor.fetchone()
print "Database version : %s " % data
not_found = {}
if command == "verify_streets":
sql = "SELECT * FROM streets"
if street > 0:
sql = sql + " where id=%i" % street
try:
cursor.execute(sql)
results = cursor.fetchall()
for row in results:
print ("Street %s lat=%s lng=%s url=%s" % (row[1].decode('iso-8859-1').encode('utf8'), row[5], row[6], osmWebUrl(row[5],row[6]) ) )
if ( row[0] > 0 ):
_position = queryOsmNominatin (street=row[1].decode('iso-8859-1').encode('utf8'), streetno='', city='Kleinblittersdorf')
if _position != None:
sql = 'update streets set lat=%s, lng=%s, osmid=%s where id = %s' % (float(_position['lat']), float(_position['lng']), int(_position['osm_id']), int(row[0]))
logging.debug ("sql query %s" % sql)
if row[9] == int(_position['osm_id']):
logging.info ("osmid=%s db lat=%s db lng=%s OsmNominatim lat=%s lng=%s new url=%s" % (row[9], row[5], row[6], float(_position['lat']), float(_position['lng']), osmWebUrl(float(_position['lat']),float(_position['lng'])) ) )
if round(float(row[5]),4) != round(float(_position['lat']),4) or round(float(row[6]),4) != round(float(_position['lng']),4):
logging.info ("%i NO MATCH" % row[9])
if options.ask_fix and prompt ("Fix?"):
try:
cursor.execute(sql)
db.commit()
logging.info ("street %s updated lat, lng, osmid to (%s,%s,%s)" % (row[1].decode('iso-8859-1').encode('utf8'), float(_position['lat']), float(_position['lng']), (_position['osm_id'])))
except:
db.rollback()
logging.error ("SQL Error %s" % traceback.format_exc())
else:
logging.info ("%i MATCH" % row[9])
else:
logging.fatal ("OSMID stimmt nicht überein! %s vs %s url=%s" % (row[9], _position['osm_id'], osmWebUrl(float(_position['lat']),float(_position['lng']))))
if options.ask_fix and prompt ("Fix?"):
try:
cursor.execute(sql)
db.commit()
logging.info ("street %s updated lat, lng, osmid to (%s,%s,%s)" % (row[1].decode('iso-8859-1').encode('utf8'), float(_position['lat']), float(_position['lng']), (_position['osm_id'])))
except:
db.rollback()
logging.error ("SQL Error %s" % traceback.format_exc())
else:
logging.fatal ("OSM nominatin Query failed!")
not_found[row[0]] = row[1].decode('iso-8859-1').encode('utf8')
#No heavy uses (an absolute maximum of 1 request per second).
#http://wiki.openstreetmap.org/wiki/Nominatim_usage_policy
time.sleep (1)
except:
logging.error ("DB Error %s" % traceback.format_exc() )
# disconnect from server
db.close()
logging.info ("verify finished")
if __name__ == '__main__':
parser = OptionParser()
parser.add_option("-d", "--database", dest="database", help="mysql database name", default="emonitor")
parser.add_option("-u", "--user", dest="user", help="mysql user", default='emonitor')
parser.add_option("-p", "--passwd", dest="passwd", help="mysql password", default='emonitor')
parser.add_option("--update-streets-position", dest="update_streets_position", help="update positions for all streets", action="store_true", default=False)
parser.add_option("--verify-street-position", dest="verify_street_position", help="verify positions for given street", type=int, default=-1)
parser.add_option("-v", "--verify-all-streets-position", dest="verify_all_streets_position", help="verify positions for given street", action="store_true", default=False)
parser.add_option("-a", "--ask-fix", dest="ask_fix", help="ask for fixing", action="store_true", default=False)
(options, args) = parser.parse_args()
#logging.basicConfig(filename='screenshot-and-telegram.log', level=logging.DEBUG)
logging.basicConfig(level=logging.DEBUG)
if options.update_streets_position:
updateMysqlStreets (db=options.database, user=options.user, passwd=options.passwd, command="update_position")
if options.verify_street_position > 0:
verifyMysqlStreets (db=options.database, user=options.user, passwd=options.passwd, command="verify_streets", street=int(options.verify_street_position))
if options.verify_all_streets_position:
verifyMysqlStreets (db=options.database, user=options.user, passwd=options.passwd, command="verify_streets")
#queryOsmNominatin(street="Rexrothstraße", streetno='', city='Kleinblittersdorf')
| bsd-3-clause | -7,600,303,221,806,536,000 | 45.424107 | 250 | 0.544379 | false | 3.781455 | false | false | false |
suma12/asterix | asterix/APDU.py | 1 | 31348 | """ asterix/APDU.py
__author__ = "Petr Tobiska"
Author: Petr Tobiska, mailto:[email protected]
This file is part of asterix, a framework for communication with smartcards
based on pyscard. This file implements handfull APDU commands.
asterix is free software; you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation; either version 2.1 of the License, or
(at your option) any later version.
asterix is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public License
along with pyscard; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
References:
[GP CS] GlobalPlatform Card Specification, Version 2.2.1, Jan 2011
[GP AmD] Secure Channel Protocol 03, Card Specification v 2.2 - Amendment D,
Version 1.1, Sep 2009
"""
import re
import hashlib
import random
from struct import pack, unpack
from binascii import hexlify, unhexlify
# PyCrypto
from Crypto.Cipher import DES, DES3, AES
# ECSDA
from ecdsa import ecdsa, ellipticcurve
# pyscard
from smartcard.ATR import ATR
# asterix
from formutil import s2l, l2s, derLen, derLV, s2int, int2s, s2ECP, chunks,\
split2TLV, findTLValue, swapNibbles
from GAF import GAF
from applet import DESsign
from SCP03 import CMAC
from mycard import ISOException, resetCard
__all__ = ('calcKCV', 'putKey', 'storeDataPutKey',
'push2B_DGI', 'X963keyDerivation', 'Push3scenario',
'selectApplet', 'openLogCh', 'closeLogCh',
'getStatus', 'getExtCardRes', 'getData',
'selectFile', 'readBinary', 'readRecord',
'updateBinary', 'updateRecord',
'verifyPin', 'changePin', 'disablePin', 'enablePin', 'unblockPin',
'selectUSIM', 'cardInfo', 'KeyType')
INS_VERIFY_PIN = 0x20
INS_CHANGE_PIN = 0x24
INS_DISABLE_PIN = 0x26
INS_ENABLE_PIN = 0x28
INS_UNBLOCK_PIN = 0x2C
INS_MANAGE_LOGCH = 0x70
INS_SELECT = 0xA4
INS_READBIN = 0xB0
INS_READREC = 0xB2
INS_GETDATA = 0xCA
INS_UPDBIN = 0xD6
INS_UPDREC = 0xDC
INS_PUTKEY = 0xD8
INS_STOREDATA = 0xE2
INS_GETSTATUS = 0xF2
class KeyType:
"""Key types as defined in [GP CS] Tab 11.16"""
# subset of currently supported keys
DES_IMPLICIT = 0x80
TDES_CBC = 0x82
DES_ECB = 0x83
DES_CBC = 0x84
AES = 0x88
def calcKCV(keyValue, zAES=False):
"""Calculate KCV for symmetric key.
keyValue - key values as string (DES, 3DES2k, 3DES3k, AES)
zAES - True if key is AES (i.e. encrypt block of '01' instead of '00')
Return 3B-long string."""
if zAES:
assert len(keyValue) in (16, 24, 32), "Wrong length of AES key"
block = '\x01'*16
tkey = AES.new(keyValue, AES.MODE_ECB)
else:
assert len(keyValue) in (8, 16, 24), "Wrong length of (3)DES key"
block = '\x00'*8
if len(keyValue) == 8:
tkey = DES.new(keyValue, DES.MODE_ECB)
else:
tkey = DES3.new(keyValue, DES.MODE_ECB)
return tkey.encrypt(block)[:3]
def putKey(oldKeyVersion, newKeyVersion, keyId, keyComponents,
zMoreCmd=False, zMultiKey=False, keyDEK=None,
lenMAC=8):
"""Build APDU for PUT KEY command.
oldKeyVersion - key version to be replaced. If zero, new key is created.
newKeyVersion - key version of key being put
keyId - id of the 1st key being put
keyComponents - list of key components being put.
Each componet is a tuple of key type (u8) and value (string).
zMoreCmd - P1.b8, signals if there is more commands
zMultiKey - P2.b8, signals if more than one component being put
keyDEK - KIK or DEK key. keyDEK.encrypt(data) called to encrypt
(including padding) key component value if not None.
If has attribute zAES and keyDEK.zAES evaluates as True, it is
considered as AES key and [GP AmD] 7.2 formatting is used.
lenMAC - length of CMAC for AES.
Applicable if AES key with key id=0x02 (KID) and
key version 0x01-0x0F or 0x11 is being put with AES keyDEK
(see ETSI 102.226 rel 9+, 8.2.1.5)
Returns APDU built (as list of u8).
See [GP CS] 11.8 and [GP AmD] 7.2 for reference.
See [GP CS] Tab 11.16 for coding of key type.
Currently only Format1 supported.
"""
# sanity check
assert 0 <= oldKeyVersion < 0x80
assert 0 < newKeyVersion < 0x80
assert 0 < keyId < 0x80
assert len(keyComponents) > 0
assert lenMAC in (4, 8)
P1 = (zMoreCmd and 0x80 or 0) | oldKeyVersion
P2 = (zMultiKey and 0x80 or 0) | keyId
data = chr(newKeyVersion)
for kc in keyComponents:
keyType, keyVal = kc[:2] # ignore eventual keyUsage and keyAccess
assert 0 <= keyType < 0xFF
if keyDEK:
encValue = keyDEK.encrypt(keyVal)
# for AES as keyDEK, prepend length of component
if 'zAES' in dir(keyDEK) and keyDEK.zAES:
encValue = derLen(keyVal) + encValue
# see ETSI 102.226 rel 9+, 8.2.1.5
if keyType == KeyType.AES and keyId == 2 and \
newKeyVersion in range(0x01, 0x10) + [0x11]:
encValue += chr(lenMAC)
else:
encValue = keyVal
# calculate KCV
if keyType in (KeyType.DES_IMPLICIT, KeyType.TDES_CBC,
KeyType.DES_ECB, KeyType.DES_CBC, KeyType.AES):
kcv = calcKCV(keyVal, keyType == KeyType.AES)
else:
kcv = ''
data += chr(keyType) + derLen(encValue) + encValue + derLen(kcv) + kcv
keyId += 1
apdu = [0x80, INS_PUTKEY, P1, P2, len(data)] + s2l(data)
return apdu
def push2B_DGI(keyVer, keys, keyCASDenc):
""" Create DGI 00A6 and 8010 for Push2B scenario
keyVer - key verions (u8)
keys - ((keytype, keyvalue)); 1 or 3 sym. keys
keyCASDenc - a method to call for encryption 8010 content
Return DGIs built (as list of strings)."""
# DGI tag on 2B (GP Card Spec 2.2.1, 11.1.12)
# DGI length coding as in GP Systems Scripting Language Spec. v1.1.0, an. B
# i.e. on 1B for x < 255, FF<yyyy> for x >=255
KAT = GAF(""" -- Control Reference Template (KAT)
-- see GP 2.2.1 AmA 4.4
00A6 #[
A6 #(
90 #(04) -- scenario identifier: Push#2B
95 #($keyUsage)
80 #($keyType)
81 #($keyLen)
83 #($keyVer)
-- 45 #($SDIN) -- optional Security Domain Image Number
)] """)
assert len(keys) in (1, 3), "One or three sym. keys expected"
keyUsage = len(keys) == 1 and '\x5C' or '\x10' # Tab. 13
keyType = keys[0][0]
assert all([k[0] == keyType for k in keys]), "Key types differ"
# remap keyType to '80' as required by GP UICC config 10.3.1
if keyType in (KeyType.TDES_CBC, KeyType.DES_ECB, KeyType.DES_CBC):
keyType = KeyType.DES_IMPLICIT
lens = [len(k[1]) for k in keys]
l = max(lens)
assert l == min(lens), "Key lengths differ"
dgi00A6 = KAT.eval(keyUsage=keyUsage, keyType=chr(keyType),
keyLen=chr(l), keyVer=chr(keyVer))
data = keyCASDenc(''.join([k[1] for k in keys]))
dgi8010 = pack(">H", 0x8010) + chr(len(data)) + data
return (dgi00A6, dgi8010)
def storeDataPutKeyDGI(keyVer, keyComponents, keyId=1, keyDEK=None):
"""Build DGI for Store Data for Put Key.
keyVer - key version of key being created
keyComponents - list of key components being put.
Each componet is a tuple of key type (u8), value (string)
and optionally Key Usage Qualifier and Key Access
(u8, defaults 0x18, 0x14 or 0x48 for key UQ, 0x00 for key ac.)
keyId - id of the 1st key being created (optional, u8, default 1)
keyDEK - KIK or DEK key. keyDEK.encrypt(data) called to encrypt
(including padding) key component value if not None.
If has attribute zAES and keyDEK.zAES evaluates as True, it is
considered as AES key and [GP AmD] 7.2 formatting is used.
Returns DGIs built (as list of string).
See GP 2.2.1 AmA 4.10.2 for reference.
"""
# sanity check
assert 0 < keyVer and keyVer < 0x80
assert 0 < keyId and keyId < 0x80
assert len(keyComponents) > 0
KeyUQ = (None, 0x38, 0x34, 0xC8) # see GP 2.2.1, 11.1.9
templ = """ B9 #(95#($keyUQ) 96#($keyAc) 80#($keyType) 81#($keyLen)
82#($keyId) 83#($keyVer) 84#($KCV))"""
d = {'keyVer': chr(keyVer)}
B9 = ''
dgi8113 = []
for kc in keyComponents:
assert len(kc) in (2, 4), "wrong keyComponent" + kc.__str__()
if len(kc) == 2:
keyType, keyVal = kc
keyUQ = 1 <= keyId <= 3 and KeyUQ[keyId] or 0xFF
keyAc = 0x00
else:
keyType, keyVal, keyUQ, keyAc = kc
d['keyLen'] = chr(len(keyVal))
assert 0 <= keyType < 0xFF
if keyType in (KeyType.DES_IMPLICIT, KeyType.TDES_CBC,
KeyType.DES_ECB, KeyType.DES_CBC, KeyType.AES):
d['KCV'] = calcKCV(keyVal, keyType == KeyType.AES)
else:
d['KCV'] = ''
d['keyId'] = chr(keyId)
for k in ('keyType', 'keyUQ', 'keyAc', 'keyId'):
d[k] = chr(locals()[k])
tlv = GAF(templ).eval(**d)
if keyDEK:
encValue = keyDEK.encrypt(keyVal)
else:
encValue = keyVal
B9 += tlv
dgi8113.append(pack(">HB", 0x8113, len(encValue)) + encValue)
keyId += 1
return(pack(">HB", 0x00B9, len(B9)) + B9, dgi8113)
def storeDataPutKey(keyVer, keyComponents, keyId=1, keyDEK=None):
"""Build APDU for Store Data for Put Key.
keyVer, keyComponents, keyId and keyDEK as in storeDataPutKeyDGI.
Return APDU a u8 list."""
dgi00B9, dgi8113 = storeDataPutKeyDGI(keyVer, keyComponents,
keyId, keyDEK)
data = dgi00B9 + ''.join(dgi8113)
assert len(data) < 256, "Longer Put Key not implemented"
P1 = 0x88
P2 = 0
apdu = [0x80, INS_STOREDATA, P1, P2, len(data)] + s2l(data)
return apdu
# ###### Scenario 3 stuff
# Preloaded ECC Curve Parameters, GP 2.2.1 AmE 4.5
# N.B., all have cofactor = 1
ECC_Curves = {
0x00: ecdsa.generator_256, # NIST P-256
0x01: ecdsa.generator_384, # NIST P-384
0x02: ecdsa.generator_521, # NIST P-521
# 0x03: brainpoolP256r1,
# 0x04: brainpoolP256t1,
# 0x05: brainpoolP384r1,
# 0x06: brainpoolP384t1,
# 0x07: brainpoolP512r1,
# 0x08: brainpoolP512t1,
}
# tag definition
T_IIN = 0x42
T_SDIN = T_CIN = 0x45
T_keyType = 0x80
T_keyLen = 0x81
T_keyID = 0x82
T_keyVer = 0x83
T_DR = 0x85
T_HostID = 0x84
T_receipt = 0x86
T_scenarioID = 0x90
T_seqCounter = 0x91
T_keyUsage = 0x95
T_keyAcc = 0x96
T_CRT = 0xA6
def X963keyDerivation(sharedSecret, bytelen, sharedInfo='',
h = hashlib.sha256):
""" X9.63 Key Derivation Function as deifned in TR-03111 4.3.3
bytelen - expected length of Key Data
sharedSecret, sharedInfo - strings
h - function to create HASH object (default hashlib.sha256)
Return Key Data (string)
Reference: TR-03111: BSI TR-03111 Elliptic Curve Cryptography, Version 2.0
https://www.bsi.bund.de/SharedDocs/Downloads/EN/BSI/Publications/TechGuidelines/TR03111/BSI-TR-03111_pdf.html"""
keyData = ''
l = h().digest_size
j = (bytelen - 1)/l + 1
for i in xrange(1, 1+j):
keyData += h(sharedSecret + pack(">L", i) + sharedInfo).digest()
return keyData[:bytelen]
def DESMAC(key, data):
""" Calculate MAC single DES with final 3DES"""
return DESsign(key).calc(data)
ktDES = KeyType.DES_IMPLICIT
ktAES = KeyType.AES
class Push3scenario:
""" Implementation of Global Platform Push #3 scenario (ECKA)"""
def __init__(self, keyParRef, pkCASD, **kw):
""" Constructor
keyParRef - Key Parameter Reference
pkCASD - PK.CASD.ECKA (tuple long x, long y)
optional **kw: IIN, CIN (as strings)"""
assert keyParRef in ECC_Curves, \
"Unknown Key param reference 0x%02X" % keyParRef
self.keyParRef = keyParRef
self.generator = ECC_Curves[keyParRef]
self.curve = self.generator.curve()
self.bytelen = len(int2s(self.curve.p()))
assert self.bytelen in (32, 48, 64, 66) # currently allowed keys
pkCASDxy = s2ECP(pkCASD)
assert self.curve.contains_point(*pkCASDxy),\
"PK.CASD.ECKA not on the curve"
self.pkCASD = ellipticcurve.Point(self.curve, *pkCASDxy)
for k in ('IIN', 'CIN'):
if k in kw:
assert isinstance(kw[k], str)
self.__dict__[k] = kw[k]
def makeDGI(self, keyVer, privkey=None,
keys=([(KeyType.AES, 16)]*3),
zDelete=False, zDR=False, zID=False, **kw):
""" Prepare data for Push #3 scenario and generate keys.
keyVer - key version to create
privkey - eSK.AP.ECKA (secret multiplier as string)
randomly generated if None
keys - [(keyType, keyLen)] to generate
zDelete, zDR, zID - bits 1-3 of Parameters of scenario, (GP AmE, Tab. 4-17)
optional **kw: keyId, seqCounter, SDIN, HostID
Return <data for StoreData>"""
if privkey is None:
secexp = random.randrange(2, self.generator.order())
else:
secexp = s2int(privkey)
assert 1 < secexp < self.generator.order(), "Wrong eSK.AP.ECKA"
print "eSK.AP.ECKA = %X" % secexp
pubkey = self.generator * secexp
dgi7F49 = pack(">HBB", 0x7F49, 2*self.bytelen+1, 4) + \
int2s(pubkey.x(), self.bytelen * 8) + \
int2s(pubkey.y(), self.bytelen * 8)
# calculate Shared Secret, suppose that cofactor is 1
S_AB = secexp * self.pkCASD
self.sharedSecret = int2s(S_AB.x(), self.bytelen * 8)
print "Shared Secret =", hexlify(self.sharedSecret).upper()
# build DGI 00A6
if zID:
assert hasattr(self, 'IIN'), "Missing IIN while CardId requested"
assert hasattr(self, 'CIN'), "Missing cIN while CardId requested"
assert 'HostID' in kw and isinstance(kw['HostID'], str)
self.HostCardID = ''.join([derLV(v) for v in
(kw['HostID'], self.IIN, self.CIN)])
else:
self.HostCardID = ''
self.zDR = zDR
scenarioPar = (zDelete and 1 or 0) +\
(zDR and 2 or 0) +\
(zID and 4 or 0)
assert all([k[0] in (KeyType.DES_IMPLICIT, KeyType.AES) for k in keys])
ktl1 = keys[0]
zDifKey = any([keys[i] != ktl1 for i in xrange(1, len(keys))])
tA6value = pack("BBBB", T_scenarioID, 2, 3, scenarioPar)
if zDifKey:
self.receiptAlgo = CMAC
self.keyLens = [16] + [k[1] for k in keys]
self.keyDesc = ''
if 'keyId' in kw:
tA6value += pack("BBB", T_keyID, 1, kw['keyId'])
tA6value += pack("BBB", T_keyVer, 1, keyVer)
# default keyUsage from GP 2.2.1 AmE tab. 4-16 for ENC, MAC, DEK
for k, keyUsage in zip(keys, (0x38, 0x34, 0xC8)):
if len(k) > 2:
keyUsage = k[2]
tB9value = pack("BBB", T_keyUsage, 1, keyUsage)
if len(k) >= 4: # optional key Access as fourth elem. of key
tB9value += pack("BBB", T_keyAcc, 1, k[3])
tB9value += pack("BBB", T_keyType, 1, k[0])
tB9value += pack("BBB", T_keyLen, 1, k[1])
self.keyDesc += pack("BBB", keyUsage, *k[:2])
tA6value += '\xB9' + derLV(tB9value)
else:
assert len(keys) in (1, 3), \
"One or three secure ch. keys expected."
self.keyLens = [ktl1[1]] * (1 + len(keys))
self.receiptAlgo = ktl1[0] == KeyType.AES and CMAC or DESMAC
keyUsage = len(keys) == 1 and 0x5C or 0x10
self.keyDesc = pack("BBB", keyUsage, *ktl1[:2])
tA6value += pack("BBB", T_keyUsage, 1, keyUsage)
if len(ktl1) == 4:
tA6value += pack("BBB", T_keyAcc, 1, ktl1[3])
tA6value += pack("BBB", T_keyType, 1, ktl1[0])
tA6value += pack("BBB", T_keyLen, 1, ktl1[1])
if 'keyId' in kw:
tA6value += pack("BBB", T_keyID, 1, kw['keyId'])
tA6value += pack("BBB", T_keyVer, 1, keyVer)
if 'seqCounter' in kw:
tA6value += chr(T_seqCounter) + derLV(kw['seqCounter'])
if 'SDIN' in kw:
tA6value += chr(T_SDIN) + derLV(kw['SDIN'])
if zID:
tA6value += chr(T_HostID) + derLV(kw['HostID'])
self.tA6 = chr(T_CRT) + derLV(tA6value)
dgi00A6 = pack(">HB", 0x00A6, len(self.tA6)) + self.tA6
return (dgi00A6, dgi7F49)
def generKeys(self, respData):
""" Verify receipt and generate symmetric keys.
respData - response to Store Data (string)
Return generated keys (tuple of strings)"""
try:
data2rec = self.tA6
except KeyError:
print "Run makeDGI first"
return
respTLV = split2TLV(respData)
if self.zDR:
lenDR = (self.bytelen // 32) * 16 # map to 16, 24 or 32
DR = respTLV[0][1]
assert len(respTLV) == 2 and \
respTLV[0][0] == T_DR and len(DR) == lenDR
data2rec += pack("BB", T_DR, lenDR) + DR
else:
assert len(respTLV) == 1
assert respTLV[-1][0] == T_receipt
receipt = respTLV[-1][1]
sharedInfo = self.keyDesc
if self.zDR:
sharedInfo += DR
if hasattr(self, 'HostCardID'):
sharedInfo += self.HostCardID
print "Shared Info =", hexlify(sharedInfo).upper()
keyData = X963keyDerivation(self.sharedSecret, sum(self.keyLens),
sharedInfo)
keyDataIt = chunks(keyData, self.keyLens)
receiptKey = keyDataIt.next()
print "Receipt Key =", hexlify(receiptKey).upper()
expReceipt = self.receiptAlgo(receiptKey, data2rec)
assert receipt == expReceipt, "Receipt verification failed"
return [k for k in keyDataIt if k] # skip empty rest
def selectApplet(c, AID, logCh=0):
""" Select applet on a given logical channel or
open new log. channel if logCh is None. """
if logCh is None:
logCh = openLogCh(c)
# select the Applet on the given logical channel
apdu = [logCh, INS_SELECT, 4, 0, len(AID)] + s2l(AID)
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C and len(AID) == 0:
apdu = [logCh, INS_SELECT, 4, 0, sw2]
resp, sw1, sw2 = c.transmit(apdu)
if(sw1 == 0x61):
apdu = [logCh, 0xC0, 0, 0, sw2]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
respdata = l2s(resp)
# close channel
return (respdata, logCh)
def openLogCh(c):
""" Manage channel to open logical channel. """
apdu = [0, INS_MANAGE_LOGCH, 0, 0, 1]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return resp[0]
def closeLogCh(c, logCh):
apdu = [0, INS_MANAGE_LOGCH, 0x80, logCh, 0]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
class GetStatusData:
"""Represent and interpret data from Get status for Packages and Modules"""
def __init__(self, respdataPM, respdataApp):
ind = 0
self.packages = []
while len(respdataPM) > ind:
length = respdataPM[ind]
pack_aid = l2s(respdataPM[ind+1: ind+1+length])
ind += length + 1
lcs = respdataPM[ind]
priv = respdataPM[ind+1]
nmod = respdataPM[ind+2]
ind += 3
mods = []
for i in xrange(nmod):
length = respdataPM[ind]
mods.append(l2s(respdataPM[ind+1: ind+1+length]))
ind += length + 1
self.packages.append({'pack_aid': pack_aid,
'lcs': lcs,
'priv': priv,
'modules': mods})
ind = 0
self.insts = []
while len(respdataApp) > ind:
length = respdataApp[ind]
app_aid = l2s(respdataApp[ind+1: ind+1+length])
ind += length + 1
lcs = respdataApp[ind]
priv = respdataApp[ind+1]
ind += 2
self.insts.append({'app_aid': app_aid,
'lcs': lcs,
'priv': priv})
def __str__(self):
res = ''
for p in self.packages:
res += "Package AID: %s %02X %02X\n" % \
(hexlify(p['pack_aid']).upper().ljust(32),
p['lcs'], p['priv'])
for m in p['modules']:
res += " module %s\n" % hexlify(m).upper().ljust(32)
for p in self.insts:
res += "Insts AID : %s %02X %02X\n" % \
(hexlify(p['app_aid']).upper().ljust(32),
p['lcs'], p['priv'])
return res
def getStatus(sc, AID_pref=''):
""" Issue GET STATUS apdu for packages and modules, and instances. """
res = {}
for P1 in (0x10, 0x40):
apdu = [0x80, INS_GETSTATUS, P1, 0, 2+len(AID_pref), 0x4F,
len(AID_pref)] + s2l(AID_pref)
respdata, sw1, sw2 = sc.transmit(apdu)
sw = (sw1 << 8) + sw2
while sw == 0x6310:
apdu = [0x80, INS_GETSTATUS, P1, 1, 2+len(AID_pref), 0x4F,
len(AID_pref)] + s2l(AID_pref)
resp, sw1, sw2 = sc.transmit(apdu)
respdata += resp
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
res[P1] = respdata
return GetStatusData(res[0x10], res[0x40])
def getData(c, tag):
P1 = tag >> 8
P2 = tag & 0xFF
apdu = [0x80, INS_GETDATA, P1, P2, 0]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C:
apdu[4] = sw2
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def getExtCardRes(c):
""" Issue GET DATA with tag FF21 in order to receive Extended
Card Resources (GP 2.2.1, 11.3 & ETSI TS 102.226, 8.2.1.7).
Returns [num. of install applets, free NVM, free RAM]"""
# CLA = 0x00: return only value
# CLA = 0x80: return TLV, i.e. 0xFF21 #(value)
apdu = [0x80, INS_GETDATA, 0xFF, 0x21, 0]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C:
apdu[4] = sw2
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
payload = l2s(resp)
result = [s2int(findTLValue(payload, (0xFF21, tag))) for
tag in (0x81, 0x82, 0x83)]
return result
def selectFile(c, path, logCh=0):
""" Select file by path from MF or MF for empty path """
if len(path) > 0:
apdu = [logCh, INS_SELECT, 8, 4, len(path)] + s2l(path)
else:
apdu = [logCh, INS_SELECT, 0, 4, 2, 0x3F, 0x00]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x61:
resp, sw1, sw2 = c.transmit([0, 0xC0, 0, 0, sw2])
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def readBinary(c, le, logCh=0, offset=0):
"""Read Binary on currently selected EF"""
P1 = (offset >> 8) & 0x7F
P2 = offset & 0xFF
apdu = [logCh, INS_READBIN, P1, P2, le]
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def readRecord(c, recNum, logCh=0):
""" Read record from currently selected EF"""
apdu = [logCh, INS_READREC, recNum, 4, 0]
resp, sw1, sw2 = c.transmit(apdu)
if sw1 == 0x6C:
apdu[4] = sw2
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
return l2s(resp)
def updateBinary(c, data, logCh=0, offset=0):
"""Update binary on currently selected EF"""
assert len(data) < 0x100
P1 = (offset >> 8) & 0x7F
P2 = offset & 0xFF
apdu = [logCh, INS_UPDBIN, P1, P2, len(data)] + s2l(data)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def updateRecord(c, recNum, data, logCh=0):
""" Update record from currently selected EF"""
assert len(data) < 0x100
apdu = [logCh, INS_UPDREC, recNum, 4, len(data)] + s2l(data)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def verifyPin(c, pin=None, P2=0x01, logCh=0):
"""Verify PIN
pin - value (str, 4-8bytes). If None, just get number of tries.
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
Return number of remaing tries or True if verification succesfull.
"""
lc = 0 if pin is None else 8
apdu = [logCh, INS_VERIFY_PIN, 0, P2, lc]
if pin is not None:
assert 4 <= len(pin) <= 8
pin += '\xFF' * (8 - len(pin))
apdu += s2l(pin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw == 0x6983: # PIN blocked
return 0
if 0x63C0 <= sw <= 0x63CA: # remaining tries
return sw - 0x63C0
if sw != 0x9000:
raise ISOException(sw)
return True # pin verified
def changePin(c, oldPin, newPin, P2=0x01, logCh=0):
"""Change PIN
oldPin - old PIN value (str, 4-8bytes)
newPin - new PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert 4 <= len(oldPin) <= 8
oldPin += '\xFF' * (8 - len(oldPin))
assert 4 <= len(newPin) <= 8
newPin += '\xFF' * (8 - len(newPin))
apdu = [logCh, INS_CHANGE_PIN, 0, P2, 0x10] + s2l(oldPin) + s2l(newPin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def disablePin(c, pin, P2=0x01, logCh=0):
"""Disable PIN
pin - PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert 4 <= len(pin) <= 8
pin += '\xFF' * (8 - len(pin))
apdu = [logCh, INS_DISABLE_PIN, 0, P2, 8] + s2l(pin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def enablePin(c, pin, P2=0x01, logCh=0):
"""Enable PIN
pin - PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert 4 <= len(pin) <= 8
pin += '\xFF' * (8 - len(pin))
apdu = [logCh, INS_ENABLE_PIN, 0, P2, 8] + s2l(pin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def unblockPin(c, puk, newPin, P2=0x01, logCh=0):
"""unblock PIN
puk - new PIN value (str, 4-8bytes)
newPin - PIN value (str, 4-8bytes)
P2 - PIN identification (0x01: PIN1 (default), 0x81: PIN2, etc.)
logCh - logical channel (default 0)
"""
assert len(puk) == 8
assert 4 <= len(newPin) <= 8
newPin += '\xFF' * (8 - len(newPin))
apdu = [logCh, INS_UNBLOCK_PIN, 0, P2, 0x10] + s2l(puk) + s2l(newPin)
resp, sw1, sw2 = c.transmit(apdu)
sw = (sw1 << 8) + sw2
if sw != 0x9000:
raise ISOException(sw)
def selectUSIM(c, logCh=0):
"""Select USIM, return AID
Read EF_DIR, USIM = first application with AID of USIM (3GPP TS 31.110)"""
# read EF_DIR
infoDIR = selectFile(c, unhexlify('2F00'), logCh)
# see ETSI 102.221 11.1.1.4.3 for coding
fileDesc = findTLValue(infoDIR, (0x62, 0x82))
assert len(fileDesc) == 5 and \
fileDesc[:2] == '\x42\x21' # linear EF
recLen, nRec = unpack(">HB", fileDesc[2:5])
aids = []
for recNum in xrange(1, nRec+1):
try:
r = readRecord(c, recNum)
if r == '\xFF' * len(r):
continue
aid = findTLValue(r, (0x61, 0x4F))
aids.append(aid)
except ISOException:
break
# search for USIM
for aid in aids:
if aid[:7] == unhexlify('A0000000871002'):
infoUSIM = selectApplet(c, aid, logCh)
return aid
return None
def cardInfo(c, USIMpin=None, logCh=0):
"""Deselect, read EF_DIR, EF_ICCID"""
resetCard(c)
histBytes = l2s(ATR(c.getATR()).getHistoricalBytes())
infoMF = selectFile(c, '', logCh)
# read EF_ICCID
infoICCID = selectFile(c, unhexlify('2FE2'), logCh)
fileSize = s2int(findTLValue(infoICCID, (0x62, 0x80)))
assert fileSize == 10, "Wrong size of EF_ICCID"
iccid = swapNibbles(readBinary(c, fileSize))
# read EF_DIR
infoDIR = selectFile(c, unhexlify('2F00'), logCh)
# see ETSI 102.221 11.1.1.4.3 for coding
fileDesc = findTLValue(infoDIR, (0x62, 0x82))
assert len(fileDesc) == 5 and \
fileDesc[:2] == '\x42\x21' # linear EF
recLen, nRec = unpack(">HB", fileDesc[2:5])
dirDO = []
for recNum in xrange(1, nRec+1):
try:
r = readRecord(c, recNum)
if r == '\xFF' * len(r):
continue
aid = findTLValue(r, (0x61, 0x4F))
label = findTLValue(r, (0x61, 0x50))
dirDO.append({'AID': aid, 'label': label})
except ISOException:
break
# select USIM and try to read IMSI
aids = [DO['AID'] for DO in dirDO
if DO['AID'][:7] == unhexlify('A0000000871002')]
if len(aids) >= 1:
aid_usim = aids[0] # choose the first AID found
else:
aid_usim = None
if aid_usim:
infoUSIM = selectApplet(c, aid_usim, logCh)
if USIMpin is not None:
verifyPin(c, USIMpin, logCh=logCh)
infoIMSI = selectFile(c, unhexlify('7FFF6F07'), logCh)
try:
bimsi = readBinary(c, 9, logCh)
digits = reduce(lambda d, n: d + [ord(n) & 0x0F, ord(n) >> 4],
bimsi[1:1+ord(bimsi[0])], [])
digits.pop(0) # remove first nibble 8 or 9
while digits[-1] == 0x0F:
digits.pop() # remove trailing F
imsi = ''.join([chr(ord('0')+i) for i in digits])
except ISOException:
imsi = None
else:
imsi = None
# select default applet and get tags 45 and 42
selectApplet(c, '', logCh)
try:
iin = findTLValue(getData(c, T_IIN), (T_IIN,))
except ISOException:
iin = None
try:
cin = findTLValue(getData(c, T_CIN), (T_CIN,))
except ISOException:
cin = None
return histBytes, iccid, dirDO, imsi, iin, cin
| lgpl-2.1 | -262,022,843,162,776,060 | 35.621495 | 115 | 0.571711 | false | 2.991507 | false | false | false |
xerond/lucia | ledEditor/cfilegen.py | 1 | 2560 | from effectgroup import EffectGroup
from effectdescriptions import EffectDescriptions
from myutils import Utils
def generateFile(fileName,ledCount,effectGroups):
f = open(fileName,'w')
f.write("#ifndef H_SONG_INSTRUCTIONS\n#define H_SONG_INSTRUCTIONS\n#include \"avr/pgmspace.h\"\n#include \"song_instructions.h\"\nconst char song_instructions[] PROGMEM = {")
lastTime = 0
for curEffectGroup in effectGroups:
writeBuffer = ""
newTime = curEffectGroup.getTimeAs10msCount()
tD = newTime - lastTime
lastTime = newTime
writeBuffer += "0xff,\n"
writeBuffer += Utils.short_to_hex(tD) + "\n"
for ledIndex in range (0,ledCount):
ledEffect = curEffectGroup.getLedEffect(ledIndex)
tempLedBytes = generateLedEffectBytes(ledIndex,ledEffect)
if tempLedBytes <> "":
writeBuffer += "\t" + generateLedEffectBytes(ledIndex,ledEffect) + "\n"
writeBuffer += "0xff,\n"
f.write(writeBuffer)
f.write("0x00,};\n#endif")
#generates a string for led effect
def generateLedEffectBytes(ledNumber,ledEffect):
effectNumber = ledEffect[EffectGroup.INDEX_EFFECT_NUMBER]
#get the real effect number
#TODO we are accessing a global here, eek!
print "Effect num is: " + str(effectNumber)
realEffectNumber = EffectDescriptions.quickEffectLookup[effectNumber]['realId']
effectData = ledEffect[EffectGroup.INDEX_EFFECT_DATA]
#if effect number is < 0, ignore it
if effectNumber < 0:
return ""
returnStr = Utils.byte_to_hex(ledNumber) + Utils.byte_to_hex(realEffectNumber)
#get the effect description
effectDescr = EffectDescriptions.quickEffectLookup[effectNumber]
#Depending on the data, time to output the values accordingly
reqAttributes = effectDescr['reqAttributes']
attribCount = len(reqAttributes)
for i in range (0,attribCount):
curAttrib = reqAttributes[i]
attribType = curAttrib[EffectDescriptions.INDEX_TYPE]
curData = effectData[i]
if(attribType == EffectDescriptions.VAR_COLOR):
returnStr += Utils.short_to_hex(curData[0])
returnStr += Utils.short_to_hex(curData[1])
returnStr += Utils.short_to_hex(curData[2])
elif(attribType == EffectDescriptions.VAR_BYTE):
returnStr += Utils.byte_to_hex(int(curData))
elif(attribType == EffectDescriptions.VAR_WORD):
returnStr += Utils.short_to_hex(int(curData))
elif(attribType == EffectDescriptions.VAR_DWORD):
returnStr += Utils.dword_to_hex(int(curData))
elif(attribType == EffectDescriptions.VAR_HIDDEN_BYTE):
returnStr += Utils.short_to_hex(int(curData))
else:
print "ERROR! COULD NOT DECODE EFFECT!"
return returnStr
| mit | 6,627,170,382,357,033,000 | 34.068493 | 175 | 0.74375 | false | 3.073229 | false | false | false |
notepadqq/NotepadqqApi_Python | notepadqq_api/notepadqq_api.py | 1 | 3531 | import asyncio
import sys
from notepadqq_api.message_channel import MessageChannel
from notepadqq_api.message_interpreter import MessageInterpreter
from notepadqq_api.stubs import Stubs
class NotepadqqApi():
"""Provides access to the Notepadqq Api."""
_NQQ_STUB_ID = 1
def __init__(self, socket_path=None, extension_id=None):
"""Construct a new Api object that can be used to invoke Notepadqq
methods and to receive its events.
If not provided, socket_path and extension_id are respectively
sys.argv[1] and sys.argv[2]
"""
if socket_path is None:
try:
socket_path = sys.argv[1]
except IndexError:
raise ValueError("Socket path not provided")
if extension_id is None:
try:
extension_id = sys.argv[2]
except IndexError:
raise ValueError("Extension id not provided")
self._socket_path = socket_path
self._extension_id = extension_id
self._message_channel = MessageChannel(self._socket_path)
self._message_interpreter = MessageInterpreter(self._message_channel)
self._nqq = Stubs.Notepadqq(self._message_interpreter, self._NQQ_STUB_ID)
def run_event_loop(self, started_callback=None):
"""Start the event loop. If started_callback is provided, it will
be called as soon as the connection with Notepadqq is ready.
"""
if started_callback is not None:
self.notepadqq.on('currentExtensionStarted', started_callback)
loop = asyncio.get_event_loop()
loop.run_until_complete(self._message_channel.start(loop, self._on_new_message))
@property
def extension_id(self):
"""The id assigned to this extension by Notepadqq"""
return self._extension_id
@property
def notepadqq(self):
"""Get an instance of the main Notepadqq object"""
return self._nqq
def on_window_created(self, callback):
"""Execute a callback for every new window.
This is preferable to the "newWindow" event of Notepadqq, because it
could happen that the extension isn't ready soon enough to receive
the "newWindow" event for the first window. This method, instead,
ensures that the passed callback will be called once and only once
for each current or future window.
"""
captured_windows = []
# Invoke the callback for every currently open window
for window in self.notepadqq.windows():
if window not in captured_windows:
captured_windows.append(window)
callback(window)
# Each time a new window gets opened, invoke the callback.
# When Notepadqq is starting and initializing all the extensions,
# we might not be fast enough to receive this event: this is why
# we manually invoked the callback for every currently open window.
def on_new_window(window):
if window not in captured_windows:
callback(window)
self.notepadqq.on('newWindow', on_new_window)
def for_each_window(self, f):
"""Decorator alternative for self.on_window_created(f)"""
self.on_window_created(f)
return f
def _on_new_message(self, msg):
# Called whenever a new message is received from the channel
self._message_interpreter.process_message(msg)
| mit | -2,928,871,828,242,222,600 | 37.380435 | 88 | 0.632965 | false | 4.37005 | false | false | false |
rackerlabs/deuce-valere | deucevalere/common/validation.py | 1 | 1337 | """
Deuce Valere - Common - Validation
"""
import datetime
from deuceclient.api import *
from deuceclient.auth.base import AuthenticationBase
from deuceclient.client.deuce import DeuceClient
from deuceclient.common.validation import *
from deuceclient.common.validation_instance import *
from stoplight import Rule, ValidationFailed, validation_function
@validation_function
def val_authenticator_instance(value):
if not isinstance(value, AuthenticationBase):
raise ValidationFailed('authenticator must be derived from '
'deuceclient.auth.base.AuthenticationBase')
@validation_function
def val_deuceclient_instance(value):
if not isinstance(value, DeuceClient):
raise ValidationFailed('invalid Deuce Client instance')
@validation_function
def val_expire_age(value):
if not isinstance(value, datetime.timedelta):
raise ValidationFailed('must be type datetime.timedelta')
def _abort(error_code):
abort_errors = {
100: TypeError
}
raise abort_errors[error_code]
AuthEngineRule = Rule(val_authenticator_instance(), lambda: _abort(100))
ClientRule = Rule(val_deuceclient_instance(), lambda: _abort(100))
ExpireAgeRule = Rule(val_expire_age(), lambda: _abort(100))
ExpireAgeRuleNoneOkay = Rule(val_expire_age(none_ok=True), lambda: _abort(100))
| apache-2.0 | -5,001,324,934,594,096,000 | 30.093023 | 79 | 0.743455 | false | 3.841954 | false | false | false |
hemebond/kapua | courses/views.py | 1 | 4832 | # Copyright 2011 James O'Neill
#
# This file is part of Kapua.
#
# Kapua is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Kapua is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Kapua. If not, see <http://www.gnu.org/licenses/>.
from django.contrib.auth.decorators import login_required
from django.utils.decorators import method_decorator
from django.views.generic import ListView, DetailView, UpdateView, \
FormView, CreateView
from django.views.generic.detail import SingleObjectMixin
from django.http import HttpResponseRedirect
from django.shortcuts import redirect
from .models import Course, Page
from .forms import CourseForm, PageForm
class CourseList(ListView):
model = Course
class CourseAdd(CreateView):
template_name = "courses/course_edit.html"
form_class = CourseForm
context_object_name = "course"
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(CourseAdd, self).dispatch(*args, **kwargs)
class CourseDetail(DetailView):
template_name = "courses/course_detail.html"
model = Course
context_object_name = "course"
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if self.object.pages.exists():
return redirect('kapua-page-detail', self.object.pages.get(level=0).pk)
context = self.get_context_data(object=self.object)
return self.render_to_response(context)
class CourseEdit(UpdateView):
template_name = "courses/course_edit.html"
form_class = CourseForm
model = Course
class PageAdd(SingleObjectMixin, FormView):
model = Course
template_name = "courses/page_edit.html"
form_class = PageForm
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PageAdd, self).dispatch(*args, **kwargs)
def get_form(self, form_class):
self.object = self.get_object()
return super(PageAdd, self).get_form(form_class)
def get_form_kwargs(self):
"""
Returns the keyword arguments for instantiating the form.
"""
form_kwargs = super(PageAdd, self).get_form_kwargs()
form_kwargs.update({
'valid_targets': self.object.pages.filter(level__gt=0)
})
return form_kwargs
def form_valid(self, form):
position = form.cleaned_data.get('position', 'last-child')
target = form.cleaned_data.get('target', None)
course = self.object
page = form.save(commit=False)
page.course = course
if not target:
if course.pages.exists():
target = course.pages.get(level=0)
position = 'last-child'
if target:
page.insert_at(
target=target,
position=position,
save=True,
)
self.success_url = page.get_absolute_url()
else:
page.save()
self.success_url = course.get_absolute_url()
return super(PageAdd, self).form_valid(form)
def get_context_data(self, *args, **kwargs):
context = super(PageAdd, self).get_context_data(*args, **kwargs)
if context['form'].errors:
context['error_message'] = context['form'].errors
return context
class PageDetail(DetailView):
template_name = "courses/page_detail.html"
context_object_name = "page"
model = Page
def get_context_data(self, **kwargs):
# Call the base implementation first to get a context
context = super(PageDetail, self).get_context_data(**kwargs)
context['course'] = self.object.course
pages = context['course'].pages.all()
for index, page in enumerate(pages):
if page.pk == self.object.pk:
if index > 0:
context['previous_page'] = pages[index - 1]
if index < (len(pages) - 1):
context['next_page'] = pages[index + 1]
break
# Remove the root page
context['pages'] = pages.filter(level__gt=0)
# This gets the ancestors of the current page but exluces the
# root page
context['breadcrumbs'] = pages.filter(
lft__lt=self.object.lft,
rght__gt=self.object.rght
).exclude(
level=0
)
return context
class PageEdit(UpdateView):
template_name = "courses/page_edit.html"
form_class = PageForm
model = Page
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PageEdit, self).dispatch(*args, **kwargs)
def form_valid(self, form):
self.object = form.save()
target = form.cleaned_data.get('target')
if target:
position = form.cleaned_data.get('position')
self.object.move_to(
target=target,
position=position
)
return redirect('kapua-page-detail', self.object.pk)
| gpl-3.0 | 8,351,613,602,219,685,000 | 25.549451 | 74 | 0.710472 | false | 3.264865 | false | false | false |
Pikl/PiklBot | cogs/pikl.py | 1 | 1281 | import discord
from discord.ext import commands
class Pikl:
"""Super pikly commands."""
def __init__(self, bot):
self.bot = bot
@commands.command(hidden=False)
async def helloworld(self):
"""Hello, world!"""
await self.bot.say("Hello, world!")
@commands.command(hidden=False)
async def postraidembed(self):
"""Posts an embedded message with a bunch of raid info"""
embed = discord.Embed(colour=discord.Colour(0x2ecc40), description="Some helpful information to aid and review [Dawn] raids.\n")
embed.set_image(url="https://cdn.discordapp.com/attachments/350137990959464459/354412417381433354/unknown.png")
embed.set_thumbnail(url="https://wiki.guildwars2.com/images/5/5e/Legendary_Insight.png")
embed.set_author(name="Dawn Raid Information", icon_url="http://raid.pikly.uk/images/dawn-logo.png")
embed.set_footer(text=": 'Stack on Pikl'", icon_url="http://raid.pikly.uk/images/dawn-logo.png")
embed.add_field(name="Raid Logs & Videos", value="https://raid.pikly.uk/", inline=True)
embed.add_field(name="Raid Class Spreadsheet", value="[Spreadsheet here](https://docs.google.com/spreadsheets/d/1zm46Jb8UBIoYP1_mewoOvLKopx_Sks9hYGm8OeWaQI8/edit?usp=sharing)", inline=True)
await self.bot.say(embed=embed)
def setup(bot):
bot.add_cog(Pikl(bot)) | gpl-3.0 | -9,634,640,449,993,438 | 40.354839 | 191 | 0.737705 | false | 2.691176 | false | false | false |
jtwaleson/decrypt | decrypt/decrypt.py | 1 | 1745 | #!/usr/bin/env python
import curses
import time
import fileinput
import random
import string
screen = curses.initscr()
lines = []
chance = 0.1
confirmed_per_line = []
def main():
curses.noecho()
try:
curses.curs_set(0)
except:
pass
screen.keypad(1)
try:
for line in fileinput.input():
confirmed_per_line.append([])
lines.append(line.rstrip())
iterate()
fileinput.close()
while iterate(increase=True):
pass
time.sleep(2)
except KeyboardInterrupt:
pass
finally:
curses.endwin()
for line in lines:
print(line)
def iterate(increase=False):
global chance, confirmed_per_line, lines
still_random = 0
if increase:
chance += 0.01
screen.erase()
(y, x) = screen.getmaxyx()
final_line = len(lines)
if final_line > y:
first_line = final_line - y
else:
first_line = 0
for line_num in range(first_line, final_line):
line = lines[line_num]
for col in range(min(x, len(line))):
try:
if col not in confirmed_per_line[line_num]:
still_random += 1
if random.random() < chance:
confirmed_per_line[line_num].append(col)
screen.addch(line_num - first_line,
col,
random.choice(string.punctuation),
curses.A_REVERSE)
else:
screen.addstr(line_num - first_line, col, line[col])
except:
pass
screen.refresh()
time.sleep(0.1)
return still_random > 0
| mit | 3,421,878,440,241,431,600 | 23.928571 | 72 | 0.514613 | false | 4.05814 | false | false | false |
vdrhtc/Measurement-automation | drivers/pyspcm.py | 1 | 7735 | import os
import platform
import sys
from ctypes import *
# load registers for easier access
from drivers.py_header.regs import *
# load registers for easier access
from drivers.py_header.spcerr import *
SPCM_DIR_PCTOCARD = 0
SPCM_DIR_CARDTOPC = 1
SPCM_BUF_DATA = 1000 # main data buffer for acquired or generated samples
SPCM_BUF_ABA = 2000 # buffer for ABA data, holds the A-DATA (slow samples)
SPCM_BUF_TIMESTAMP = 3000 # buffer for timestamps
# determine bit width of os
oPlatform = platform.architecture()
if (oPlatform[0] == '64bit'):
bIs64Bit = 1
else:
bIs64Bit = 0
# define pointer aliases
int8 = c_int8
int16 = c_int16
int32 = c_int32
int64 = c_int64
ptr8 = POINTER (int8)
ptr16 = POINTER (int16)
ptr32 = POINTER (int32)
ptr64 = POINTER (int64)
uint8 = c_uint8
uint16 = c_uint16
uint32 = c_uint32
uint64 = c_uint64
uptr8 = POINTER (uint8)
uptr16 = POINTER (uint16)
uptr32 = POINTER (uint32)
uptr64 = POINTER (uint64)
# Windows
if os.name == 'nt':
#sys.stdout.write("Python Version: {0} on Windows\n\n".format (
# platform.python_version()))
# define card handle type
if (bIs64Bit):
# for unknown reasons c_void_p gets messed up on Win7/64bit, but this works:
drv_handle = POINTER(c_uint64)
else:
drv_handle = c_void_p
# Load DLL into memory.
# use windll because all driver access functions use _stdcall calling convention under windows
if (bIs64Bit == 1):
spcmDll = windll.LoadLibrary ("c:\\windows\\system32\\spcm_win64.dll")
else:
spcmDll = windll.LoadLibrary ("c:\\windows\\system32\\spcm_win32.dll")
# load spcm_hOpen
if (bIs64Bit):
spcm_hOpen = getattr (spcmDll, "spcm_hOpen")
else:
spcm_hOpen = getattr (spcmDll, "_spcm_hOpen@4")
spcm_hOpen.argtype = [c_char_p]
spcm_hOpen.restype = drv_handle
# load spcm_vClose
if (bIs64Bit):
spcm_vClose = getattr (spcmDll, "spcm_vClose")
else:
spcm_vClose = getattr (spcmDll, "_spcm_vClose@4")
spcm_vClose.argtype = [drv_handle]
spcm_vClose.restype = None
# load spcm_dwGetErrorInfo
if (bIs64Bit):
spcm_dwGetErrorInfo_i32 = getattr (spcmDll, "spcm_dwGetErrorInfo_i32")
else:
spcm_dwGetErrorInfo_i32 = getattr (spcmDll, "_spcm_dwGetErrorInfo_i32@16")
spcm_dwGetErrorInfo_i32.argtype = [drv_handle, uptr32, ptr32, c_char_p]
spcm_dwGetErrorInfo_i32.restype = uint32
# load spcm_dwGetParam_i32
if (bIs64Bit):
spcm_dwGetParam_i32 = getattr (spcmDll, "spcm_dwGetParam_i32")
else:
spcm_dwGetParam_i32 = getattr (spcmDll, "_spcm_dwGetParam_i32@12")
spcm_dwGetParam_i32.argtype = [drv_handle, int32, ptr32]
spcm_dwGetParam_i32.restype = uint32
# load spcm_dwGetParam_i64
if (bIs64Bit):
spcm_dwGetParam_i64 = getattr (spcmDll, "spcm_dwGetParam_i64")
else:
spcm_dwGetParam_i64 = getattr (spcmDll, "_spcm_dwGetParam_i64@12")
spcm_dwGetParam_i64.argtype = [drv_handle, int32, ptr64]
spcm_dwGetParam_i64.restype = uint32
# load spcm_dwSetParam_i32
if (bIs64Bit):
spcm_dwSetParam_i32 = getattr (spcmDll, "spcm_dwSetParam_i32")
else:
spcm_dwSetParam_i32 = getattr (spcmDll, "_spcm_dwSetParam_i32@12")
spcm_dwSetParam_i32.argtype = [drv_handle, int32, int32]
spcm_dwSetParam_i32.restype = uint32
# load spcm_dwSetParam_i64
if (bIs64Bit):
spcm_dwSetParam_i64 = getattr (spcmDll, "spcm_dwSetParam_i64")
else:
spcm_dwSetParam_i64 = getattr (spcmDll, "_spcm_dwSetParam_i64@16")
spcm_dwSetParam_i64.argtype = [drv_handle, int32, int64]
spcm_dwSetParam_i64.restype = uint32
# load spcm_dwSetParam_i64m
if (bIs64Bit):
spcm_dwSetParam_i64m = getattr (spcmDll, "spcm_dwSetParam_i64m")
else:
spcm_dwSetParam_i64m = getattr (spcmDll, "_spcm_dwSetParam_i64m@16")
spcm_dwSetParam_i64m.argtype = [drv_handle, int32, int32, int32]
spcm_dwSetParam_i64m.restype = uint32
# load spcm_dwDefTransfer_i64
if (bIs64Bit):
spcm_dwDefTransfer_i64 = getattr (spcmDll, "spcm_dwDefTransfer_i64")
else:
spcm_dwDefTransfer_i64 = getattr (spcmDll, "_spcm_dwDefTransfer_i64@36")
spcm_dwDefTransfer_i64.argtype = [drv_handle, uint32, uint32, uint32, c_void_p, uint64, uint64]
spcm_dwDefTransfer_i64.restype = uint32
# load spcm_dwInvalidateBuf
if (bIs64Bit):
spcm_dwInvalidateBuf = getattr (spcmDll, "spcm_dwInvalidateBuf")
else:
spcm_dwInvalidateBuf = getattr (spcmDll, "_spcm_dwInvalidateBuf@8")
spcm_dwInvalidateBuf.argtype = [drv_handle, uint32]
spcm_dwInvalidateBuf.restype = uint32
# load spcm_dwGetContBuf_i64
if (bIs64Bit):
spcm_dwGetContBuf_i64 = getattr (spcmDll, "spcm_dwGetContBuf_i64")
else:
spcm_dwGetContBuf_i64 = getattr (spcmDll, "_spcm_dwGetContBuf_i64@16")
spcm_dwGetContBuf_i64.argtype = [drv_handle, uint32, POINTER(c_void_p), uptr64]
spcm_dwGetContBuf_i64.restype = uint32
elif os.name == 'posix':
sys.stdout.write("Python Version: {0} on Linux\n\n".format (platform.python_version()))
# define card handle type
if (bIs64Bit):
drv_handle = POINTER(c_uint64)
else:
drv_handle = c_void_p
# Load DLL into memory.
# use cdll because all driver access functions use cdecl calling convention under linux
spcmDll = cdll.LoadLibrary ("libspcm_linux.so")
# load spcm_hOpen
spcm_hOpen = getattr (spcmDll, "spcm_hOpen")
spcm_hOpen.argtype = [c_char_p]
spcm_hOpen.restype = drv_handle
# load spcm_vClose
spcm_vClose = getattr (spcmDll, "spcm_vClose")
spcm_vClose.argtype = [drv_handle]
spcm_vClose.restype = None
# load spcm_dwGetErrorInfo
spcm_dwGetErrorInfo_i32 = getattr (spcmDll, "spcm_dwGetErrorInfo_i32")
spcm_dwGetErrorInfo_i32.argtype = [drv_handle, uptr32, ptr32, c_char_p]
spcm_dwGetErrorInfo_i32.restype = uint32
# load spcm_dwGetParam_i32
spcm_dwGetParam_i32 = getattr (spcmDll, "spcm_dwGetParam_i32")
spcm_dwGetParam_i32.argtype = [drv_handle, int32, ptr32]
spcm_dwGetParam_i32.restype = uint32
# load spcm_dwGetParam_i64
spcm_dwGetParam_i64 = getattr (spcmDll, "spcm_dwGetParam_i64")
spcm_dwGetParam_i64.argtype = [drv_handle, int32, ptr64]
spcm_dwGetParam_i64.restype = uint32
# load spcm_dwSetParam_i32
spcm_dwSetParam_i32 = getattr (spcmDll, "spcm_dwSetParam_i32")
spcm_dwSetParam_i32.argtype = [drv_handle, int32, int32]
spcm_dwSetParam_i32.restype = uint32
# load spcm_dwSetParam_i64
spcm_dwSetParam_i64 = getattr (spcmDll, "spcm_dwSetParam_i64")
spcm_dwSetParam_i64.argtype = [drv_handle, int32, int64]
spcm_dwSetParam_i64.restype = uint32
# load spcm_dwSetParam_i64m
spcm_dwSetParam_i64m = getattr (spcmDll, "spcm_dwSetParam_i64m")
spcm_dwSetParam_i64m.argtype = [drv_handle, int32, int32, int32]
spcm_dwSetParam_i64m.restype = uint32
# load spcm_dwDefTransfer_i64
spcm_dwDefTransfer_i64 = getattr (spcmDll, "spcm_dwDefTransfer_i64")
spcm_dwDefTransfer_i64.argtype = [drv_handle, uint32, uint32, uint32, c_void_p, uint64, uint64]
spcm_dwDefTransfer_i64.restype = uint32
# load spcm_dwInvalidateBuf
spcm_dwInvalidateBuf = getattr (spcmDll, "spcm_dwInvalidateBuf")
spcm_dwInvalidateBuf.argtype = [drv_handle, uint32]
spcm_dwInvalidateBuf.restype = uint32
# load spcm_dwGetContBuf_i64
spcm_dwGetContBuf_i64 = getattr (spcmDll, "spcm_dwGetContBuf_i64")
spcm_dwGetContBuf_i64.argtype = [drv_handle, uint32, POINTER(c_void_p), uptr64]
spcm_dwGetContBuf_i64.restype = uint32
else:
raise Exception ('Operating system not supported by pySpcm')
| gpl-3.0 | 5,788,646,918,922,448,000 | 33.225664 | 99 | 0.68287 | false | 2.709282 | false | false | false |
Dev-Cloud-Platform/Dev-Cloud | dev_cloud/web_service/urls/user/environment.py | 1 | 5340 | # -*- coding: utf-8 -*-
# @COPYRIGHT_begin
#
# Copyright [2015] Michał Szczygieł, M4GiK Software
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @COPYRIGHT_end
from django.conf.urls import patterns, url, include
from core.utils.decorators import user_permission, vm_permission
from web_service.views.user.enviroment import wizard_setup, generate_dependencies, customize_environment, \
define_environment, summary, validation_process, validation_process_ip, validation_process_resources, \
validation_process_ip_pre, view_environment, environments_list, get_vm_status, destroy_vm, refresh_vm_tasks, \
show_vnc, get_cpu_load, get_ssh_key, view_predefined, customize_predefined_environment, \
define_predefined_environment
main_patterns = patterns('web_service.views.user.enviroment',
url(r'^app/create/environment/$', user_permission(wizard_setup),
name='personalized_environment'),
url(r'^app/create/environment/technology/(?P<technology>\w+)/$',
user_permission(generate_dependencies),
name='generate_dependencies'),
url(
r'^app/create/environment/customize/(?P<technology>\w+)/(?P<application>[\w\-]+)/(?P<operation>\w+)/$',
user_permission(customize_environment), name='customize_environment'),
url(r'^app/create/environment/define/(?P<technology>\w+)/(?P<exposed_ip>\w+)/$',
user_permission(define_environment), name='define_environment'),
url(r'^app/create/environment/summary/$', user_permission(summary), name='summary'),
url(r'^app/create/environment/validation_process/(?P<template>\w+)/(?P<exposed_ip>\w+)/$',
user_permission(validation_process), name='validation_process'),
url(r'^app/create/environment/validation_process_ip/(?P<exposed_ip>\w+)/$',
user_permission(validation_process_ip), name='validation_process_ip'),
url(r'^app/create/environment/validation_process_ip_pre/(?P<exposed_ip>\w+)/$',
user_permission(validation_process_ip_pre), name='validation_process_ip_pre'),
url(r'^app/create/environment/validation_process_resources/(?P<template_id>\w+)/$',
user_permission(validation_process_resources), name='validation_process_resources'),
url(r'^app/environments/$', user_permission(environments_list), name='environments_list'),
url(r'^app/environments/(?P<destroy_status>\w+)/$', user_permission(environments_list),
name='environments_list'),
url(r'^app/environments/show_vm/(?P<vm_id>\w+)/$', vm_permission(view_environment),
name='view_environment'),
url(r'^app/environments/vm_status/(?P<vm_id>\w+)/$', vm_permission(get_vm_status),
name='get_vm_status'),
url(r'^app/environments/destroy/(?P<vm_id>\w+)/$', vm_permission(destroy_vm),
name='destroy_vm'),
url(r'^app/environments/refresh_tasks/(?P<vm_id>\w+)/$', vm_permission(refresh_vm_tasks),
name='refresh_vm_tasks'),
url(r'^app/environments/show_vm/vnc/(?P<vm_id>\w+)/$', vm_permission(show_vnc),
name='show_vnc'),
url(r'^app/environments/show_vm/cpu_load/(?P<vm_id>\w+)/$', vm_permission(get_cpu_load),
name='get_cpu_load'),
url(r'^app/environments/show_vm/get_ssh_key/(?P<vm_id>\w+)/$', vm_permission(get_ssh_key),
name='get_ssh_key'),
url(r'^app/create/environment/predefined/$', user_permission(view_predefined),
name='predefined_environment'),
url(
r'^app/create/environment/predefined/customize/(?P<application>[\w\-]+)/(?P<operation>\w+)/$',
user_permission(customize_predefined_environment),
name='customize_predefined_environment'),
url(
r'^app/create/environment/predefined/define/(?P<application>[\w\-]+)/(?P<exposed_ip>\w+)/$',
user_permission(define_predefined_environment),
name='define_predefined_environment'))
urlpatterns = patterns('', url(r'^main/', include(main_patterns)))
| apache-2.0 | -6,435,118,396,431,205,000 | 70.173333 | 132 | 0.566317 | false | 4.357551 | false | false | false |
vertexproject/synapse | synapse/tests/test_tools_csvtool.py | 1 | 6295 | import csv
from unittest import mock
import synapse.common as s_common
import synapse.telepath as s_telepath
import synapse.tests.utils as s_t_utils
import synapse.tools.csvtool as s_csvtool
csvfile = b'''ipv4,fqdn,notes
1.2.3.4,vertex.link,malware
8.8.8.8,google.com,whitelist
'''
csvstorm = b'''
for ($ipv4, $fqdn, $note) in $rows {
$lib.print("oh hai")
[ inet:dns:a=($fqdn,$ipv4) ]
}
'''
csvfile_missing = b'''fqdn,email,tag
vertex.link,,mytag
google.com,[email protected],
yahoo.com,[email protected],mytag
'''
csvstorm_missing = b'''
for ($fqdn, $email, $tag) in $rows {
$lib.print("hello hello")
[ inet:dns:soa=$lib.guid() :fqdn=$fqdn :email?=$email +?#$tag ]
}
'''
# count is used for test coverage.
csvstorm_export = b'''
test:int $lib.csv.emit($node, $node.props.loc) | count
'''
class CsvToolTest(s_t_utils.SynTest):
def _getOldSynVers(self):
return (0, 0, 0)
async def test_csvtool(self):
async with self.getTestCore() as core:
url = core.getLocalUrl()
dirn = s_common.gendir(core.dirn, 'junk')
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm)
argv = ['--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('oh hai')
outp.expect('2 nodes')
with mock.patch('synapse.telepath.Proxy._getSynVers', self._getOldSynVers):
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('Cortex version 0.0.0 is outside of the csvtool supported range')
async def test_csvtool_missingvals(self):
async with self.getTestCore() as core:
url = core.getLocalUrl()
dirn = s_common.gendir(core.dirn, 'junk')
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile_missing)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm_missing)
argv = ['--csv-header', '--debug', '--cortex', url, '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('hello hello')
outp.expect("'fqdn': 'google.com'")
outp.expect('3 nodes')
async def test_csvtool_local(self):
with self.getTestDir() as dirn:
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm)
argv = ['--csv-header', '--debug', '--test', '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('2 nodes')
async def test_csvtool_cli(self):
with self.getTestDir() as dirn:
logpath = s_common.genpath(dirn, 'csvtest.log')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
with s_common.genfile(csvpath) as fd:
fd.write(csvfile)
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm)
argv = ['--csv-header', '--debug', '--cli', '--test', '--logfile', logpath, stormpath, csvpath]
outp = self.getTestOutp()
cmdg = s_t_utils.CmdGenerator(['storm --hide-props inet:fqdn',
EOFError(),
])
with self.withCliPromptMockExtendOutp(outp):
with self.withTestCmdr(cmdg):
await s_csvtool.main(argv, outp=outp)
outp.expect('inet:fqdn=google.com')
outp.expect('2 nodes')
async def test_csvtool_export(self):
async with self.getTestCore() as core:
await core.nodes('[ test:int=20 :loc=us ]')
await core.nodes('[ test:int=30 :loc=cn ]')
await core.nodes('[ test:int=40 ]')
url = core.getLocalUrl()
dirn = s_common.gendir(core.dirn, 'junk')
csvpath = s_common.genpath(dirn, 'csvtest.csv')
stormpath = s_common.genpath(dirn, 'csvtest.storm')
with s_common.genfile(stormpath) as fd:
fd.write(csvstorm_export)
# test a few no-no cases
argv = ['--test', '--export', stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('--export requires --cortex')
argv = ['--cortex', url, '--export', stormpath, csvpath, 'lol.csv']
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('--export requires exactly 1 csvfile')
argv = ['--cortex', url, '--export', stormpath, csvpath]
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect('Counted 3 nodes.')
outp.expect('3 csv rows')
with open(csvpath, 'r') as fd:
rows = [row for row in csv.reader(fd)]
self.eq(rows, (['20', 'us'], ['30', 'cn'], ['40', '']))
with mock.patch('synapse.telepath.Proxy._getSynVers', self._getOldSynVers):
outp = self.getTestOutp()
await s_csvtool.main(argv, outp=outp)
outp.expect(f'Cortex version 0.0.0 is outside of the csvtool supported range')
| apache-2.0 | 5,403,740,842,168,833,000 | 31.448454 | 107 | 0.554408 | false | 3.362714 | true | false | false |
Jumpscale/jumpscale_core8 | lib/JumpScale/tools/issuemanager/models/repoCollection.py | 1 | 2535 | from JumpScale import j
base = j.data.capnp.getModelBaseClassCollection()
class RepoCollection(base):
"""
This class represent a collection of Issues
"""
def list(self, owner=0, name='', id=0, source="", returnIndex=False):
"""
List all keys of repo model with specified params.
@param owner int,, id of owner the repo belongs to.
@param name str,, name of repo.
@param id int,, repo id in db.
@param source str,, source of remote database.
@param returnIndexalse bool,, return the index used.
"""
if owner == "":
owner = ".*"
if name == "":
name = ".*"
if id == "" or id == 0:
id = ".*"
if source == "":
source = ".*"
regex = "%s:%s:%s:%s" % (owner, name, id, source)
return self._index.list(regex, returnIndex=returnIndex)
def find(self, owner='', name='', id=0, milestone=0, member=0, label='', source=""):
"""
List all instances of repo model with specified params.
@param owner int,, id of owner the repo belongs to.
@param name str,, name of repo.
@param id int,, repo id in db.
@param milestone int,, id of milestone in repo.
@param member int,, id of member in repo.
@param milestone int,, label in repo.
@param source str,, source of remote database.
@param returnIndexalse bool,, return the index used.
"""
res = []
for key in self.list(owner=owner, name=name, id=id, source=source):
res.append(self.get(key))
if milestone:
for model in res[::-1]:
for milestone_model in model.dictFiltered.get('milestones', []):
if milestone == milestone_model['id']:
break
else:
res.remove(model)
if member:
for model in res[::-1]:
for member_model in model.dictFiltered.get('members', []):
if member == member_model['userKey']:
break
else:
res.remove(model)
if label:
for model in res[::-1]:
if (label not in model.dictFiltered.get('labels', [])) or not model.dictFiltered.get('labels', False):
res.remove(model)
return res
def getFromId(self, id):
key = self._index.lookupGet("issue_id", id)
return self.get(key)
| apache-2.0 | 7,284,187,035,742,835,000 | 32.8 | 118 | 0.523077 | false | 4.21797 | false | false | false |
fzza/rdio-sock | src/rdiosock/metadata.py | 1 | 3693 | from rdiosock.exceptions import RdioApiError
from rdiosock.objects.collection import RdioList
class SEARCH_TYPES:
"""Metadata search types"""
NONE = 0
ARTIST = 1
ALBUM = 2
TRACK = 4
PLAYLIST = 8
USER = 16
LABEL = 32
ALL = (
ARTIST |
ALBUM |
TRACK |
PLAYLIST |
USER |
LABEL
)
_MAP = {
ARTIST: 'Artist',
ALBUM: 'Album',
TRACK: 'Track',
PLAYLIST: 'Playlist',
USER: 'User',
LABEL: 'Label'
}
@classmethod
def parse(cls, value):
if type(value) is int:
value = cls._parse_bit(value)
items = []
for key in value:
items.append(cls._MAP[key])
return items
@classmethod
def _parse_bit(cls, value):
items = []
for key in cls._MAP:
if (value & key) == key:
items.append(key)
return items
class SEARCH_EXTRAS:
"""Metadata search extras"""
NONE = 0
LOCATION = 1
USERNAME = 2
STATIONS = 4
DESCRIPTION = 8
FOLLOWER_COUNT = 16
FOLLOWING_COUNT = 32
FAVORITE_COUNT = 64
SET_COUNT = 128
ICON_250x375 = 256
ICON_500x750 = 512
ICON_250x333 = 1024
ICON_500x667 = 2048
ALL = (
LOCATION |
USERNAME |
STATIONS |
DESCRIPTION |
FOLLOWER_COUNT |
FOLLOWING_COUNT |
FAVORITE_COUNT |
SET_COUNT |
ICON_250x375 |
ICON_500x750 |
ICON_250x333 |
ICON_500x667
)
_MAP = {
LOCATION: 'location',
USERNAME: 'username',
STATIONS: 'stations',
DESCRIPTION: 'description',
FOLLOWER_COUNT: 'followerCount',
FOLLOWING_COUNT: 'followingCount',
FAVORITE_COUNT: 'favoriteCount',
SET_COUNT: 'setCount',
ICON_250x375: 'icon250x375',
ICON_500x750: 'icon500x750',
ICON_250x333: 'icon250x333',
ICON_500x667: 'icon500x667'
}
@classmethod
def parse(cls, value):
if type(value) is int:
value = cls._parse_bit(value)
items = []
for key in value:
items.append(cls._MAP[key])
return items
@classmethod
def _parse_bit(cls, value):
items = []
for key in cls._MAP:
if (value & key) == key:
items.append(key)
return items
class RdioMetadata(object):
def __init__(self, sock):
"""
:type sock: RdioSock
"""
self._sock = sock
def search(self, query, search_types=SEARCH_TYPES.ALL, search_extras=SEARCH_EXTRAS.ALL):
"""Search for media item.
:param query: Search query
:type query: str
:param search_types: Search type (:class:`rdiosock.metadata.SEARCH_TYPES` bitwise-OR or list)
:type search_types: int or list of int
:param search_extras: Search result extras to include (:class:`rdiosock.metadata.SEARCH_EXTRAS` bitwise-OR or list)
:type search_extras: int or list of int
"""
result = self._sock._api_post('search', {
'query': query,
'types[]': SEARCH_TYPES.parse(search_types)
}, secure=False, extras=SEARCH_EXTRAS.parse(search_extras))
if result['status'] == 'error':
raise RdioApiError(result)
result = result['result']
if result['type'] == 'list':
return RdioList.parse(result)
else:
raise NotImplementedError()
| gpl-3.0 | 9,067,494,512,009,808,000 | 22.08125 | 123 | 0.516382 | false | 3.681954 | false | false | false |
Hiestaa/3D-Lsystem | lsystem/Tree7.py | 1 | 1145 | from lsystem.LSystem import LSystem
import math
class Tree7(LSystem):
"""Fractale en forme d'arbre v7"""
def defineParams(self):
self.LSName = "Tree7"
self.LSAngle = math.pi / 4
self.LSSegment = 100
self.LSSteps = 9
self.LSStartingString = "T(x)"
self.LSStochastic = False
self.LSStochRange = 0.2
def createVars(self):
self.LSVars = {
'F': self.turtle.forward,
'T': self.turtle.forward,
'+': self.turtle.rotZ,
'-': self.turtle.irotZ,
'^': self.turtle.rotY,
'&': self.turtle.irotY,
'<': self.turtle.rotX,
'>': self.turtle.irotX,
'|': self.turtle.rotX,
'[': self.turtle.push,
']': self.turtle.pop,
'I': self.turtle.setColor,
'Y': self.turtle.setColor
}
self.LSParams = {
'x': self.LSSegment,
'+': self.LSAngle,
'-': self.LSAngle,
'&': self.LSAngle,
'^': self.LSAngle,
'<': self.LSAngle,
'>': self.LSAngle,
'|': self.LSAngle * 2,
'[': None,
']': None,
'I': (0.5,0.25,0),
'Y': (0, 0.5, 0)
}
def createRules(self):
self.LSRules = {
"T(x)": "IT(x*0.3)F(x*0.3)",
"F(x)": "IF(x)[+YF(x*0.5)][-YF(x*0.5)][<YF(x*0.5)][>YF(x*0.5)]"
}
| mit | -4,130,226,436,172,089,000 | 20.603774 | 69 | 0.558952 | false | 2.136194 | false | false | false |
sagiss/sardana | src/sardana/taurus/qt/qtgui/extra_hkl/hklscan.py | 1 | 15114 | #!/usr/bin/env python
##############################################################################
##
## This file is part of Sardana
##
## http://www.sardana-controls.org/
##
## Copyright 2011 CELLS / ALBA Synchrotron, Bellaterra, Spain
##
## Sardana is free software: you can redistribute it and/or modify
## it under the terms of the GNU Lesser General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## Sardana is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public License
## along with Sardana. If not, see <http://www.gnu.org/licenses/>.
##
##############################################################################
__docformat__ = 'restructuredtext'
import sys
import sardana
from taurus.external.qt import Qt
from taurus.qt.qtgui.container import TaurusWidget
from taurus.qt.qtgui.display import TaurusLabel
from taurus.qt.qtgui.base import TaurusBaseWidget
from taurus.external.qt import QtCore, QtGui
import taurus.core
from taurus.qt.qtcore.communication import SharedDataManager
from taurus.qt.qtgui.input import TaurusValueLineEdit
from displayscanangles import DisplayScanAngles
import taurus.core.util.argparse
import taurus.qt.qtgui.application
from taurus.qt.qtgui.util.ui import UILoadable
from PyTango import *
from sardana.taurus.qt.qtgui.extra_macroexecutor import TaurusMacroExecutorWidget, TaurusSequencerWidget, \
TaurusMacroConfigurationDialog, \
TaurusMacroDescriptionViewer, DoorOutput, DoorDebug, DoorResult
class EngineModesComboBox(Qt.QComboBox, TaurusBaseWidget):
"""ComboBox representing list of engine modes"""
def __init__(self, parent=None):
name = self.__class__.__name__
self.call__init__wo_kw(Qt.QComboBox, parent)
self.call__init__(TaurusBaseWidget, name)
self.setSizeAdjustPolicy(Qt.QComboBox.AdjustToContentsOnFirstShow)
self.setToolTip("Choose a engine mode ...")
QtCore.QMetaObject.connectSlotsByName(self)
def loadEngineModeNames(self, enginemodes):
self.clear()
self.addItems(enginemodes)
@UILoadable(with_ui="_ui")
class HKLScan(TaurusWidget):
def __init__(self, parent=None, designMode=False):
TaurusWidget.__init__(self, parent, designMode=designMode)
self.loadUi(filename="hklscan.ui")
self.connect(self._ui.hklStartScanButton,
Qt.SIGNAL("clicked()"), self.start_hklscan)
self.connect(self._ui.hklStopScanButton,
Qt.SIGNAL("clicked()"), self.stop_hklscan)
self.connect(self._ui.hklDisplayAnglesButton,
Qt.SIGNAL("clicked()"), self.display_angles)
self.connect(self._ui.MacroServerConnectionButton, Qt.SIGNAL(
"clicked()"), self.open_macroserver_connection_panel)
# Create a global SharedDataManager
Qt.qApp.SDM = SharedDataManager(self)
@classmethod
def getQtDesignerPluginInfo(cls):
ret = TaurusWidget.getQtDesignerPluginInfo()
ret['module'] = 'hklscan'
ret['group'] = 'Taurus Containers'
ret['container'] = ':/designer/frame.png'
ret['container'] = True
return ret
def setModel(self, model):
if model != None:
self.device = taurus.Device(model)
self.pseudo_motor_names = []
for motor in self.device.hklpseudomotorlist:
self.pseudo_motor_names.append(motor.split(' ')[0])
self.h_device_name = self.pseudo_motor_names[0]
self.h_device = taurus.Device(self.h_device_name)
self.k_device_name = self.pseudo_motor_names[1]
self.k_device = taurus.Device(self.k_device_name)
self.l_device_name = self.pseudo_motor_names[2]
self.l_device = taurus.Device(self.l_device_name)
# Add dynamically the angle widgets
motor_list = self.device.motorlist
motor_names = []
for motor in self.device.motorlist:
motor_names.append(motor.split(' ')[0])
self.nb_motors = len(motor_list)
angles_labels = []
angles_names = []
angles_taurus_label = []
gap_x = 800 / self.nb_motors
try:
angles_names = self.device.motorroles
except: # Only for compatibility
if self.nb_motors == 4:
angles_names.append("omega")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("theta")
elif self.nb_motors == 6:
angles_names.append("mu")
angles_names.append("th")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("gamma")
angles_names.append("delta")
for i in range(0, self.nb_motors):
angles_labels.append(QtGui.QLabel(self))
angles_labels[i].setGeometry(
QtCore.QRect(50 + gap_x * i, 290, 51, 17))
alname = "angleslabel" + str(i)
angles_labels[i].setObjectName(alname)
angles_labels[i].setText(QtGui.QApplication.translate(
"HKLScan", angles_names[i], None, QtGui.QApplication.UnicodeUTF8))
angles_taurus_label.append(TaurusLabel(self))
angles_taurus_label[i].setGeometry(
QtCore.QRect(50 + gap_x * i, 320, 81, 19))
atlname = "anglestauruslabel" + str(i)
angles_taurus_label[i].setObjectName(atlname)
angles_taurus_label[i].setModel(motor_names[i] + "/Position")
# Set model to hkl display
hmodel = self.h_device_name + "/Position"
self._ui.taurusValueLineH.setModel(hmodel)
self._ui.taurusLabelValueH.setModel(hmodel)
kmodel = self.k_device_name + "/Position"
self._ui.taurusValueLineK.setModel(kmodel)
self._ui.taurusLabelValueK.setModel(kmodel)
lmodel = self.l_device_name + "/Position"
self._ui.taurusValueLineL.setModel(lmodel)
self._ui.taurusLabelValueL.setModel(lmodel)
# Set model to engine and modes
enginemodel = model + '/engine'
self._ui.taurusLabelEngine.setModel(enginemodel)
enginemodemodel = model + '/enginemode'
self._ui.taurusLabelEngineMode.setModel(enginemodemodel)
self.enginemodescombobox = EngineModesComboBox(self)
self.enginemodescombobox.setGeometry(QtCore.QRect(150, 445, 221, 27))
self.enginemodescombobox.setObjectName("enginemodeslist")
self.enginemodescombobox.loadEngineModeNames(self.device.hklmodelist)
self.connect(self.enginemodescombobox, Qt.SIGNAL(
"currentIndexChanged(QString)"), self.onModeChanged)
def onModeChanged(self, modename):
if self.device.engine != "hkl":
self.device.write_attribute("engine", "hkl")
self.device.write_attribute("enginemode", str(modename))
def start_hklscan(self):
start_hkl = []
stop_hkl = []
start_hkl.append(float(self._ui.lineEditStartH.text()))
start_hkl.append(float(self._ui.lineEditStartK.text()))
start_hkl.append(float(self._ui.lineEditStartL.text()))
stop_hkl.append(float(self._ui.lineEditStopH.text()))
stop_hkl.append(float(self._ui.lineEditStopK.text()))
stop_hkl.append(float(self._ui.lineEditStopL.text()))
nb_points = int(self._ui.LineEditNbpoints.text())
sample_time = float(self._ui.LineEditSampleTime.text())
dim = 0
macro_name = ["ascan", "a2scan", "a3scan"]
macro_command = []
index_to_scan = []
if self.door_device != None:
for i in range(0, 3):
if start_hkl[i] != stop_hkl[i]:
dim = dim + 1
index_to_scan.append(i)
if dim > 0:
macro_command.append(macro_name[dim - 1])
for i in range(len(index_to_scan)):
macro_command.append(
str(self.pseudo_motor_names[index_to_scan[i]]))
macro_command.append(str(start_hkl[index_to_scan[i]]))
macro_command.append(str(stop_hkl[index_to_scan[i]]))
macro_command.append(str(nb_points))
macro_command.append(str(sample_time))
self.door_device.RunMacro(macro_command)
def stop_hklscan(self):
self.door_device.StopMacro()
def display_angles(self):
xangle = []
for i in range(0, 6):
xangle.append(40 + i * 100)
yhkl = 50
tr = self.device.selectedtrajectory
w = DisplayScanAngles()
angles_labels = []
angles_names = []
if self.nb_motors == 4:
angles_names.append("omega")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("theta")
elif self.nb_motors == 6:
angles_names.append("mu")
angles_names.append("th")
angles_names.append("chi")
angles_names.append("phi")
angles_names.append("gamma")
angles_names.append("delta")
dsa_label = []
for i in range(0, self.nb_motors):
dsa_label.append(QtGui.QLabel(w))
dsa_label[i].setGeometry(QtCore.QRect(xangle[i], yhkl, 51, 20))
label_name = "dsa_label_" + str(i)
dsa_label[i].setObjectName(label_name)
dsa_label[i].setText(QtGui.QApplication.translate(
"Form", angles_names[i], None, QtGui.QApplication.UnicodeUTF8))
start_hkl = []
stop_hkl = []
missed_values = 0
# TODO: This code will raise exception if one of the line edits is empty.
# But not all dimensions (H & K & L) are obligatory. One could try
# to display angles of just 1 or 2 dimensional scan.
try:
start_hkl.append(float(self._ui.lineEditStartH.text()))
start_hkl.append(float(self._ui.lineEditStartK.text()))
start_hkl.append(float(self._ui.lineEditStartL.text()))
stop_hkl.append(float(self._ui.lineEditStopH.text()))
stop_hkl.append(float(self._ui.lineEditStopK.text()))
stop_hkl.append(float(self._ui.lineEditStopL.text()))
nb_points = int(self._ui.LineEditNbpoints.text())
except:
nb_points = -1
missed_values = 1
increment_hkl = []
if nb_points > 0:
for i in range(0, 3):
increment_hkl.append((stop_hkl[i] - start_hkl[i]) / nb_points)
taurusValueAngle = []
for i in range(0, nb_points + 1):
hkl_temp = []
for j in range(0, 3):
hkl_temp.append(start_hkl[j] + i * increment_hkl[j])
no_trajectories = 0
try:
self.device.write_attribute("computetrajectoriessim", hkl_temp)
except:
no_trajectories = 1
if not no_trajectories:
angles_list = self.device.trajectorylist[tr]
taurusValueAngle.append([])
for iangle in range(0, self.nb_motors):
taurusValueAngle[i].append(TaurusValueLineEdit(w))
taurusValueAngle[i][iangle].setGeometry(
QtCore.QRect(xangle[iangle], yhkl + 30 * (i + 1), 80, 27))
taurusValueAngle[i][iangle].setReadOnly(True)
tva_name = "taurusValueAngle" + str(i) + "_" + str(iangle)
taurusValueAngle[i][iangle].setObjectName(tva_name)
taurusValueAngle[i][iangle].setValue(
"%10.4f" % angles_list[iangle])
else:
taurusValueAngle.append(TaurusValueLineEdit(w))
taurusValueAngle[i].setGeometry(QtCore.QRect(
xangle[0], yhkl + 30 * (i + 1), self.nb_motors * 120, 27))
taurusValueAngle[i].setReadOnly(True)
tva_name = "taurusValueAngle" + str(i)
taurusValueAngle[i].setObjectName(tva_name)
taurusValueAngle[i].setValue(
"... No angle solution for hkl values ...")
# TODO: not all dimensions (H & K & L) are obligatory. One could try
# to display angles of just 1 or 2 dimensional scan.
if nb_points == -1:
nb_points = 0
taurusValueAngle.append(TaurusValueLineEdit(w))
taurusValueAngle[0].setGeometry(QtCore.QRect(
xangle[0], yhkl + 30, self.nb_motors * 120, 27))
taurusValueAngle[0].setReadOnly(True)
tva_name = "taurusValueAngle"
taurusValueAngle[0].setObjectName(tva_name)
taurusValueAngle[0].setValue(
"... No scan parameters filled. Fill them in the main window ...")
w.resize(self.nb_motors * 140, 120 + nb_points * 40)
w.show()
w.show()
def open_macroserver_connection_panel(self):
w = TaurusMacroConfigurationDialog(self)
Qt.qApp.SDM.connectReader("macroserverName", w.selectMacroServer)
Qt.qApp.SDM.connectReader("doorName", w.selectDoor)
Qt.qApp.SDM.connectReader("doorName", self.onDoorChanged)
Qt.qApp.SDM.connectWriter(
"macroserverName", w, 'macroserverNameChanged')
Qt.qApp.SDM.connectWriter("doorName", w, 'doorNameChanged')
w.show()
def onDoorChanged(self, doorName):
if doorName != self.door_device_name:
self.door_device_name = doorName
self.door_device = taurus.Device(doorName)
def main():
parser = taurus.core.util.argparse.get_taurus_parser()
parser.usage = "%prog <model> [door_name]"
parser.set_description("a taurus application for performing hkl scans")
app = taurus.qt.qtgui.application.TaurusApplication(cmd_line_parser=parser,
app_version=sardana.Release.version)
app.setApplicationName("hklscan")
args = app.get_command_line_args()
if len(args) < 1:
msg = "model not set (requires diffractometer controller)"
parser.error(msg)
w = HKLScan()
w.model = args[0]
w.setModel(w.model)
w.door_device = None
w.door_device_name = None
if len(args) > 1:
w.onDoorChanged(args[1])
else:
print "WARNING: Not door name supplied. Connection to MacroServer/Door not automatically done"
w.show()
sys.exit(app.exec_())
# if len(sys.argv)>1: model=sys.argv[1]
# else: model = None
# app = Qt.QApplication(sys.argv)
# w = HKLScan()
# w.setModel(model)
# w.show()
# sys.exit(app.exec_())
if __name__ == "__main__":
main()
| lgpl-3.0 | 5,986,230,292,049,450,000 | 37.070529 | 119 | 0.596335 | false | 3.596003 | false | false | false |
josephxsxn/alchemists_notepad | Tests.py | 1 | 6304 | #List all ENUMS
from Object.Ingredient import Ingredient
for i in Ingredient:
print(i)
from Object.PotionColor import PotionColor
for r in PotionColor:
print(r)
from Object.PotionSign import PotionSign
for r in PotionSign:
print(r)
#//TODO
#NEED TO ADD ALCHEMICAL ENUMS HERE
#Make a Potion and Fetch its values
from Object.Potion import Potion
from Object.PotionColor import PotionColor
from Object.PotionSign import PotionSign
flowertoad = Potion(Ingredient.TOAD, Ingredient.FLOWER, PotionColor.RED, PotionSign.POSITIVE)
print(flowertoad.get_ingredients())
print(flowertoad.get_color())
print(flowertoad.get_sign())
###Put some Potions in the List and Get back
from Object.PotionList import PotionList
polist = PotionList()
polist.add_potion(flowertoad)
pores = polist.get_potions()
for po in pores:
print(po.get_ingredients())
print(po.get_color())
print(po.get_sign())
#Get an exact one from the list
pores = polist.get_potion(0)
print(pores.get_ingredients())
print(pores.get_color())
print(pores.get_sign())
#fetch one that doesnt exist from the list
pores = polist.get_potion(1)
print(pores)
#make an few Alchemicals
from Object.Alchemical import Alchemical
from Object.AlchemicalColor import AlchemicalColor
from Object.AlchemicalSign import AlchemicalSign
from Object.AlchemicalSize import AlchemicalSize
#triplet one
redposlarge = Alchemical(AlchemicalColor.RED, AlchemicalSign.POSITIVE, AlchemicalSize.LARGE)
bluenegsmall = Alchemical(AlchemicalColor.BLUE, AlchemicalSign.NEGATIVE, AlchemicalSize.SMALL)
greennegsmall = Alchemical(AlchemicalColor.GREEN, AlchemicalSign.NEGATIVE, AlchemicalSize.SMALL)
#triplet two
redpossmall = Alchemical(AlchemicalColor.RED, AlchemicalSign.POSITIVE, AlchemicalSize.SMALL)
bluepossmall = Alchemical(AlchemicalColor.BLUE, AlchemicalSign.POSITIVE, AlchemicalSize.SMALL)
greenposlarge = Alchemical(AlchemicalColor.GREEN, AlchemicalSign.POSITIVE, AlchemicalSize.SMALL)
print('T1 ' + str(redposlarge.get_color()) + ' ' + str(redposlarge.get_sign()) + ' ' + str(redposlarge.get_size()))
print('T1 ' + str(bluenegsmall.get_color()) + ' ' + str(bluenegsmall.get_sign()) + ' ' + str(bluenegsmall.get_size()))
print('T1 ' + str(greennegsmall.get_color()) + ' ' + str(greennegsmall.get_sign()) + ' ' + str(greennegsmall.get_size()))
print('T2 ' + str(redpossmall.get_color()) + ' ' + str(redpossmall.get_sign()) + ' ' + str(redpossmall.get_size()))
print('T2 ' + str(bluepossmall.get_color()) + ' ' + str(bluepossmall.get_sign()) + ' ' + str(bluepossmall.get_size()))
print('T2 ' + str(greenposlarge.get_color()) + ' ' + str(greenposlarge.get_sign()) + ' ' + str(greenposlarge.get_size()))
#make a Triplet
from Object.AlchemicalTriplet import AlchemicalTriplet
triplet_one = AlchemicalTriplet([redposlarge, bluenegsmall, greennegsmall])
triplet_one_list = triplet_one.get_alchemicals()
for a in triplet_one_list:
print('Triplet_ONE ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
triplet_two = AlchemicalTriplet([redpossmall, bluepossmall, greenposlarge])
triplet_two_list = triplet_two.get_alchemicals()
print(triplet_two_list)
for b in triplet_two_list:
print('Triplet_TWO ' + str(b.get_color()) + ' ' + str(b.get_sign()) + ' ' + str(b.get_size()))
#make some ingredients and properties
from Object.IngredientProperties import IngredientProperties
ip = IngredientProperties(Ingredient.TOAD)
print(str(ip.get_name()))
print(ip.get_alchemical_options())
ip.set_alchemical_options([triplet_one])
ip_triplet_list = ip.get_alchemical_options()
#for given ingredient list all triplet props
for l in ip_triplet_list:
for a in l.get_alchemicals():
print('IngredientProps ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
#Alchemical Combinations Test
from Routine.AlchemicalCombinations import AlchemicalCombinations
ingredient_dic = {Ingredient.TOAD : ip}
print(ingredient_dic.keys())
triplet_list = ingredient_dic[Ingredient.TOAD].get_alchemical_options()
for triplet in triplet_list:
for a in triplet.get_alchemicals():
print('AC Combos ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
ac = AlchemicalCombinations()
res = ac.reduce_potion_alchemicals(polist.get_potion(0), ingredient_dic)
print(polist.get_potion(0).get_ingredients())
print(polist.get_potion(0).get_sign())
print(polist.get_potion(0).get_color())
print(res.keys())
triplet_list = res[Ingredient.TOAD]
for triplet in triplet_list:
for a in triplet.get_alchemicals():
print('Filtered Toad Combos ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
print(len(res[Ingredient.TOAD]))
print(len(res[Ingredient.FLOWER]))
#triplet_list = res[Ingredient.FLOWER]
#for triplet in triplet_list:
# for a in triplet.get_alchemicals():
# print('Filtered Flower Combos ' + str(a.get_color()) + ' ' + str(a.get_sign()) + ' ' + str(a.get_size()))
ip = IngredientProperties(Ingredient.FLOWER)
print(str(ip.get_name()))
print(ip.get_alchemical_options())
ip.set_alchemical_options(res[Ingredient.FLOWER])
ingredient_dic[Ingredient.FLOWER] = ip
print('TOAD LEN ' + str(len(ingredient_dic[Ingredient.TOAD].get_alchemical_options())))
print('FLOWER LEN ' + str(len(ingredient_dic[Ingredient.FLOWER].get_alchemical_options())))
initalTriplets = ac.inital_alchemical_options()
print(len(initalTriplets))
print(len(ac.potion_only_filter(initalTriplets, polist.get_potion(0).get_color(), polist.get_potion(0).get_sign())))
#################
###NEUTRAL POTION
#################
herbtoad = Potion(Ingredient.TOAD, Ingredient.HERB, PotionColor.NEUTRAL, PotionSign.NEUTRAL)
polist.add_potion(herbtoad)
#ac2 = AlchemicalCombinations()
res = ac.reduce_potion_alchemicals(herbtoad, ingredient_dic)
print(polist.get_potion(1).get_ingredients())
print(polist.get_potion(1).get_sign())
print(polist.get_potion(1).get_color())
print(res.keys())
print('TOAD LEN RES: ' + str(len(res[Ingredient.TOAD])))
print('HERB LEN RES: ' + str(len(res[Ingredient.HERB])))
ip = IngredientProperties(Ingredient.TOAD)
print(str(ip.get_name()))
ip.set_alchemical_options(res[Ingredient.TOAD])
ingredient_dic[Ingredient.TOAD] = ip
ip = IngredientProperties(Ingredient.HERB)
print(str(ip.get_name()))
ip.set_alchemical_options(res[Ingredient.HERB])
ingredient_dic[Ingredient.HERB] = ip
print(ingredient_dic.keys())
| apache-2.0 | -8,223,180,517,559,525,000 | 39.410256 | 121 | 0.740641 | false | 2.636554 | false | false | false |
domain51/d51.django.apps.logger | d51/django/apps/logger/tests/views.py | 1 | 1154 | import datetime
from django.test import TestCase
from django.test.client import Client
from ..models import Hit
from .utils import build_hit_url, random_url
class TestOfHitView(TestCase):
def test_logs_hit(self):
url = random_url()
c = Client()
response = c.get(build_hit_url(url))
hit = Hit.objects.get(url=url)
def test_stores_current_time(self):
url = random_url()
response = Client().get(build_hit_url(url))
hit = Hit.objects.get(url=url)
self.assert_(isinstance(hit.created_on, datetime.datetime))
self.assert_((datetime.datetime.now() - hit.created_on).seconds < 1,
"Check creation time, might fail on slow machines/network connections.")
def test_redirects_to_url(self):
url = random_url()
response = Client().get(build_hit_url(url))
self.assertEquals(response.status_code, 302)
# TODO: refactor this - we can't use assertRedirect() because it
# tries to load crap, but this test should be simplified
self.assertEquals(response._headers['location'][1], url, "ensure redirection took place")
| gpl-3.0 | -987,452,888,625,049,100 | 36.225806 | 97 | 0.652513 | false | 3.796053 | true | false | false |
roam/machete | machete/endpoints.py | 1 | 25618 | # -*- coding: utf-8 -*-
from __future__ import (unicode_literals, print_function, division,
absolute_import)
import sys
import hashlib
from contextlib import contextmanager
from django.views.decorators.csrf import csrf_exempt
from django.db import transaction, models
from django.views.generic import View
from django.core.exceptions import ImproperlyConfigured
from django.core.urlresolvers import reverse
from django.http import HttpResponse, Http404
from django.utils.http import quote_etag, parse_etags
from .serializers import serialize
from .urls import create_resource_view_name
from .exceptions import (JsonApiError, MissingRequestBody, InvalidDataFormat,
IdMismatch, FormValidationError)
from .utils import (RequestContext, RequestWithResourceContext, pluck_ids,
RequestPayloadDescriptor)
from . import compat, json
@contextmanager
def not_atomic(using=None):
yield
class GetEndpoint(View):
"""
Extends a generic View to provide support for retrieving resources.
Some methods might seem convoluted, but they're mostly built that
way to provide useful points of extension/override. Methods are
rarely passed all information, but request-method methods
(get, post,...) should provide a context object containing the
necessary information under ``self.context``.
"""
context = None
content_type = 'application/json' # Default to this for now; works better in browsers
methods = ['get']
pks_url_key = 'pks'
pk_field = 'pk'
queryset = None
model = None
form_class = None
filter_class = None
include_link_to_self = False
etag_attribute = None
def __init__(self, *args, **kwargs):
super(GetEndpoint, self).__init__(*args, **kwargs)
# Django uses http_method_names to know which methods are
# supported, we always add options on top which will advertise
# the actual methods we support.
self.http_method_names = self.get_methods() + ['options']
@classmethod
def endpoint(cls, **initkwargs):
return csrf_exempt(cls.as_view(**initkwargs))
def dispatch(self, request, *args, **kwargs):
# Override dispatch to enable the handling or errors we can
# handle.
# Because Django 1.4 only sets the request parameters in
# dispatch we'll set them right now ourselves.
self.request = request
self.args = args
self.kwargs = kwargs
manager, m_args, m_kwargs = self.context_manager()
try:
with manager(*m_args, **m_kwargs):
return super(GetEndpoint, self).dispatch(request, *args, **kwargs)
except Exception as error:
et, ei, tb = sys.exc_info()
return self.handle_error(error, tb)
def options(self, request, *args, **kwargs):
# From the JSON API FAQ:
# http://jsonapi.org/faq/#how-to-discover-resource-possible-actions
self.context = self.create_get_context(request)
actions = self.possible_actions()
return HttpResponse(','.join(a.upper() for a in actions))
def possible_actions(self):
"""
Returns a list of allowed methods for this endpoint.
You can use the context (a GET context) to determine what's
possible. By default this simply returns all allowed methods.
"""
return self.get_methods()
def get(self, request, *args, **kwargs):
self.context = self.create_get_context(request)
if not self.has_etag_changed():
content_type = self.get_content_type()
return HttpResponse(status=304, content_type=content_type)
collection = False
if self.context.requested_single_resource:
data = self.get_resource()
else:
data = self.get_resources()
collection = True
return self.create_http_response(data, collection=collection, compound=True)
def has_etag_changed(self):
if not self.etag_attribute:
return True
etag = self.generate_etag()
if not etag:
return True
match = self.request.META.get('HTTP_IF_NONE_MATCH')
if match:
values = parse_etags(match)
for value in values:
# Django appends ";gzip" when gzip is enabled
clean_value = value.split(';')[0]
if clean_value == '*' or clean_value == etag:
return False
return True
def generate_etag(self):
if not self.etag_attribute:
return None
qs = self.get_filtered_queryset()
values = qs.values_list(self.etag_attribute, flat=True)
etag = ','.join('%s' % value for value in values)
return hashlib.md5(etag).hexdigest()
def create_http_response(self, data, collection=False, compound=False):
"""
Creates a HTTP response from the data.
The data might be an (a) HttpResponse object, (b) dict or (c)
object that can be serialized.
HttpResponse objects will simply be returned without further
processing, dicts will be turned into JSON and returned as a
response using the status attribute of the context. Other
objects will be serialized using ``serialize`` method.
"""
if isinstance(data, HttpResponse):
# No more processing necessary
return data
if isinstance(data, dict):
# How nice. Use it!
response_data = data
else:
# Everything else: run it through the serialization process
response_data = self.serialize(data, collection=collection, compound=compound)
json_data = self.create_json(response_data, indent=2)
status = self.context.status
content_type = self.get_content_type()
response = HttpResponse(json_data, content_type=content_type, status=status)
return self.postprocess_response(response, data, response_data, collection)
def serialize(self, data, collection=False, compound=False):
"""
Serializes the data.
Note that a serializer must have been registered with the name
of this resource or relationship, depending on the request type.
"""
name = self.get_resource_type()
context = self.context.__dict__
self_link = self.include_link_to_self
fields = self.context.resource_descriptor.fields
only = fields if fields else None
return serialize(name, data, many=collection, compound=compound, context=context, self_link=self_link, only=only)
def get_resource_type(self):
return self.resource_name
def handle_error(self, error, traceback=None):
# TODO Improve error reporting
error_object = {}
if isinstance(error, FormValidationError):
errors = []
for field, itemized_errors in error.form.errors.items():
composite = field == '__all__'
for e in itemized_errors:
detail = {'detail': '%s' % e}
if not composite:
detail['member'] = field
detail['member_label'] = '%s' % error.form.fields.get(field).label
errors.append(detail)
return HttpResponse(self.create_json({'errors': errors}), status=400)
if isinstance(error, Http404):
error_object['message'] = '%s' % error
return HttpResponse(self.create_json({'errors': [error_object]}), status=404)
if isinstance(error, JsonApiError):
error_object['message'] = '%s' % error
return HttpResponse(self.create_json({'errors': [error_object]}), status=500)
raise error.__class__, error, traceback
def postprocess_response(self, response, data, response_data, collection):
"""
If you need to do any further processing of the HttpResponse
objects, this is the place to do it.
"""
etag = self.generate_etag()
if etag:
response['ETag'] = quote_etag(etag)
response['Cache-Control'] = 'private, max-age=0'
return response
def get_resource(self):
"""
Grabs the resource for a resource request.
Maps to ``GET /posts/1``.
"""
filter = {self.get_pk_field(): self.context.pk}
return self.get_filtered_queryset().get(**filter)
def get_resources(self):
"""
Grabs the resources for a collection request.
Maps to ``GET /posts/1,2,3`` or ``GET /posts``.
"""
qs = self.get_filtered_queryset()
if self.context.pks:
filter = {'%s__in' % self.get_pk_field(): self.context.pks}
qs = qs.filter(**filter)
if self.context.pks and not qs.exists():
raise Http404()
return qs
def get_filtered_queryset(self):
qs = self.get_queryset()
if self.filter_class:
return self.filter_class(self.request.GET, queryset=qs).qs
return qs
def is_changed_besides(self, resource, model):
# TODO Perform simple diff of serialized model with resource
return False
def get_pk_field(self):
"""
Determines the name of the primary key field of the model.
Either set the ``pk_field`` on the class or override this method
when your model's primary key points to another field than the
default.
"""
return self.pk_field
def get_queryset(self):
"""
Get the list of items for this main resource.
This must be an iterable, and may be a queryset
(in which qs-specific behavior will be enabled).
"""
if self.queryset is not None:
queryset = self.queryset
if hasattr(queryset, '_clone'):
queryset = queryset._clone()
elif self.model is not None:
queryset = self.model._default_manager.all()
else:
raise ImproperlyConfigured("'%s' must define 'queryset' or 'model'"
% self.__class__.__name__)
return queryset
def get_content_type(self):
"""
Determines the content type of responses.
Override this method or set ``content_type`` on the class.
"""
return self.content_type
def create_get_context(self, request):
"""Creates the context for a GET request."""
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
fields = request.GET.get('fields')
fields = None if not fields else fields.split(',')
resource_descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks, fields=fields)
context = RequestContext(request, resource_descriptor)
context.update_mode('GET')
return context
def extract_resources(self, request):
"""
Extracts resources from the request body.
This should probably be moved elsewhere since it doesn't make
sense in a GET request. But still.
"""
body = request.body
if not body:
raise MissingRequestBody()
resource_name = self.resource_name
try:
data = self.parse_json(body)
if not resource_name in data:
raise InvalidDataFormat('Missing %s as key' % resource_name)
obj = data[resource_name]
if isinstance(obj, list):
resource = None
resources = obj
else:
resource = obj
resources = [obj]
return RequestPayloadDescriptor(resource_name, resources, resource)
except ValueError:
raise InvalidDataFormat()
def parse_json(self, data):
return json.loads(data)
def create_json(self, data, *args, **kwargs):
return json.dumps(data, *args, **kwargs)
def get_methods(self):
return self.methods
def context_manager(self):
if self.request.method in ['POST', 'PUT', 'DELETE', 'PATCH']:
return (transaction.atomic, [], {})
return (not_atomic, [], {})
class GetLinkedEndpoint(GetEndpoint):
relationship_name = None
relationship_pks_url_keys = None
relationship_pk_fields = None
@classmethod
def endpoint(cls, relationship_name=None, **initkwargs):
initkwargs['relationship_name'] = relationship_name
return csrf_exempt(cls.as_view(**initkwargs))
def dispatch(self, request, *args, **kwargs):
if not self.relationship_name:
self.relationship_name = kwargs.get('relationship')
return super(GetLinkedEndpoint, self).dispatch(request, *args, **kwargs)
def get(self, request, *args, **kwargs):
self.context = self.create_get_context(request)
collection = False
# We're dealing with a request for a related resource
if self.context.requested_single_related_resource or not self.context.to_many:
# Either a single relationship id was passed in or the
# relationship is a to-one
data = self.get_related_resource()
else:
# Multiple relationship ids or a to-many relationship
data = self.get_related_resources()
collection = True
return self.create_http_response(data, collection=collection)
def get_related_resource(self):
"""
Handles the retrieval of a related resource.
This will be called when either a single relationship instance
was requested or the relationship is to-one.
"""
qs = self.get_related_queryset()
if not self.context.to_many:
# Since it's not a to-many, we can simply return the value
return qs
pk_field = self.get_relationship_pk_field()
filter = {pk_field: self.context.relationship_pk}
return qs.get(**filter)
def get_related_resources(self):
"""
Handles the retrieval of multiple related resources.
This will be called when either a multiple relationship
instances were requested or no ids were supplied.
"""
qs = self.get_related_queryset().all()
if self.context.relationship_pks:
pk_field = self.get_relationship_pk_field()
filter = {'%s__in' % pk_field: self.context.relationship_pks}
qs = qs.filter(**filter)
if not qs.exists():
raise Http404()
return qs
def get_related_queryset(self):
field_name = self.get_related_field_name()
resource = self.get_resource()
return getattr(resource, field_name)
def get_resource_type(self):
return self.relationship_name
def create_get_context(self, request):
"""Creates the context for a GET request."""
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
rel_pks_url_key = self.get_relationship_pks_url_key()
rel_pks = self.kwargs.get(rel_pks_url_key, '')
rel_pks = rel_pks.split(',') if rel_pks else []
many = self.is_to_many_relationship()
rel_descriptor = RequestContext.create_relationship_descriptor(self.relationship_name, rel_pks, many)
resource_descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks, rel_descriptor)
context = RequestContext(request, resource_descriptor)
context.update_mode('GET')
return context
def get_related_field_name(self):
# TODO Use serializer to find correct name by default
return self.relationship_name
def get_relationship_pks_url_key(self):
rel_name = self.get_related_field_name()
keys = self.relationship_pks_url_keys
keys = keys if keys else {}
return keys.get(rel_name, 'rel_pks')
def get_relationship_pk_field(self):
rel_name = self.get_related_field_name()
fields = self.relationship_pk_fields
fields = fields if fields else {}
return fields.get(rel_name, 'pk')
def is_to_many_relationship(self):
rel_name = self.get_related_field_name()
if self.model:
model = self.model
elif self.queryset:
model = self.queryset.model
else:
model = self.get_queryset().model
meta = model._meta
field_object, model, direct, m2m = compat.get_field_by_name(meta, rel_name)
if direct:
return m2m
return field_object.field.rel.multiple
class WithFormMixin(object):
"""
Mixin supporting create and update of resources with a model form.
Note that it relies on some methods made available by the
GetEndpoint.
"""
form_class = None
def get_form_kwargs(self, **kwargs):
return kwargs
def get_form_class(self):
return self.form_class
def form_valid(self, form):
return form.save()
def form_invalid(self, form):
raise FormValidationError('', form=form)
def get_form(self, resource, instance=None):
"""Constructs a new form instance with the supplied data."""
data = self.prepare_form_data(resource, instance)
form_kwargs = {'data': data, 'instance': instance}
form_kwargs = self.get_form_kwargs(**form_kwargs)
form_class = self.get_form_class()
if not form_class:
raise ImproperlyConfigured('Missing form_class')
return form_class(**form_kwargs)
def prepare_form_data(self, resource, instance=None):
"""Last chance to tweak the data being passed to the form."""
if instance:
# The instance is converted to JSON and then loaded to ensure
# special encodings (like timezone-conversion) are performed
as_json = self.create_json(self.serialize(instance, compound=False))
original = json.loads(as_json)
original = original[self.resource_name]
merged = dict(original.items() + original.get('links', {}).items())
data = dict(resource.items() + resource.get('links', {}).items())
for field, value in data.items():
if value is None:
merged[field] = None
else:
merged[field] = value
return merged
return dict(resource.items() + resource.get('links', {}).items())
class PostMixin(object):
"""
Provides support for POST requests on resources.
The ``create_resource`` method must be implemented to actually do
something.
"""
def get_methods(self):
return super(PostMixin, self).get_methods() + ['post']
def post(self, request, *args, **kwargs):
self.context = self.create_post_context(request)
collection = False
payload = self.context.payload
if payload.many:
data = self.create_resources(payload.resources)
collection = True
else:
data = self.create_resource(payload.resource)
return self.create_http_response(data, collection=collection)
def create_post_context(self, request):
payload = self.extract_resources(request)
descriptor = RequestContext.create_resource_descriptor(self.resource_name)
context = RequestWithResourceContext(request, descriptor, payload, status=201)
context.update_mode('POST')
return context
def create_resources(self, resources):
return [self.create_resource(r) for r in resources]
def create_resource(self, resource):
"""Create the resource and return the corresponding model."""
pass
def postprocess_response(self, response, data, response_data, collection):
response = super(PostMixin, self).postprocess_response(response, data, response_data, collection)
if self.context.status != 201:
return response
pks = ','.join(pluck_ids(response_data, self.resource_name))
location = self.create_resource_url(pks)
response['Location'] = location
return response
def create_resource_url(self, pks):
kwargs = {self.pks_url_key: pks}
return reverse(self.get_url_name(), kwargs=kwargs)
def get_url_name(self):
return create_resource_view_name(self.resource_name)
class PostWithFormMixin(PostMixin, WithFormMixin):
"""
Provides an implementation of ``create_resource`` using a form.
"""
def create_resource(self, resource):
form = self.get_form(resource)
if form.is_valid():
return self.form_valid(form)
return self.form_invalid(form)
class PutMixin(object):
"""
Provides support for PUT requests on resources.
This supports both full and partial updates, on single and multiple
resources.
Requires ``update_resource`` to be implemented.
"""
def get_methods(self):
return super(PutMixin, self).get_methods() + ['put']
def put(self, request, *args, **kwargs):
self.context = self.create_put_context(request)
collection = False
payload = self.context.payload
if payload.many:
changed_more, data = self.update_resources(payload.resources)
collection = True
else:
changed_more, data = self.update_resource(payload.resource)
if not changed_more:
# > A server MUST return a 204 No Content status code if an update
# > is successful and the client's current attributes remain up to
# > date. This applies to PUT requests as well as POST and DELETE
# > requests that modify links without affecting other attributes
# > of a resource.
return HttpResponse(status=204)
return self.create_http_response(data, collection=collection)
def create_put_context(self, request):
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
payload = self.extract_resources(request)
descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks)
context = RequestWithResourceContext(request, descriptor, payload, status=200)
context.update_mode('PUT')
return context
def update_resources(self, resources):
updated = []
changed = []
for res in resources:
changed_more, result = self.update_resource(res)
updated.append(result)
changed.append(changed_more)
return any(changed), updated
def update_resource(self, resource):
pass
class PutWithFormMixin(PutMixin, WithFormMixin):
"""
Provides an implementation of ``update_resource`` using a form.
"""
def update_resource(self, resource):
resource_id = resource['id']
if resource_id not in self.context.pks:
message = 'Id %s in request body but not in URL' % resource_id
raise IdMismatch(message)
filter = {self.get_pk_field(): resource_id}
instance = self.get_queryset().get(**filter)
form = self.get_form(resource, instance)
if form.is_valid():
model = self.form_valid(form)
return self.is_changed_besides(resource, model), model
return self.form_invalid(form)
class DeleteMixin(object):
"""
Provides support for DELETE request on single + multiple resources.
"""
def get_methods(self):
return super(DeleteMixin, self).get_methods() + ['delete']
def delete(self, request, *args, **kwargs):
self.context = self.create_delete_context(request)
if not self.context.pks:
raise Http404('Missing ids')
# Although the default implementation defers DELETE request for
# both single and multiple resources to the ``perform_delete``
# method, we still split based on
if self.context.requested_single_resource:
not_deleted = self.delete_resource()
else:
not_deleted = self.delete_resources()
if not_deleted:
raise Http404('Resources %s not found' % ','.join(not_deleted))
return HttpResponse(status=204)
def create_delete_context(self, request):
pks = self.kwargs.get(self.pks_url_key, '')
pks = pks.split(',') if pks else []
descriptor = RequestContext.create_resource_descriptor(self.resource_name, pks)
context = RequestContext(request, descriptor)
context.update_mode('DELETE')
return context
def delete_resources(self):
return self.perform_delete(self.context.pks)
def delete_resource(self):
return self.perform_delete(self.context.pks)
def perform_delete(self, pks):
not_deleted = pks[:]
filter = {'%s__in' % self.get_pk_field(): pks}
for item in self.get_queryset().filter(**filter).iterator():
# Fetch each item separately to actually trigger any logic
# performed in the delete method (like implicit deletes)
not_deleted.remove('%s' % item.pk)
item.delete()
return not_deleted
class Endpoint(PostWithFormMixin, PutWithFormMixin, DeleteMixin, GetEndpoint):
"""
Ties everything together.
Use this base class when you need to support GET, POST, PUT and
DELETE and want to use a form to process incoming data.
"""
pass
| bsd-2-clause | 7,528,123,250,925,826,000 | 35.183616 | 121 | 0.61851 | false | 4.340563 | false | false | false |
Zhang-O/small | tensor__cpu/http/spyser_liyou.py | 1 | 5473 | import urllib.request
from bs4 import BeautifulSoup
import re
import urllib.parse
import xlsxwriter
import pandas as pd
import numpy as np
from urllib import request, parse
from urllib.error import URLError
import json
import multiprocessing
import time
# 详情页面的 地址 存放在这里面
urls_of_detail = []
total_pages = 0
# 要爬取的内容 按序存成数组
_1 = []
_2 = []
_3 = []
_4 = []
_5 = []
issue_date_sum = []
project_address_sum = []
project_sector_sum = []
project_content_sum = []
company_name_sum = []
company_staff_sum = []
company_phone_sum = []
# 一级网址
url = 'http://www.stc.gov.cn/ZWGK/TZGG/GGSB/'
# page 表示第几页
def get_urls(url,page):
# 构造 form 数据
# postdata = urllib.parse.urlencode({'currDistrict': '', 'pageNo': page,'hpjgName_hidden':'','keyWordName':''})
# postdata = postdata.encode('utf-8')
#
# #发送请求
# response = urllib.request.urlopen(url, data=postdata)
# html_cont = response.read()
if page == 0:
url = url + 'index.htm'
else:
url = url + 'index_' + str(page) + '.htm'
req = request.Request(url=url)
res_data = request.urlopen(req)
# print(res_data)
html_cont = res_data.read()
# 解析文档树
soup = BeautifulSoup(html_cont, 'html.parser', from_encoding='utf-8')
#
# # 用正则表达式 查找 二级网站的网址 所在的 元素 tr
trs = soup.find_all('a', href=re.compile(r"^./201"))
# # 把 二级网站的网址存到 urls_of_detail 中
for i in trs:
# print(i['href'][2:])
urls_of_detail.append(i['href'][2:])
def get_info(url,second_url):
# s = urllib.request.urlopen(urls_of_detail[0])
# 请求文档
second_url = url + second_url
s = urllib.request.urlopen(second_url)
# 解析文档
soup = BeautifulSoup(s, 'html.parser', from_encoding='utf-8')
# 查找的内容 在 td 元素内 ,且没有任何唯一标识 ,找到所有td ,查看每个待爬取得内容在 list 中 的索引
div = soup.find_all('div', class_=re.compile(r"TRS_Editor"))
trs = div[0].find_all('tr')
trs = trs[1:]
# print(trs[0])
print('trs num',len(trs))
for tr in trs:
tds = tr.find_all('td')
if len(tds[0].find_all('font')) > 0 :
if tds[3].find_all('font')[0].string == None:
print(second_url)
_1.append(tds[0].find_all('font')[0].string)
_2.append(tds[1].find_all('font')[0].string)
_3.append(tds[2].find_all('font')[0].string)
_4.append(tds[3].find_all('font')[0].string)
if len(tds) == 5:
_5.append(tds[4].find_all('font')[0].string)
else:
_5.append('null')
elif len(tds[0].find_all('p')) > 0 :
# if tds[3].find_all('p')[0].string == None:
# print(second_url)
_1.append(tds[0].find_all('p')[0].string)
_2.append(tds[1].find_all('p')[0].string)
_3.append(tds[2].find_all('p')[0].string)
if len(tds[3].find_all('p')) > 0:
_4.append(tds[3].find_all('p')[0].string)
else:
_4.append(tds[3].string)
if len(tds) == 5:
_5.append(tds[4])
else:
_5.append('null')
else:
if tds[3].string == None:
print(second_url)
_1.append(tds[0].string)
_2.append(tds[1].string)
if len(tds[2].find_all('span'))>0 and tds[2].find_all('span')[0].string == None:
_3.append(tds[2].string)
else:
_3.append(tds[2].string)
_4.append(tds[3].string)
if len(tds) == 5:
_5.append(tds[4].string)
else:
_5.append('null')
# elif len(tds[0].find_all('td'))
# print(len(tds))
# print(tds[0].string)
# print(tds[1].string)
# print(tds[2].string)
# print(tds[3].string)
# print(response.read().decode('utf-8','ignore'))
# 网站显示一共有 1036 页
num0 =0
for page in range(0,7):
num0 += 1
# print(num0)
get_urls(url, page)
# 把所有的二级网站 存成文本
with open('urls_all_liyou','w') as f:
f.write(str(urls_of_detail))
# print(len(urls_of_detail))
# print(len(set(urls_of_detail)))
print('urls num :' , len(urls_of_detail))
num=0 # 这个主要用于调试 爬的过程中如果出错 看看是在哪个网址出的
for second_url in urls_of_detail:
num += 1
print('page num : ', num)
if num in [15,42]:
continue
if num > 54:
break
get_info(url, second_url)
print('end ----------')
print(len(_1))
workbook = xlsxwriter.Workbook('./liyou.xlsx')
# 1.------------------ 创建一个 worksheet 存放具体分数-------------------------------
ws = workbook.add_worksheet('liyou')
#设置宽度
ws.set_column('A:A', 25)
ws.set_column('B:B', 25)
ws.set_column('C:C', 15)
ws.set_column('D:D', 15)
ws.set_column('E:E', 15)
# 写表头
ws.write(0, 0, '序号')
ws.write(0, 1, '区域')
ws.write(0, 2, '类型')
ws.write(0, 3, '设置地点')
ws.write(0, 4, '方向')
number = len(_1)
for i in range(number):
ws.write(i + 1, 0, str(_1[i]))
ws.write(i + 1, 1, str(_2[i]))
ws.write(i + 1, 2, str(_3[i]))
ws.write(i + 1, 3, str(_4[i]))
ws.write(i + 1, 4, str(_5[i]))
workbook.close()
| mit | -3,506,324,734,389,136,400 | 22.686916 | 115 | 0.533241 | false | 2.440539 | false | false | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.