repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values | var_hash
int64 -9,223,186,179,200,150,000
9,223,291,175B
| doc_hash
int64 -9,223,304,365,658,930,000
9,223,309,051B
| line_mean
float64 3.5
99.8
| line_max
int64 13
999
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|
Antiun/odoomrp-wip | quality_control_force_valid/__openerp__.py | 19 | 1379 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (c)
# 2014 Serv. Tec. Avanzados - Pedro M. Baeza (http://www.serviciosbaeza.com)
# 2014 AvanzOsc (http://www.avanzosc.es)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Quality control - Manual validation",
"version": "1.0",
"depends": [
"quality_control",
],
"author": "OdooMRP team,"
"AvanzOSC,"
"Serv. Tecnol. Avanzados - Pedro M. Baeza",
"category": "Quality control",
'data': [
'views/qc_inspection_view.xml',
],
'installable': True,
}
| agpl-3.0 | -8,407,317,227,055,665,000 | 308,907,294,390,832,300 | 36.27027 | 79 | 0.57578 | false |
zeroincombenze/tools | zar/restdb.py | 2 | 29848 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) SHS-AV s.r.l. (<http://www.zeroincombenze.it>)
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
r"""Restore database files from Production Machine to Development Machine
Make 2 server quite identical, ready to use
May be used to create a mirror server of Zeroincombenze®
Translation file rules (restconf.ini).
every line ha follow format: filename \t src \t tgt
where filename maybe:
#
every line beginning with '#' ia a remark
realfilename (i.e. http.conf)
every 'src' text is replaced by 'tgt' text
sqlname->wp (ie. mysite.sql->wp)
every 'src' is wp param and 'tgt' is the its value
sqlname->wiki (ie. mysite.sql->wiki)
every 'src' is wikimedia param and 'tgt' is the its value
sqlname/ (ie mysite.sql/)
every line is an SQL statement to execute at the end;
spaces are written with escape \ character (ie. update\ table ...)
"""
# import pdb
import os
import os.path
import sys
import glob
from datetime import date, datetime, timedelta
import time
import string
import re
from . import zarlib
try:
from os0 import os0
except ImportError:
import os0
__version__ = "1.3.35.1"
def version():
return __version__
class Restore_Image:
def __init__(self, ctx):
self.hostname = ctx['hostname']
os0.set_debug_mode(ctx['dbg_mode'])
self.prodhost = ctx['production_host']
self.devhost = ctx['development_host']
self.mirrorhost = ctx['mirror_host']
self.pgdir = ctx['pg_dir']
self.mysqldir = ctx['mysql_dir']
homedir = os.path.expanduser("~")
self.ftp_cfn = homedir + "/" + ctx['ftp_script']
self.flist = homedir + "/" + ctx['list_file']
os0.set_tlog_file(ctx['logfn'])
# Log begin execution
os0.wlog("Restore database files", __version__)
# Simulate backup
self.dry_run = ctx['dry_run']
if ctx['saveset'] == "bckdb" or \
ctx['saveset'] == "bckconf" or \
ctx['saveset'] == "bckwww":
if self.hostname == self.prodhost:
os0.wlog("Running on production machine")
if ctx['alt']:
self.bck_host = self.mirrorhost
self.fconf = homedir + "/" + \
ctx['no_translation']
else:
self.bck_host = self.devhost
self.fconf = homedir + "/" + \
ctx['data_translation']
elif self.hostname == self.mirrorhost:
os0.wlog("Running on mirror machine")
if ctx['alt']:
self.bck_host = self.prodhost
self.fconf = homedir + "/" + \
ctx['no_translation']
else:
self.bck_host = self.devhost
self.fconf = homedir + "/" + \
ctx['data_translation']
elif self.hostname == self.devhost:
os0.wlog("This command cannot run on development machine")
if not ctx['dry_run']:
raise Exception("Command aborted due invalid machine")
else:
os0.wlog("Unknown machine - Command aborted")
if not ctx['dry_run']:
raise Exception("Command aborted due unknown machine")
elif ctx['saveset'] == "restdb" or \
ctx['saveset'] == "restconf" or \
ctx['saveset'] == "restwww":
if self.hostname == self.prodhost:
os0.wlog("This command cannot run on production machine")
if not ctx['dry_run']:
raise Exception("Command aborted due production machine")
elif self.hostname == self.mirrorhost:
os0.wlog("Running on mirror machine")
if ctx['alt']:
self.bck_host = self.prodhost
self.fconf = homedir + "/" + \
ctx['no_translation']
else:
self.bck_host = self.devhost
self.fconf = homedir + "/" + \
ctx['data_translation']
elif self.hostname == self.devhost:
os0.wlog("Running on development machine")
if ctx['alt']:
self.bck_host = self.mirrorhost
self.fconf = homedir + "/" + \
ctx['data_translation']
else:
self.bck_host = self.devhost
self.fconf = homedir + "/" + \
ctx['data_translation']
else:
os0.wlog("Unknown machine - Command aborted")
if not ctx['dry_run']:
raise Exception("Command aborted due unknown machine")
# May be (.gz or .bz2)
self.tar_ext = ctx['tar_ext']
# May be (z or j)
self.tar_opt = ctx['tar_opt']
# May be (null or .sql)
self.pre_ext = ctx['pre_ext']
# May be (null or .sql)
self.sql_ext = ctx['sql_ext']
self.psql_uu = ctx['pgsql_user']
self.psql_db = ctx['pgsql_def_db']
self.mysql_uu = ctx['mysql_user']
self.mysql_db = ctx['mysql_def_db']
self.pid = os.getpid()
self.ftp_rootdir = ""
self.ftp_dir = ""
self.dbtype = ""
self.create_dict()
def create_dict(self):
self.dict = {}
self.xtl = {}
self.seed = 0
try:
cnf_fd = open(self.fconf, "r")
line = cnf_fd.readline()
while line != "":
i = line.rfind('\n')
if i >= 0 and line[0:1] != "#":
line = line.replace("\\ ", "\\b")
line = re.sub('\\s+', ' ', line).strip()
f = string.split(line, ' ')
self.add_dict_entr(f[0], f[1], f[2])
line = cnf_fd.readline()
cnf_fd.close()
except:
os0.wlog("No dictionary file", self.fconf, "found!")
def add_dict_entr(self, name, src, tgt):
self.seed = self.seed + 1
key = "{0:06d}".format(self.seed)
val = (src, tgt)
if name in self.dict:
self.dict[name].append(key)
else:
self.dict[name] = [key]
self.xtl[key] = val
# os0.wlog("> s|{0}|{1}|g {2}!".format(src, tgt, name))
def search4item(self, item):
if item in self.dict:
return self.dict[item]
else:
return None
def restore_file(self, fqn):
# pdb.set_trace()
dbtype = ""
# Extract dir if supplied
p = os.path.dirname(fqn)
f = os.path.basename(fqn) # Just filename
# No dir supplied
if p == "":
p = self.ftp_dir
elif p == "/var/lib/pgsql/backups":
dbtype = "psql"
elif p == "/var/lib/mysql/backups":
dbtype = "mysql"
if dbtype != self.dbtype:
if dbtype == "psql":
cmd = "service postgresql restart"
os0.trace_debug("$", cmd)
os0.muteshell(cmd,
simulate=self.dry_run,
keepout=os0.debug_mode)
elif dbtype == "mysql":
cmd = "service mysqld restart"
os0.trace_debug("$", cmd)
os0.muteshell(cmd,
simulate=self.dry_run,
keepout=os0.debug_mode)
if p != self.ftp_dir: # Change dir
self.chdir(p) # Set directory
llen = len(self.sql_ext) + 9
# i = len(f) - llen
# Extract dbname from XXXXX-YYYYMMDD.SQL
dbname = f[0:-llen]
# if dbname == "wp-zi-it":
# os0.wlog(" db", dbname, "not upgradable!!!")
if os.path.isfile(f):
self.restore_db(dbtype, dbname, fqn)
else:
os0.wlog(" file", f, "not found!!!")
def get_params(self, f):
ctx = {}
ctx['prefix'] = ""
ctx['siteURL'] = ""
ctx['testURL'] = ""
ctx['siteURI'] = ""
ctx['testURI'] = ""
ctx['admin_email'] = ""
ctx['conf_file'] = ""
ctx['conf_file2'] = ""
ctx['conf_file3'] = ""
ctx['index_html'] = ""
key_ids = self.search4item(f)
if key_ids:
# fxch = True
# Text couples for substitution
for key in key_ids:
src = self.xtl[key][0]
src = src.replace("\\b", " ")
tgt = self.xtl[key][1]
tgt = tgt.replace("\\b", " ")
if src == ".prefix":
ctx['prefix'] = tgt
elif src == ".siteURL":
ctx['siteURL'] = tgt
i = ctx['siteURL'].find(".")
if i < 0:
ctx['siteURL'] = "http://www." + ctx['siteURL']
i = ctx['siteURL'].find(":")
if i < 0:
ctx['siteURL'] = "http://" + ctx['siteURL']
i = ctx['siteURL'].find(".")
if ctx['admin_email'] == "":
ctx['admin_email'] = "postmaster@" + \
ctx['siteURL'][i + 1:]
if ctx['testURL'] == "":
ctx['testURL'] = ctx['siteURL'][0:i] + \
"1" + ctx['siteURL'][i:]
if ctx['siteURI'] == "":
x = ctx['siteURL'].split("://")
ctx['siteURI'] = x[1]
if ctx['testURI'] == "":
x = ctx['testURL'].split("://")
ctx['testURI'] = x[1]
elif src == ".testURL":
ctx['testURL'] = tgt
x = ctx['testURL'].split("://")
ctx['testURI'] = x[1]
elif src == ".siteURI":
ctx['siteURI'] = tgt
elif src == ".testURI":
ctx['testURI'] = tgt
elif src == ".admin_email":
ctx['admin_email'] = tgt
elif src == ".conf_file":
ctx['conf_file'] = tgt
elif src == ".conf_file2":
ctx['conf_file2'] = tgt
elif src == ".conf_file3":
ctx['conf_file3'] = tgt
elif src == ".index_html":
ctx['index_html'] = tgt
else:
raise ValueError('Invalid param {0}!'.format(src))
return ctx
def repl_data_wp(self, ctx, fqn_str):
os0.trace_debug(
"> update URL (wp) {0}->{1}"
.format(ctx['siteURL'], ctx['testURL']))
stmt = "update {0}options set option_value='{1}'"\
" where option_name='{2}'"\
.format(ctx['prefix'], ctx['testURL'], "siteurl")
fqn_str = fqn_str + stmt + ";\n"
stmt = "update {0}options set option_value='{1}'"\
" where option_name='{2}'"\
.format(ctx['prefix'], ctx['testURL'], "home")
fqn_str = fqn_str + stmt + ";\n"
stmt = "update {0}options set option_value='{1}/'"\
" where option_name='{2}'"\
.format(ctx['prefix'], ctx['testURL'], "ga_default_domain")
fqn_str = fqn_str + stmt + ";\n"
stmt = "update {0}options set option_value='{1}'"\
" where option_name='{2}'"\
.format(ctx['prefix'], ctx['siteURI'], "ga_root_domain")
fqn_str = fqn_str + stmt + ";\n"
stmt = "update {0}options set option_value='{1}'"\
" where option_name='{2}'"\
.format(ctx['prefix'], ctx['admin_email'], "admin_email")
fqn_str = fqn_str + stmt + ";\n"
stmt = "update {0}options set option_value='{1}'"\
" where option_name='{2}'"\
.format(ctx['prefix'], "0", "blog_public")
fqn_str = fqn_str + stmt + ";\n"
src_str = ctx['siteURL']
ix = fqn_str.find(src_str)
while ix >= 0:
llen = len(ctx['siteURL'])
j = ix - 1
sep = ' '
while sep == ' ':
while fqn_str[j] != '\"' and fqn_str[j] != '\'':
j = j - 1
sep = fqn_str[j]
j = j - 1
if fqn_str[j] == '\\':
if sep == '\'':
sep = ' '
else:
j = j - 1
if fqn_str[j] != ':':
sep = ' '
else:
j = j - 1
if sep == '\"':
ix1 = j + 1
while fqn_str[j].isdigit():
j = j - 1
n = fqn_str[j + 1:ix1]
i = int(n)
if i >= llen:
src = fqn_str[j + 1:ix] + ctx['siteURL']
j = len(ctx['testURL'])
n = str(i + j - llen)
tgt = n + fqn_str[ix1:ix] + ctx['testURL']
os0.trace_debug(
"> sed|{0}|{1}|".format(src, tgt))
fqn_str = fqn_str.replace(src, tgt)
ix = fqn_str.find(src_str, ix + 1)
return fqn_str
def repl_data(self, dbname, fqn):
fzero = False
try:
fqn_fd = open(fqn, 'r')
# Go to end of file
fqn_fd.seek(0, os.SEEK_END)
# File len = 0 ?
if fqn_fd.tell() == 0:
fzero = True
# Go to begin of file
fqn_fd.seek(0, 0)
# Read entire file
fqn_str = fqn_fd.read()
fqn_fd.close()
except:
fzero = True
if fzero:
os0.wlog(" file", fqn, "empty!!!")
else:
fxch = False
# Search for text substitution (Wordpress)
f = dbname + "->wp"
ctx = self.get_params(f)
if ctx['prefix'] != "" and ctx['siteURL'] != "":
fxch = True
fqn_str = self.repl_data_wp(ctx, fqn_str)
# Search for sql command to append
f = dbname + "/"
key_ids = self.search4item(f)
if key_ids:
fxch = True
# Text couples for substitution
for key in key_ids:
src = self.xtl[key][0]
src = src.replace("\\b", " ")
tgt = self.xtl[key][1]
tgt = tgt.replace("\\b", " ")
os0.trace_debug(">", src, tgt, ";")
fqn_str = fqn_str + src + " " + tgt + ";\n"
# Search for text substitution in SQL statements
f = dbname + self.sql_ext
key_ids = self.search4item(f)
if key_ids:
fxch = True
# Text couples for substitution
for key in key_ids:
src = self.xtl[key][0]
src = src.replace("\\b", " ")
tgt = self.xtl[key][1]
tgt = tgt.replace("\\b", " ")
os0.trace_debug("> sed|{0}|{1}|".format(src, tgt))
fqn_str = fqn_str.replace(src, tgt)
if fxch:
fqn_fd = open(fqn, 'w')
fqn_fd.write(fqn_str)
fqn_fd.close()
f = dbname + "->wiki"
ctx = self.get_params(f)
if ctx['siteURL'] != "":
fqns = ctx['conf_file'].split(',')
for fqn in fqns:
self.replace_file(ctx, f, fqn)
if ctx['conf_file2']:
fqns = ctx['conf_file2'].split(',')
for fqn in fqns:
self.replace_file(ctx, f, fqn)
if ctx['conf_file3']:
fqns = ctx['conf_file3'].split(',')
for fqn in fqns:
self.replace_file(ctx, f, fqn)
fqn = ctx['index_html']
self.replace_file(ctx, f, fqn)
def replace_file(self, ctx, f, fqn):
os0.trace_debug("> replace file", fqn)
try:
fn_fd = open(fqn, 'r')
fn_str = fn_fd.read()
fn_fd.close()
key_ids = self.search4item(f)
if key_ids:
src = ctx['siteURL']
tgt = ctx['testURL']
fn_str = fn_str.replace(src, tgt)
src = ctx['siteURI']
tgt = ctx['testURI']
fn_str = fn_str.replace(src, tgt)
fn_fd = open(fqn, 'w')
fn_fd.write(fn_str)
fn_fd.close()
except:
pass
def restore_db(self, dbtype, dbname, fqn):
# pdb.set_trace()
os0.wlog(" restoring", dbname, " ({0})".format(fqn))
homedir = os.path.expanduser("~")
tar_ext = self.tar_ext
tar_opt = self.tar_opt
fzip_fn = dbname + tar_ext
if not os.path.isfile(fzip_fn):
if self.tar_ext == ".gz":
tar_ext = ".bz2"
tar_opt = "j"
fzip_fn = dbname + tar_ext
if not os.path.isfile(fzip_fn):
tar_ext = self.tar_ext
tar_opt = self.tar_opt
# No compressed file found
fzip_fn = ""
elif self.tar_ext == ".bz2":
tar_ext = ".gz"
tar_opt = "z"
fzip_fn = dbname + tar_ext
if not os.path.isfile(fzip_fn):
tar_ext = self.tar_ext
tar_opt = self.tar_opt
# No compressed file found
fzip_fn = ""
f = os.path.basename(fqn) # Just filename
llen = len(self.sql_ext) + 9
i = len(f) - llen
# Extract date (YYYYMMDD) from XXXXX-YYYYMMDD.SQL
dts = f[i + 1:i + 9]
if dbtype == "psql":
cmd = "chown " + self.psql_uu + ":" + self.psql_uu + " " + fqn
elif dbtype == "mysql":
cmd = "chown " + self.mysql_uu + ":" + self.mysql_uu + " " + fqn
os0.trace_debug("$", cmd)
os0.muteshell(cmd, simulate=self.dry_run, keepout=os0.debug_mode)
sql_fn = homedir + "/restdb.sql"
cmd = "cp " + fqn + " " + sql_fn
os0.trace_debug("$", cmd)
os0.muteshell(cmd, simulate=self.dry_run, keepout=os0.debug_mode)
# cmd = "sed -i -e \"s|Owner: openerp|Owner: odoo|g\""\
# " -e \"s|OWNER TO openerp|OWNER TO odoo|g\" ~/restdb.sql"
# os0.trace_debug("$", cmd)
# os0.muteshell(cmd, simulate=self.dry_run, keepout=os0.debug_mode)
if dbtype == "psql":
cmd = "chown " + self.psql_uu + ":" + self.psql_uu + " " + sql_fn
elif dbtype == "mysql":
cmd = "chown " + self.mysql_uu + ":" + self.mysql_uu + " " + sql_fn
os0.trace_debug("$", cmd)
os0.muteshell(cmd, simulate=self.dry_run, keepout=os0.debug_mode)
self.repl_data(dbname, sql_fn)
psh_fn = homedir + "/restdb.psh"
psh_fd = open(psh_fn, "w")
if dbtype == "psql":
user = self.psql_uu
defdb = self.psql_db
psh_fd.write("\\c {0}\n".format(defdb))
psh_fd.write(
"DROP DATABASE IF EXISTS \"{0}-{1}\";\n".format(dbname, dts))
psh_fd.write("DROP DATABASE IF EXISTS \"{0}\";\n".format(dbname))
psh_fd.write(
"CREATE DATABASE \"{0}\" TEMPLATE template1;\n".format(dbname))
psh_fd.write("\\c \"{0}\"\n".format(dbname))
psh_fd.write("\\i {0}\n".format(sql_fn))
psh_fd.write(
"ALTER DATABASE \"{0}\" OWNER TO odoo;\n".format(dbname))
cmd = "psql -f " + psh_fn + " -U" + user + " " + defdb
psh_fd.close()
os0.trace_debug("$", cmd)
os0.muteshell(cmd, simulate=self.dry_run, keepout=os0.debug_mode)
elif dbtype == "mysql":
user = "root"
pwd = "SHS13mgr"
# defdb = self.psql_db
psh_fd.write(
"mysqladmin -u{0} --password={1} -f drop \"{2}-{3}\" ||true\n"
.format(user, pwd, dbname, dts))
psh_fd.write(
"mysqladmin -u{0} --password={1} -f drop \"{2}\" || true\n"
.format(user, pwd, dbname))
psh_fd.write(
"mysqladmin -u{0} --password={1} -f create \"{2}\"\n"
.format(user, pwd, dbname))
psh_fd.write(
"mysql -u{0} --password=SHS13mgr -G -e \"source {1}\" {2}\n"
.format(user, sql_fn, dbname))
psh_fd.close()
cmd = "chmod +x " + psh_fn
os0.muteshell(cmd, simulate=self.dry_run, keepout=os0.debug_mode)
cmd = psh_fn
os0.trace_debug("$", cmd)
os0.muteshell(cmd, simulate=self.dry_run, keepout=os0.debug_mode)
else:
os0.wlog(" unknown", dbname, "database type!!!")
cmd = "echo Error"
# Compressed file found
if fzip_fn != "":
if dbtype == "psql":
cmd = "chown " + self.psql_uu + \
":" + self.psql_uu + " " + fzip_fn
elif dbtype == "mysql":
cmd = "chown " + self.mysql_uu + \
":" + self.mysql_uu + " " + fzip_fn
os0.muteshell(cmd, simulate=self.dry_run, keepout=os0.debug_mode)
cmd = "tar --keep-newer-files -x" + tar_opt + "f " + fzip_fn
os0.muteshell(cmd, simulate=self.dry_run, keepout=os0.debug_mode)
if not self.dry_run:
os.remove(fzip_fn)
self.purge_db(dbtype, dbname)
def purge_db(self, dbtype, f):
# pdb.set_trace()
if self.sql_ext != self.pre_ext:
self.change_file_ext(f)
dtc = date.today() - timedelta(90)
os0.wlog(" removing file older than", dtc.strftime("%Y-%m-%d"))
fzip_fn = f + self.tar_ext
force_change_ext = False
for i in range(180, 120, -1):
dtc = datetime.today() - timedelta(i)
dts = dtc.strftime("%Y%m%d")
fsql = f + "-" + dts + self.sql_ext
if not os.path.isfile(fsql) and self.sql_ext != self.pre_ext:
ftmp = f + "-" + dts + self.pre_ext
if os.path.isfile(ftmp):
try:
os0.wlog("$ mv", ftmp, fsql)
if not self.dry_run:
# Rename old ext -> nex ext
os.rename(ftmp, fsql)
# Force change sql file extension
force_change_ext = True
except:
pass
if dtc.day != 1:
if not self.remove_sql_file(fsql) \
and self.sql_ext != self.pre_ext:
fsql = f + "-" + dts + self.pre_ext
self.remove_sql_file(fsql)
if force_change_ext:
self.change_file_ext(f)
fsql = f + "-????????" + self.sql_ext
if dbtype == "psql":
cmd = "chown " + self.psql_uu + ":" + self.psql_uu + " " + fsql
elif dbtype == "mysql":
cmd = "chown " + self.mysql_uu + ":" + self.mysql_uu + " " + fsql
os0.trace_debug("$ ", cmd)
os0.muteshell(cmd, simulate=self.dry_run)
cmd = "tar --remove-files -c" + \
self.tar_opt + "f " + fzip_fn + " " + fsql
os0.trace_debug("$ ", cmd)
os0.muteshell(cmd, simulate=self.dry_run)
if dbtype == "psql":
cmd = "chown " + self.psql_uu + ":" + self.psql_uu + " " + fzip_fn
elif dbtype == "mysql":
cmd = "chown " + self.mysql_uu + \
":" + self.mysql_uu + " " + fzip_fn
os0.trace_debug("$ ", cmd)
os0.muteshell(cmd, simulate=self.dry_run)
os0.wlog(" removing archived files")
fsql = f + "-????????" + self.sql_ext
f_ids = sorted(glob.glob(fsql))
for fsql in f_ids:
self.remove_sql_file(fsql)
def change_file_ext(self, f):
os0.wlog(" changing extension files")
fsql = f + "-????????" + self.pre_ext
f_ids = glob.glob(fsql)
for f in f_ids:
llen = len(f) - len(self.pre_ext)
fsql = f[0:llen] + self.sql_ext
if not os.path.isfile(fsql):
ftmp = f
if os.path.isfile(ftmp):
try:
os0.wlog("$ mv", ftmp, fsql)
if not self.dry_run:
# Rename old ext -> nex ext
os.rename(ftmp, fsql)
except:
pass
def remove_sql_file(self, fsql):
try:
fzip_fd = open(fsql, "r")
fzip_fd.close()
os0.trace_debug("$ rm", fsql)
if not self.dry_run:
os.remove(fsql)
sts = True
except:
sts = False
return sts
def extract_fn_2_restore(self):
file_2_restore = ""
ls_fd = open(self.flist, "r+")
p = ls_fd.tell()
fl = ls_fd.readline()
# f_copy = False
while fl != "":
i = fl.rfind('\n')
if file_2_restore == "" and i >= 0 and fl[0:1] != '#':
f = fl[0:i]
file_2_restore = f
f = "#" + f[1:]
ls_fd.seek(p, os.SEEK_SET)
ls_fd.write(f)
p = ls_fd.tell()
fl = ls_fd.readline()
ls_fd.close()
return file_2_restore
def commit_fn_restored(self):
ftmp = self.flist + ".lst"
fbak = self.flist + ".bak"
if os.path.isfile(ftmp):
fn_fd = open(ftmp, 'r')
fzero = True
fl = fn_fd.readline()
while fl != "" and fzero:
i = fl.rfind('\n')
if i >= 0:
fzero = False
fl = fn_fd.readline()
fn_fd.close()
if not fzero:
cmd = "rm -f {2}; mv {0} {2}; mv {1} {0}".format(
self.flist, ftmp, fbak)
os0.trace_debug("$ ", cmd)
os0.muteshell(cmd, simulate=self.dry_run)
else:
if not self.dry_run:
os.remove(ftmp)
def chdir(self, path):
# Change root dir
lpath = os0.setlfilename(path)
os0.wlog(" [{0}]".format(lpath))
self.set_chdir(lpath)
self.ftp_dir = path # Remember dir
def set_chdir(self, path):
# Exec chdir and store into ftp script
os.chdir(path)
def main():
"""Tool main"""
sts = 0
# pdb.set_trace()
ctx = zarlib.parse_args(sys.argv[1:],
version=version(),
doc=__doc__)
if ctx['do_list']:
print ctx['saveset_list']
return sts
RI = Restore_Image(ctx)
f_alrdy_run = zarlib.check_if_running(ctx, RI.pid)
if f_alrdy_run:
os0.wlog("({0}) ***Another instance is running!!!".format(RI.pid))
# Restore files
file_r_ctr = 0
file_u_ctr = 0
time_wait = 60
wait_loop = 3
if not f_alrdy_run:
fl = RI.extract_fn_2_restore()
loop_ctr = wait_loop
while loop_ctr > 0:
if fl != "":
file_r_ctr = file_r_ctr + 1
if os.path.isfile(fl):
RI.restore_file(fl)
file_u_ctr += 1
if file_u_ctr > 1:
wait_loop = 60
loop_ctr = wait_loop
else:
os0.wlog(" file", fl, "not found!!!")
RI.commit_fn_restored()
fl = RI.extract_fn_2_restore()
if fl == "":
os0.wlog(" wait for next db")
time.sleep(time_wait)
loop_ctr -= 1
if not ctx['dbg_mode'] and os.path.isfile(os0.setlfilename(os0.bgout_fn)):
os.remove(os0.setlfilename(os0.bgout_fn))
if not f_alrdy_run:
os0.wlog("Restore DB ended."
" {0} DB to restore, {1} DB restored ({2})."
.format(file_u_ctr, file_u_ctr, RI.pid))
return sts
if __name__ == "__main__":
sts = main()
sys.exit(sts)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -7,032,537,364,860,649,000 | -2,301,661,619,806,524,200 | 37.265385 | 79 | 0.441049 | false |
iruga090/python-social-auth | social/pipeline/disconnect.py | 88 | 1082 | from social.exceptions import NotAllowedToDisconnect
def allowed_to_disconnect(strategy, user, name, user_storage,
association_id=None, *args, **kwargs):
if not user_storage.allowed_to_disconnect(user, name, association_id):
raise NotAllowedToDisconnect()
def get_entries(strategy, user, name, user_storage, association_id=None,
*args, **kwargs):
return {
'entries': user_storage.get_social_auth_for_user(
user, name, association_id
)
}
def revoke_tokens(strategy, entries, *args, **kwargs):
revoke_tokens = strategy.setting('REVOKE_TOKENS_ON_DISCONNECT', False)
if revoke_tokens:
for entry in entries:
if 'access_token' in entry.extra_data:
backend = entry.get_backend(strategy)(strategy)
backend.revoke_token(entry.extra_data['access_token'],
entry.uid)
def disconnect(strategy, entries, user_storage, *args, **kwargs):
for entry in entries:
user_storage.disconnect(entry)
| bsd-3-clause | -2,619,395,549,354,550,000 | -2,784,344,721,105,733,000 | 33.903226 | 74 | 0.624769 | false |
Hubert51/AutoGrading | learning/number_recognization/test.py | 1 | 1250 | from pytesseract import image_to_string
from PIL import Image
import cv2
import numpy
import sys
if __name__ == '__main__':
f = open("test1.txt")
f = f.read()
for element in f:
str1 = element
position = ((712, 571), (725, 587))
dh = position[1][1] - position[0][1]
upper = position[0][1] - 2 * dh
lower = position[1][1] + int(3.5 * dh)
left = position[1][0]
print(upper,lower, left)
img = cv2.imread('answerSheet_with_name.png')
#image = Image.open('answerSheet_with_name.png')
img = img[upper:lower, left:img[1].size]
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray,(5,5),0)
thresh = cv2.adaptiveThreshold(blur,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,11,6)
cv2.imshow("hello", img)
################# Now finding Contours ###################
img,contours,hierarchy = cv2.findContours(thresh,cv2.RETR_LIST,cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(img, contours, -1, (0, 0, 255),1)
im = Image.fromarray(img, 'RGB')
file = open("image_to_string.txt", "w")
# box = image_to_string(image).split('\n')
file.write(image_to_string(im))
#file.write(image_to_string(image))
file.close()
| mit | 6,503,599,553,643,639,000 | 1,834,760,060,903,469,000 | 26.777778 | 98 | 0.6064 | false |
bblais/Tech-SIE | Estimating_Proportion/Estimating_Proportion.py | 1 | 4755 |
# coding: utf-8
# #Statistical Inference for Everyone: Technical Supplement
#
#
#
# This document is the technical supplement, for instructors, for [Statistical Inference for Everyone], the introductory statistical inference textbook from the perspective of "probability theory as logic".
#
# <img src="http://web.bryant.edu/~bblais/images/Saturn_with_Dice.png" align=center width = 250px />
#
# [Statistical Inference for Everyone]: http://web.bryant.edu/~bblais/statistical-inference-for-everyone-sie.html
#
# ## Estimating a Proportion
#
# $$\newcommand{\twocvec}[2]{\left(\begin{array}{c}
# #1 \\\\ #2
# \end{array}\right)}
# \newcommand{\nchoosek}[2]{\twocvec{#1}{#2}}
# $$
#
# If $\theta$ is the model representing the probability, $\theta$, of the coin
# landing on heads (and $1-\theta$ is the probability of landing on tails), we
# need to make an estimate of probability of model $\theta$ being true given the
# data, which will consist of $N$ flips of which $h$ are heads.
#
# Bayes rule is:
# \begin{eqnarray}
# p(\theta|D,I) &=& \frac{p(D|\theta,I)p(\theta|I)}{p(D|I)} =
# \frac{p(D|\theta,I)p(\theta,I)}{\sum_\theta p(D|\theta,I)p(\theta|I)}
# \end{eqnarray}
#
# Thus, the probability of a particular model $\theta$ being true is the product
# of the probability of the observed data ($h$ heads in $N$ flips) given the
# model $\theta$ and the prior probability of the model $\theta$ being true
# before we even look at the data, divided by the probability of the data itself
# over all models.
#
# The prior probability of model $\theta$ will be assumed to be uniform (from
# maximum entropy considerations). The probability, $\theta$, ranges from 0 to
# 1, to the prior is
# \begin{eqnarray}
# p(\theta|I) = 1
# \end{eqnarray}
#
# The probability of the data given the random model, is just the binomial
# distribution:
#
# \begin{eqnarray}
# p(D|\theta)=\nchoosek{N}{h} \theta^h (1-\theta)^{N-h}
# \end{eqnarray}
#
# The probability of the data, $p(D|I)$, is found by summing (or in this case
# integrating) $p(D|\theta,I)p(\theta|I)$ for all $\theta$:
#
# \begin{eqnarray}
# p(D|I) &=& \int_0^1 \nchoosek{N}{h} \theta^h (1-\theta)^{N-h} \cdot 1 d\theta
# \\\\
# &=&\frac{N!}{h!(N-h)!} \frac{h!(N-h)!}{(N+1)!} = \frac{1}{N+1}
# \end{eqnarray}
#
# Now the probability of model $\theta$ being true, given the data, is just
#
# \begin{eqnarray}
# p(\theta|D,I)&=& (N+1) \cdot \nchoosek{N}{h} \theta^h (1-\theta)^{N-h} \\
# &=& \frac{(N+1)!}{h!(N-h)!} \theta^h (1-\theta)^{N-h}
# \end{eqnarray}
#
#
# ### Max, Mean, Variance
#
# The model with the maximum probability is found by maximizing $p(\theta|D,I)$
# w.r.t. $\theta$:
#
# \begin{eqnarray}
# \frac{dP(\theta|D,I)}{d\theta} &=& 0 = \frac{(N+1)!}{h!(N-h)!} \left(
# -(N-h) \theta^h (1-\theta)^{N-h-1} + h \theta^{h-1} (1-\theta)^{N-h} \right) \\\\
# (N-h) \theta^h (1-\theta)^{N-h-1} &=& h \theta^{h-1} (1-\theta)^{N-h} \\\\
# \theta(N-h) &=& (1-\theta) h = h-\theta h = N\theta-\theta h \\\\
# \theta&=&\frac{h}{N} \;\;\;\;\;\surd
# \end{eqnarray}
#
# The average and the standard deviation is also straightforward.
#
#
# \begin{eqnarray}
# \bar{\theta} &=& \int_0^1 \theta \cdot \frac{(N+1)!}{h!(N-h)!} \theta^h (1-\theta)^{N-h} \\\\
# &=& \frac{(N+1)!}{h!(N-h)!} \int_0^1 \theta^{h+1} (1-\theta)^{N-h} \\\\
# &=&\frac{(N+1)!}{h!(N-h)!} \frac{(h+1)!(N-h)!}{(N+2)!} \\\\
# &=&\frac{h+1}{N+2} \\\\
# \bar{\theta^2} &=& \int_0^1 \theta^2 \cdot \frac{(N+1)!}{h!(N-h)!} \theta^h (1-\theta)^{N-h} \\\\
# &=&\frac{(N+1)!}{h!(N-h)!} \frac{(h+2)!(N-h)!}{(N+3)!} \\\\
# &=&\frac{(h+1)(h+2)}{(N+2)(N+3)} \\\\
# \sigma^2 &=& \bar{\theta^2} - \bar{\theta}^2 = \frac{(h+1)(h+2)}{(N+2)(N+3)} -
# \frac{(h+1)(h+1)}{(N+2)(N+2)} \\\\
# &=&\frac{(h+1)(N-h+1)}{(N+2)^2(N+3)} \\\\
# &=& \frac{(h+1)}{(N+2)}\left( \frac{n+2}{n+2} - \frac{h+1}{N+2}\right)
# \frac{1}{N+3} \\\\
# &=& \bar{\theta}(1-\bar{\theta})\frac{1}{N+3}
# \end{eqnarray}
#
# ### An Approximation for the Variance
#
# If $f=h/N$ is the actual fraction of heads observed, then the variance above
# can be written as
# \begin{eqnarray}
# \sigma^2 &=&\frac{(fN+1)(N-fN+1)}{(N+2)^2(N+3)} \\\\
# \mbox{(for large $N$)}&\approx& \frac{(fN+1)(N-fN)}{N^3}
# =\frac{(fN+1)(1-f)}{N^2} \\\\
# \mbox{(for large $fN$)}&\approx& \frac{(fN)(N-fN)}{N^2} = \frac{f(1-f)}{N} \\\\
# \sigma^2&\approx& \frac{f(1-f)}{N}
# \end{eqnarray}
#
# In this limit, the distribution (beta distribution) can be approximated with a
# Gaussian.
#
# In[11]:
# ---------------------
# In[8]:
from IPython.core.display import HTML
def css_styling():
styles = open("../styles/custom.css", "r").read()
return HTML(styles)
css_styling()
| mit | -8,313,249,629,320,456,000 | 2,040,188,219,679,617,800 | 33.708029 | 206 | 0.578549 | false |
gpndata/grpc | src/python/grpcio_test/grpc_test/framework/interfaces/face/test_cases.py | 14 | 3176 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tools for creating tests of implementations of the Face layer."""
# unittest is referenced from specification in this module.
import unittest # pylint: disable=unused-import
# test_interfaces is referenced from specification in this module.
from grpc_test.framework.interfaces.face import _blocking_invocation_inline_service
from grpc_test.framework.interfaces.face import _event_invocation_synchronous_event_service
from grpc_test.framework.interfaces.face import _future_invocation_asynchronous_event_service
from grpc_test.framework.interfaces.face import _invocation
from grpc_test.framework.interfaces.face import test_interfaces # pylint: disable=unused-import
_TEST_CASE_SUPERCLASSES = (
_blocking_invocation_inline_service.TestCase,
_event_invocation_synchronous_event_service.TestCase,
_future_invocation_asynchronous_event_service.TestCase,
)
def test_cases(implementation):
"""Creates unittest.TestCase classes for a given Face layer implementation.
Args:
implementation: A test_interfaces.Implementation specifying creation and
destruction of a given Face layer implementation.
Returns:
A sequence of subclasses of unittest.TestCase defining tests of the
specified Face layer implementation.
"""
test_case_classes = []
for invoker_constructor in _invocation.invoker_constructors():
for super_class in _TEST_CASE_SUPERCLASSES:
test_case_classes.append(
type(invoker_constructor.name() + super_class.NAME, (super_class,),
{'implementation': implementation,
'invoker_constructor': invoker_constructor}))
return test_case_classes
| bsd-3-clause | 857,266,509,680,744,800 | -4,079,750,589,565,810,700 | 46.402985 | 96 | 0.774244 | false |
openaire/iis | iis-3rdparty-madis/src/main/resources/eu/dnetlib/iis/3rdparty/scripts/madis/functions/row/util.py | 4 | 10746 | # coding: utf-8
import setpath
from gzip import zlib
import subprocess
import functions
import time
import urllib2
import urllib
from lib import jopts
from functions.conf import domainExtraHeaders
import lib.gzip32 as gzip
try:
from collections import OrderedDict
except ImportError:
# Python 2.6
from lib.collections26 import OrderedDict
def gz(*args):
"""
.. function:: gz(text) -> gzip compressed blob
Function *gz* compresses its input with gzip's maximum compression level.
Examples:
>>> table1('''
... "qwerqewrqwerqwerqwerqwerqwer"
... "asdfasdfasdfasdfasdfasdfsadf"
... ''')
>>> sql("select length(a), length(gz(a)) from table1")
length(a) | length(gz(a))
-------------------------
28 | 20
28 | 18
"""
return buffer(zlib.compress(args[0], 9))
gz.registered=True
def ungz(*args):
"""
.. function:: ungz(blob) -> text
Function *ungz* decompresses gzip blobs. If the input blobs aren't gzip
compressed, then it just returns them as they are.
Examples:
>>> table1('''
... "qwerqwerqwer"
... "asdfasdfasdf"
... ''')
>>> sql("select ungz(gz(a)) from table1")
ungz(gz(a))
------------
qwerqwerqwer
asdfasdfasdf
>>> sql("select ungz('string'), ungz(123)")
ungz('string') | ungz(123)
--------------------------
string | 123
"""
try:
return zlib.decompress(args[0])
except KeyboardInterrupt:
raise
except:
return args[0]
ungz.registered=True
def urlrequest(*args):
"""
.. function:: urlrequest([null], url) -> response
This functions connects to the *url* (via GET HTTP method) and returns the request's result. If first
parameter is *null*, then in case of errors *null* will be returned.
Examples:
>>> sql("select urlrequest('http://www.google.com/not_existing')")
Traceback (most recent call last):
...
HTTPError: HTTP Error 404: Not Found
>>> sql("select urlrequest(null, 'http://www.google.com/not_existing') as result")
result
------
None
"""
try:
req = urllib2.Request(''.join((x for x in args if x != None)), None, domainExtraHeaders)
hreq = urllib2.urlopen(req)
if [1 for x,y in hreq.headers.items() if x.lower() in ('content-encoding', 'content-type') and y.lower().find('gzip')!=-1]:
hreq = gzip.GzipFile(fileobj=hreq)
return unicode(hreq.read(), 'utf-8', errors = 'replace')
except urllib2.HTTPError,e:
if args[0] == None:
return None
else:
raise e
urlrequest.registered=True
def urlrequestpost(*args):
"""
.. function:: urlrequestpost(data_jdict, [null], url) -> response
This functions connects to the *url* (via POST HTTP method), submits the *data_jdict*, and returns the request's result. If second
parameter is *null*, then in case of errors *null* will be returned.
Examples:
>>> sql('''select urlrequestpost('{"POST_param_name":"data"}', 'http://www.google.com/not_existing')''')
Traceback (most recent call last):
...
HTTPError: HTTP Error 404: Not Found
>>> sql('''select urlrequestpost('["POST_param_name","data"]', null, 'http://www.google.com/not_existing') as result''')
result
------
None
>>> sql("select urlrequestpost(jdict('param1','value1'), null, 'http://www.google.com/not_existing') as result")
result
------
None
>>> sql("select urlrequestpost(jpack('param1','value1'), null, 'http://www.google.com/not_existing') as result")
result
------
None
"""
try:
req = urllib2.Request(''.join((x for x in args[1:] if x != None)), None, domainExtraHeaders)
datain = jopts.fromjsingle(args[0])
dataout = []
if type(datain) == list:
for i in xrange(0, len(datain), 2):
dataout.append((datain[i].encode('utf_8'), datain[i+1].encode('utf_8')))
else:
dataout = [( x.encode('utf_8'), y.encode('utf_8') ) for x,y in datain.items()]
if dataout == []:
raise functions.OperatorError('urlrequestpost',"A list or dict should be provided")
hreq = urllib2.urlopen(req, urllib.urlencode(dataout))
if [1 for x,y in hreq.headers.items() if x.lower() in ('content-encoding', 'content-type') and y.lower().find('gzip')!=-1]:
hreq = gzip.GzipFile(fileobj=hreq)
return unicode(hreq.read(), 'utf-8', errors = 'replace')
except urllib2.HTTPError,e:
if args[1] == None:
return None
else:
raise e
urlrequestpost.registered=True
def failif(*args):
"""
.. function:: failif(condition [, messsage])
If condition is true, raises an error. If message is provided, the message is included in
raised error.
Examples:
>>> sql("select failif(1=1,'exception') as answer") #doctest:+ELLIPSIS +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
OperatorError: Madis SQLError:
Operator FAILIF: exception
>>> sql("select failif(1=0,'exception') as answer") #doctest:+ELLIPSIS +NORMALIZE_WHITESPACE
answer
------
0
>>> sql("select failif(1=1) as answer") #doctest:+ELLIPSIS +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
OperatorError: Madis SQLError:
Operator FAILIF: an error was found
"""
if len(args)>3:
raise functions.OperatorError('failif','operator needs one or two input')
if args[0]:
if len(args)==2:
raise functions.OperatorError('failif', args[1])
else:
raise functions.OperatorError('failif', 'an error was found')
return args[0]
failif.registered=True
def execprogram(*args):
"""
.. function:: execprogram(stdin=null, program_name, parameters, [raise_error]) -> text or blob
Function *execprogram* executes a shell command and returns its output. If the
value of the first argument is not *null*, the arguments value will be pushed in program's Standard Input.
If the program doesn't return a *0* return code, then a madIS error will be raised, containing
the contents of the program's error stream.
If the last argument of *execprogram* is set to *null*, then all program errors will be returned as *null*
(see "cat non_existent_file" examples below).
Every one of the program's parameters must be provided as different arguments of the *execprogram* call
(see "cat -n" example below).
.. note::
Function *execprogram* tries by default to convert the program's output to UTF-8. If the conversion
isn't succesfull, then it returns the output as a binary blob.
Examples:
>>> table1('''
... echo test
... echo 1
... ''')
>>> sql("select execprogram(null, a, b) from table1")
execprogram(null, a, b)
-----------------------
test
1
>>> sql("select execprogram(null, null, '-l')") #doctest:+ELLIPSIS +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
OperatorError: Madis SQLError:
Operator EXECPROGRAM: Second parameter should be the name of the program to run
>>> sql("select execprogram(null, null, '-l', null)") #doctest:+ELLIPSIS +NORMALIZE_WHITESPACE
execprogram(null, null, '-l', null)
-----------------------------------
None
>>> sql("select execprogram('test', 'cat')")
execprogram('test', 'cat')
--------------------------
test
>>> sql('''select execprogram('test', 'cat', '-n')''') #doctest:+ELLIPSIS +NORMALIZE_WHITESPACE
execprogram('test', 'cat', '-n')
--------------------------------
1 test
>>> sql("select execprogram(null, 'NON_EXISTENT_PROGRAM')") #doctest:+ELLIPSIS +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
OperatorError: Madis SQLError:
Operator EXECPROGRAM: [Errno 2] No such file or directory
>>> sql("select execprogram(null, 'cat', 'non_existent_file')") #doctest:+ELLIPSIS +NORMALIZE_WHITESPACE
Traceback (most recent call last):
...
OperatorError: Madis SQLError:
Operator EXECPROGRAM: cat: non_existent_file: No such file or directory
>>> sql("select execprogram(null, 'cat', 'non_existent_file', null)") #doctest:+ELLIPSIS +NORMALIZE_WHITESPACE
execprogram(null, 'cat', 'non_existent_file', null)
---------------------------------------------------
None
"""
if len(args)<2:
raise functions.OperatorError('execprogram', "First parameter should be data to provide to program's STDIN, or null")
raise_error=False
if len(args)>2 and args[-1]==None:
raise_error=True
if args[1]==None:
if raise_error:
return None
else:
raise functions.OperatorError('execprogram', "Second parameter should be the name of the program to run")
outtext=errtext=''
try:
p=subprocess.Popen([unicode(x) for x in args[1:] if x!=None], stdin=subprocess.PIPE if args[0]!=None else None, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if args[0]==None:
outtext, errtext=p.communicate()
else:
val = args[0]
valtype = type(val)
if valtype == unicode:
val = val.encode('utf-8')
if valtype in (int,float):
val = str(val)
outtext, errtext=p.communicate( val )
except Exception,e:
raise functions.OperatorError('execprogram', functions.mstr(e))
if p.returncode!=0:
if raise_error:
return None
else:
raise functions.OperatorError('execprogram', functions.mstr(errtext).strip())
try:
outtext=unicode(outtext, 'utf-8')
except KeyboardInterrupt:
raise
except:
return buffer(outtext)
return outtext
execprogram.registered=True
def sleep(*args):
"""
.. function:: sleep(seconds)
This function waits for the given number of seconds before returning. The *seconds* parameters can
be fractional (e.g. *0.1* will sleep for 100 milliseconds).
Examples:
>>> sql("select sleep(0.1)")
sleep(0.1)
----------
0.1
"""
t = args[0]
if t<0:
t=0
time.sleep(t)
return t
sleep.registered=True
if not ('.' in __name__):
"""
This is needed to be able to test the function, put it at the end of every
new function you create
"""
import sys
import setpath
from functions import *
testfunction()
if __name__ == "__main__":
reload(sys)
sys.setdefaultencoding('utf-8')
import doctest
doctest.testmod()
| apache-2.0 | -3,574,879,139,516,687,000 | 8,786,334,264,855,366,000 | 27.656 | 167 | 0.602364 | false |
guymakam/Kodi-Israel | plugin.video.reshet.video/resources/appCaster/APEpgLoader.py | 3 | 1230 | # -*- coding: utf-8 -*-
'''
Created on 21/01/2012
Copyright (c) 2010-2012 Shai Bentin.
All rights reserved. Unpublished -- rights reserved
Use of a copyright notice is precautionary only, and does
not imply publication or disclosure.
Licensed under Eclipse Public License, Version 1.0
Initial Developer: Shai Bentin.
@author: shai
'''
from APLoader import APLoader
class APEpgLoader(APLoader):
'''
classdocs
'''
EPG_URI = "v{{api_version}}/accounts/{{account_id}}/broadcasters/{{broadcaster_id}}/vod_items/{{item_id}}/epg"
def __init__(self, settings, itemId = ''):
'''
Constructor
'''
super(APEpgLoader, self).__init__(settings) # call the parent constructor with the settings object
self.queryUrl = self.URL + self.EPG_URI
self.queryUrl = self.queryUrl.replace("{{api_version}}", "1" + "2")
self.queryUrl = self.queryUrl.replace("{{account_id}}", self.accountId)
self.queryUrl = self.queryUrl.replace("{{broadcaster_id}}", self.broadcasterId)
self.queryUrl = self.queryUrl.replace("{{item_id}}", itemId);
self.queryUrl = self.prepareQueryURL(self.queryUrl, None); | gpl-2.0 | 1,330,217,250,835,447,300 | 4,781,211,216,151,586,000 | 31.394737 | 114 | 0.631707 | false |
Giftingnation/GN-Oscar-Custom | sites/demo/apps/order/migrations/0005_auto__add_field_orderdiscount_offer_name.py | 16 | 32848 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'OrderDiscount.offer_name'
db.add_column('order_orderdiscount', 'offer_name', self.gf('django.db.models.fields.CharField')(max_length=128, null=True, db_index=True), keep_default=False)
def backwards(self, orm):
# Deleting field 'OrderDiscount.offer_name'
db.delete_column('order_orderdiscount', 'offer_name')
models = {
'address.country': {
'Meta': {'ordering': "('-is_highlighted', 'name')", 'object_name': 'Country'},
'is_highlighted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'is_shipping_country': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'iso_3166_1_a2': ('django.db.models.fields.CharField', [], {'max_length': '2', 'primary_key': 'True'}),
'iso_3166_1_a3': ('django.db.models.fields.CharField', [], {'max_length': '3', 'null': 'True', 'db_index': 'True'}),
'iso_3166_1_numeric': ('django.db.models.fields.PositiveSmallIntegerField', [], {'null': 'True', 'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'printable_name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 10, 11, 14, 42, 12, 984329)'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2012, 10, 11, 14, 42, 12, 984227)'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'db_index': 'True', 'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '1024', 'db_index': 'True'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'db_index': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'customer.communicationeventtype': {
'Meta': {'object_name': 'CommunicationEventType'},
'category': ('django.db.models.fields.CharField', [], {'default': "u'Order related'", 'max_length': '255'}),
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'db_index': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email_body_html_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_body_template': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'email_subject_template': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'sms_template': ('django.db.models.fields.CharField', [], {'max_length': '170', 'blank': 'True'})
},
'order.billingaddress': {
'Meta': {'object_name': 'BillingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.communicationevent': {
'Meta': {'object_name': 'CommunicationEvent'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['customer.CommunicationEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'communication_events'", 'to': "orm['order.Order']"})
},
'order.line': {
'Meta': {'object_name': 'Line'},
'est_dispatch_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_price_before_discounts_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_before_discounts_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'line_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'lines'", 'to': "orm['order.Order']"}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'order_lines'", 'null': 'True', 'to': "orm['partner.Partner']"}),
'partner_line_notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'partner_line_reference': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'partner_name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'partner_sku': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']", 'null': 'True', 'blank': 'True'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'unit_cost_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_excl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_price_incl_tax': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'unit_retail_price': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'})
},
'order.lineattribute': {
'Meta': {'object_name': 'LineAttribute'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': "orm['order.Line']"}),
'option': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_attributes'", 'null': 'True', 'to': "orm['catalogue.Option']"}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'order.lineprice': {
'Meta': {'ordering': "('id',)", 'object_name': 'LinePrice'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'prices'", 'to': "orm['order.Line']"}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_prices'", 'to': "orm['order.Order']"}),
'price_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'price_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'})
},
'order.order': {
'Meta': {'ordering': "['-date_placed']", 'object_name': 'Order'},
'basket_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'billing_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.BillingAddress']", 'null': 'True', 'blank': 'True'}),
'date_placed': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'guest_email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}),
'shipping_address': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingAddress']", 'null': 'True', 'blank': 'True'}),
'shipping_excl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_incl_tax': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'shipping_method': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'site': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['sites.Site']"}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'total_excl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'total_incl_tax': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'orders'", 'null': 'True', 'to': "orm['auth.User']"})
},
'order.orderdiscount': {
'Meta': {'object_name': 'OrderDiscount'},
'amount': ('django.db.models.fields.DecimalField', [], {'default': '0', 'max_digits': '12', 'decimal_places': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'offer_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'offer_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'discounts'", 'to': "orm['order.Order']"}),
'voucher_code': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}),
'voucher_id': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'order.ordernote': {
'Meta': {'object_name': 'OrderNote'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'note_type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': "orm['order.Order']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'})
},
'order.paymentevent': {
'Meta': {'object_name': 'PaymentEvent'},
'amount': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.PaymentEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['order.Line']", 'through': "orm['order.PaymentEventQuantity']", 'symmetrical': 'False'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'payment_events'", 'to': "orm['order.Order']"})
},
'order.paymenteventquantity': {
'Meta': {'object_name': 'PaymentEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.PaymentEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.paymenteventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'PaymentEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'order.shippingaddress': {
'Meta': {'object_name': 'ShippingAddress'},
'country': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['address.Country']"}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'line1': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'line2': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line3': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'line4': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'phone_number': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'search_text': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
},
'order.shippingevent': {
'Meta': {'ordering': "['-date']", 'object_name': 'ShippingEvent'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'event_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.ShippingEventType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lines': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['order.Line']", 'through': "orm['order.ShippingEventQuantity']", 'symmetrical': 'False'}),
'notes': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'shipping_events'", 'to': "orm['order.Order']"})
},
'order.shippingeventquantity': {
'Meta': {'object_name': 'ShippingEventQuantity'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'line_quantities'", 'to': "orm['order.ShippingEvent']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['order.Line']"}),
'quantity': ('django.db.models.fields.PositiveIntegerField', [], {})
},
'order.shippingeventtype': {
'Meta': {'ordering': "('sequence_number',)", 'object_name': 'ShippingEventType'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_required': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'sequence_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'partner.partner': {
'Meta': {'object_name': 'Partner'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.User']"})
},
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['order']
| bsd-3-clause | 4,564,557,053,925,921,000 | 3,164,048,063,151,065,000 | 87.064343 | 222 | 0.553793 | false |
peterfpeterson/mantid | scripts/Interface/reduction_gui/widgets/sans/hfir_background.py | 3 | 12637 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
#pylint: disable=invalid-name
from qtpy.QtWidgets import (QFrame) # noqa
from qtpy.QtGui import (QDoubleValidator) # noqa
import reduction_gui.widgets.util as util
from reduction_gui.reduction.sans.hfir_background_script import Background
from reduction_gui.widgets.base_widget import BaseWidget
from reduction_gui.widgets.sans.hfir_sample_data import BeamSpreader, DirectBeam
try:
from mantidqt.utils.qt import load_ui
except ImportError:
from mantid.kernel import Logger
Logger("BckDirectBeam").information('Using legacy ui importer')
from mantidplot import load_ui
class BckDirectBeam(DirectBeam):
def __init__(self, parent=None, state=None, settings=None, data_type=None, data_proxy=None):
super(BckDirectBeam, self).__init__(parent, state, settings, data_type, data_proxy=data_proxy)
if state is None:
self.set_state(Background.DirectBeam())
def get_state(self):
direct_beam = super(BckDirectBeam, self).get_state()
m = Background.DirectBeam(direct_beam)
return m
def set_state(self, state):
super(BckDirectBeam, self).set_state(state)
class BckBeamSpreader(BeamSpreader):
def __init__(self, parent=None, state=None, settings=None, data_type=None, data_proxy=None):
super(BckBeamSpreader, self).__init__(parent, state, settings, data_type, data_proxy=data_proxy)
if state is None:
self.set_state(Background.BeamSpreader())
def get_state(self):
direct_beam = super(BckBeamSpreader, self).get_state()
m = Background.BeamSpreader(direct_beam)
return m
def set_state(self, state):
super(BckBeamSpreader, self).set_state(state)
class BackgroundWidget(BaseWidget):
"""
Widget that presents the transmission options to the user
"""
_method_box = None
## Widget name
name = "Background"
def __init__(self, parent=None, state=None, settings=None, show_transmission=True, data_type=None, data_proxy=None):
super(BackgroundWidget, self).__init__(parent, state, settings, data_type, data_proxy=data_proxy)
class BckFrame(QFrame):
def __init__(self, parent=None):
QFrame.__init__(self, parent)
self.ui = load_ui(__file__, '../../../ui/sans/hfir_background.ui', baseinstance=self)
self._content = BckFrame(self)
self._layout.addWidget(self._content)
# Flag to show transmission options or not
self.show_transmission = show_transmission
self.initialize_content()
if state is not None:
self.set_state(state)
else:
m = Background()
self.set_state(m)
self._last_direct_state = None
self._last_spreader_state = None
def initialize_content(self):
"""
Declare the validators and event connections for the
widgets loaded through the .ui file.
"""
# Validators
self._content.transmission_edit.setValidator(QDoubleValidator(self._content.transmission_edit))
self._content.dtransmission_edit.setValidator(QDoubleValidator(self._content.dtransmission_edit))
#self._content.thickness_edit.setValidator(QDoubleValidator(self._content.thickness_edit))
# Connections
self._content.calculate_trans_chk.clicked.connect(self._calculate_clicked)
self._content.trans_direct_chk.clicked.connect(self._direct_beam)
self._content.trans_spreader_chk.clicked.connect(self._beam_spreader)
self._content.background_chk.clicked.connect(self._background_clicked)
self._content.background_browse.clicked.connect(self._background_browse)
self._content.trans_dark_current_button.clicked.connect(self._trans_dark_current_browse)
self._content.background_plot_button.clicked.connect(self._background_plot_clicked)
self._content.trans_dark_current_plot_button.clicked.connect(self._trans_dark_current_plot_clicked)
# Process transmission option
if not self.show_transmission:
self._content.calculate_trans_chk.hide()
self._content.bck_trans_label.hide()
self._content.bck_trans_err_label.hide()
self._content.transmission_edit.hide()
self._content.dtransmission_edit.hide()
self._content.calculate_trans_chk.hide()
self._content.theta_dep_chk.hide()
self._content.trans_direct_chk.hide()
self._content.trans_spreader_chk.hide()
self._content.trans_dark_current_label.hide()
self._content.trans_dark_current_edit.hide()
self._content.trans_dark_current_button.hide()
if not self._has_instrument_view:
self._content.background_plot_button.hide()
self._content.trans_dark_current_plot_button.hide()
def _background_plot_clicked(self):
self.show_instrument(file_name=self._content.background_edit.text)
def _trans_dark_current_plot_clicked(self):
self.show_instrument(file_name=self._content.trans_dark_current_edit.text)
def set_state(self, state):
"""
Populate the UI elements with the data from the given state.
@param state: Transmission object
"""
bck_file = str(self._content.background_edit.text()).strip()
self._content.background_chk.setChecked(state.background_corr)
self._content.background_edit.setText(state.background_file)
if state.background_file.strip() != bck_file:
self.get_data_info()
self._background_clicked(state.background_corr)
if self.show_transmission:
self._content.transmission_edit.setText(str("%6.4f" % state.bck_transmission))
self._content.dtransmission_edit.setText(str("%6.4f" % state.bck_transmission_spread))
#self._content.thickness_edit.setText("%6.4f" % state.sample_thickness)
if isinstance(state.trans_calculation_method, state.DirectBeam):
self._content.trans_direct_chk.setChecked(True)
self._direct_beam(state=state.trans_calculation_method)
else:
self._content.trans_spreader_chk.setChecked(True)
self._beam_spreader(state=state.trans_calculation_method)
self._content.calculate_trans_chk.setChecked(state.calculate_transmission)
self._content.theta_dep_chk.setChecked(state.theta_dependent)
self._content.trans_dark_current_edit.setText(str(state.trans_dark_current))
self._calculate_clicked(state.calculate_transmission)
def get_state(self):
"""
Returns an object with the state of the interface
"""
m = Background()
m.background_corr = self._content.background_chk.isChecked()
m.background_file = str(self._content.background_edit.text())
m.bck_transmission_enabled = self.show_transmission
if self.show_transmission:
#m.sample_thickness = util._check_and_get_float_line_edit(self._content.thickness_edit)
m.bck_transmission = util._check_and_get_float_line_edit(self._content.transmission_edit)
m.bck_transmission_spread = util._check_and_get_float_line_edit(self._content.dtransmission_edit)
m.calculate_transmission = self._content.calculate_trans_chk.isChecked()
m.theta_dependent = self._content.theta_dep_chk.isChecked()
m.trans_dark_current = self._content.trans_dark_current_edit.text()
if self._method_box is not None:
m.trans_calculation_method=self._method_box.get_state()
return m
def _trans_dark_current_browse(self):
fname = self.data_browse_dialog()
if fname:
self._content.trans_dark_current_edit.setText(fname)
def _direct_beam(self, state=None):
if state is None:
state = self._last_direct_state
if isinstance(self._method_box, BckBeamSpreader):
self._last_spreader_state = self._method_box.get_state()
if self.show_transmission:
self._replace_method(BckDirectBeam(self, state=state, settings=self._settings,
data_type=self._data_type, data_proxy=self._data_proxy))
def _beam_spreader(self, state=None):
if state is None:
state = self._last_spreader_state
if isinstance(self._method_box, BckDirectBeam):
self._last_direct_state = self._method_box.get_state()
if self.show_transmission:
self._replace_method(BckBeamSpreader(self, state=state, settings=self._settings,
data_type=self._data_type, data_proxy=self._data_proxy))
def _replace_method(self, widget):
if self._method_box is not None:
for i in range(0, self._content.widget_placeholder.count()):
item = self._content.widget_placeholder.itemAt(i)
self._content.widget_placeholder.removeItem(self._content.widget_placeholder.itemAt(i))
item.widget().deleteLater()
self._method_box = widget
self._content.widget_placeholder.addWidget(self._method_box)
def _background_clicked(self, is_checked):
self._content.background_edit.setEnabled(is_checked)
#self._content.thickness_edit.setEnabled(is_checked)
#self._content.thickness_label.setEnabled(is_checked)
self._content.geometry_options_groupbox.setEnabled(is_checked)
self._content.background_browse.setEnabled(is_checked)
self._content.background_plot_button.setEnabled(is_checked)
self._content.calculate_trans_chk.setEnabled(is_checked)
self._content.theta_dep_chk.setEnabled(is_checked)
self._content.bck_trans_label.setEnabled(is_checked)
self._content.bck_trans_err_label.setEnabled(is_checked)
self._content.transmission_grpbox.setEnabled(is_checked)
self._calculate_clicked(is_checked and self._content.calculate_trans_chk.isChecked())
def _background_browse(self):
fname = self.data_browse_dialog()
if fname:
bck_file = str(self._content.background_edit.text()).strip()
self._content.background_edit.setText(fname)
if str(fname).strip() != bck_file:
self.get_data_info()
def _calculate_clicked(self, is_checked):
self._content.trans_direct_chk.setEnabled(is_checked)
self._content.trans_spreader_chk.setEnabled(is_checked)
if self._method_box is not None:
self._method_box.setEnabled(is_checked)
self._content.transmission_edit.setEnabled(not is_checked and self._content.background_chk.isChecked())
self._content.dtransmission_edit.setEnabled(not is_checked and self._content.background_chk.isChecked())
self._content.trans_dark_current_label.setEnabled(is_checked)
self._content.trans_dark_current_edit.setEnabled(is_checked)
self._content.trans_dark_current_button.setEnabled(is_checked)
self._content.trans_dark_current_plot_button.setEnabled(is_checked)
def get_data_info(self):
"""
Retrieve information from the data file and update the display
"""
if self._data_proxy is None:
return
fname = str(self._content.background_edit.text())
if len(str(fname).strip())>0:
dataproxy = self._data_proxy(fname, "__background_raw")
if len(dataproxy.errors)>0:
return
self._settings.last_data_ws = dataproxy.data_ws
if dataproxy.sample_detector_distance is not None:
self._content.sample_dist_edit.setText(str(dataproxy.sample_detector_distance))
util._check_and_get_float_line_edit(self._content.sample_dist_edit, min=0.0)
if dataproxy.wavelength is not None:
self._content.wavelength_edit.setText(str(dataproxy.wavelength))
util._check_and_get_float_line_edit(self._content.wavelength_edit, min=0.0)
if dataproxy.wavelength_spread is not None:
self._content.wavelength_spread_edit.setText(str(dataproxy.wavelength_spread))
| gpl-3.0 | 213,969,435,069,029,400 | 3,612,605,865,638,394,000 | 44.786232 | 120 | 0.656327 | false |
enzochiau/tablib | tablib/packages/openpyxl3/reader/worksheet.py | 55 | 3839 | # file openpyxl/reader/worksheet.py
# Copyright (c) 2010 openpyxl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# @license: http://www.opensource.org/licenses/mit-license.php
# @author: Eric Gazoni
"""Reader for a single worksheet."""
# Python stdlib imports
try:
from xml.etree.cElementTree import iterparse
except ImportError:
from xml.etree.ElementTree import iterparse
from io import StringIO
# package imports
from ..cell import Cell, coordinate_from_string
from ..worksheet import Worksheet
def _get_xml_iter(xml_source):
if not hasattr(xml_source, 'name'):
return StringIO(xml_source)
else:
xml_source.seek(0)
return xml_source
def read_dimension(xml_source):
source = _get_xml_iter(xml_source)
it = iterparse(source)
for event, element in it:
if element.tag == '{http://schemas.openxmlformats.org/spreadsheetml/2006/main}dimension':
ref = element.get('ref')
if ':' in ref:
min_range, max_range = ref.split(':')
else:
min_range = max_range = ref
min_col, min_row = coordinate_from_string(min_range)
max_col, max_row = coordinate_from_string(max_range)
return min_col, min_row, max_col, max_row
else:
element.clear()
return None
def filter_cells(xxx_todo_changeme):
(event, element) = xxx_todo_changeme
return element.tag == '{http://schemas.openxmlformats.org/spreadsheetml/2006/main}c'
def fast_parse(ws, xml_source, string_table, style_table):
source = _get_xml_iter(xml_source)
it = iterparse(source)
for event, element in filter(filter_cells, it):
value = element.findtext('{http://schemas.openxmlformats.org/spreadsheetml/2006/main}v')
if value is not None:
coordinate = element.get('r')
data_type = element.get('t', 'n')
style_id = element.get('s')
if data_type == Cell.TYPE_STRING:
value = string_table.get(int(value))
ws.cell(coordinate).value = value
if style_id is not None:
ws._styles[coordinate] = style_table.get(int(style_id))
# to avoid memory exhaustion, clear the item after use
element.clear()
from ..reader.iter_worksheet import IterableWorksheet
def read_worksheet(xml_source, parent, preset_title, string_table,
style_table, workbook_name = None, sheet_codename = None):
"""Read an xml worksheet"""
if workbook_name and sheet_codename:
ws = IterableWorksheet(parent, preset_title, workbook_name,
sheet_codename, xml_source)
else:
ws = Worksheet(parent, preset_title)
fast_parse(ws, xml_source, string_table, style_table)
return ws
| mit | 105,672,415,629,441,580 | 8,443,947,512,631,143,000 | 31.811966 | 97 | 0.668924 | false |
mikkylok/mikky.lu | venv/lib/python2.7/site-packages/markdown/extensions/abbr.py | 123 | 2738 | '''
Abbreviation Extension for Python-Markdown
==========================================
This extension adds abbreviation handling to Python-Markdown.
See <https://pythonhosted.org/Markdown/extensions/abbreviations.html>
for documentation.
Oringinal code Copyright 2007-2008 [Waylan Limberg](http://achinghead.com/) and
[Seemant Kulleen](http://www.kulleen.org/)
All changes Copyright 2008-2014 The Python Markdown Project
License: [BSD](http://www.opensource.org/licenses/bsd-license.php)
'''
from __future__ import absolute_import
from __future__ import unicode_literals
from . import Extension
from ..preprocessors import Preprocessor
from ..inlinepatterns import Pattern
from ..util import etree, AtomicString
import re
# Global Vars
ABBR_REF_RE = re.compile(r'[*]\[(?P<abbr>[^\]]*)\][ ]?:\s*(?P<title>.*)')
class AbbrExtension(Extension):
""" Abbreviation Extension for Python-Markdown. """
def extendMarkdown(self, md, md_globals):
""" Insert AbbrPreprocessor before ReferencePreprocessor. """
md.preprocessors.add('abbr', AbbrPreprocessor(md), '<reference')
class AbbrPreprocessor(Preprocessor):
""" Abbreviation Preprocessor - parse text for abbr references. """
def run(self, lines):
'''
Find and remove all Abbreviation references from the text.
Each reference is set as a new AbbrPattern in the markdown instance.
'''
new_text = []
for line in lines:
m = ABBR_REF_RE.match(line)
if m:
abbr = m.group('abbr').strip()
title = m.group('title').strip()
self.markdown.inlinePatterns['abbr-%s' % abbr] = \
AbbrPattern(self._generate_pattern(abbr), title)
else:
new_text.append(line)
return new_text
def _generate_pattern(self, text):
'''
Given a string, returns an regex pattern to match that string.
'HTML' -> r'(?P<abbr>[H][T][M][L])'
Note: we force each char as a literal match (in brackets) as we don't
know what they will be beforehand.
'''
chars = list(text)
for i in range(len(chars)):
chars[i] = r'[%s]' % chars[i]
return r'(?P<abbr>\b%s\b)' % (r''.join(chars))
class AbbrPattern(Pattern):
""" Abbreviation inline pattern. """
def __init__(self, pattern, title):
super(AbbrPattern, self).__init__(pattern)
self.title = title
def handleMatch(self, m):
abbr = etree.Element('abbr')
abbr.text = AtomicString(m.group('abbr'))
abbr.set('title', self.title)
return abbr
def makeExtension(*args, **kwargs):
return AbbrExtension(*args, **kwargs)
| mit | -3,577,529,003,480,618,500 | -8,469,701,926,653,727,000 | 29.087912 | 79 | 0.616508 | false |
wylee/django-local-settings | src/local_settings/util.py | 1 | 5070 | import importlib
import io
import os
import dotenv
NO_DEFAULT = type(
"NO_DEFAULT",
(),
{
"__nonzero__": (lambda self: False), # Python 2
"__bool__": (lambda self: False), # Python 3
"__str__": (lambda self: self.__class__.__name__),
"__repr__": (lambda self: str(self)),
"__copy__": (lambda self: self),
},
)()
def get_file_name():
"""Get local settings file from environ or discover it.
If the ``LOCAL_SETTINGS_FILE`` environment variable is set, its
value is returned directly.
Otherwise, the current working directory is searched for
`local.{ext}` for each file extension handled by each loading
:mod:`strategy`. Note that the search is done in alphabetical order
so that if ``local.cfg`` and ``local.yaml`` both exist, the former
will be returned.
Returns:
str: File name if set via environ or discovered
None: File name isn't set and wasn't discovered
"""
file_name = os.environ.get("LOCAL_SETTINGS_FILE")
if file_name:
return file_name
cwd = os.getcwd()
default_file_names = get_default_file_names()
for file_name in default_file_names:
file_name = os.path.join(cwd, file_name)
if os.path.exists(file_name):
return file_name
def get_default_file_names():
"""Get default file names for all loading strategies, sorted."""
from .strategy import get_file_type_map # noqa: Avoid circular import
return sorted(f"local.{ext}" for ext in get_file_type_map())
def parse_file_name_and_section(
file_name, section=None, extender=None, extender_section=None
):
"""Parse file name and (maybe) section.
File names can be absolute paths, relative paths, or asset
specs::
/home/user/project/local.cfg
local.cfg
some.package:local.cfg
File names can also include a section::
some.package:local.cfg#dev
If a ``section`` is passed, it will take precedence over a
section parsed out of the file name.
"""
if "#" in file_name:
file_name, parsed_section = file_name.rsplit("#", 1)
else:
parsed_section = None
if ":" in file_name:
file_name = asset_path(file_name)
if extender:
if not file_name:
# Extended another section in the same file
file_name = extender
elif not os.path.isabs(file_name):
# Extended by another file in the same directory
file_name = abs_path(file_name, relative_to=os.path.dirname(extender))
if section:
pass
elif parsed_section:
section = parsed_section
elif extender_section:
section = extender_section
else:
section = None
return file_name, section
# Path utilities
def abs_path(path, relative_to=None):
"""Make path absolute and normalize it."""
if os.path.isabs(path):
path = os.path.normpath(path)
elif ":" in path:
path = asset_path(path)
else:
path = os.path.expanduser(path)
if relative_to:
path = os.path.join(relative_to, path)
path = os.path.abspath(path)
path = os.path.normpath(path)
return path
def asset_path(path):
"""Get absolute path from asset spec and normalize it."""
if ":" in path:
package_name, rel_path = path.split(":", 1)
else:
package_name, rel_path = path, ""
try:
package = importlib.import_module(package_name)
except ImportError:
raise ValueError(
f"Could not get asset path for {path}; could not import "
f"package: {package_name}"
)
if not hasattr(package, "__file__"):
raise ValueError("Can't compute path relative to namespace package")
package_path = os.path.dirname(package.__file__)
if rel_path:
path = os.path.join(package_path, rel_path)
path = os.path.normpath(path)
return path
def dotenv_path(path=None, relative_to=None, file_name=".env"):
"""Get .env path.
If a path is specified, convert it to an absolute path. Otherwise,
use the default, "./.env".
.. note:: By default, the dotenv package discovers the default .env
file relative to the call site, so we have to tell it use CWD.
"""
if path:
path = abs_path(path, relative_to)
else:
path = dotenv.find_dotenv(filename=file_name, usecwd=True)
return path
def load_dotenv(path=None, relative_to=None, file_name=".env"):
"""Load vars from dotenv file into environ."""
path = dotenv_path(path, relative_to, file_name)
dotenv.load_dotenv(path)
# These TTY functions were copied from Invoke
def is_a_tty(stream):
if hasattr(stream, "isatty") and callable(stream.isatty):
return stream.isatty()
elif has_fileno(stream):
return os.isatty(stream.fileno())
return False
def has_fileno(stream):
try:
return isinstance(stream.fileno(), int)
except (AttributeError, io.UnsupportedOperation):
return False
| mit | -4,398,949,885,694,380,000 | 5,671,235,065,341,416,000 | 26.258065 | 82 | 0.622091 | false |
ngageoint/voxel-globe | voxel_globe/tests/tasks.py | 2 | 1124 | from voxel_globe.common_tasks import shared_task, VipTask
from celery.utils.log import get_task_logger
logger = get_task_logger(__name__)
@shared_task(base=VipTask, bind=True)
def success(self):
import time
time.sleep(0.5)
return 123
@shared_task(base=VipTask, bind=True)
def python_crash(self):
import time
x = 15
time.sleep(0.5)
x += 5
ok()
return -321
@shared_task(base=VipTask, bind=True)
def python_segfault(self):
import time
time.sleep(0.5)
from types import CodeType as code
#Guaranteed segfault https://wiki.python.org/moin/CrashingPython
exec code(0, 5, 8, 0, "hello moshe", (), (), (), "", "", 0, "")
return -111
@shared_task(base=VipTask, bind=True, routing_key="gpu")
def run_ocl_info(self):
import os
print os.getpid()
import boxm2_adaptor as b
b.ocl_info()
@shared_task(base=VipTask, bind=True)
def run_ocl_info2(self):
import os
print os.getpid()
import boxm2_adaptor as b
b.ocl_info()
@shared_task(base=VipTask, bind=True)
def add(self, a, b, pause=None):
if pause:
import time
time.sleep(pause)
print 'a + b = %s + %s' %(a, b)
return a+b
| mit | -111,466,291,446,681,070 | 2,542,985,046,780,107,000 | 21.039216 | 66 | 0.673488 | false |
ngageoint/scale | scale/data/models.py | 1 | 24039 | """Defines the database models for datasets"""
from __future__ import absolute_import, unicode_literals
import copy
import logging
from collections import namedtuple
import django.contrib.postgres.fields
from django.db import models, transaction
from django.db.models import Q, Count
from data.data import data_util
from data.data.json.data_v6 import convert_data_to_v6_json, DataV6
from data.data.exceptions import InvalidData
from data.data.value import FileValue
from data.dataset.dataset import DataSetDefinition
from data.dataset.json.dataset_v6 import convert_definition_to_v6_json, DataSetDefinitionV6
from data.exceptions import InvalidDataSetDefinition, InvalidDataSetMember
from data.serializers import DataSetFileSerializerV6, DataSetMemberSerializerV6
from storage.models import ScaleFile
from util import rest as rest_utils
from util.database import alphabetize
logger = logging.getLogger(__name__)
DataSetValidation = namedtuple('DataSetValidation', ['is_valid', 'errors', 'warnings'])
# DataSetKey = namedtuple('DataSetKey', ['name', 'version'])
class DataSetManager(models.Manager):
"""Provides additional methods for handling datasets"""
def create_dataset_v6(self, definition, title=None, description=None):
"""Creates and returns a new dataset for the given name/title/description/definition/version??
:param definition: Parameter definition of the dataset
:type definition: :class:`data.dataset.dataset.DataSetDefinition`
:param title: Optional title of the dataset
:type title: string
:param description: Optional description of the dataset
:type description: string
:returns: The new dataset
:rtype: :class:`data.models.DataSet`
:raises :class:`data.exceptions.InvalidDataSet`: If a give dataset has an invalid value
"""
if not definition:
definition = DataSetDefinition(definition={})
dataset = DataSet()
dataset.title = title
dataset.description = description
dataset.definition = definition.get_dict()
dataset.save()
return dataset
def get_details_v6(self, dataset_id):
"""Gets additional details for the given dataset id
:returns: The full dataset for the given id
:rtype: :class:`data.models.DataSet`
"""
ds = DataSet.objects.get(pk=dataset_id)
ds.files = DataSetFile.objects.get_dataset_files(ds.id)
return ds
def get_datasets_v6(self, started=None, ended=None, dataset_ids=None, keywords=None, order=None):
"""Handles retrieving datasets - possibly filtered and ordered
:returns: The list of datasets that match the given filters
:rtype: [:class:`data.models.DataSet`]
"""
return self.filter_datasets(started=started, ended=ended, dataset_ids=dataset_ids, keywords=keywords, order=order)
def filter_datasets(self, started=None, ended=None, dataset_ids=None, keywords=None, order=None):
"""Returns a query for dataset models that filters on the given fields
:param started: Query datasets created after this amount of time.
:type started: :class:`datetime.datetime`
:param ended: Query datasets created before this amount of time.
:type ended: :class:`datetime.datetime`
:param dataset_ids: Query datasets assciated with the given id(s)
:type dataset_ids: :func:`list`
:param keywords: Query datasets with title or description matching one of the specified keywords
:type keywords: :func:`list`
:param order: A list of fields to control the sort order.
:type order: :func:`list`
:returns: The dataset query
:rtype: :class:`django.db.models.QuerySet`
"""
# Fetch a list of the datasets
datasets = self.all()
# Apply time range filtering
if started:
datasets = datasets.filter(created__gte=started)
if ended:
datasets = datasets.filter(created__lte=ended)
# Apply additional filters
if dataset_ids:
datasets = datasets.filter(id__in=dataset_ids)
# Execute a sub-query that returns distinct job type names that match the provided filter arguments
if keywords:
key_query = Q()
for keyword in keywords:
key_query |= Q(title__icontains=keyword)
key_query |= Q(description__icontains=keyword)
datasets = datasets.filter(key_query)
# Apply sorting
if order:
ordering = alphabetize(order, DataSet.ALPHABETIZE_FIELDS)
datasets = datasets.order_by(*ordering)
else:
datasets = datasets.order_by('id')
for ds in datasets:
files = DataSetFile.objects.get_file_ids(dataset_ids=[ds.id])
ds.files = len(files)
return datasets
def validate_dataset_v6(self, definition, title=None, description=None):
"""Validates the given dataset definiton
:param definition: The dataset definition
:type definition: dict
:returns: The dataset validation
:rtype: :class:`datset.models.DataSetValidation`
"""
is_valid = True
errors = []
warnings = []
dataset_definition = None
try:
dataset_definition = DataSetDefinitionV6(definition=definition, do_validate=True)
except InvalidDataSetDefinition as ex:
is_valid = False
errors.append(ex.error)
message = 'Dataset definition is invalid: %s' % ex
logger.info(message)
pass
# validate other fields
return DataSetValidation(is_valid, errors, warnings)
def get_dataset_files(self, dataset_id):
"""Returns the files associated with the given dataset
:returns: The list of DataSetFiles matching the file_id
:rtype: [:class:`data.models.DataSetFile`]
"""
files = DataSetFile.objects.get_dataset_files(dataset_id=dataset_id)
return files
def get_dataset_members(self, dataset_id):
"""Returns the members associated with the given dataset_id
:returns: The list of DataSetMembers
:rtype: [:class:`data.models.DataSetMember`]
"""
dataset = self.get(pk=dataset_id)
members = DataSetMember.objects.all().filter(dataset=dataset)
return members
class DataSet(models.Model):
"""
Represents a DataSet object
:keyword name: The identifying name of the dataset used by clients for queries
:type name: :class:`django.db.models.CharField`
:keyword version: The version of the dataset
:type version: :class:`django.db.models.CharField`
:keyword version_array: The version of the dataset split into SemVer integer components (major,minor,patch,prerelease)
:type version_array: :func:`list`
:keyword title: The human-readable title of this dataset (optional)
:type title: :class:`django.db.models.CharField`
:keyword description: The description of the dataset (optional)
:type description: :class:`django.db.models.CharField`
:keyword created: Defines the created time of the dataset
:type created: :class:`django.db.models.DateTimeField`
:keyword definition: Defines the dataset
:type definition: class:`django.contrib.postgres.fields.JSONField`
"""
ALPHABETIZE_FIELDS = ['title', 'description']
title = models.CharField(blank=True, max_length=50, null=True)
description = models.TextField(blank=True, null=True)
created = models.DateTimeField(auto_now_add=True)
definition = django.contrib.postgres.fields.JSONField(default=dict)
objects = DataSetManager()
def get_definition(self):
"""Returns the dataset definition
:returns: The DataSet definition
:rtype: :class:`data.dataset.dataset.DataSetDefinition`
"""
if isinstance(self.definition, basestring):
self.definition = {}
return DataSetDefinitionV6(definition=self.definition).get_definition()
def get_v6_definition_json(self):
"""Returns the dataset definition in v6 of the JSON schema
:returns: The dataset definition in v6 of the JSON schema
:rtype: dict
"""
return rest_utils.strip_schema_version(convert_definition_to_v6_json(self.get_definition()).get_dict())
def get_dataset_definition(self):
"""Returns the dataset definition
:returns: The dataset definition json
:rtype: dict
"""
return self.definition
def get_dataset_members_json(self):
"""Returns the JSON for the associated dataset members
:returns: Returns the outgoing primitive representation.
:rtype: dict?
"""
members = DataSet.objects.get_dataset_members(dataset_id=self.id)
serializer = DataSetMemberSerializerV6(members, many=True)
return serializer.data
def get_dataset_files_json(self):
"""Returns the JSON for the associated dataset files
:returns: Returns the outgoing primitive representation.
:rtype: dict?
"""
files = DataSet.objects.get_dataset_files(self.id)
serializer = DataSetFileSerializerV6(files, many=True)
return serializer.data
class Meta(object):
"""meta information for the db"""
db_table = 'data_set'
class DataSetMemberManager(models.Manager):
"""Provides additional methods for handling dataset members"""
def build_data_list(self, template, data_started=None, data_ended=None, created_started=None, created_ended=None,
source_started=None, source_ended=None, source_sensor_classes=None, source_sensors=None,
source_collections=None,source_tasks=None, mod_started=None, mod_ended=None, job_type_ids=None,
job_type_names=None, job_ids=None, is_published=None, is_superseded=None, file_names=None,
job_outputs=None, recipe_ids=None, recipe_type_ids=None, recipe_nodes=None, batch_ids=None, order=None):
"""Builds a list of data dictionaries from a template and file filters
:param template: The template to fill with files found through filters
:type template: dict
:param data_started: Query files where data started after this time.
:type data_started: :class:`datetime.datetime`
:param data_ended: Query files where data ended before this time.
:type data_ended: :class:`datetime.datetime`
:param created_started: Query files created after this time.
:type created_started: :class:`datetime.datetime`
:param created_ended: Query files created before this time.
:type created_ended: :class:`datetime.datetime`
:param source_started: Query files where source collection started after this time.
:type source_started: :class:`datetime.datetime`
:param source_ended: Query files where source collection ended before this time.
:type source_ended: :class:`datetime.datetime`
:param source_sensor_classes: Query files with the given source sensor class.
:type source_sensor_classes: :func:`list`
:param source_sensor: Query files with the given source sensor.
:type source_sensor: :func:`list`
:param source_collection: Query files with the given source class.
:type source_collection: :func:`list`
:param source_tasks: Query files with the given source tasks.
:type source_tasks: :func:`list`
:param mod_started: Query files where the last modified date is after this time.
:type mod_started: :class:`datetime.datetime`
:param mod_ended: Query files where the last modified date is before this time.
:type mod_ended: :class:`datetime.datetime`
:param job_type_ids: Query files with jobs with the given type identifier.
:type job_type_ids: :func:`list`
:param job_type_names: Query files with jobs with the given type name.
:type job_type_names: :func:`list`
:keyword job_ids: Query files with a given job id
:type job_ids: :func:`list`
:param is_published: Query files flagged as currently exposed for publication.
:type is_published: bool
:param is_superseded: Query files that have/have not been superseded.
:type is_superseded: bool
:param file_names: Query files with the given file names.
:type file_names: :func:`list`
:keyword job_outputs: Query files with the given job outputs
:type job_outputs: :func:`list`
:keyword recipe_ids: Query files with a given recipe id
:type recipe_ids: :func:`list`
:keyword recipe_nodes: Query files with a given recipe nodes
:type recipe_nodes: :func:`list`
:keyword recipe_type_ids: Query files with the given recipe types
:type recipe_type_ids: :func:`list`
:keyword batch_ids: Query files with batches with the given identifiers.
:type batch_ids: :func:`list`
:param order: A list of fields to control the sort order.
:type order: :func:`list`
"""
files = ScaleFile.objects.filter_files(
data_started=data_started, data_ended=data_ended,
source_started=source_started, source_ended=source_ended,
source_sensor_classes=source_sensor_classes, source_sensors=source_sensors,
source_collections=source_collections, source_tasks=source_tasks,
mod_started=mod_started, mod_ended=mod_ended, job_type_ids=job_type_ids,
job_type_names=job_type_names, job_ids=job_ids,
file_names=file_names, job_outputs=job_outputs, recipe_ids=recipe_ids,
recipe_type_ids=recipe_type_ids, recipe_nodes=recipe_nodes, batch_ids=batch_ids,
order=order)
data_list = []
try:
for f in files:
entry = copy.deepcopy(template)
file_params = entry['files']
for p in file_params:
if file_params[p] == 'FILE_VALUE':
file_params[p] = [f.id]
data_list.append(DataV6(data=entry, do_validate=True).get_data())
except (KeyError, TypeError) as ex:
raise InvalidData('INVALID_TEMPLATE', "Specified template is invalid: %s" % ex)
return data_list
def validate_data_list(self, dataset_def, data_list):
"""Validates a list of data objects against a dataset
:param dataset_def: The dataset definition the member is a part of
:type dataset_def:
:param data_list: Data definitions of the dataset members
:type data_list: [:class:`data.data.data.Data`]
"""
is_valid = True
errors = []
warnings = []
for data in data_list:
try:
dataset_def.validate(data)
except (InvalidData, InvalidDataSetMember) as ex:
is_valid = False
errors.append(ex.error)
message = 'Dataset definition is invalid: %s' % ex
logger.info(message)
pass
# validate other fields
return DataSetValidation(is_valid, errors, warnings)
def create_dataset_members(self, dataset, data_list):
"""Creates a dataset member
:param dataset: The dataset the member is a part of
:type dataset: :class:`data.models.DataSet`
:param data_list: Data definitions of the dataset members
:type data_list: [:class:`data.data.data.Data`]
"""
with transaction.atomic():
dataset_members = []
datasetfiles = []
existing_scale_ids = DataSetFile.objects.get_file_ids(dataset_ids=[dataset.id])
for d in data_list:
dataset_member = DataSetMember()
dataset_member.dataset = dataset
dataset_member.data = convert_data_to_v6_json(d).get_dict()
dataset_member.file_ids = list(data_util.get_file_ids(d))
dataset_members.append(dataset_member)
datasetfiles.extend(DataSetFile.objects.create_dataset_files(dataset, d, existing_scale_ids))
existing_scale_ids.append(dataset_member.file_ids)
DataSetFile.objects.bulk_create(datasetfiles)
return DataSetMember.objects.bulk_create(dataset_members)
def get_dataset_members(self, dataset):
"""Returns dataset members for the given dataset
:returns: members for a given dataset
:rtype: QuerySet<DataSetMember>
"""
return self.all().filter(dataset=dataset).order_by('id')
def get_details_v6(self, dsm_id):
"""Gets additional details for the given dataset member id
:returns: The full dataset member for the given id
:rtype: :class:`data.models.DataSetMember`
"""
dsm = DataSetMember.objects.get(pk=dsm_id)
dsm.files = DataSetFile.objects.filter(dataset=dsm.dataset, scale_file_id__in=list(dsm.file_ids))
return dsm
class DataSetMember(models.Model):
"""
Defines the data of a dataset? contains list/descriptors of DataFiles
:keyword dataset: Refers to dataset member belongs to
:type dataset: :class:`django.db.models.ForeignKey`
:keyword data: JSON description of the data in this DataSetMember.
:type data: :class: `django.contrib.postgres.fields.JSONField(default=dict)`
:keyword created: Created Time
:type created: datetime
"""
dataset = models.ForeignKey('data.DataSet', on_delete=models.PROTECT)
data = django.contrib.postgres.fields.JSONField(default=dict)
file_ids = django.contrib.postgres.fields.ArrayField(models.IntegerField(null=True))
created = models.DateTimeField(auto_now_add=True)
objects = DataSetMemberManager()
def get_dataset_definition(self):
"""Returns the dataset definition
:returns: The dataset definition
:rtype: :class:`data.dataset.dataset.DataSetDefinition`
"""
return self.dataset.get_definition()
def get_data(self):
"""Returns the data for this datasetmember
:returns: The data for this datasetmember
:rtype: :class:`data.data.data.Data`
"""
return DataV6(data=self.data, do_validate=False).get_data()
def get_v6_data_json(self):
"""Returns the data for this datasetmember as v6 json with the version stripped
:returns: The v6 JSON output data dict for this datasetmember
:rtype: dict
"""
return rest_utils.strip_schema_version(convert_data_to_v6_json(self.get_data()).get_dict())
class Meta(object):
"""meta information for the db"""
db_table = 'data_set_member'
class DataSetFileManager(models.Manager):
"""Manages the datasetfile model"""
def create_dataset_files(self, dataset, data, existing_scale_ids):
"""Creates dataset files for the given dataset and data"""
datasetfiles = []
for i in data.values.keys():
v = data.values[i]
if type(v) is FileValue:
for id in v.file_ids:
if id in existing_scale_ids:
continue
file = DataSetFile()
file.dataset = dataset
file.scale_file = ScaleFile.objects.get(pk=id)
file.parameter_name = i
datasetfiles.append(file)
return datasetfiles
def get_file_ids(self, dataset_ids, parameter_names=None):
"""Returns a list of the file IDs for the given datasets, optionally filtered by parameter_name.
:param dataset_ids: The ids of the associated datasets
:type dataset_ids: integer
:param parameter_names: The parameter names to search for in the given datasets
:type parameter_names: string
:returns: The list of scale file IDs
:rtype: :func:`list`
"""
query = self.all().filter(dataset_id__in=list(dataset_ids))
if parameter_names:
query = query.filter(parameter_name__in=list(parameter_names))
return [result.scale_file_id for result in query.only('scale_file_id').distinct()]
def get_dataset_ids(self, file_ids, all_files=False):
"""Returns a list of the dataset IDs that contain the given files
:param file_ids: The ids of the files to look for
:type dataset_id: integer
:param all_files: Whether or not a dataset must contain all files or just some of the files in the list
:type all_files: bool
:returns: The list of dataset IDs
:rtype: :func:`list`
"""
results = []
if not all_files:
query = self.all().filter(scale_file_id__in=list(file_ids)).only('dataset_id').distinct()
results = [result.dataset_id for result in query]
else:
query = self.all().filter(scale_file_id__in=list(file_ids)).values('dataset_id').annotate(total=Count('dataset_id')).order_by('total')
for result in query:
if result['total'] == len(file_ids):
results.append(result['dataset_id'])
return results
def get_files(self, dataset_ids, parameter_names=None):
"""Returns the dataset files associated with the given dataset_ids
:param dataset_ids: The ids of the associated datasets
:type dataset_ids: integer
:param parameter_names: The parameter names to search for in the given datasets
:type parameter_names: string
:returns: The DataSetFiles associated with that dataset_id
:rtype: [:class:`data.models.DataSetFile`]
"""
files = self.all().filter(dataset_id__in=list(dataset_ids))
if parameter_names:
files = files.filter(parameter_name__in=list(parameter_names))
return files
def get_datasets(self, file_ids, all_files=False):
"""Returns the datasets associated with the given file_id
:param file_id: The id of the associated file
:type file_id: integer
:param all_files: Whether or not a dataset must contain all files or just some of the files in the list
:type all_files: bool
:returns: The DataSets associated with that dataset_id
:rtype: [:class:`data.models.DataSet`]
"""
dataset_ids = self.get_dataset_ids(file_ids=file_ids, all_files=all_files)
datasets = DataSet.objects.filter(id__in=dataset_ids)
return datasets
def get_dataset_files(self, dataset_id):
"""Returns the dataset files associated with the given dataset_id
:param dataset_id: The id of the associated dataset
:type dataset_id: integer
:returns: The DataSetFiles associated with that dataset_id
:rtype: [:class:`data.models.DataSetFile`]
"""
files = DataSetFile.objects.filter(dataset_id=dataset_id)
return files
class DataSetFile(models.Model):
"""
The actual file in a dataset member
:keyword dataset: Refers to the dataset the file is a member of
:type dataset: :class:`django.db.models.ForeignKey`
:keyword scale_file: Refers to the ScaleFile
:type scale_file: :class:`django.db.models.ForeignKey`
:keyword parameter_name: Refers to the File parameter name
:type parameter_name: :class:`django.db.models.CharField`
"""
dataset = models.ForeignKey('data.DataSet', on_delete=models.PROTECT)
scale_file = models.ForeignKey('storage.ScaleFile', on_delete=models.PROTECT)
parameter_name = models.CharField(db_index=True, max_length=50)
objects = DataSetFileManager()
class Meta(object):
"""meta information for the db"""
db_table = 'data_set_file'
unique_together = ("dataset", "scale_file") | apache-2.0 | -5,873,155,974,511,240,000 | 3,161,414,115,556,294,000 | 39.745763 | 146 | 0.650193 | false |
mirror/vbox | src/VBox/Devices/EFI/Firmware/BaseTools/Source/Python/Common/VpdInfoFile.py | 11 | 11296 | ## @file
#
# This package manage the VPD PCD information file which will be generated
# by build tool's autogen.
# The VPD PCD information file will be input for third-party BPDG tool which
# is pointed by *_*_*_VPD_TOOL_GUID in conf/tools_def.txt
#
#
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
import os
import re
import Common.EdkLogger as EdkLogger
import Common.BuildToolError as BuildToolError
import subprocess
FILE_COMMENT_TEMPLATE = \
"""
## @file
#
# THIS IS AUTO-GENERATED FILE BY BUILD TOOLS AND PLEASE DO NOT MAKE MODIFICATION.
#
# This file lists all VPD informations for a platform collected by build.exe.
#
# Copyright (c) 2010, Intel Corporation. All rights reserved.<BR>
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
"""
## The class manage VpdInfoFile.
#
# This file contains an ordered (based on position in the DSC file) list of the PCDs specified in the platform description file (DSC). The Value field that will be assigned to the PCD comes from the DSC file, INF file (if not defined in the DSC file) or the DEC file (if not defined in the INF file). This file is used as an input to the BPDG tool.
# Format for this file (using EBNF notation) is:
# <File> :: = [<CommentBlock>]
# [<PcdEntry>]*
# <CommentBlock> ::= ["#" <String> <EOL>]*
# <PcdEntry> ::= <PcdName> "|" <Offset> "|" <Size> "|" <Value> <EOL>
# <PcdName> ::= <TokenSpaceCName> "." <PcdCName>
# <TokenSpaceCName> ::= C Variable Name of the Token Space GUID
# <PcdCName> ::= C Variable Name of the PCD
# <Offset> ::= {"*"} {<HexNumber>}
# <HexNumber> ::= "0x" (a-fA-F0-9){1,8}
# <Size> ::= <HexNumber>
# <Value> ::= {<HexNumber>} {<NonNegativeInt>} {<QString>} {<Array>}
# <NonNegativeInt> ::= (0-9)+
# <QString> ::= ["L"] <DblQuote> <String> <DblQuote>
# <DblQuote> ::= 0x22
# <Array> ::= {<CArray>} {<NList>}
# <CArray> ::= "{" <HexNumber> ["," <HexNumber>]* "}"
# <NList> ::= <HexNumber> ["," <HexNumber>]*
#
class VpdInfoFile:
## The mapping dictionary from datum type to size string.
_MAX_SIZE_TYPE = {"BOOLEAN":"1", "UINT8":"1", "UINT16":"2", "UINT32":"4", "UINT64":"8"}
_rVpdPcdLine = None
## Constructor
def __init__(self):
## Dictionary for VPD in following format
#
# Key : PcdClassObject instance.
# @see BuildClassObject.PcdClassObject
# Value : offset in different SKU such as [sku1_offset, sku2_offset]
self._VpdArray = {}
## Add a VPD PCD collected from platform's autogen when building.
#
# @param vpds The list of VPD PCD collected for a platform.
# @see BuildClassObject.PcdClassObject
#
# @param offset integer value for VPD's offset in specific SKU.
#
def Add(self, Vpd, Offset):
if (Vpd == None):
EdkLogger.error("VpdInfoFile", BuildToolError.ATTRIBUTE_UNKNOWN_ERROR, "Invalid VPD PCD entry.")
if not (Offset >= 0 or Offset == "*"):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID, "Invalid offset parameter: %s." % Offset)
if Vpd.DatumType == "VOID*":
if Vpd.MaxDatumSize <= 0:
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid max datum size for VPD PCD %s.%s" % (Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
elif Vpd.DatumType in ["BOOLEAN", "UINT8", "UINT16", "UINT32", "UINT64"]:
if Vpd.MaxDatumSize == None or Vpd.MaxDatumSize == "":
Vpd.MaxDatumSize = VpdInfoFile._MAX_SIZE_TYPE[Vpd.DatumType]
else:
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid DatumType %s for VPD PCD %s.%s" % (Vpd.DatumType, Vpd.TokenSpaceGuidCName, Vpd.TokenCName))
if Vpd not in self._VpdArray.keys():
#
# If there is no Vpd instance in dict, that imply this offset for a given SKU is a new one
#
self._VpdArray[Vpd] = [Offset]
else:
#
# If there is an offset for a specific SKU in dict, then append this offset for other sku to array.
#
self._VpdArray[Vpd].append(Offset)
## Generate VPD PCD information into a text file
#
# If parameter FilePath is invalid, then assert.
# If
# @param FilePath The given file path which would hold VPD information
def Write(self, FilePath):
if not (FilePath != None or len(FilePath) != 0):
EdkLogger.error("VpdInfoFile", BuildToolError.PARAMETER_INVALID,
"Invalid parameter FilePath: %s." % FilePath)
try:
fd = open(FilePath, "w")
except:
EdkLogger.error("VpdInfoFile",
BuildToolError.FILE_OPEN_FAILURE,
"Fail to open file %s for written." % FilePath)
try:
# write file header
fd.write(FILE_COMMENT_TEMPLATE)
# write each of PCD in VPD type
Pcds = self._VpdArray.keys()
Pcds.sort()
for Pcd in Pcds:
for Offset in self._VpdArray[Pcd]:
PcdValue = str(Pcd.SkuInfoList[Pcd.SkuInfoList.keys()[0]].DefaultValue).strip()
if PcdValue == "" :
PcdValue = Pcd.DefaultValue
fd.write("%s.%s|%s|%s|%s \n" % (Pcd.TokenSpaceGuidCName, Pcd.TokenCName, str(Offset).strip(), str(Pcd.MaxDatumSize).strip(),PcdValue))
except:
EdkLogger.error("VpdInfoFile",
BuildToolError.FILE_WRITE_FAILURE,
"Fail to write file %s" % FilePath)
fd.close()
## Read an existing VPD PCD info file.
#
# This routine will read VPD PCD information from existing file and construct
# internal PcdClassObject array.
# This routine could be used by third-party tool to parse VPD info file content.
#
# @param FilePath The full path string for existing VPD PCD info file.
def Read(self, FilePath):
try:
fd = open(FilePath, "r")
except:
EdkLogger.error("VpdInfoFile",
BuildToolError.FILE_OPEN_FAILURE,
"Fail to open file %s for written." % FilePath)
Lines = fd.readlines()
for Line in Lines:
Line = Line.strip()
if len(Line) == 0 or Line.startswith("#"):
continue
#
# the line must follow output format defined in BPDG spec.
#
try:
PcdName, Offset, Size, Value = Line.split("#")[0].split("|")
TokenSpaceName, PcdTokenName = PcdName.split(".")
except:
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Fail to parse VPD information file %s" % FilePath)
Found = False
for VpdObject in self._VpdArray.keys():
if VpdObject.TokenSpaceGuidCName == TokenSpaceName and VpdObject.TokenCName == PcdTokenName.strip():
if self._VpdArray[VpdObject][0] == "*":
if Offset == "*":
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "The offset of %s has not been fixed up by third-party BPDG tool." % PcdName)
self._VpdArray[VpdObject][0] = Offset
Found = True
break
if not Found:
EdkLogger.error("BPDG", BuildToolError.PARSER_ERROR, "Can not find PCD defined in VPD guid file.")
## Get count of VPD PCD collected from platform's autogen when building.
#
# @return The integer count value
def GetCount(self):
Count = 0
for OffsetList in self._VpdArray.values():
Count += len(OffsetList)
return Count
## Get an offset value for a given VPD PCD
#
# Because BPDG only support one Sku, so only return offset for SKU default.
#
# @param vpd A given VPD PCD
def GetOffset(self, vpd):
if not self._VpdArray.has_key(vpd):
return None
if len(self._VpdArray[vpd]) == 0:
return None
return self._VpdArray[vpd]
## Call external BPDG tool to process VPD file
#
# @param ToolPath The string path name for BPDG tool
# @param VpdFileName The string path name for VPD information guid.txt
#
def CallExtenalBPDGTool(ToolPath, VpdFileName):
assert ToolPath != None, "Invalid parameter ToolPath"
assert VpdFileName != None and os.path.exists(VpdFileName), "Invalid parameter VpdFileName"
OutputDir = os.path.dirname(VpdFileName)
FileName = os.path.basename(VpdFileName)
BaseName, ext = os.path.splitext(FileName)
OutputMapFileName = os.path.join(OutputDir, "%s.map" % BaseName)
OutputBinFileName = os.path.join(OutputDir, "%s.bin" % BaseName)
try:
PopenObject = subprocess.Popen([ToolPath,
'-o', OutputBinFileName,
'-m', OutputMapFileName,
'-q',
'-f',
VpdFileName],
stdout=subprocess.PIPE,
stderr= subprocess.PIPE)
except Exception, X:
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, ExtraData="%s" % (str(X)))
(out, error) = PopenObject.communicate()
print out
while PopenObject.returncode == None :
PopenObject.wait()
if PopenObject.returncode != 0:
if PopenObject.returncode != 0:
EdkLogger.debug(EdkLogger.DEBUG_1, "Fail to call BPDG tool", str(error))
EdkLogger.error("BPDG", BuildToolError.COMMAND_FAILURE, "Fail to execute BPDG tool with exit code: %d, the error message is: \n %s" % \
(PopenObject.returncode, str(error)))
return PopenObject.returncode
| gpl-2.0 | -1,568,804,577,756,926,700 | 4,078,979,152,865,147,400 | 43.125 | 349 | 0.575248 | false |
Drooids/odoo | addons/auth_crypt/auth_crypt.py | 49 | 3999 | import logging
from passlib.context import CryptContext
import openerp
from openerp.osv import fields, osv
openerp.addons.base.res.res_users.USER_PRIVATE_FIELDS.append('password_crypt')
_logger = logging.getLogger(__name__)
default_crypt_context = CryptContext(
# kdf which can be verified by the context. The default encryption kdf is
# the first of the list
['pbkdf2_sha512', 'md5_crypt'],
# deprecated algorithms are still verified as usual, but ``needs_update``
# will indicate that the stored hash should be replaced by a more recent
# algorithm. Passlib 1.6 supports an `auto` value which deprecates any
# algorithm but the default, but Debian only provides 1.5 so...
deprecated=['md5_crypt'],
)
class res_users(osv.osv):
_inherit = "res.users"
def init(self, cr):
_logger.info("Hashing passwords, may be slow for databases with many users...")
cr.execute("SELECT id, password FROM res_users"
" WHERE password IS NOT NULL"
" AND password != ''")
for uid, pwd in cr.fetchall():
self._set_password(cr, openerp.SUPERUSER_ID, uid, pwd)
def set_pw(self, cr, uid, id, name, value, args, context):
if value:
self._set_password(cr, uid, id, value, context=context)
self.invalidate_cache(cr, uid, context=context)
def get_pw( self, cr, uid, ids, name, args, context ):
cr.execute('select id, password from res_users where id in %s', (tuple(map(int, ids)),))
return dict(cr.fetchall())
_columns = {
'password': fields.function(get_pw, fnct_inv=set_pw, type='char', string='Password', invisible=True, store=True),
'password_crypt': fields.char(string='Encrypted Password', invisible=True, copy=False),
}
def check_credentials(self, cr, uid, password):
# convert to base_crypt if needed
cr.execute('SELECT password, password_crypt FROM res_users WHERE id=%s AND active', (uid,))
encrypted = None
if cr.rowcount:
stored, encrypted = cr.fetchone()
if stored and not encrypted:
self._set_password(cr, uid, uid, stored)
self.invalidate_cache(cr, uid)
try:
return super(res_users, self).check_credentials(cr, uid, password)
except openerp.exceptions.AccessDenied:
if encrypted:
valid_pass, replacement = self._crypt_context(cr, uid, uid)\
.verify_and_update(password, encrypted)
if replacement is not None:
self._set_encrypted_password(cr, uid, uid, replacement)
if valid_pass:
return
raise
def _set_password(self, cr, uid, id, password, context=None):
""" Encrypts then stores the provided plaintext password for the user
``id``
"""
encrypted = self._crypt_context(cr, uid, id, context=context).encrypt(password)
self._set_encrypted_password(cr, uid, id, encrypted, context=context)
def _set_encrypted_password(self, cr, uid, id, encrypted, context=None):
""" Store the provided encrypted password to the database, and clears
any plaintext password
:param uid: id of the current user
:param id: id of the user on which the password should be set
"""
cr.execute(
"UPDATE res_users SET password='', password_crypt=%s WHERE id=%s",
(encrypted, id))
def _crypt_context(self, cr, uid, id, context=None):
""" Passlib CryptContext instance used to encrypt and verify
passwords. Can be overridden if technical, legal or political matters
require different kdfs than the provided default.
Requires a CryptContext as deprecation and upgrade notices are used
internally
"""
return default_crypt_context
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -5,472,844,722,883,584,000 | 7,790,676,799,069,440,000 | 39.393939 | 121 | 0.629907 | false |
paolap/cwsl-mas | cwsl/tests/test_scheduler.py | 4 | 2051 | """
Authors: Tim Bedin, Tim Erwin
Copyright 2014 CSIRO
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Tests for the scheduler classes.
"""
import unittest
from cwsl.core.scheduler import SimpleExecManager
class TestScheduler(unittest.TestCase):
def setUp(self):
self.test_cons_dict = {'test_1': 'first_test',
'test_2': 'second_test'}
def test_schedulecommand(self):
""" Test that the scheduler can create a script for a simple command. """
in_files = ['infile_1']
out_files = ['outfile_1']
this_manager = SimpleExecManager(noexec=True)
this_manager.add_cmd(['echo'] + in_files + out_files, out_files)
this_manager.submit()
expected_string = """#!/bin/sh\nset -e\n\nmodule purge\nmkdir -p \necho infile_1 outfile_1\n"""
self.assertEqual(this_manager.job.to_str(), expected_string)
def test_command_annotation(self):
""" Test that the scheduler can correctly add annotations. """
in_files = ['infile_1.nc']
out_files = ['outfile_1.nc']
this_manager = SimpleExecManager(noexec=True)
this_manager.add_cmd(['echo'] + in_files + out_files, out_files, annotation="This is an annotation")
this_manager.submit()
expected_string = """#!/bin/sh\nset -e\n\nmodule purge\nmodule load nco\nmkdir -p \necho infile_1.nc outfile_1.nc\nncatted -O -a vistrails_history,global,a,c,"This is an annotation" outfile_1.nc\n"""
self.assertEqual(this_manager.job.to_str(), expected_string)
| apache-2.0 | -8,433,313,961,869,914,000 | -3,263,079,721,620,873,700 | 36.290909 | 207 | 0.673818 | false |
shakamunyi/neutron-vrrp | neutron/plugins/openvswitch/ovs_models_v2.py | 49 | 3896 | # Copyright 2011 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy import Boolean, Column, ForeignKey, Integer, String
from sqlalchemy.schema import UniqueConstraint
from neutron.db import model_base
from neutron.db import models_v2
from sqlalchemy import orm
class VlanAllocation(model_base.BASEV2):
"""Represents allocation state of vlan_id on physical network."""
__tablename__ = 'ovs_vlan_allocations'
physical_network = Column(String(64), nullable=False, primary_key=True)
vlan_id = Column(Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = Column(Boolean, nullable=False)
def __init__(self, physical_network, vlan_id):
self.physical_network = physical_network
self.vlan_id = vlan_id
self.allocated = False
def __repr__(self):
return "<VlanAllocation(%s,%d,%s)>" % (self.physical_network,
self.vlan_id, self.allocated)
class TunnelAllocation(model_base.BASEV2):
"""Represents allocation state of tunnel_id."""
__tablename__ = 'ovs_tunnel_allocations'
tunnel_id = Column(Integer, nullable=False, primary_key=True,
autoincrement=False)
allocated = Column(Boolean, nullable=False)
def __init__(self, tunnel_id):
self.tunnel_id = tunnel_id
self.allocated = False
def __repr__(self):
return "<TunnelAllocation(%d,%s)>" % (self.tunnel_id, self.allocated)
class NetworkBinding(model_base.BASEV2):
"""Represents binding of virtual network to physical realization."""
__tablename__ = 'ovs_network_bindings'
network_id = Column(String(36),
ForeignKey('networks.id', ondelete="CASCADE"),
primary_key=True)
# 'gre', 'vlan', 'flat', 'local'
network_type = Column(String(32), nullable=False)
physical_network = Column(String(64))
segmentation_id = Column(Integer) # tunnel_id or vlan_id
network = orm.relationship(
models_v2.Network,
backref=orm.backref("binding", lazy='joined',
uselist=False, cascade='delete'))
def __init__(self, network_id, network_type, physical_network,
segmentation_id):
self.network_id = network_id
self.network_type = network_type
self.physical_network = physical_network
self.segmentation_id = segmentation_id
def __repr__(self):
return "<NetworkBinding(%s,%s,%s,%d)>" % (self.network_id,
self.network_type,
self.physical_network,
self.segmentation_id)
class TunnelEndpoint(model_base.BASEV2):
"""Represents tunnel endpoint in RPC mode."""
__tablename__ = 'ovs_tunnel_endpoints'
__table_args__ = (
UniqueConstraint('id', name='uniq_ovs_tunnel_endpoints0id'),
model_base.BASEV2.__table_args__,
)
ip_address = Column(String(64), primary_key=True)
id = Column(Integer, nullable=False)
def __init__(self, ip_address, id):
self.ip_address = ip_address
self.id = id
def __repr__(self):
return "<TunnelEndpoint(%s,%s)>" % (self.ip_address, self.id)
| apache-2.0 | 4,254,410,345,747,820,000 | 2,314,136,544,950,823,400 | 35.411215 | 78 | 0.62038 | false |
linzhaolover/myansible | capi_states/mitaka/gpu_volume.py | 2 | 3009 | #!/usr/bin/env python
"""
a module of share host GPU, created on 2016/05/31
"""
import os
import re
#import shutil
from oslo_log import log
LOG = log.getLogger(__name__)
def search_file(topPath, reValue):
print "Jian>>> topPath:%s search:%s" % (topPath, reValue)
resultList = []
for rootpath, dirnames, filenames in os.walk(topPath):
for filename in filenames:
if re.findall(reValue, filename, re.IGNORECASE):
resultList.append(os.path.join(rootpath, filename))
print "Jian>>> lib files:%s" % resultList
return resultList
def search_nvidia_dir(dstPath):
dirList = []
patt = re.compile('nvidia-\d+$', re.IGNORECASE)
for dirname in os.listdir(dstPath):
if patt.findall(dirname):
dirList.append(os.path.join(dstPath, dirname))
return dirList
"""
{"message": "Build of instance 280cc136-7b4d-43cd-ae88-9f84b9f2a083 was re-scheduled: [Errno 13] Permission denied: '/etc/ld.so.conf.d/powerpc64le-linux-gnu_GL.conf'", "code": 500, "created": "2016-06-01T09:40:30Z"}
def create_local_conf():
libList = search_nvidia_dir("/usr/lib/")
content = sorted(libList)[-1]
if os.path.exists("/etc/ld.so.conf.d/powerpc64le-linux-gnu_GL.conf"):
shutil.copyfile("/etc/ld.so.conf.d/powerpc64le-linux-gnu_GL.conf", "/etc/ld.so.conf.d/powerpc64le-linux-gnu_GL.conf.bak")
os.remove("/etc/ld.so.conf.d/powerpc64le-linux-gnu_GL.conf")
with open("/etc/ld.so.conf.d/powerpc64le-linux-gnu_GL.conf", "w") as fh:
fh.writelines(content)
#os.system("cd /etc/ld.so.conf.d/ && echo '%s' > powerpc64le-linux-gnu_GL.conf" % content)
"""
def report_gpu_volume():
#create_local_conf()
ubuntuDict = {
"nvidia-smi" : "/usr/lib/",
"libnvidia-ml" : "/usr/lib/",
"libcuda" : "/usr/lib/powerpc64le-linux-gnu/",
"etc_ld" : "/etc/ld.so.conf.d/powerpc64le-linux-gnu_GL.conf", #this file need to be create manually before running, on 2016/06/01
"usr_bin" : "/usr/bin/nvidia-smi",
}
allDict = []
allDict.append(ubuntuDict)
bindList = []
for eachDict in allDict:
if "etc_ld" in eachDict.keys():
bindList.append(eachDict["etc_ld"])
if "usr_bin" in eachDict.keys():
bindList.append(eachDict["usr_bin"])
for nvidiaPath in search_nvidia_dir(eachDict["nvidia-smi"]):
print "nvidia path:", nvidiaPath
bindList.extend(search_file(os.path.join(nvidiaPath, "bin"), "nvidia-smi"))
for nvidiaPath in search_nvidia_dir(eachDict["libnvidia-ml"]):
bindList.extend(search_file(nvidiaPath, "libnvidia-ml\.so"))
bindList.extend(search_file(eachDict["libcuda"], "libcuda\.so"))
LOG.info("Jian>>> Bind List:%s" % bindList)
print "Jian>>> Bind List:%s" % bindList
return bindList
if __name__ == '__main__':
print "Begain."
report_gpu_volume()
| apache-2.0 | 4,564,567,083,345,911,300 | -1,928,541,733,770,706,400 | 36.6125 | 215 | 0.611831 | false |
marcsans/cnn-physics-perception | phy/lib/python2.7/site-packages/sklearn/externals/joblib/__init__.py | 23 | 5101 | """ Joblib is a set of tools to provide **lightweight pipelining in
Python**. In particular, joblib offers:
1. transparent disk-caching of the output values and lazy re-evaluation
(memoize pattern)
2. easy simple parallel computing
3. logging and tracing of the execution
Joblib is optimized to be **fast** and **robust** in particular on large
data and has specific optimizations for `numpy` arrays. It is
**BSD-licensed**.
============================== ============================================
**User documentation**: http://pythonhosted.org/joblib
**Download packages**: http://pypi.python.org/pypi/joblib#downloads
**Source code**: http://github.com/joblib/joblib
**Report issues**: http://github.com/joblib/joblib/issues
============================== ============================================
Vision
--------
The vision is to provide tools to easily achieve better performance and
reproducibility when working with long running jobs.
* **Avoid computing twice the same thing**: code is rerun over an
over, for instance when prototyping computational-heavy jobs (as in
scientific development), but hand-crafted solution to alleviate this
issue is error-prone and often leads to unreproducible results
* **Persist to disk transparently**: persisting in an efficient way
arbitrary objects containing large data is hard. Using
joblib's caching mechanism avoids hand-written persistence and
implicitly links the file on disk to the execution context of
the original Python object. As a result, joblib's persistence is
good for resuming an application status or computational job, eg
after a crash.
Joblib strives to address these problems while **leaving your code and
your flow control as unmodified as possible** (no framework, no new
paradigms).
Main features
------------------
1) **Transparent and fast disk-caching of output value:** a memoize or
make-like functionality for Python functions that works well for
arbitrary Python objects, including very large numpy arrays. Separate
persistence and flow-execution logic from domain logic or algorithmic
code by writing the operations as a set of steps with well-defined
inputs and outputs: Python functions. Joblib can save their
computation to disk and rerun it only if necessary::
>>> from sklearn.externals.joblib import Memory
>>> mem = Memory(cachedir='/tmp/joblib')
>>> import numpy as np
>>> a = np.vander(np.arange(3)).astype(np.float)
>>> square = mem.cache(np.square)
>>> b = square(a) # doctest: +ELLIPSIS
________________________________________________________________________________
[Memory] Calling square...
square(array([[ 0., 0., 1.],
[ 1., 1., 1.],
[ 4., 2., 1.]]))
___________________________________________________________square - 0...s, 0.0min
>>> c = square(a)
>>> # The above call did not trigger an evaluation
2) **Embarrassingly parallel helper:** to make it easy to write readable
parallel code and debug it quickly::
>>> from sklearn.externals.joblib import Parallel, delayed
>>> from math import sqrt
>>> Parallel(n_jobs=1)(delayed(sqrt)(i**2) for i in range(10))
[0.0, 1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]
3) **Logging/tracing:** The different functionalities will
progressively acquire better logging mechanism to help track what
has been ran, and capture I/O easily. In addition, Joblib will
provide a few I/O primitives, to easily define logging and
display streams, and provide a way of compiling a report.
We want to be able to quickly inspect what has been run.
4) **Fast compressed Persistence**: a replacement for pickle to work
efficiently on Python objects containing large data (
*joblib.dump* & *joblib.load* ).
..
>>> import shutil ; shutil.rmtree('/tmp/joblib/')
"""
# PEP0440 compatible formatted version, see:
# https://www.python.org/dev/peps/pep-0440/
#
# Generic release markers:
# X.Y
# X.Y.Z # For bugfix releases
#
# Admissible pre-release markers:
# X.YaN # Alpha release
# X.YbN # Beta release
# X.YrcN # Release Candidate
# X.Y # Final release
#
# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.
# 'X.Y.dev0' is the canonical version of 'X.Y.dev'
#
__version__ = '0.10.3'
from .memory import Memory, MemorizedResult
from .logger import PrintTime
from .logger import Logger
from .hashing import hash
from .numpy_pickle import dump
from .numpy_pickle import load
from .parallel import Parallel
from .parallel import delayed
from .parallel import cpu_count
from .parallel import register_parallel_backend
from .parallel import parallel_backend
from .parallel import effective_n_jobs
__all__ = ['Memory', 'MemorizedResult', 'PrintTime', 'Logger', 'hash', 'dump',
'load', 'Parallel', 'delayed', 'cpu_count', 'effective_n_jobs',
'register_parallel_backend', 'parallel_backend']
| mit | -4,074,168,009,990,192,600 | 1,633,779,500,438,792,400 | 35.963768 | 87 | 0.648696 | false |
redhat-openstack/rdo-infra | ci-scripts/dlrnapi_promoter/test_registries_client_unit.py | 1 | 10398 | import subprocess
import yaml
try:
# Python3 imports
from unittest import mock
from unittest.mock import patch
except ImportError:
# Python2 imports
from mock import patch
import mock
from common import PromotionError
from dlrn_hash import DlrnCommitDistroHash, DlrnHash
from test_unit_fixtures import LegacyConfigSetup, hashes_test_cases
class TestPrepareExtraVars(LegacyConfigSetup):
def setUp(self):
super(TestPrepareExtraVars, self).setUp()
self.client = self.promoter.registries_client
self.dlrn_hash_commitdistro = DlrnCommitDistroHash(commit_hash='abc',
distro_hash='def',
component="comp1",
timestamp=1)
def test_setup(self):
error_msg = "Container push logfile is misplaced"
assert self.client.logfile != "", error_msg
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('logging.Logger.debug')
@patch('repo_client.RepoClient.get_versions_csv')
@patch('repo_client.RepoClient.get_commit_sha')
@patch('repo_client.RepoClient.get_containers_list')
def test_prepare_extra_vars_empty_missing_reader(self,
get_containers_mock,
get_commit_mock,
get_versions_mock,
mock_log_debug,
mock_log_info,
mock_log_error):
get_versions_mock.return_value = None
with self.assertRaises(PromotionError):
self.client.prepare_extra_vars(self.dlrn_hash_commitdistro,
"current-tripleo",
"tripleo-ci-testing")
get_versions_mock.assert_has_calls([
mock.call(self.dlrn_hash_commitdistro, "tripleo-ci-testing")
])
self.assertFalse(get_commit_mock.called)
self.assertFalse(get_containers_mock.called)
self.assertFalse(mock_log_debug.called)
self.assertFalse(mock_log_info.called)
mock_log_error.assert_has_calls([
mock.call("No versions.csv found")
])
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('logging.Logger.debug')
@patch('repo_client.RepoClient.get_versions_csv')
@patch('repo_client.RepoClient.get_commit_sha')
@patch('repo_client.RepoClient.get_containers_list')
def test_prepare_extra_vars_empty_missing_sha(self,
get_containers_mock,
get_commit_mock,
get_versions_mock,
mock_log_debug,
mock_log_info,
mock_log_error):
get_versions_mock.return_value = "reader"
get_commit_mock.return_value = None
with self.assertRaises(PromotionError):
self.client.prepare_extra_vars(self.dlrn_hash_commitdistro,
"current-tripleo",
"tripleo-ci-testing")
get_versions_mock.assert_has_calls([
mock.call(self.dlrn_hash_commitdistro, "tripleo-ci-testing")
])
get_commit_mock.assert_has_calls([
mock.call("reader", "openstack-tripleo-common")
])
self.assertFalse(get_containers_mock.called)
self.assertFalse(mock_log_debug.called)
self.assertFalse(mock_log_info.called)
mock_log_error.assert_has_calls([
mock.call("Versions.csv does not contain tripleo-common commit")
])
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('logging.Logger.debug')
@patch('repo_client.RepoClient.get_versions_csv')
@patch('repo_client.RepoClient.get_commit_sha')
@patch('repo_client.RepoClient.get_containers_list')
def test_prepare_extra_vars_empty_containers_list(self,
get_containers_mock,
get_commit_mock,
get_versions_mock,
mock_log_debug,
mock_log_info,
mock_log_error):
get_versions_mock.return_value = "reader"
get_commit_mock.return_value = "abc"
get_containers_mock.return_value = []
with self.assertRaises(PromotionError):
self.client.prepare_extra_vars(self.dlrn_hash_commitdistro,
"current-tripleo",
"tripleo-ci-testing")
get_versions_mock.assert_has_calls([
mock.call(self.dlrn_hash_commitdistro, "tripleo-ci-testing")
])
get_commit_mock.assert_has_calls([
mock.call("reader", "openstack-tripleo-common")
])
get_containers_mock.assert_has_calls([
mock.call("abc")
])
self.assertFalse(mock_log_debug.called)
self.assertFalse(mock_log_info.called)
mock_log_error.assert_has_calls([
mock.call("Containers list is empty")
])
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('logging.Logger.debug')
@patch('repo_client.RepoClient.get_versions_csv')
@patch('repo_client.RepoClient.get_commit_sha')
@patch('repo_client.RepoClient.get_containers_list')
def test_prepare_extra_vars_success(self,
get_containers_mock,
get_commit_mock,
get_versions_mock,
mock_log_debug,
mock_log_info,
mock_log_error):
get_versions_mock.return_value = "reader"
get_commit_mock.return_value = "abc"
get_containers_mock.return_value = ['a', 'b']
extra_vars_path = \
self.client.prepare_extra_vars(self.dlrn_hash_commitdistro,
"current-tripleo",
"tripleo-ci-testing")
self.assertIsInstance(extra_vars_path, str)
self.assertIn(".yaml", extra_vars_path)
with open(extra_vars_path) as extra_vars_file:
extra_vars = yaml.safe_load(stream=extra_vars_file)
self.assertIsInstance(extra_vars, dict)
self.assertDictEqual(extra_vars, {
'release': "master",
'script_root': mock.ANY,
'distro_name': "centos",
'distro_version': '7',
'manifest_push': True,
'target_registries_push': True,
'candidate_label': "tripleo-ci-testing",
"named_label": "current-tripleo",
"source_namespace": "tripleomaster",
"target_namespace": "tripleomaster",
"commit_hash": self.dlrn_hash_commitdistro.commit_hash,
"distro_hash": self.dlrn_hash_commitdistro.distro_hash,
"full_hash": self.dlrn_hash_commitdistro.full_hash,
"containers_list": ['a', 'b']
})
get_versions_mock.assert_has_calls([
mock.call(self.dlrn_hash_commitdistro, "tripleo-ci-testing")
])
get_commit_mock.assert_has_calls([
mock.call("reader", "openstack-tripleo-common")
])
get_containers_mock.assert_has_calls([
mock.call("abc")
])
mock_log_debug.assert_has_calls([
mock.call("Crated extra vars file at %s", mock.ANY)
])
mock_log_info.assert_has_calls([
mock.call("Passing extra vars to playbook: %s", mock.ANY)
])
self.assertFalse(mock_log_error.called)
class TestPromote(LegacyConfigSetup):
def setUp(self):
super(TestPromote, self).setUp()
self.client = self.promoter.registries_client
self.dlrn_hash_commitdistro = DlrnCommitDistroHash(
commit_hash='abc',
distro_hash='def',
component="comp1",
timestamp=1)
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('os.unlink')
@patch('registries_client.RegistriesClient.prepare_extra_vars')
@mock.patch('subprocess.check_output')
def test_promote_success(self, check_output_mock,
extra_vars_mock,
unlink_mock,
mock_log_info,
mock_log_error
):
candidate_hash =\
DlrnHash(source=hashes_test_cases['aggregate']['dict']['valid'])
target_label = "test"
check_output_mock.return_value = "test log"
self.client.promote(candidate_hash, target_label)
self.assertTrue(check_output_mock.called)
self.assertFalse(mock_log_error.called)
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('os.unlink')
@patch('registries_client.RegistriesClient.prepare_extra_vars')
@mock.patch('subprocess.check_output')
def test_promote_failure(self, check_output_mock,
extra_vars_mock,
unlink_mock,
mock_log_info,
mock_log_error
):
candidate_hash = \
DlrnHash(source=hashes_test_cases['aggregate']['dict']['valid'])
target_label = "test"
exception = subprocess.CalledProcessError(1, 2)
exception.output = b"test"
check_output_mock.side_effect = exception
with self.assertRaises(PromotionError):
self.client.promote(candidate_hash, target_label)
self.assertTrue(mock_log_error.called)
| apache-2.0 | 1,774,941,189,581,475,600 | 8,134,547,677,034,084,000 | 41.790123 | 77 | 0.525293 | false |
alphatwirl/alphatwirl | alphatwirl/summary/Scan.py | 1 | 1209 | # Tai Sakuma <[email protected]>
##__________________________________________________________________||
import numpy as np
import copy
##__________________________________________________________________||
class Scan:
def __init__(self, val=None, weight=1, contents=None):
if contents is not None:
self.contents = contents
return
if val is None:
self.contents = [ ]
return
self.contents = [val]
def __add__(self, other):
contents = self.contents + other.contents
return self.__class__(contents=contents)
def __radd__(self, other):
# is called with other = 0 when e.g. sum([obj1, obj2])
if other == 0:
return self.__class__() + self
raise TypeError('unsupported: {!r} + {!r}'.format(other, self))
def __repr__(self):
return '{}(contents={})'.format(self.__class__.__name__, self.contents)
def __eq__(self, other):
return self.contents == other.contents
def __copy__(self):
contents = list(self.contents)
return self.__class__(contents=contents)
##__________________________________________________________________||
| bsd-3-clause | -5,533,702,731,044,276,000 | 9,160,441,691,054,043,000 | 28.487805 | 79 | 0.456576 | false |
paukenba/youtube-dl | youtube_dl/extractor/xhamster.py | 58 | 6318 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
unified_strdate,
str_to_int,
int_or_none,
parse_duration,
)
class XHamsterIE(InfoExtractor):
_VALID_URL = r'(?P<proto>https?)://(?:.+?\.)?xhamster\.com/movies/(?P<id>[0-9]+)/(?P<seo>.+?)\.html(?:\?.*)?'
_TESTS = [
{
'url': 'http://xhamster.com/movies/1509445/femaleagent_shy_beauty_takes_the_bait.html',
'info_dict': {
'id': '1509445',
'ext': 'mp4',
'title': 'FemaleAgent Shy beauty takes the bait',
'upload_date': '20121014',
'uploader': 'Ruseful2011',
'duration': 893,
'age_limit': 18,
}
},
{
'url': 'http://xhamster.com/movies/2221348/britney_spears_sexy_booty.html?hd',
'info_dict': {
'id': '2221348',
'ext': 'mp4',
'title': 'Britney Spears Sexy Booty',
'upload_date': '20130914',
'uploader': 'jojo747400',
'duration': 200,
'age_limit': 18,
}
},
{
'url': 'https://xhamster.com/movies/2272726/amber_slayed_by_the_knight.html',
'only_matching': True,
},
]
def _real_extract(self, url):
def extract_video_url(webpage, name):
return self._search_regex(
[r'''file\s*:\s*(?P<q>["'])(?P<mp4>.+?)(?P=q)''',
r'''<a\s+href=(?P<q>["'])(?P<mp4>.+?)(?P=q)\s+class=["']mp4Thumb''',
r'''<video[^>]+file=(?P<q>["'])(?P<mp4>.+?)(?P=q)[^>]*>'''],
webpage, name, group='mp4')
def is_hd(webpage):
return '<div class=\'icon iconHD\'' in webpage
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
seo = mobj.group('seo')
proto = mobj.group('proto')
mrss_url = '%s://xhamster.com/movies/%s/%s.html' % (proto, video_id, seo)
webpage = self._download_webpage(mrss_url, video_id)
title = self._html_search_regex(r'<title>(?P<title>.+?) - xHamster\.com</title>', webpage, 'title')
# Only a few videos have an description
mobj = re.search(r'<span>Description: </span>([^<]+)', webpage)
description = mobj.group(1) if mobj else None
upload_date = self._html_search_regex(r'hint=\'(\d{4}-\d{2}-\d{2}) \d{2}:\d{2}:\d{2} [A-Z]{3,4}\'',
webpage, 'upload date', fatal=False)
if upload_date:
upload_date = unified_strdate(upload_date)
uploader = self._html_search_regex(
r"<a href='[^']+xhamster\.com/user/[^>]+>(?P<uploader>[^<]+)",
webpage, 'uploader', default='anonymous')
thumbnail = self._search_regex(
[r'''thumb\s*:\s*(?P<q>["'])(?P<thumbnail>.+?)(?P=q)''',
r'''<video[^>]+poster=(?P<q>["'])(?P<thumbnail>.+?)(?P=q)[^>]*>'''],
webpage, 'thumbnail', fatal=False, group='thumbnail')
duration = parse_duration(self._html_search_regex(r'<span>Runtime:</span> (\d+:\d+)</div>',
webpage, 'duration', fatal=False))
view_count = self._html_search_regex(r'<span>Views:</span> ([^<]+)</div>', webpage, 'view count', fatal=False)
if view_count:
view_count = str_to_int(view_count)
mobj = re.search(r"hint='(?P<likecount>\d+) Likes / (?P<dislikecount>\d+) Dislikes'", webpage)
(like_count, dislike_count) = (mobj.group('likecount'), mobj.group('dislikecount')) if mobj else (None, None)
mobj = re.search(r'</label>Comments \((?P<commentcount>\d+)\)</div>', webpage)
comment_count = mobj.group('commentcount') if mobj else 0
age_limit = self._rta_search(webpage)
hd = is_hd(webpage)
format_id = 'hd' if hd else 'sd'
video_url = extract_video_url(webpage, format_id)
formats = [{
'url': video_url,
'format_id': 'hd' if hd else 'sd',
'preference': 1,
}]
if not hd:
mrss_url = self._search_regex(r'<link rel="canonical" href="([^"]+)', webpage, 'mrss_url')
webpage = self._download_webpage(mrss_url + '?hd', video_id, note='Downloading HD webpage')
if is_hd(webpage):
video_url = extract_video_url(webpage, 'hd')
formats.append({
'url': video_url,
'format_id': 'hd',
'preference': 2,
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'upload_date': upload_date,
'uploader': uploader,
'thumbnail': thumbnail,
'duration': duration,
'view_count': view_count,
'like_count': int_or_none(like_count),
'dislike_count': int_or_none(dislike_count),
'comment_count': int_or_none(comment_count),
'age_limit': age_limit,
'formats': formats,
}
class XHamsterEmbedIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?xhamster\.com/xembed\.php\?video=(?P<id>\d+)'
_TEST = {
'url': 'http://xhamster.com/xembed.php?video=3328539',
'info_dict': {
'id': '3328539',
'ext': 'mp4',
'title': 'Pen Masturbation',
'upload_date': '20140728',
'uploader_id': 'anonymous',
'duration': 5,
'age_limit': 18,
}
}
@staticmethod
def _extract_urls(webpage):
return [url for _, url in re.findall(
r'<iframe[^>]+?src=(["\'])(?P<url>(?:https?:)?//(?:www\.)?xhamster\.com/xembed\.php\?video=\d+)\1',
webpage)]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
video_url = self._search_regex(
r'href="(https?://xhamster\.com/movies/%s/[^"]+\.html[^"]*)"' % video_id,
webpage, 'xhamster url')
return self.url_result(video_url, 'XHamster')
| unlicense | -753,032,551,650,821,400 | 2,196,440,705,410,368,300 | 35.732558 | 118 | 0.48987 | false |
oasis-open/cti-python-stix2 | stix2/test/v20/test_kill_chain_phases.py | 1 | 1652 | """Tests for stix.ExternalReference"""
import pytest
import stix2
LMCO_RECON = """{
"kill_chain_name": "lockheed-martin-cyber-kill-chain",
"phase_name": "reconnaissance"
}"""
def test_lockheed_martin_cyber_kill_chain():
recon = stix2.v20.KillChainPhase(
kill_chain_name="lockheed-martin-cyber-kill-chain",
phase_name="reconnaissance",
)
assert recon.serialize(pretty=True) == LMCO_RECON
FOO_PRE_ATTACK = """{
"kill_chain_name": "foo",
"phase_name": "pre-attack"
}"""
def test_kill_chain_example():
preattack = stix2.v20.KillChainPhase(
kill_chain_name="foo",
phase_name="pre-attack",
)
assert preattack.serialize(pretty=True) == FOO_PRE_ATTACK
def test_kill_chain_required_properties():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.v20.KillChainPhase()
assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["kill_chain_name", "phase_name"]
def test_kill_chain_required_property_chain_name():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.v20.KillChainPhase(phase_name="weaponization")
assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["kill_chain_name"]
def test_kill_chain_required_property_phase_name():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.v20.KillChainPhase(kill_chain_name="lockheed-martin-cyber-kill-chain")
assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["phase_name"]
| bsd-3-clause | 3,304,618,519,551,369,700 | -6,816,018,493,820,956,000 | 26.081967 | 84 | 0.700969 | false |
zerebubuth/mapnik | scons/scons-local-2.5.0/SCons/Tool/msvc.py | 3 | 11464 | """engine.SCons.Tool.msvc
Tool-specific initialization for Microsoft Visual C/C++.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2016 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/msvc.py rel_2.5.0:3543:937e55cd78f7 2016/04/09 11:29:54 bdbaddog"
import os.path
import re
import sys
import SCons.Action
import SCons.Builder
import SCons.Errors
import SCons.Platform.win32
import SCons.Tool
import SCons.Tool.msvs
import SCons.Util
import SCons.Warnings
import SCons.Scanner.RC
from MSCommon import msvc_exists, msvc_setup_env_once
CSuffixes = ['.c', '.C']
CXXSuffixes = ['.cc', '.cpp', '.cxx', '.c++', '.C++']
def validate_vars(env):
"""Validate the PCH and PCHSTOP construction variables."""
if 'PCH' in env and env['PCH']:
if 'PCHSTOP' not in env:
raise SCons.Errors.UserError("The PCHSTOP construction must be defined if PCH is defined.")
if not SCons.Util.is_String(env['PCHSTOP']):
raise SCons.Errors.UserError("The PCHSTOP construction variable must be a string: %r"%env['PCHSTOP'])
def pch_emitter(target, source, env):
"""Adds the object file target."""
validate_vars(env)
pch = None
obj = None
for t in target:
if SCons.Util.splitext(str(t))[1] == '.pch':
pch = t
if SCons.Util.splitext(str(t))[1] == '.obj':
obj = t
if not obj:
obj = SCons.Util.splitext(str(pch))[0]+'.obj'
target = [pch, obj] # pch must be first, and obj second for the PCHCOM to work
return (target, source)
def object_emitter(target, source, env, parent_emitter):
"""Sets up the PCH dependencies for an object file."""
validate_vars(env)
parent_emitter(target, source, env)
# Add a dependency, but only if the target (e.g. 'Source1.obj')
# doesn't correspond to the pre-compiled header ('Source1.pch').
# If the basenames match, then this was most likely caused by
# someone adding the source file to both the env.PCH() and the
# env.Program() calls, and adding the explicit dependency would
# cause a cycle on the .pch file itself.
#
# See issue #2505 for a discussion of what to do if it turns
# out this assumption causes trouble in the wild:
# http://scons.tigris.org/issues/show_bug.cgi?id=2505
if 'PCH' in env:
pch = env['PCH']
if str(target[0]) != SCons.Util.splitext(str(pch))[0] + '.obj':
env.Depends(target, pch)
return (target, source)
def static_object_emitter(target, source, env):
return object_emitter(target, source, env,
SCons.Defaults.StaticObjectEmitter)
def shared_object_emitter(target, source, env):
return object_emitter(target, source, env,
SCons.Defaults.SharedObjectEmitter)
pch_action = SCons.Action.Action('$PCHCOM', '$PCHCOMSTR')
pch_builder = SCons.Builder.Builder(action=pch_action, suffix='.pch',
emitter=pch_emitter,
source_scanner=SCons.Tool.SourceFileScanner)
# Logic to build .rc files into .res files (resource files)
res_scanner = SCons.Scanner.RC.RCScan()
res_action = SCons.Action.Action('$RCCOM', '$RCCOMSTR')
res_builder = SCons.Builder.Builder(action=res_action,
src_suffix='.rc',
suffix='.res',
src_builder=[],
source_scanner=res_scanner)
def msvc_batch_key(action, env, target, source):
"""
Returns a key to identify unique batches of sources for compilation.
If batching is enabled (via the $MSVC_BATCH setting), then all
target+source pairs that use the same action, defined by the same
environment, and have the same target and source directories, will
be batched.
Returning None specifies that the specified target+source should not
be batched with other compilations.
"""
# Fixing MSVC_BATCH mode. Previous if did not work when MSVC_BATCH
# was set to False. This new version should work better.
# Note we need to do the env.subst so $MSVC_BATCH can be a reference to
# another construction variable, which is why we test for False and 0
# as strings.
if not 'MSVC_BATCH' in env or env.subst('$MSVC_BATCH') in ('0', 'False', '', None):
# We're not using batching; return no key.
return None
t = target[0]
s = source[0]
if os.path.splitext(t.name)[0] != os.path.splitext(s.name)[0]:
# The base names are different, so this *must* be compiled
# separately; return no key.
return None
return (id(action), id(env), t.dir, s.dir)
def msvc_output_flag(target, source, env, for_signature):
"""
Returns the correct /Fo flag for batching.
If batching is disabled or there's only one source file, then we
return an /Fo string that specifies the target explicitly. Otherwise,
we return an /Fo string that just specifies the first target's
directory (where the Visual C/C++ compiler will put the .obj files).
"""
# Fixing MSVC_BATCH mode. Previous if did not work when MSVC_BATCH
# was set to False. This new version should work better. Removed
# len(source)==1 as batch mode can compile only one file
# (and it also fixed problem with compiling only one changed file
# with batch mode enabled)
if not 'MSVC_BATCH' in env or env.subst('$MSVC_BATCH') in ('0', 'False', '', None):
return '/Fo$TARGET'
else:
# The Visual C/C++ compiler requires a \ at the end of the /Fo
# option to indicate an output directory. We use os.sep here so
# that the test(s) for this can be run on non-Windows systems
# without having a hard-coded backslash mess up command-line
# argument parsing.
return '/Fo${TARGET.dir}' + os.sep
CAction = SCons.Action.Action("$CCCOM", "$CCCOMSTR",
batch_key=msvc_batch_key,
targets='$CHANGED_TARGETS')
ShCAction = SCons.Action.Action("$SHCCCOM", "$SHCCCOMSTR",
batch_key=msvc_batch_key,
targets='$CHANGED_TARGETS')
CXXAction = SCons.Action.Action("$CXXCOM", "$CXXCOMSTR",
batch_key=msvc_batch_key,
targets='$CHANGED_TARGETS')
ShCXXAction = SCons.Action.Action("$SHCXXCOM", "$SHCXXCOMSTR",
batch_key=msvc_batch_key,
targets='$CHANGED_TARGETS')
def generate(env):
"""Add Builders and construction variables for MSVC++ to an Environment."""
static_obj, shared_obj = SCons.Tool.createObjBuilders(env)
# TODO(batch): shouldn't reach in to cmdgen this way; necessary
# for now to bypass the checks in Builder.DictCmdGenerator.__call__()
# and allow .cc and .cpp to be compiled in the same command line.
static_obj.cmdgen.source_ext_match = False
shared_obj.cmdgen.source_ext_match = False
for suffix in CSuffixes:
static_obj.add_action(suffix, CAction)
shared_obj.add_action(suffix, ShCAction)
static_obj.add_emitter(suffix, static_object_emitter)
shared_obj.add_emitter(suffix, shared_object_emitter)
for suffix in CXXSuffixes:
static_obj.add_action(suffix, CXXAction)
shared_obj.add_action(suffix, ShCXXAction)
static_obj.add_emitter(suffix, static_object_emitter)
shared_obj.add_emitter(suffix, shared_object_emitter)
env['CCPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Z7") or ""}'])
env['CCPCHFLAGS'] = SCons.Util.CLVar(['${(PCH and "/Yu%s \\\"/Fp%s\\\""%(PCHSTOP or "",File(PCH))) or ""}'])
env['_MSVC_OUTPUT_FLAG'] = msvc_output_flag
env['_CCCOMCOM'] = '$CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS $CCPCHFLAGS $CCPDBFLAGS'
env['CC'] = 'cl'
env['CCFLAGS'] = SCons.Util.CLVar('/nologo')
env['CFLAGS'] = SCons.Util.CLVar('')
env['CCCOM'] = '${TEMPFILE("$CC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CFLAGS $CCFLAGS $_CCCOMCOM","$CCCOMSTR")}'
env['SHCC'] = '$CC'
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
env['SHCFLAGS'] = SCons.Util.CLVar('$CFLAGS')
env['SHCCCOM'] = '${TEMPFILE("$SHCC $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCCCOMSTR")}'
env['CXX'] = '$CC'
env['CXXFLAGS'] = SCons.Util.CLVar('$( /TP $)')
env['CXXCOM'] = '${TEMPFILE("$CXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $CXXFLAGS $CCFLAGS $_CCCOMCOM","$CXXCOMSTR")}'
env['SHCXX'] = '$CXX'
env['SHCXXFLAGS'] = SCons.Util.CLVar('$CXXFLAGS')
env['SHCXXCOM'] = '${TEMPFILE("$SHCXX $_MSVC_OUTPUT_FLAG /c $CHANGED_SOURCES $SHCXXFLAGS $SHCCFLAGS $_CCCOMCOM","$SHCXXCOMSTR")}'
env['CPPDEFPREFIX'] = '/D'
env['CPPDEFSUFFIX'] = ''
env['INCPREFIX'] = '/I'
env['INCSUFFIX'] = ''
# env.Append(OBJEMITTER = [static_object_emitter])
# env.Append(SHOBJEMITTER = [shared_object_emitter])
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
env['RC'] = 'rc'
env['RCFLAGS'] = SCons.Util.CLVar('')
env['RCSUFFIXES']=['.rc','.rc2']
env['RCCOM'] = '$RC $_CPPDEFFLAGS $_CPPINCFLAGS $RCFLAGS /fo$TARGET $SOURCES'
env['BUILDERS']['RES'] = res_builder
env['OBJPREFIX'] = ''
env['OBJSUFFIX'] = '.obj'
env['SHOBJPREFIX'] = '$OBJPREFIX'
env['SHOBJSUFFIX'] = '$OBJSUFFIX'
# Set-up ms tools paths
msvc_setup_env_once(env)
env['CFILESUFFIX'] = '.c'
env['CXXFILESUFFIX'] = '.cc'
env['PCHPDBFLAGS'] = SCons.Util.CLVar(['${(PDB and "/Yd") or ""}'])
env['PCHCOM'] = '$CXX /Fo${TARGETS[1]} $CXXFLAGS $CCFLAGS $CPPFLAGS $_CPPDEFFLAGS $_CPPINCFLAGS /c $SOURCES /Yc$PCHSTOP /Fp${TARGETS[0]} $CCPDBFLAGS $PCHPDBFLAGS'
env['BUILDERS']['PCH'] = pch_builder
if 'ENV' not in env:
env['ENV'] = {}
if 'SystemRoot' not in env['ENV']: # required for dlls in the winsxs folders
env['ENV']['SystemRoot'] = SCons.Platform.win32.get_system_root()
def exists(env):
return msvc_exists()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| lgpl-2.1 | 231,577,212,311,972,130 | -8,966,822,351,965,109,000 | 40.23741 | 166 | 0.638608 | false |
tvalacarta/tvalacarta | python/main-classic/lib/youtube_dl/extractor/vimple.py | 64 | 1968 | from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import int_or_none
class SprutoBaseIE(InfoExtractor):
def _extract_spruto(self, spruto, video_id):
playlist = spruto['playlist'][0]
title = playlist['title']
video_id = playlist.get('videoId') or video_id
thumbnail = playlist.get('posterUrl') or playlist.get('thumbnailUrl')
duration = int_or_none(playlist.get('duration'))
formats = [{
'url': f['url'],
} for f in playlist['video']]
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'duration': duration,
'formats': formats,
}
class VimpleIE(SprutoBaseIE):
IE_DESC = 'Vimple - one-click video hosting'
_VALID_URL = r'https?://(?:player\.vimple\.(?:ru|co)/iframe|vimple\.(?:ru|co))/(?P<id>[\da-f-]{32,36})'
_TESTS = [{
'url': 'http://vimple.ru/c0f6b1687dcd4000a97ebe70068039cf',
'md5': '2e750a330ed211d3fd41821c6ad9a279',
'info_dict': {
'id': 'c0f6b168-7dcd-4000-a97e-be70068039cf',
'ext': 'mp4',
'title': 'Sunset',
'duration': 20,
'thumbnail': r're:https?://.*?\.jpg',
},
}, {
'url': 'http://player.vimple.ru/iframe/52e1beec-1314-4a83-aeac-c61562eadbf9',
'only_matching': True,
}, {
'url': 'http://vimple.co/04506a053f124483b8fb05ed73899f19',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(
'http://player.vimple.ru/iframe/%s' % video_id, video_id)
spruto = self._parse_json(
self._search_regex(
r'sprutoData\s*:\s*({.+?}),\r\n', webpage, 'spruto data'),
video_id)
return self._extract_spruto(spruto, video_id)
| gpl-3.0 | -1,450,133,906,048,034,800 | 6,589,613,910,608,152,000 | 31.262295 | 107 | 0.550305 | false |
guymakam/Kodi-Israel | plugin.video.ilten/resources/m3u8/model.py | 3 | 11066 | from collections import namedtuple
import os
import errno
import math
import urlparse
import re
from resources.m3u8 import parser
class M3U8(object):
'''
Represents a single M3U8 playlist. Should be instantiated with
the content as string.
Parameters:
`content`
the m3u8 content as string
`basepath`
all urls (key and segments url) will be updated with this basepath,
ex.:
basepath = "http://videoserver.com/hls"
/foo/bar/key.bin --> http://videoserver.com/hls/key.bin
http://vid.com/segment1.ts --> http://videoserver.com/hls/segment1.ts
can be passed as parameter or setted as an attribute to ``M3U8`` object.
`baseuri`
uri the playlist comes from. it is propagated to SegmentList and Key
ex.: http://example.com/path/to
Attributes:
`key`
it's a `Key` object, the EXT-X-KEY from m3u8. Or None
`segments`
a `SegmentList` object, represents the list of `Segment`s from this playlist
`is_variant`
Returns true if this M3U8 is a variant playlist, with links to
other M3U8s with different bitrates.
If true, `playlists` if a list of the playlists available.
`is_endlist`
Returns true if EXT-X-ENDLIST tag present in M3U8.
http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.8
`playlists`
If this is a variant playlist (`is_variant` is True), returns a list of
Playlist objects
`target_duration`
Returns the EXT-X-TARGETDURATION as an integer
http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.2
`media_sequence`
Returns the EXT-X-MEDIA-SEQUENCE as an integer
http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.3
`version`
Return the EXT-X-VERSION as is
`allow_cache`
Return the EXT-X-ALLOW-CACHE as is
`files`
Returns an iterable with all files from playlist, in order. This includes
segments and key uri, if present.
`baseuri`
It is a property (getter and setter) used by
SegmentList and Key to have absolute URIs.
'''
simple_attributes = (
# obj attribute # parser attribute
('is_variant', 'is_variant'),
('is_endlist', 'is_endlist'),
('target_duration', 'targetduration'),
('media_sequence', 'media_sequence'),
('version', 'version'),
('allow_cache', 'allow_cache'),
)
def __init__(self, content=None, basepath=None, baseuri=None):
if content is not None:
self.data = parser.parse(content)
else:
self.data = {}
self._baseuri = baseuri
self._initialize_attributes()
self.basepath = basepath
def _initialize_attributes(self):
self.key = Key(baseuri=self.baseuri, **self.data['key']) if 'key' in self.data else None
self.segments = SegmentList([ Segment(baseuri=self.baseuri, **params)
for params in self.data.get('segments', []) ])
for attr, param in self.simple_attributes:
setattr(self, attr, self.data.get(param))
self.files = []
if self.key:
self.files.append(self.key.uri)
self.files.extend(self.segments.uri)
self.playlists = PlaylistList([ Playlist(baseuri=self.baseuri, **playlist)
for playlist in self.data.get('playlists', []) ])
def __unicode__(self):
return self.dumps()
@property
def baseuri(self):
return self._baseuri
@baseuri.setter
def baseuri(self, new_baseuri):
self._baseuri = new_baseuri
self.segments.baseuri = new_baseuri
@property
def basepath(self):
return self._basepath
@basepath.setter
def basepath(self, newbasepath):
self._basepath = newbasepath
self._update_basepath()
def _update_basepath(self):
if self._basepath is None:
return
if self.key:
self.key.basepath = self.basepath
self.segments.basepath = self.basepath
self.playlists.basepath = self.basepath
def add_playlist(self, playlist):
self.is_variant = True
self.playlists.append(playlist)
def dumps(self):
'''
Returns the current m3u8 as a string.
You could also use unicode(<this obj>) or str(<this obj>)
'''
output = ['#EXTM3U']
if self.media_sequence:
output.append('#EXT-X-MEDIA-SEQUENCE:' + str(self.media_sequence))
if self.allow_cache:
output.append('#EXT-X-ALLOW-CACHE:' + self.allow_cache.upper())
if self.version:
output.append('#EXT-X-VERSION:' + self.version)
if self.key:
output.append(str(self.key))
if self.target_duration:
output.append('#EXT-X-TARGETDURATION:' + int_or_float_to_string(self.target_duration))
if self.is_variant:
output.append(str(self.playlists))
output.append(str(self.segments))
if self.is_endlist:
output.append('#EXT-X-ENDLIST')
return '\n'.join(output)
def dump(self, filename):
'''
Saves the current m3u8 to ``filename``
'''
self._create_sub_directories(filename)
with open(filename, 'w') as fileobj:
fileobj.write(self.dumps())
def _create_sub_directories(self, filename):
basename = os.path.dirname(filename)
try:
os.makedirs(basename)
except OSError as error:
if error.errno != errno.EEXIST:
raise
class BasePathMixin(object):
@property
def absolute_uri(self):
if parser.is_url(self.uri):
return self.uri
else:
if self.baseuri is None:
raise ValueError('There can not be `absolute_uri` with no `baseuri` set')
return _urijoin(self.baseuri, self.uri)
@property
def basepath(self):
return os.path.dirname(self.uri)
@basepath.setter
def basepath(self, newbasepath):
self.uri = self.uri.replace(self.basepath, newbasepath)
class GroupedBasePathMixin(object):
def _set_baseuri(self, new_baseuri):
for item in self:
item.baseuri = new_baseuri
baseuri = property(None, _set_baseuri)
def _set_basepath(self, newbasepath):
for item in self:
item.basepath = newbasepath
basepath = property(None, _set_basepath)
class Segment(BasePathMixin):
'''
A video segment from a M3U8 playlist
`uri`
a string with the segment uri
`title`
title attribute from EXTINF parameter
`duration`
duration attribute from EXTINF paramter
`baseuri`
uri the key comes from in URI hierarchy. ex.: http://example.com/path/to
'''
def __init__(self, uri, baseuri, duration=None, title=None):
self.uri = uri
self.duration = duration
self.title = title
self.baseuri = baseuri
def __str__(self):
output = ['#EXTINF:%s,' % int_or_float_to_string(self.duration)]
if self.title:
output.append(quoted(self.title))
output.append('\n')
output.append(self.uri)
return ''.join(output)
class SegmentList(list, GroupedBasePathMixin):
def __str__(self):
output = [str(segment) for segment in self]
return '\n'.join(output)
@property
def uri(self):
return [seg.uri for seg in self]
class Key(BasePathMixin):
'''
Key used to encrypt the segments in a m3u8 playlist (EXT-X-KEY)
`method`
is a string. ex.: "AES-128"
`uri`
is a string. ex:: "https://priv.example.com/key.php?r=52"
`baseuri`
uri the key comes from in URI hierarchy. ex.: http://example.com/path/to
`iv`
initialization vector. a string representing a hexadecimal number. ex.: 0X12A
'''
def __init__(self, method, uri, baseuri, iv=None):
self.method = method
self.uri = uri
self.iv = iv
self.baseuri = baseuri
def __str__(self):
output = [
'METHOD=%s' % self.method,
'URI="%s"' % self.uri,
]
if self.iv:
output.append('IV=%s' % self.iv)
return '#EXT-X-KEY:' + ','.join(output)
class Playlist(BasePathMixin):
'''
Playlist object representing a link to a variant M3U8 with a specific bitrate.
Each `stream_info` attribute has: `program_id`, `bandwidth`, `resolution` and `codecs`
`resolution` is a tuple (h, v) of integers
More info: http://tools.ietf.org/html/draft-pantos-http-live-streaming-07#section-3.3.10
'''
def __init__(self, uri, stream_info, baseuri):
self.uri = uri
self.baseuri = baseuri
resolution = stream_info.get('resolution')
if resolution != None:
values = resolution.split('x')
resolution_pair = (int(values[0]), int(values[1]))
else:
resolution_pair = None
self.stream_info = StreamInfo(bandwidth=stream_info['bandwidth'],
program_id=stream_info.get('program_id'),
resolution=resolution_pair,
codecs=stream_info.get('codecs'))
def __str__(self):
stream_inf = []
if self.stream_info.program_id:
stream_inf.append('PROGRAM-ID=' + self.stream_info.program_id)
if self.stream_info.bandwidth:
stream_inf.append('BANDWIDTH=' + self.stream_info.bandwidth)
if self.stream_info.resolution:
res = str(self.stream_info.resolution[0]) + 'x' + str(self.stream_info.resolution[1])
stream_inf.append('RESOLUTION=' + res)
if self.stream_info.codecs:
stream_inf.append('CODECS=' + quoted(self.stream_info.codecs))
return '#EXT-X-STREAM-INF:' + ','.join(stream_inf) + '\n' + self.uri
StreamInfo = namedtuple('StreamInfo', ['bandwidth', 'program_id', 'resolution', 'codecs'])
class PlaylistList(list, GroupedBasePathMixin):
def __str__(self):
output = [str(playlist) for playlist in self]
return '\n'.join(output)
def denormalize_attribute(attribute):
return attribute.replace('_','-').upper()
def quoted(string):
return '"%s"' % string
def _urijoin(baseuri, path):
if parser.is_url(baseuri):
parsed_url = urlparse.urlparse(baseuri)
prefix = parsed_url.scheme + '://' + parsed_url.netloc
new_path = os.path.normpath(parsed_url.path + '/' + path)
return urlparse.urljoin(prefix, new_path.strip('/'))
else:
return os.path.normpath(os.path.join(baseuri, path.strip('/')))
def int_or_float_to_string(number):
return str(int(number)) if number == math.floor(number) else str(number)
| gpl-2.0 | 8,368,752,872,408,478,000 | 8,020,330,999,890,270,000 | 29.401099 | 98 | 0.594433 | false |
direvus/ansible | lib/ansible/modules/network/ios/ios_logging.py | 48 | 12512 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Ansible by Red Hat, inc
#
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: ios_logging
version_added: "2.4"
author: "Trishna Guha (@trishnaguha)"
short_description: Manage logging on network devices
description:
- This module provides declarative management of logging
on Cisco Ios devices.
notes:
- Tested against IOS 15.6
options:
dest:
description:
- Destination of the logs.
choices: ['on', 'host', 'console', 'monitor', 'buffered']
name:
description:
- If value of C(dest) is I(file) it indicates file-name,
for I(user) it indicates username and for I(host) indicates
the host name to be notified.
size:
description:
- Size of buffer. The acceptable value is in range from 4096 to
4294967295 bytes.
default: 4096
facility:
description:
- Set logging facility.
level:
description:
- Set logging severity levels.
aggregate:
description: List of logging definitions.
state:
description:
- State of the logging configuration.
default: present
choices: ['present', 'absent']
extends_documentation_fragment: ios
"""
EXAMPLES = """
- name: configure host logging
ios_logging:
dest: host
name: 172.16.0.1
state: present
- name: remove host logging configuration
ios_logging:
dest: host
name: 172.16.0.1
state: absent
- name: configure console logging level and facility
ios_logging:
dest: console
facility: local7
level: debugging
state: present
- name: enable logging to all
ios_logging:
dest : on
- name: configure buffer size
ios_logging:
dest: buffered
size: 5000
- name: Configure logging using aggregate
ios_logging:
aggregate:
- { dest: console, level: notifications }
- { dest: buffered, size: 9000 }
- name: remove logging using aggregate
ios_logging:
aggregate:
- { dest: console, level: notifications }
- { dest: buffered, size: 9000 }
state: absent
"""
RETURN = """
commands:
description: The list of configuration mode commands to send to the device
returned: always
type: list
sample:
- logging facility local7
- logging host 172.16.0.1
"""
import re
from copy import deepcopy
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.utils import remove_default_spec, validate_ip_address
from ansible.module_utils.network.ios.ios import get_config, load_config
from ansible.module_utils.network.ios.ios import get_capabilities
from ansible.module_utils.network.ios.ios import ios_argument_spec, check_args
def validate_size(value, module):
if value:
if not int(4096) <= int(value) <= int(4294967295):
module.fail_json(msg='size must be between 4096 and 4294967295')
else:
return value
def map_obj_to_commands(updates, module, os_version):
dest_group = ('console', 'monitor', 'buffered', 'on')
commands = list()
want, have = updates
for w in want:
dest = w['dest']
name = w['name']
size = w['size']
facility = w['facility']
level = w['level']
state = w['state']
del w['state']
if facility:
w['dest'] = 'facility'
if state == 'absent' and w in have:
if dest:
if dest == 'host':
if '12.' in os_version:
commands.append('no logging {0}'.format(name))
else:
commands.append('no logging host {0}'.format(name))
elif dest in dest_group:
commands.append('no logging {0}'.format(dest))
else:
module.fail_json(msg='dest must be among console, monitor, buffered, host, on')
if facility:
commands.append('no logging facility {0}'.format(facility))
if state == 'present' and w not in have:
if facility:
present = False
for entry in have:
if entry['dest'] == 'facility' and entry['facility'] == facility:
present = True
if not present:
commands.append('logging facility {0}'.format(facility))
if dest == 'host':
if '12.' in os_version:
commands.append('logging {0}'.format(name))
else:
commands.append('logging host {0}'.format(name))
elif dest == 'on':
commands.append('logging on')
elif dest == 'buffered' and size:
present = False
for entry in have:
if entry['dest'] == 'buffered' and entry['size'] == size and entry['level'] == level:
present = True
if not present:
if level and level != 'debugging':
commands.append('logging buffered {0} {1}'.format(size, level))
else:
commands.append('logging buffered {0}'.format(size))
else:
if dest:
dest_cmd = 'logging {0}'.format(dest)
if level:
dest_cmd += ' {0}'.format(level)
commands.append(dest_cmd)
return commands
def parse_facility(line, dest):
facility = None
if dest == 'facility':
match = re.search(r'logging facility (\S+)', line, re.M)
if match:
facility = match.group(1)
return facility
def parse_size(line, dest):
size = None
if dest == 'buffered':
match = re.search(r'logging buffered(?: (\d+))?(?: [a-z]+)?', line, re.M)
if match:
if match.group(1) is not None:
size = match.group(1)
else:
size = "4096"
return size
def parse_name(line, dest):
if dest == 'host':
match = re.search(r'logging host (\S+)', line, re.M)
if match:
name = match.group(1)
else:
name = None
return name
def parse_level(line, dest):
level_group = ('emergencies', 'alerts', 'critical', 'errors', 'warnings',
'notifications', 'informational', 'debugging')
if dest == 'host':
level = 'debugging'
else:
if dest == 'buffered':
match = re.search(r'logging buffered(?: \d+)?(?: ([a-z]+))?', line, re.M)
else:
match = re.search(r'logging {0} (\S+)'.format(dest), line, re.M)
if match and match.group(1) in level_group:
level = match.group(1)
else:
level = 'debugging'
return level
def map_config_to_obj(module):
obj = []
dest_group = ('console', 'host', 'monitor', 'buffered', 'on', 'facility')
data = get_config(module, flags=['| include logging'])
for line in data.split('\n'):
match = re.search(r'logging (\S+)', line, re.M)
if match:
if match.group(1) in dest_group:
dest = match.group(1)
obj.append({
'dest': dest,
'name': parse_name(line, dest),
'size': parse_size(line, dest),
'facility': parse_facility(line, dest),
'level': parse_level(line, dest)
})
elif validate_ip_address(match.group(1)):
dest = 'host'
obj.append({
'dest': dest,
'name': match.group(1),
'facility': parse_facility(line, dest),
'level': parse_level(line, dest)
})
else:
ip_match = re.search(r'\d+\.\d+\.\d+\.\d+', match.group(1), re.M)
if ip_match:
dest = 'host'
obj.append({
'dest': dest,
'name': match.group(1),
'facility': parse_facility(line, dest),
'level': parse_level(line, dest)
})
return obj
def map_params_to_obj(module, required_if=None):
obj = []
aggregate = module.params.get('aggregate')
if aggregate:
for item in aggregate:
for key in item:
if item.get(key) is None:
item[key] = module.params[key]
module._check_required_if(required_if, item)
d = item.copy()
if d['dest'] != 'host':
d['name'] = None
if d['dest'] == 'buffered':
if 'size' in d:
d['size'] = str(validate_size(d['size'], module))
elif 'size' not in d:
d['size'] = str(4096)
else:
pass
if d['dest'] != 'buffered':
d['size'] = None
obj.append(d)
else:
if module.params['dest'] != 'host':
module.params['name'] = None
if module.params['dest'] == 'buffered':
if not module.params['size']:
module.params['size'] = str(4096)
else:
module.params['size'] = None
if module.params['size'] is None:
obj.append({
'dest': module.params['dest'],
'name': module.params['name'],
'size': module.params['size'],
'facility': module.params['facility'],
'level': module.params['level'],
'state': module.params['state']
})
else:
obj.append({
'dest': module.params['dest'],
'name': module.params['name'],
'size': str(validate_size(module.params['size'], module)),
'facility': module.params['facility'],
'level': module.params['level'],
'state': module.params['state']
})
return obj
def main():
""" main entry point for module execution
"""
element_spec = dict(
dest=dict(type='str', choices=['on', 'host', 'console', 'monitor', 'buffered']),
name=dict(type='str'),
size=dict(type='int'),
facility=dict(type='str'),
level=dict(type='str', default='debugging'),
state=dict(default='present', choices=['present', 'absent']),
)
aggregate_spec = deepcopy(element_spec)
# remove default in aggregate spec, to handle common arguments
remove_default_spec(aggregate_spec)
argument_spec = dict(
aggregate=dict(type='list', elements='dict', options=aggregate_spec),
)
argument_spec.update(element_spec)
argument_spec.update(ios_argument_spec)
required_if = [('dest', 'host', ['name'])]
module = AnsibleModule(argument_spec=argument_spec,
required_if=required_if,
supports_check_mode=True)
device_info = get_capabilities(module)
os_version = device_info['device_info']['network_os_version']
warnings = list()
check_args(module, warnings)
result = {'changed': False}
if warnings:
result['warnings'] = warnings
want = map_params_to_obj(module, required_if=required_if)
have = map_config_to_obj(module)
commands = map_obj_to_commands((want, have), module, os_version)
result['commands'] = commands
if commands:
if not module.check_mode:
load_config(module, commands)
result['changed'] = True
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 | -2,303,792,175,291,803,400 | -1,802,734,316,712,108,000 | 28.370892 | 105 | 0.539162 | false |
mancoast/CPythonPyc_test | fail/324_test_urllib2net.py | 6 | 13263 | #!/usr/bin/env python3
import unittest
from test import support
from test.test_urllib2 import sanepathname2url
import os
import socket
import urllib.error
import urllib.request
import sys
try:
import ssl
except ImportError:
ssl = None
TIMEOUT = 60 # seconds
def _retry_thrice(func, exc, *args, **kwargs):
for i in range(3):
try:
return func(*args, **kwargs)
except exc as e:
last_exc = e
continue
except:
raise
raise last_exc
def _wrap_with_retry_thrice(func, exc):
def wrapped(*args, **kwargs):
return _retry_thrice(func, exc, *args, **kwargs)
return wrapped
# Connecting to remote hosts is flaky. Make it more robust by retrying
# the connection several times.
_urlopen_with_retry = _wrap_with_retry_thrice(urllib.request.urlopen,
urllib.error.URLError)
class AuthTests(unittest.TestCase):
"""Tests urllib2 authentication features."""
## Disabled at the moment since there is no page under python.org which
## could be used to HTTP authentication.
#
# def test_basic_auth(self):
# import http.client
#
# test_url = "http://www.python.org/test/test_urllib2/basic_auth"
# test_hostport = "www.python.org"
# test_realm = 'Test Realm'
# test_user = 'test.test_urllib2net'
# test_password = 'blah'
#
# # failure
# try:
# _urlopen_with_retry(test_url)
# except urllib2.HTTPError, exc:
# self.assertEqual(exc.code, 401)
# else:
# self.fail("urlopen() should have failed with 401")
#
# # success
# auth_handler = urllib2.HTTPBasicAuthHandler()
# auth_handler.add_password(test_realm, test_hostport,
# test_user, test_password)
# opener = urllib2.build_opener(auth_handler)
# f = opener.open('http://localhost/')
# response = _urlopen_with_retry("http://www.python.org/")
#
# # The 'userinfo' URL component is deprecated by RFC 3986 for security
# # reasons, let's not implement it! (it's already implemented for proxy
# # specification strings (that is, URLs or authorities specifying a
# # proxy), so we must keep that)
# self.assertRaises(http.client.InvalidURL,
# urllib2.urlopen, "http://evil:[email protected]")
class CloseSocketTest(unittest.TestCase):
def test_close(self):
# calling .close() on urllib2's response objects should close the
# underlying socket
response = _urlopen_with_retry("http://www.python.org/")
sock = response.fp
self.assertTrue(not sock.closed)
response.close()
self.assertTrue(sock.closed)
class OtherNetworkTests(unittest.TestCase):
def setUp(self):
if 0: # for debugging
import logging
logger = logging.getLogger("test_urllib2net")
logger.addHandler(logging.StreamHandler())
# XXX The rest of these tests aren't very good -- they don't check much.
# They do sometimes catch some major disasters, though.
def test_ftp(self):
urls = [
'ftp://ftp.kernel.org/pub/linux/kernel/README',
'ftp://ftp.kernel.org/pub/linux/kernel/non-existent-file',
#'ftp://ftp.kernel.org/pub/leenox/kernel/test',
'ftp://gatekeeper.research.compaq.com/pub/DEC/SRC'
'/research-reports/00README-Legal-Rules-Regs',
]
self._test_urls(urls, self._extra_handlers())
def test_file(self):
TESTFN = support.TESTFN
f = open(TESTFN, 'w')
try:
f.write('hi there\n')
f.close()
urls = [
'file:' + sanepathname2url(os.path.abspath(TESTFN)),
('file:///nonsensename/etc/passwd', None,
urllib.error.URLError),
]
self._test_urls(urls, self._extra_handlers(), retry=True)
finally:
os.remove(TESTFN)
self.assertRaises(ValueError, urllib.request.urlopen,'./relative_path/to/file')
# XXX Following test depends on machine configurations that are internal
# to CNRI. Need to set up a public server with the right authentication
# configuration for test purposes.
## def test_cnri(self):
## if socket.gethostname() == 'bitdiddle':
## localhost = 'bitdiddle.cnri.reston.va.us'
## elif socket.gethostname() == 'bitdiddle.concentric.net':
## localhost = 'localhost'
## else:
## localhost = None
## if localhost is not None:
## urls = [
## 'file://%s/etc/passwd' % localhost,
## 'http://%s/simple/' % localhost,
## 'http://%s/digest/' % localhost,
## 'http://%s/not/found.h' % localhost,
## ]
## bauth = HTTPBasicAuthHandler()
## bauth.add_password('basic_test_realm', localhost, 'jhylton',
## 'password')
## dauth = HTTPDigestAuthHandler()
## dauth.add_password('digest_test_realm', localhost, 'jhylton',
## 'password')
## self._test_urls(urls, self._extra_handlers()+[bauth, dauth])
def test_urlwithfrag(self):
urlwith_frag = "http://docs.python.org/2/glossary.html#glossary"
with support.transient_internet(urlwith_frag):
req = urllib.request.Request(urlwith_frag)
res = urllib.request.urlopen(req)
self.assertEqual(res.geturl(),
"http://docs.python.org/2/glossary.html#glossary")
def test_custom_headers(self):
url = "http://www.example.com"
with support.transient_internet(url):
opener = urllib.request.build_opener()
request = urllib.request.Request(url)
self.assertFalse(request.header_items())
opener.open(request)
self.assertTrue(request.header_items())
self.assertTrue(request.has_header('User-agent'))
request.add_header('User-Agent','Test-Agent')
opener.open(request)
self.assertEqual(request.get_header('User-agent'),'Test-Agent')
def test_sites_no_connection_close(self):
# Some sites do not send Connection: close header.
# Verify that those work properly. (#issue12576)
URL = 'http://www.imdb.com' # mangles Connection:close
with support.transient_internet(URL):
try:
with urllib.request.urlopen(URL) as res:
pass
except ValueError as e:
self.fail("urlopen failed for site not sending \
Connection:close")
else:
self.assertTrue(res)
req = urllib.request.urlopen(URL)
res = req.read()
self.assertTrue(res)
def _test_urls(self, urls, handlers, retry=True):
import time
import logging
debug = logging.getLogger("test_urllib2").debug
urlopen = urllib.request.build_opener(*handlers).open
if retry:
urlopen = _wrap_with_retry_thrice(urlopen, urllib.error.URLError)
for url in urls:
if isinstance(url, tuple):
url, req, expected_err = url
else:
req = expected_err = None
with support.transient_internet(url):
debug(url)
try:
f = urlopen(url, req, TIMEOUT)
except EnvironmentError as err:
debug(err)
if expected_err:
msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" %
(expected_err, url, req, type(err), err))
self.assertIsInstance(err, expected_err, msg)
except urllib.error.URLError as err:
if isinstance(err[0], socket.timeout):
print("<timeout: %s>" % url, file=sys.stderr)
continue
else:
raise
else:
try:
with support.time_out, \
support.socket_peer_reset, \
support.ioerror_peer_reset:
buf = f.read()
debug("read %d bytes" % len(buf))
except socket.timeout:
print("<timeout: %s>" % url, file=sys.stderr)
f.close()
debug("******** next url coming up...")
time.sleep(0.1)
def _extra_handlers(self):
handlers = []
cfh = urllib.request.CacheFTPHandler()
self.addCleanup(cfh.clear_cache)
cfh.setTimeout(1)
handlers.append(cfh)
return handlers
class TimeoutTest(unittest.TestCase):
def test_http_basic(self):
self.assertTrue(socket.getdefaulttimeout() is None)
url = "http://www.python.org"
with support.transient_internet(url, timeout=None):
u = _urlopen_with_retry(url)
self.addCleanup(u.close)
self.assertTrue(u.fp.raw._sock.gettimeout() is None)
def test_http_default_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
url = "http://www.python.org"
with support.transient_internet(url):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(url)
self.addCleanup(u.close)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(u.fp.raw._sock.gettimeout(), 60)
def test_http_no_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
url = "http://www.python.org"
with support.transient_internet(url):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(url, timeout=None)
self.addCleanup(u.close)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(u.fp.raw._sock.gettimeout() is None)
def test_http_timeout(self):
url = "http://www.python.org"
with support.transient_internet(url):
u = _urlopen_with_retry(url, timeout=120)
self.addCleanup(u.close)
self.assertEqual(u.fp.raw._sock.gettimeout(), 120)
FTP_HOST = "ftp://ftp.mirror.nl/pub/gnu/"
def test_ftp_basic(self):
self.assertTrue(socket.getdefaulttimeout() is None)
with support.transient_internet(self.FTP_HOST, timeout=None):
u = _urlopen_with_retry(self.FTP_HOST)
self.addCleanup(u.close)
self.assertTrue(u.fp.fp.raw._sock.gettimeout() is None)
def test_ftp_default_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
with support.transient_internet(self.FTP_HOST):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(self.FTP_HOST)
self.addCleanup(u.close)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(u.fp.fp.raw._sock.gettimeout(), 60)
def test_ftp_no_timeout(self):
self.assertTrue(socket.getdefaulttimeout() is None)
with support.transient_internet(self.FTP_HOST):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(self.FTP_HOST, timeout=None)
self.addCleanup(u.close)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(u.fp.fp.raw._sock.gettimeout() is None)
def test_ftp_timeout(self):
with support.transient_internet(self.FTP_HOST):
u = _urlopen_with_retry(self.FTP_HOST, timeout=60)
self.addCleanup(u.close)
self.assertEqual(u.fp.fp.raw._sock.gettimeout(), 60)
@unittest.skipUnless(ssl, "requires SSL support")
class HTTPSTests(unittest.TestCase):
def test_sni(self):
self.skipTest("test disabled - test server needed")
# Checks that Server Name Indication works, if supported by the
# OpenSSL linked to.
# The ssl module itself doesn't have server-side support for SNI,
# so we rely on a third-party test site.
expect_sni = ssl.HAS_SNI
with support.transient_internet("XXX"):
u = urllib.request.urlopen("XXX")
contents = u.readall()
if expect_sni:
self.assertIn(b"Great", contents)
self.assertNotIn(b"Unfortunately", contents)
else:
self.assertNotIn(b"Great", contents)
self.assertIn(b"Unfortunately", contents)
def test_main():
support.requires("network")
support.run_unittest(AuthTests,
HTTPSTests,
OtherNetworkTests,
CloseSocketTest,
TimeoutTest,
)
if __name__ == "__main__":
test_main()
| gpl-3.0 | -1,682,061,531,027,863,000 | -8,230,313,645,860,163,000 | 35.53719 | 88 | 0.560205 | false |
hbrunn/website | website_event_register_free/__init__.py | 71 | 1029 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2015 Therp BV <http://therp.nl>.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import model
from . import controllers
| agpl-3.0 | 1,020,884,717,039,696,900 | -4,903,609,012,177,950,000 | 45.772727 | 78 | 0.609329 | false |
Designist/audacity | lib-src/lv2/lv2/plugins/eg-midigate.lv2/waflib/Build.py | 265 | 20971 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys,errno,re,shutil
try:
import cPickle
except ImportError:
import pickle as cPickle
from waflib import Runner,TaskGen,Utils,ConfigSet,Task,Logs,Options,Context,Errors
import waflib.Node
CACHE_DIR='c4che'
CACHE_SUFFIX='_cache.py'
INSTALL=1337
UNINSTALL=-1337
SAVED_ATTRS='root node_deps raw_deps task_sigs'.split()
CFG_FILES='cfg_files'
POST_AT_ONCE=0
POST_LAZY=1
POST_BOTH=2
class BuildContext(Context.Context):
'''executes the build'''
cmd='build'
variant=''
def __init__(self,**kw):
super(BuildContext,self).__init__(**kw)
self.is_install=0
self.top_dir=kw.get('top_dir',Context.top_dir)
self.run_dir=kw.get('run_dir',Context.run_dir)
self.post_mode=POST_AT_ONCE
self.out_dir=kw.get('out_dir',Context.out_dir)
self.cache_dir=kw.get('cache_dir',None)
if not self.cache_dir:
self.cache_dir=self.out_dir+os.sep+CACHE_DIR
self.all_envs={}
self.task_sigs={}
self.node_deps={}
self.raw_deps={}
self.cache_dir_contents={}
self.task_gen_cache_names={}
self.launch_dir=Context.launch_dir
self.jobs=Options.options.jobs
self.targets=Options.options.targets
self.keep=Options.options.keep
self.cache_global=Options.cache_global
self.nocache=Options.options.nocache
self.progress_bar=Options.options.progress_bar
self.deps_man=Utils.defaultdict(list)
self.current_group=0
self.groups=[]
self.group_names={}
def get_variant_dir(self):
if not self.variant:
return self.out_dir
return os.path.join(self.out_dir,self.variant)
variant_dir=property(get_variant_dir,None)
def __call__(self,*k,**kw):
kw['bld']=self
ret=TaskGen.task_gen(*k,**kw)
self.task_gen_cache_names={}
self.add_to_group(ret,group=kw.get('group',None))
return ret
def rule(self,*k,**kw):
def f(rule):
ret=self(*k,**kw)
ret.rule=rule
return ret
return f
def __copy__(self):
raise Errors.WafError('build contexts are not supposed to be copied')
def install_files(self,*k,**kw):
pass
def install_as(self,*k,**kw):
pass
def symlink_as(self,*k,**kw):
pass
def load_envs(self):
node=self.root.find_node(self.cache_dir)
if not node:
raise Errors.WafError('The project was not configured: run "waf configure" first!')
lst=node.ant_glob('**/*%s'%CACHE_SUFFIX,quiet=True)
if not lst:
raise Errors.WafError('The cache directory is empty: reconfigure the project')
for x in lst:
name=x.path_from(node).replace(CACHE_SUFFIX,'').replace('\\','/')
env=ConfigSet.ConfigSet(x.abspath())
self.all_envs[name]=env
for f in env[CFG_FILES]:
newnode=self.root.find_resource(f)
try:
h=Utils.h_file(newnode.abspath())
except(IOError,AttributeError):
Logs.error('cannot find %r'%f)
h=Utils.SIG_NIL
newnode.sig=h
def init_dirs(self):
if not(os.path.isabs(self.top_dir)and os.path.isabs(self.out_dir)):
raise Errors.WafError('The project was not configured: run "waf configure" first!')
self.path=self.srcnode=self.root.find_dir(self.top_dir)
self.bldnode=self.root.make_node(self.variant_dir)
self.bldnode.mkdir()
def execute(self):
self.restore()
if not self.all_envs:
self.load_envs()
self.execute_build()
def execute_build(self):
Logs.info("Waf: Entering directory `%s'"%self.variant_dir)
self.recurse([self.run_dir])
self.pre_build()
self.timer=Utils.Timer()
if self.progress_bar:
sys.stderr.write(Logs.colors.cursor_off)
try:
self.compile()
finally:
if self.progress_bar==1:
c=len(self.returned_tasks)or 1
self.to_log(self.progress_line(c,c,Logs.colors.BLUE,Logs.colors.NORMAL))
print('')
sys.stdout.flush()
sys.stderr.write(Logs.colors.cursor_on)
Logs.info("Waf: Leaving directory `%s'"%self.variant_dir)
self.post_build()
def restore(self):
try:
env=ConfigSet.ConfigSet(os.path.join(self.cache_dir,'build.config.py'))
except(IOError,OSError):
pass
else:
if env['version']<Context.HEXVERSION:
raise Errors.WafError('Version mismatch! reconfigure the project')
for t in env['tools']:
self.setup(**t)
dbfn=os.path.join(self.variant_dir,Context.DBFILE)
try:
data=Utils.readf(dbfn,'rb')
except(IOError,EOFError):
Logs.debug('build: Could not load the build cache %s (missing)'%dbfn)
else:
try:
waflib.Node.pickle_lock.acquire()
waflib.Node.Nod3=self.node_class
try:
data=cPickle.loads(data)
except Exception ,e:
Logs.debug('build: Could not pickle the build cache %s: %r'%(dbfn,e))
else:
for x in SAVED_ATTRS:
setattr(self,x,data[x])
finally:
waflib.Node.pickle_lock.release()
self.init_dirs()
def store(self):
data={}
for x in SAVED_ATTRS:
data[x]=getattr(self,x)
db=os.path.join(self.variant_dir,Context.DBFILE)
try:
waflib.Node.pickle_lock.acquire()
waflib.Node.Nod3=self.node_class
x=cPickle.dumps(data,-1)
finally:
waflib.Node.pickle_lock.release()
Utils.writef(db+'.tmp',x,m='wb')
try:
st=os.stat(db)
os.remove(db)
if not Utils.is_win32:
os.chown(db+'.tmp',st.st_uid,st.st_gid)
except(AttributeError,OSError):
pass
os.rename(db+'.tmp',db)
def compile(self):
Logs.debug('build: compile()')
self.producer=Runner.Parallel(self,self.jobs)
self.producer.biter=self.get_build_iterator()
self.returned_tasks=[]
try:
self.producer.start()
except KeyboardInterrupt:
self.store()
raise
else:
if self.producer.dirty:
self.store()
if self.producer.error:
raise Errors.BuildError(self.producer.error)
def setup(self,tool,tooldir=None,funs=None):
if isinstance(tool,list):
for i in tool:self.setup(i,tooldir)
return
module=Context.load_tool(tool,tooldir)
if hasattr(module,"setup"):module.setup(self)
def get_env(self):
try:
return self.all_envs[self.variant]
except KeyError:
return self.all_envs['']
def set_env(self,val):
self.all_envs[self.variant]=val
env=property(get_env,set_env)
def add_manual_dependency(self,path,value):
if path is None:
raise ValueError('Invalid input')
if isinstance(path,waflib.Node.Node):
node=path
elif os.path.isabs(path):
node=self.root.find_resource(path)
else:
node=self.path.find_resource(path)
if isinstance(value,list):
self.deps_man[id(node)].extend(value)
else:
self.deps_man[id(node)].append(value)
def launch_node(self):
try:
return self.p_ln
except AttributeError:
self.p_ln=self.root.find_dir(self.launch_dir)
return self.p_ln
def hash_env_vars(self,env,vars_lst):
if not env.table:
env=env.parent
if not env:
return Utils.SIG_NIL
idx=str(id(env))+str(vars_lst)
try:
cache=self.cache_env
except AttributeError:
cache=self.cache_env={}
else:
try:
return self.cache_env[idx]
except KeyError:
pass
lst=[env[a]for a in vars_lst]
ret=Utils.h_list(lst)
Logs.debug('envhash: %s %r',Utils.to_hex(ret),lst)
cache[idx]=ret
return ret
def get_tgen_by_name(self,name):
cache=self.task_gen_cache_names
if not cache:
for g in self.groups:
for tg in g:
try:
cache[tg.name]=tg
except AttributeError:
pass
try:
return cache[name]
except KeyError:
raise Errors.WafError('Could not find a task generator for the name %r'%name)
def progress_line(self,state,total,col1,col2):
n=len(str(total))
Utils.rot_idx+=1
ind=Utils.rot_chr[Utils.rot_idx%4]
pc=(100.*state)/total
eta=str(self.timer)
fs="[%%%dd/%%%dd][%%s%%2d%%%%%%s][%s]["%(n,n,ind)
left=fs%(state,total,col1,pc,col2)
right='][%s%s%s]'%(col1,eta,col2)
cols=Logs.get_term_cols()-len(left)-len(right)+2*len(col1)+2*len(col2)
if cols<7:cols=7
ratio=((cols*state)//total)-1
bar=('='*ratio+'>').ljust(cols)
msg=Utils.indicator%(left,bar,right)
return msg
def declare_chain(self,*k,**kw):
return TaskGen.declare_chain(*k,**kw)
def pre_build(self):
for m in getattr(self,'pre_funs',[]):
m(self)
def post_build(self):
for m in getattr(self,'post_funs',[]):
m(self)
def add_pre_fun(self,meth):
try:
self.pre_funs.append(meth)
except AttributeError:
self.pre_funs=[meth]
def add_post_fun(self,meth):
try:
self.post_funs.append(meth)
except AttributeError:
self.post_funs=[meth]
def get_group(self,x):
if not self.groups:
self.add_group()
if x is None:
return self.groups[self.current_group]
if x in self.group_names:
return self.group_names[x]
return self.groups[x]
def add_to_group(self,tgen,group=None):
assert(isinstance(tgen,TaskGen.task_gen)or isinstance(tgen,Task.TaskBase))
tgen.bld=self
self.get_group(group).append(tgen)
def get_group_name(self,g):
if not isinstance(g,list):
g=self.groups[g]
for x in self.group_names:
if id(self.group_names[x])==id(g):
return x
return''
def get_group_idx(self,tg):
se=id(tg)
for i in range(len(self.groups)):
for t in self.groups[i]:
if id(t)==se:
return i
return None
def add_group(self,name=None,move=True):
if name and name in self.group_names:
Logs.error('add_group: name %s already present'%name)
g=[]
self.group_names[name]=g
self.groups.append(g)
if move:
self.current_group=len(self.groups)-1
def set_group(self,idx):
if isinstance(idx,str):
g=self.group_names[idx]
for i in range(len(self.groups)):
if id(g)==id(self.groups[i]):
self.current_group=i
else:
self.current_group=idx
def total(self):
total=0
for group in self.groups:
for tg in group:
try:
total+=len(tg.tasks)
except AttributeError:
total+=1
return total
def get_targets(self):
to_post=[]
min_grp=0
for name in self.targets.split(','):
tg=self.get_tgen_by_name(name)
if not tg:
raise Errors.WafError('target %r does not exist'%name)
m=self.get_group_idx(tg)
if m>min_grp:
min_grp=m
to_post=[tg]
elif m==min_grp:
to_post.append(tg)
return(min_grp,to_post)
def get_all_task_gen(self):
lst=[]
for g in self.groups:
lst.extend(g)
return lst
def post_group(self):
if self.targets=='*':
for tg in self.groups[self.cur]:
try:
f=tg.post
except AttributeError:
pass
else:
f()
elif self.targets:
if self.cur<self._min_grp:
for tg in self.groups[self.cur]:
try:
f=tg.post
except AttributeError:
pass
else:
f()
else:
for tg in self._exact_tg:
tg.post()
else:
ln=self.launch_node()
if ln.is_child_of(self.bldnode):
Logs.warn('Building from the build directory, forcing --targets=*')
ln=self.srcnode
elif not ln.is_child_of(self.srcnode):
Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)'%(ln.abspath(),self.srcnode.abspath()))
ln=self.srcnode
for tg in self.groups[self.cur]:
try:
f=tg.post
except AttributeError:
pass
else:
if tg.path.is_child_of(ln):
f()
def get_tasks_group(self,idx):
tasks=[]
for tg in self.groups[idx]:
try:
tasks.extend(tg.tasks)
except AttributeError:
tasks.append(tg)
return tasks
def get_build_iterator(self):
self.cur=0
if self.targets and self.targets!='*':
(self._min_grp,self._exact_tg)=self.get_targets()
global lazy_post
if self.post_mode!=POST_LAZY:
while self.cur<len(self.groups):
self.post_group()
self.cur+=1
self.cur=0
while self.cur<len(self.groups):
if self.post_mode!=POST_AT_ONCE:
self.post_group()
tasks=self.get_tasks_group(self.cur)
Task.set_file_constraints(tasks)
Task.set_precedence_constraints(tasks)
self.cur_tasks=tasks
self.cur+=1
if not tasks:
continue
yield tasks
while 1:
yield[]
class inst(Task.Task):
color='CYAN'
def uid(self):
lst=[self.dest,self.path]+self.source
return Utils.h_list(repr(lst))
def post(self):
buf=[]
for x in self.source:
if isinstance(x,waflib.Node.Node):
y=x
else:
y=self.path.find_resource(x)
if not y:
if Logs.verbose:
Logs.warn('Could not find %s immediately (may cause broken builds)'%x)
idx=self.generator.bld.get_group_idx(self)
for tg in self.generator.bld.groups[idx]:
if not isinstance(tg,inst)and id(tg)!=id(self):
tg.post()
y=self.path.find_resource(x)
if y:
break
else:
raise Errors.WafError('Could not find %r in %r'%(x,self.path))
buf.append(y)
self.inputs=buf
def runnable_status(self):
ret=super(inst,self).runnable_status()
if ret==Task.SKIP_ME:
return Task.RUN_ME
return ret
def __str__(self):
return''
def run(self):
return self.generator.exec_task()
def get_install_path(self,destdir=True):
dest=Utils.subst_vars(self.dest,self.env)
dest=dest.replace('/',os.sep)
if destdir and Options.options.destdir:
dest=os.path.join(Options.options.destdir,os.path.splitdrive(dest)[1].lstrip(os.sep))
return dest
def exec_install_files(self):
destpath=self.get_install_path()
if not destpath:
raise Errors.WafError('unknown installation path %r'%self.generator)
for x,y in zip(self.source,self.inputs):
if self.relative_trick:
destfile=os.path.join(destpath,y.path_from(self.path))
else:
destfile=os.path.join(destpath,y.name)
self.generator.bld.do_install(y.abspath(),destfile,self.chmod)
def exec_install_as(self):
destfile=self.get_install_path()
self.generator.bld.do_install(self.inputs[0].abspath(),destfile,self.chmod)
def exec_symlink_as(self):
destfile=self.get_install_path()
src=self.link
if self.relative_trick:
src=os.path.relpath(src,os.path.dirname(destfile))
self.generator.bld.do_link(src,destfile)
class InstallContext(BuildContext):
'''installs the targets on the system'''
cmd='install'
def __init__(self,**kw):
super(InstallContext,self).__init__(**kw)
self.uninstall=[]
self.is_install=INSTALL
def do_install(self,src,tgt,chmod=Utils.O644):
d,_=os.path.split(tgt)
if not d:
raise Errors.WafError('Invalid installation given %r->%r'%(src,tgt))
Utils.check_dir(d)
srclbl=src.replace(self.srcnode.abspath()+os.sep,'')
if not Options.options.force:
try:
st1=os.stat(tgt)
st2=os.stat(src)
except OSError:
pass
else:
if st1.st_mtime+2>=st2.st_mtime and st1.st_size==st2.st_size:
if not self.progress_bar:
Logs.info('- install %s (from %s)'%(tgt,srclbl))
return False
if not self.progress_bar:
Logs.info('+ install %s (from %s)'%(tgt,srclbl))
try:
os.remove(tgt)
except OSError:
pass
try:
shutil.copy2(src,tgt)
os.chmod(tgt,chmod)
except IOError:
try:
os.stat(src)
except(OSError,IOError):
Logs.error('File %r does not exist'%src)
raise Errors.WafError('Could not install the file %r'%tgt)
def do_link(self,src,tgt):
d,_=os.path.split(tgt)
Utils.check_dir(d)
link=False
if not os.path.islink(tgt):
link=True
elif os.readlink(tgt)!=src:
link=True
if link:
try:os.remove(tgt)
except OSError:pass
if not self.progress_bar:
Logs.info('+ symlink %s (to %s)'%(tgt,src))
os.symlink(src,tgt)
else:
if not self.progress_bar:
Logs.info('- symlink %s (to %s)'%(tgt,src))
def run_task_now(self,tsk,postpone):
tsk.post()
if not postpone:
if tsk.runnable_status()==Task.ASK_LATER:
raise self.WafError('cannot post the task %r'%tsk)
tsk.run()
def install_files(self,dest,files,env=None,chmod=Utils.O644,relative_trick=False,cwd=None,add=True,postpone=True):
tsk=inst(env=env or self.env)
tsk.bld=self
tsk.path=cwd or self.path
tsk.chmod=chmod
if isinstance(files,waflib.Node.Node):
tsk.source=[files]
else:
tsk.source=Utils.to_list(files)
tsk.dest=dest
tsk.exec_task=tsk.exec_install_files
tsk.relative_trick=relative_trick
if add:self.add_to_group(tsk)
self.run_task_now(tsk,postpone)
return tsk
def install_as(self,dest,srcfile,env=None,chmod=Utils.O644,cwd=None,add=True,postpone=True):
tsk=inst(env=env or self.env)
tsk.bld=self
tsk.path=cwd or self.path
tsk.chmod=chmod
tsk.source=[srcfile]
tsk.dest=dest
tsk.exec_task=tsk.exec_install_as
if add:self.add_to_group(tsk)
self.run_task_now(tsk,postpone)
return tsk
def symlink_as(self,dest,src,env=None,cwd=None,add=True,postpone=True,relative_trick=False):
if Utils.is_win32:
return
tsk=inst(env=env or self.env)
tsk.bld=self
tsk.dest=dest
tsk.path=cwd or self.path
tsk.source=[]
tsk.link=src
tsk.relative_trick=relative_trick
tsk.exec_task=tsk.exec_symlink_as
if add:self.add_to_group(tsk)
self.run_task_now(tsk,postpone)
return tsk
class UninstallContext(InstallContext):
'''removes the targets installed'''
cmd='uninstall'
def __init__(self,**kw):
super(UninstallContext,self).__init__(**kw)
self.is_install=UNINSTALL
def do_install(self,src,tgt,chmod=Utils.O644):
if not self.progress_bar:
Logs.info('- remove %s'%tgt)
self.uninstall.append(tgt)
try:
os.remove(tgt)
except OSError ,e:
if e.errno!=errno.ENOENT:
if not getattr(self,'uninstall_error',None):
self.uninstall_error=True
Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
if Logs.verbose>1:
Logs.warn('Could not remove %s (error code %r)'%(e.filename,e.errno))
while tgt:
tgt=os.path.dirname(tgt)
try:
os.rmdir(tgt)
except OSError:
break
def do_link(self,src,tgt):
try:
if not self.progress_bar:
Logs.info('- remove %s'%tgt)
os.remove(tgt)
except OSError:
pass
while tgt:
tgt=os.path.dirname(tgt)
try:
os.rmdir(tgt)
except OSError:
break
def execute(self):
try:
def runnable_status(self):
return Task.SKIP_ME
setattr(Task.Task,'runnable_status_back',Task.Task.runnable_status)
setattr(Task.Task,'runnable_status',runnable_status)
super(UninstallContext,self).execute()
finally:
setattr(Task.Task,'runnable_status',Task.Task.runnable_status_back)
class CleanContext(BuildContext):
'''cleans the project'''
cmd='clean'
def execute(self):
self.restore()
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])
try:
self.clean()
finally:
self.store()
def clean(self):
Logs.debug('build: clean called')
if self.bldnode!=self.srcnode:
lst=[]
for e in self.all_envs.values():
lst.extend(self.root.find_or_declare(f)for f in e[CFG_FILES])
for n in self.bldnode.ant_glob('**/*',excl='.lock* *conf_check_*/** config.log c4che/*',quiet=True):
if n in lst:
continue
n.delete()
self.root.children={}
for v in'node_deps task_sigs raw_deps'.split():
setattr(self,v,{})
class ListContext(BuildContext):
'''lists the targets to execute'''
cmd='list'
def execute(self):
self.restore()
if not self.all_envs:
self.load_envs()
self.recurse([self.run_dir])
self.pre_build()
self.timer=Utils.Timer()
for g in self.groups:
for tg in g:
try:
f=tg.post
except AttributeError:
pass
else:
f()
try:
self.get_tgen_by_name('')
except Exception:
pass
lst=list(self.task_gen_cache_names.keys())
lst.sort()
for k in lst:
Logs.pprint('GREEN',k)
class StepContext(BuildContext):
'''executes tasks in a step-by-step fashion, for debugging'''
cmd='step'
def __init__(self,**kw):
super(StepContext,self).__init__(**kw)
self.files=Options.options.files
def compile(self):
if not self.files:
Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"')
BuildContext.compile(self)
return
targets=None
if self.targets and self.targets!='*':
targets=self.targets.split(',')
for g in self.groups:
for tg in g:
if targets and tg.name not in targets:
continue
try:
f=tg.post
except AttributeError:
pass
else:
f()
for pat in self.files.split(','):
matcher=self.get_matcher(pat)
for tg in g:
if isinstance(tg,Task.TaskBase):
lst=[tg]
else:
lst=tg.tasks
for tsk in lst:
do_exec=False
for node in getattr(tsk,'inputs',[]):
if matcher(node,output=False):
do_exec=True
break
for node in getattr(tsk,'outputs',[]):
if matcher(node,output=True):
do_exec=True
break
if do_exec:
ret=tsk.run()
Logs.info('%s -> exit %r'%(str(tsk),ret))
def get_matcher(self,pat):
inn=True
out=True
if pat.startswith('in:'):
out=False
pat=pat.replace('in:','')
elif pat.startswith('out:'):
inn=False
pat=pat.replace('out:','')
anode=self.root.find_node(pat)
pattern=None
if not anode:
if not pat.startswith('^'):
pat='^.+?%s'%pat
if not pat.endswith('$'):
pat='%s$'%pat
pattern=re.compile(pat)
def match(node,output):
if output==True and not out:
return False
if output==False and not inn:
return False
if anode:
return anode==node
else:
return pattern.match(node.abspath())
return match
BuildContext.store=Utils.nogc(BuildContext.store)
BuildContext.restore=Utils.nogc(BuildContext.restore)
| gpl-2.0 | 5,273,600,411,072,784,000 | -5,803,420,283,684,559,000 | 26.666227 | 115 | 0.670688 | false |
kaiyuanl/gem5 | src/cpu/kvm/X86KvmCPU.py | 54 | 2012 | # Copyright (c) 2013 Andreas Sandberg
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
from m5.params import *
from BaseKvmCPU import BaseKvmCPU
class X86KvmCPU(BaseKvmCPU):
type = 'X86KvmCPU'
cxx_header = "cpu/kvm/x86_cpu.hh"
@classmethod
def export_methods(cls, code):
code('''
void dumpFpuRegs();
void dumpIntRegs();
void dumpSpecRegs();
void dumpXCRs();
void dumpXSave();
void dumpVCpuEvents();
''')
useXSave = Param.Bool(True, "Use XSave to synchronize FPU/SIMD registers")
| bsd-3-clause | 4,765,797,286,312,029,000 | -8,436,570,104,725,574,000 | 41.808511 | 78 | 0.759443 | false |
ArcEye/MK-Qt5 | nosetests/unittest_instbindings.py | 11 | 1279 | #!/usr/bin/env python
# verify the cython inst bindings
from nose import with_setup
from machinekit.nosetests.realtime import setup_module ,teardown_module
from machinekit.nosetests.support import fnear
from unittest import TestCase
import time,os,ConfigParser
from machinekit import rtapi,hal
class TestIinst(TestCase):
def setUp(self):
self.cfg = ConfigParser.ConfigParser()
self.cfg.read(os.getenv("MACHINEKIT_INI"))
self.uuid = self.cfg.get("MACHINEKIT", "MKUUID")
rt = rtapi.RTAPIcommand(uuid=self.uuid)
rt.loadrt("icomp");
rt.newinst("icomp","foo")
assert len(instances) == 1
rt.newinst("icomp","bar")
assert len(instances) == 2
rt.delinst("foo")
assert len(instances) == 1
c = hal.Component("icomp")
for i in instances:
assert c.id == i.owner_id
assert c.name == i.owner().name
assert "foo" in instances
assert "bar" in instances
assert instances["foo"].size > 0
assert instances["bar"].size > 0
try:
x = instances["nonexistent"]
raise "should not happen"
except NameError:
pass
(lambda s=__import__('signal'):
s.signal(s.SIGTERM, s.SIG_IGN))()
| lgpl-2.1 | -1,885,333,532,561,198,000 | -6,261,811,555,351,392,000 | 29.452381 | 71 | 0.613761 | false |
js0701/chromium-crosswalk | tools/telemetry/telemetry/value/list_of_scalar_values.py | 9 | 7147 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import numbers
import math
from telemetry import value as value_module
from telemetry.value import none_values
from telemetry.value import summarizable
def Variance(sample):
""" Compute the population variance.
Args:
sample: a list of numbers.
"""
k = len(sample) - 1 # Bessel correction
if k <= 0:
return 0
m = _Mean(sample)
return sum((x - m)**2 for x in sample)/k
def StandardDeviation(sample):
""" Compute standard deviation for a list of numbers.
Args:
sample: a list of numbers.
"""
return math.sqrt(Variance(sample))
def PooledStandardDeviation(list_of_samples, list_of_variances=None):
""" Compute standard deviation for a list of samples.
See: https://en.wikipedia.org/wiki/Pooled_variance for the formula.
Args:
list_of_samples: a list of lists, each is a list of numbers.
list_of_variances: a list of numbers, the i-th element is the variance of
the i-th sample in list_of_samples. If this is None, we use
Variance(sample) to get the variance of the i-th sample.
"""
pooled_variance = 0.0
total_degrees_of_freedom = 0
for i in xrange(len(list_of_samples)):
l = list_of_samples[i]
k = len(l) - 1 # Bessel correction
if k <= 0:
continue
variance = list_of_variances[i] if list_of_variances else Variance(l)
pooled_variance += k * variance
total_degrees_of_freedom += k
if total_degrees_of_freedom:
return (pooled_variance/total_degrees_of_freedom) ** 0.5
return 0
def _Mean(values):
return float(sum(values)) / len(values) if len(values) > 0 else 0.0
class ListOfScalarValues(summarizable.SummarizableValue):
""" ListOfScalarValues represents a list of numbers.
By default, std is the standard deviation of all numbers in the list. Std can
also be specified in the constructor if the numbers are not from the same
population.
"""
def __init__(self, page, name, units, values,
important=True, description=None,
tir_label=None, none_value_reason=None,
std=None, same_page_merge_policy=value_module.CONCATENATE,
improvement_direction=None):
super(ListOfScalarValues, self).__init__(page, name, units, important,
description, tir_label,
improvement_direction)
if values is not None:
assert isinstance(values, list)
assert len(values) > 0
assert all(isinstance(v, numbers.Number) for v in values)
assert std is None or isinstance(std, numbers.Number)
else:
assert std is None
none_values.ValidateNoneValueReason(values, none_value_reason)
self.values = values
self.none_value_reason = none_value_reason
self.same_page_merge_policy = same_page_merge_policy
if values is not None and std is None:
std = StandardDeviation(values)
assert std is None or std >= 0, (
'standard deviation cannot be negative: %s' % std)
self._std = std
@property
def std(self):
return self._std
@property
def variance(self):
return self._std ** 2
def __repr__(self):
if self.page:
page_name = self.page.display_name
else:
page_name = 'None'
if self.same_page_merge_policy == value_module.CONCATENATE:
merge_policy = 'CONCATENATE'
else:
merge_policy = 'PICK_FIRST'
return ('ListOfScalarValues(%s, %s, %s, %s, '
'important=%s, description=%s, tir_label=%s, std=%s, '
'same_page_merge_policy=%s, improvement_direction=%s)') % (
page_name,
self.name,
self.units,
repr(self.values),
self.important,
self.description,
self.tir_label,
self.std,
merge_policy,
self.improvement_direction)
def GetBuildbotDataType(self, output_context):
if self._IsImportantGivenOutputIntent(output_context):
return 'default'
return 'unimportant'
def GetBuildbotValue(self):
return self.values
def GetRepresentativeNumber(self):
return _Mean(self.values)
def GetRepresentativeString(self):
return repr(self.values)
def IsMergableWith(self, that):
return (super(ListOfScalarValues, self).IsMergableWith(that) and
self.same_page_merge_policy == that.same_page_merge_policy)
@staticmethod
def GetJSONTypeName():
return 'list_of_scalar_values'
def AsDict(self):
d = super(ListOfScalarValues, self).AsDict()
d['values'] = self.values
d['std'] = self.std
if self.none_value_reason is not None:
d['none_value_reason'] = self.none_value_reason
return d
@staticmethod
def FromDict(value_dict, page_dict):
kwargs = value_module.Value.GetConstructorKwArgs(value_dict, page_dict)
kwargs['values'] = value_dict['values']
kwargs['std'] = value_dict['std']
if 'improvement_direction' in value_dict:
kwargs['improvement_direction'] = value_dict['improvement_direction']
if 'none_value_reason' in value_dict:
kwargs['none_value_reason'] = value_dict['none_value_reason']
if 'tir_label' in value_dict:
kwargs['tir_label'] = value_dict['tir_label']
return ListOfScalarValues(**kwargs)
@classmethod
def MergeLikeValuesFromSamePage(cls, values):
assert len(values) > 0
v0 = values[0]
if v0.same_page_merge_policy == value_module.PICK_FIRST:
return ListOfScalarValues(
v0.page, v0.name, v0.units,
values[0].values,
important=v0.important,
same_page_merge_policy=v0.same_page_merge_policy,
none_value_reason=v0.none_value_reason,
improvement_direction=v0.improvement_direction)
assert v0.same_page_merge_policy == value_module.CONCATENATE
return cls._MergeLikeValues(values, v0.page, v0.name, v0.tir_label)
@classmethod
def MergeLikeValuesFromDifferentPages(cls, values):
assert len(values) > 0
v0 = values[0]
return cls._MergeLikeValues(values, None, v0.name, v0.tir_label)
@classmethod
def _MergeLikeValues(cls, values, page, name, tir_label):
v0 = values[0]
merged_values = []
list_of_samples = []
none_value_reason = None
pooled_std = None
for v in values:
if v.values is None:
merged_values = None
none_value_reason = none_values.MERGE_FAILURE_REASON
break
merged_values.extend(v.values)
list_of_samples.append(v.values)
if merged_values:
pooled_std = PooledStandardDeviation(
list_of_samples, list_of_variances=[v.variance for v in values])
return ListOfScalarValues(
page, name, v0.units,
merged_values,
important=v0.important,
tir_label=tir_label,
same_page_merge_policy=v0.same_page_merge_policy,
std=pooled_std,
none_value_reason=none_value_reason,
improvement_direction=v0.improvement_direction)
| bsd-3-clause | 5,084,820,886,102,818,000 | -6,538,469,605,139,472,000 | 31.049327 | 79 | 0.650902 | false |
Orav/kbengine | kbe/res/scripts/common/Lib/email/mime/text.py | 2 | 1408 | # Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: [email protected]
"""Class representing text/* type MIME documents."""
__all__ = ['MIMEText']
from email.mime.nonmultipart import MIMENonMultipart
class MIMEText(MIMENonMultipart):
"""Class for generating text/* type MIME documents."""
def __init__(self, _text, _subtype='plain', _charset=None):
"""Create a text/* type MIME document.
_text is the string for this message object.
_subtype is the MIME sub content type, defaulting to "plain".
_charset is the character set parameter added to the Content-Type
header. This defaults to "us-ascii". Note that as a side-effect, the
Content-Transfer-Encoding header will also be set.
"""
# If no _charset was specified, check to see if there are non-ascii
# characters present. If not, use 'us-ascii', otherwise use utf-8.
# XXX: This can be removed once #7304 is fixed.
if _charset is None:
try:
_text.encode('us-ascii')
_charset = 'us-ascii'
except UnicodeEncodeError:
_charset = 'utf-8'
MIMENonMultipart.__init__(self, 'text', _subtype,
**{'charset': _charset})
self.set_payload(_text, _charset)
| lgpl-3.0 | 1,339,704,881,727,927,600 | -5,426,001,449,373,602,000 | 31.547619 | 78 | 0.590909 | false |
cc272309126/panda3d | contrib/src/sceneeditor/propertyWindow.py | 8 | 70519 | #################################################################
# propertyWindow.py
# Written by Yi-Hong Lin, [email protected], 2004
#################################################################
from direct.tkwidgets.AppShell import *
from direct.showbase.TkGlobal import *
from seColorEntry import *
from direct.tkwidgets import Floater
from direct.tkwidgets import Dial
from direct.tkwidgets import Slider
from direct.tkwidgets import VectorWidgets
from pandac.PandaModules import *
from Tkinter import *
import Pmw
class propertyWindow(AppShell,Pmw.MegaWidget):
#################################################################
# propertyWindow(AppShell,Pmw.MegaWidget)
# This class will create a widow to show the object property and
# let user can change shoe of them.
#################################################################
appversion = '1.0'
appname = 'Property Window'
frameWidth = 400
frameHeight = 400
padx = 0
pady = 0
usecommandarea = 0
usestatusarea = 0
widgetsDict = {}
def __init__(self, target, type, info, parent = None, nodePath = render, **kw):
self.nodePath = target
self.name = target.getName()
self.type = type
self.info = info
# Initialise superclass
Pmw.MegaWidget.__init__(self, parent)
# Define the megawidget options.
optiondefs = (
('title', self.appname, None),
)
self.defineoptions(kw, optiondefs)
if parent == None:
self.parent = Toplevel()
AppShell.__init__(self, self.parent)
self.parent.resizable(False,False) ## Disable the ability to resize for this Window.
def appInit(self):
return
def createInterface(self):
# The interior of the toplevel panel
interior = self.interior()
mainFrame = Frame(interior)
name_label = Label(mainFrame, text= self.name,font=('MSSansSerif', 15),
relief = RIDGE, borderwidth=5)
name_label.pack()
outFrame = Frame(mainFrame, relief = RIDGE, borderwidth=3)
self.contentWidge = self.createcomponent(
'scrolledFrame',
(), None,
Pmw.ScrolledFrame, (outFrame,),
hull_width = 200, hull_height = 300,
usehullsize = 1)
self.contentFrame = self.contentWidge.component('frame')
self.contentWidge.pack(fill = 'both', expand = 1,padx = 3, pady = 5)
outFrame.pack(fill = 'both', expand = 1)
# Creating different interface depands on object's type
if self.type == 'camera':
self.cameraInterface(self.contentFrame)
self.accept('forPorpertyWindow'+self.name, self.trackDataFromSceneCamera)
elif self.type == 'Model':
self.modelInterface(self.contentFrame)
self.accept('forPorpertyWindow'+self.name, self.trackDataFromSceneModel)
elif self.type == 'Actor':
self.modelInterface(self.contentFrame)
self.actorInterface(self.contentFrame)
self.accept('forPorpertyWindow'+self.name, self.trackDataFromSceneActor)
pass
elif self.type == 'Light':
self.lightInterface(self.contentFrame)
self.accept('forPorpertyWindow'+self.name, self.trackDataFromSceneLight)
pass
elif self.type == 'dummy':
self.dummyInterface(self.contentFrame)
self.accept('forPorpertyWindow'+self.name, self.trackDataFromSceneDummy)
pass
elif self.type == 'collisionNode':
self.collisionInterface(self.contentFrame)
self.accept('forPorpertyWindow'+self.name, self.trackDataFromSceneCollision)
pass
elif self.type == 'Special':
# If user try to open the property window for node "SEditor"
# It will show the grid property.
self.gridInterface(self.contentFrame)
self.accept('forPorpertyWindow'+self.name, None)
pass
self.curveFrame = None
#### If nodePath has been binded with any curves
if self.info.has_key('curveList'):
self.createCurveFrame(self.contentFrame)
## Set all stuff done
mainFrame.pack(fill = 'both', expand = 1)
def createMenuBar(self):
# we don't need menu bar here.
self.menuBar.destroy()
def onDestroy(self, event):
self.ignore('forPorpertyWindow'+self.name)
messenger.send('PW_close', [self.name])
'''
If you have open any thing, please rewrite here!
'''
pass
def createEntryField(self, parent,text, value,
command, initialState, labelWidth = 12,
side = 'left', fill = X, expand = 0,
validate = None,
defaultButton = False, buttonText = 'Default',defaultFunction = None ):
#################################################################
# createEntryField(self, parent,text, value,
# command, initialState, labelWidth = 12,
# side = 'left', fill = X, expand = 0,
# validate = None,
# defaultButton = False, buttonText = 'Default',defaultFunction = None ):
# This function will create a Entry on the frame "parent"
# Also, if user has enabled the "defaultButton," it will create a button right after the entry.
#################################################################
frame = Frame(parent)
widget = Pmw.EntryField(frame, labelpos='w', label_text = text,
value = value, entry_font=('MSSansSerif', 10),label_font=('MSSansSerif', 10),
modifiedcommand=command, validate = validate,
label_width = labelWidth)
widget.configure(entry_state = initialState)
widget.pack(side=LEFT)
self.widgetsDict[text] = widget
if defaultButton and (defaultFunction!=None):
# create a button if they need.
widget = Button(frame, text=buttonText, font=('MSSansSerif', 10), command = defaultFunction)
widget.pack(side=LEFT, padx=3)
self.widgetsDict[text+'-'+'DefaultButton']=widget
frame.pack(side = side, fill = fill, expand = expand,pady=3)
def createPosEntry(self, contentFrame):
#################################################################
# createPosEntry(self, contentFrame)
# This function will create three entries for setting position for the objects.
# the entry type is Floater.
# And, it will set the call back function to setNodePathPosHprScale()
#################################################################
posInterior = Frame(contentFrame)
self.posX = self.createcomponent('posX', (), None,
Floater.Floater, (posInterior,),
text = 'X', relief = FLAT,
value = self.nodePath.getX(),
label_foreground = 'Red',
entry_width = 9)
self.posX['commandData'] = ['x']
self.posX['command'] = self.setNodePathPosHprScale
self.posX.pack(side=LEFT,expand=0,fill=X, padx=1)
self.posY = self.createcomponent('posY', (), None,
Floater.Floater, (posInterior,),
text = 'Y', relief = FLAT,
value = self.nodePath.getY(),
label_foreground = '#00A000',
entry_width = 9)
self.posY['commandData'] = ['y']
self.posY['command'] = self.setNodePathPosHprScale
self.posY.pack(side=LEFT, expand=0,fill=X, padx=1)
self.posZ = self.createcomponent('posZ', (), None,
Floater.Floater, (posInterior,),
text = 'Z', relief = FLAT,
value = self.nodePath.getZ(),
label_foreground = 'Blue',
entry_width = 9)
self.posZ['commandData'] = ['z']
self.posZ['command'] = self.setNodePathPosHprScale
self.posZ.pack(side=LEFT, expand=0,fill=X, padx=1)
posInterior.pack(side=TOP, expand=0,fill=X, padx=3, pady=3)
def createHprEntry(self, contentFrame):
#################################################################
# createHprEntry(self, contentFrame)
# This function will create three entries for setting orientation for the objects.
# the entry type is Floater.
# And, it will set the call back function to setNodePathPosHprScale()
#################################################################
hprInterior = Frame(contentFrame)
self.hprH = self.createcomponent('hprH', (), None,
Dial.AngleDial, (hprInterior,),
style = 'mini',
text = 'H', value = self.nodePath.getH(),
relief = FLAT,
label_foreground = 'blue',
entry_width = 9)
self.hprH['commandData'] = ['h']
self.hprH['command'] = self.setNodePathPosHprScale
self.hprH.pack(side = LEFT, expand=0,fill=X)
self.hprP = self.createcomponent('hprP', (), None,
Dial.AngleDial, (hprInterior,),
style = 'mini',
text = 'P', value = self.nodePath.getP(),
relief = FLAT,
label_foreground = 'red',
entry_width = 9)
self.hprP['commandData'] = ['p']
self.hprP['command'] = self.setNodePathPosHprScale
self.hprP.pack(side = LEFT, expand=0,fill=X)
self.hprR = self.createcomponent('hprR', (), None,
Dial.AngleDial, (hprInterior,),
style = 'mini',
text = 'R', value = self.nodePath.getR(),
relief = FLAT,
label_foreground = '#00A000',
entry_width = 9)
self.hprR['commandData'] = ['r']
self.hprR['command'] = self.setNodePathPosHprScale
self.hprR.pack(side = LEFT, expand=0,fill=X)
hprInterior.pack(side=TOP, expand=0,fill=X, padx=3, pady=3)
def createScaleEntry(self, contentFrame):
#################################################################
# createScaleEntry(self, contentFrame)
# This function will create three entries for setting scale for the objects.
# the entry type is Floater.
# And, it will set the call back function to setNodePathPosHprScale()
#################################################################
scaleInterior = Frame(contentFrame)
self.scale = self.createcomponent('scale', (), None,
Floater.Floater, (scaleInterior,),
text = 'Scale',
relief = FLAT,
min = 0.0001, value = self.nodePath.getScale().getX(),
resetValue = 1.0,
label_foreground = 'Blue')
self.scale['commandData'] = ['s']
self.scale['command'] = self.setNodePathPosHprScale
self.scale.pack(side=LEFT,expand=0,fill=X)
scaleInterior.pack(side=TOP,expand=0,fill=X, padx=3, pady=3)
def createColorEntry(self, contentFrame):
#################################################################
# createColorEntry(self, contentFrame)
# This function will create three entries for setting color for the objects.
# the entry type is Floater.
# And, it will set the call back function to setNodeColorVec()
#################################################################
color = self.nodePath.getColor()
print color
self.nodeColor = VectorWidgets.ColorEntry(
contentFrame, text = 'Node Color', value=[color.getX()*255,
color.getY()*255,
color.getZ()*255,
color.getW()*255])
self.nodeColor['command'] = self.setNodeColorVec
self.nodeColor['resetValue'] = [255,255,255,255]
self.nodeColor.place(anchor=NW,y=235)
self.bind(self.nodeColor, 'Set nodePath color')
self.nodeColor.pack(side=TOP,expand=0,fill=X, padx=3, pady=3)
return
def setNodeColorVec(self, color):
#################################################################
# setNodeColorVec(self, color)
# This function will set the color of the object
#################################################################
self.nodePath.setColor(color[0]/255.0,
color[1]/255.0,
color[2]/255.0,
color[3]/255.0)
return
def setNodePathPosHprScale(self, data, axis):
#################################################################
# setNodePathPosHprScale(self, data, axis)
# This function will set the postion, orientation or scale of the object
# use the "axis" parameter to decide which property should be set.
#################################################################
if axis == 'x':
self.nodePath.setX(data)
elif axis == 'y':
self.nodePath.setY(data)
elif axis == 'z':
self.nodePath.setZ(data)
elif axis == 'h':
self.nodePath.setH(data)
elif axis == 'p':
self.nodePath.setP(data)
elif axis == 'r':
self.nodePath.setR(data)
elif axis == 's':
self.nodePath.setScale(data)
#### Curve property
def createCurveFrame(self, contentFrame):
#################################################################
# createCurveFrame(self, contentFrame)
# Draw the curve property frame
# This function will draw the property frame and content of curves
# pass the target frame as a variable
#################################################################
if self.curveFrame==None:
self.curveFrame = Frame(contentFrame)
group = Pmw.Group(self.curveFrame,
tag_text='Motion Path List for this Node',
tag_font=('MSSansSerif', 10))
innerFrame = group.interior()
n = 0
for curve in self.info['curveList']:
n += 1
self.createEntryField(innerFrame,'Curve %d:' %n,
value = curve.getCurve(0).getName(),
command = None,
initialState='disabled',
side = 'top',
defaultButton = True,
buttonText = 'delete',
defaultFunction = lambda a = n, b = self : b.deleteCurve(a))
group.pack(side = TOP, fill = X, expand = 0,pady=3, padx=3)
self.curveFrame.pack(side = TOP, fill = X, expand = 0,pady=3, padx=3)
return
def deleteCurve(self, number = 0):
#################################################################
# deleteCurve(self, number = 0)
# Call back function, will be called when user click on the "delete" button beside the curve name.
# This function will send the message to sceneEditor to remove the target curve
# and will set a callback function waitting the result.
#################################################################
widget = self.widgetsDict['Curve %d:' %number]
curveName = widget.getvalue()
self.accept('curveRemovedFromNode',self.redrawCurveProperty)
messenger.send('PW_removeCurveFromNode',[self.nodePath, curveName])
return
def redrawCurveProperty(self, nodePath, curveList):
#################################################################
# redrawCurveProperty(self, nodePath, curveList)
# Callback function, will be called once get the result from dataHolder.
# It will check the target nodePath first, then check the curve list is empty or not.
# If yes, then delete whole curve frame. If not, then renew the data and redraw the curve frame again.
#################################################################
self.name = self.nodePath.getName()
if self.name != nodePath.getName():
messenger.send('curveRemovedFromNode',[nodePath, curveList])
return
else:
self.ignore('curveRemovedFromNode')
if curveList!= None:
del self.info['curveList']
self.info['curveList'] = curveList
self.curveFrame.destroy()
del self.curveFrame
self.curveFrame = None
self.createCurveFrame(self.contentFrame)
else:
del self.info['curveList']
self.curveFrame.destroy()
del self.curveFrame
self.curveFrame = None
return
####
#### Anything about Camera will be here!
####
def cameraInterface(self, contentFrame):
#################################################################
# cameraInterface(self, interior, mainFrame)
# Create the interface for camera node.
#################################################################
## Type entry : unchageable
widget = self.createEntryField(contentFrame,'Type:',
value = self.type,
command = None,
initialState='disabled',
side = 'top')
## lens Type entry
widget = self.createEntryField(contentFrame, 'Lens Type:',
value = self.info['lensType'],
command = None,
initialState='disabled',
side = 'top')
## Pos
group = Pmw.Group(contentFrame,tag_text='Position',
tag_font=('MSSansSerif', 10))
self.createPosEntry(group.interior())
group.pack(side=TOP,fill = X, expand = 0, pady=3)
## Orientation
group = Pmw.Group(contentFrame,tag_text='Orientation',
tag_font=('MSSansSerif', 10))
self.createHprEntry(group.interior())
group.pack(side=TOP,fill = X, expand = 0, pady=3)
## near entry
group = Pmw.Group(contentFrame,tag_text='Lens Property',
tag_font=('MSSansSerif', 10))
lensFrame = group.interior()
widget = self.createEntryField(lensFrame, 'Near:',value = self.info['near'],
command = self.setCameraNear,
initialState='normal',
validate = Pmw.realvalidator,
side = 'top',
defaultButton = True,
defaultFunction = self.defaultCameraNear)
## far entry
widget = self.createEntryField(lensFrame, 'Far:',
value = self.info['far'],
command = self.setCameraFar,
initialState='normal',
side = 'top',
validate = Pmw.realvalidator,
defaultButton = True,
defaultFunction = self.defaultCameraFar)
## Hfov entry
widget = self.createEntryField(lensFrame, 'H.F.O.V.:',
value = self.info['hFov'],
command = self.setCameraFov,
validate = Pmw.realvalidator,
initialState='normal',
side = 'top',
defaultButton = True,
defaultFunction = self.defaultCameraHfov)
## Vfov entry
widget = self.createEntryField(lensFrame, 'V.F.O.V.:',
value = self.info['vFov'],
command = self.setCameraFov,
validate = Pmw.realvalidator,
initialState='normal',
side = 'top',
defaultButton = True,
defaultFunction = self.defaultCameraVfov)
## Film Size entry
frame = Frame(lensFrame)
widget = Label(frame, text = "Film Size:", font=('MSSansSerif', 10),width=12)
widget.pack(side=LEFT)
frame.pack(side = TOP, fill = X, expand = 0, pady=3)
frame = Frame(lensFrame)
widget = Pmw.EntryField(frame, labelpos='w', label_text = ' ',
value = self.info['FilmSize'].getX(),
entry_font=('MSSansSerif', 10),
label_font=('MSSansSerif', 10),
modifiedcommand=self.setCameraFilmSize, validate = Pmw.realvalidator,
entry_width = 8)
self.widgetsDict['FilmSizeX']=widget
widget.pack(side=LEFT, padx=3)
widget = Pmw.EntryField(frame, labelpos='w', label_text = ': ', value = self.info['FilmSize'].getY() ,
label_font=('MSSansSerif', 10),
entry_font=('MSSansSerif', 10),
modifiedcommand=self.setCameraFilmSize, validate = Pmw.realvalidator,
entry_width = 8)
self.widgetsDict['FilmSizeY']=widget
widget.pack(side=LEFT, padx=3)
widget = Button(frame, text='Default', font=('MSSansSerif', 10), command = self.defaultCameraFilmSize)
widget.pack(side=LEFT, padx=3)
self.widgetsDict['FilmSize'+'-'+'DefaultButton']=widget
frame.pack(side = TOP, fill = X, expand = 0,pady=0)
## Focal Length entry
widget = self.createEntryField(lensFrame, 'Focal Length:',
value = self.info['focalLength'],
command = self.setCameraFocalLength,
validate = Pmw.realvalidator,
initialState='normal',
side = 'top',
defaultButton = True,
defaultFunction = self.defaultCameraFocalLength)
group.pack(side = TOP, fill = X, expand = 0,pady=2)
def defaultCameraFar(self):
#################################################################
# defaultCameraFar(self)
# set the camera "Far" value back to default.
#################################################################
widget = self.widgetsDict['Far:']
widget.setvalue(base.cam.node().getLens().getDefaultFar())
return
def setCameraFar(self):
#################################################################
# setCameraFar(self)
# set the camera "Far" value to what now user has typed in the entry
#################################################################
if self.widgetsDict['Far:'].getvalue() != '':
value = float(self.widgetsDict['Far:'].getvalue())
else:
value = 0
camera.getChild(0).node().getLens().setFar(value)
return
def defaultCameraNear(self):
#################################################################
# defaultCameraNear(self)
# set the camera "Near" value back to default.
#################################################################
widget = self.widgetsDict['Near:']
widget.setvalue(base.cam.node().getLens().getDefaultNear())
return
def setCameraNear(self):
#################################################################
# setCameraNear(self)
# set the camera "Near" value to what now user has typed in the entry
#################################################################
if self.widgetsDict['Near:'].getvalue() != '':
value = float(self.widgetsDict['Near:'].getvalue())
else:
value = 0
camera.getChild(0).node().getLens().setNear(value)
return
def defaultCameraHfov(self):
#################################################################
# defaultCameraHfov(self)
# set the camera "Hfov" value back to default.
#################################################################
widget = self.widgetsDict['H.F.O.V.:']
widget.setvalue(45.0)
return
def setCameraFov(self):
#################################################################
# setCameraFov(self)
# set the camera "Fov" value to what now user has typed in the entry
#################################################################
if self.widgetsDict['H.F.O.V.:'].getvalue() != '':
value1 = float(self.widgetsDict['H.F.O.V.:'].getvalue())
else:
value1 = 0
if self.widgetsDict['V.F.O.V.:'].getvalue() != '':
value2 = float(self.widgetsDict['V.F.O.V.:'].getvalue())
else:
value2 = 0
camera.getChild(0).node().getLens().setFov(VBase2(value1,value2))
return
def defaultCameraVfov(self):
#################################################################
# defaultCameraVfov(self)
# set the camera "Vfov" value back to default.
#################################################################
widget = self.widgetsDict['V.F.O.V.:']
widget.setvalue(34.51587677)
return
def defaultCameraFocalLength(self):
#################################################################
# defaultCameraFocalLength(self)
# set the camera "Focal Length" value back to default.
#################################################################
widget = self.widgetsDict['Focal Length:']
widget.setvalue(1.20710682869)
return
def setCameraFocalLength(self):
#################################################################
# setCameraFocalLength(self)
# set the camera "Focal Length" value to what now user has typed in the entry
#################################################################
if self.widgetsDict['Focal Length:'].getvalue() != '':
value = float(self.widgetsDict['Focal Length:'].getvalue())
else:
value = 0
camera.getChild(0).node().getLens().setFocalLength(value)
camera.getChild(0).node().getLens().setFilmSize(VBase2(float(self.widgetsDict['FilmSizeX'].getvalue()),float(self.widgetsDict['FilmSizeY'].getvalue())))
return
def defaultCameraFilmSize(self):
#################################################################
# defaultCameraFilmSize(self)
# set the camera "Film Size" value back to default.
#################################################################
widget = self.widgetsDict['FilmSizeX']
widget.setvalue(1)
widget = self.widgetsDict['FilmSizeY']
widget.setvalue(0.75)
return
def setCameraFilmSize(self):
#################################################################
# setCameraFilmSize(self)
# set the camera "Film Size" value to what now user has typed in the entry
#################################################################
if self.widgetsDict['FilmSizeX'].getvalue() != '':
value1 = float(self.widgetsDict['FilmSizeX'].getvalue())
else:
value1 = 0
if self.widgetsDict['FilmSizeY'].getvalue() != '':
value2 = float(self.widgetsDict['FilmSizeY'].getvalue())
else:
value2 = 0
camera.getChild(0).node().getLens().setFilmSize(VBase2(value1,value2))
return
####
#### Anything about Model & Actor will be here!
####
def modelInterface(self, contentFrame):
#################################################################
# modelInterface(self, contentFrame)
# Create the basic interface for ModelRoot Type Node
#################################################################
widget = self.createEntryField(contentFrame,'Type:',
value = self.type,
command = None,
initialState='disabled',
side = 'top')
widget = self.createEntryField(contentFrame,'Model File:',
value = self.info['filePath'].getFullpath(),
command = None,
initialState='disabled',
side = 'top',
defaultButton = False,
buttonText = 'Change',
defaultFunction = None)
group = Pmw.Group(contentFrame,tag_text='Position',
tag_font=('MSSansSerif', 10))
self.createPosEntry(group.interior())
group.pack(side=TOP,fill = X, expand = 0, pady=3)
group = Pmw.Group(contentFrame,tag_text='Orientation',
tag_font=('MSSansSerif', 10))
self.createHprEntry(group.interior())
group.pack(side=TOP,fill = X, expand = 0, pady=3)
self.createScaleEntry(contentFrame)
group = Pmw.Group(contentFrame,tag_text='Color',
tag_font=('MSSansSerif', 10))
frame = group.interior()
self.createColorEntry(frame)
self.varAlpha = IntVar()
self.varAlpha.set(self.nodePath.hasTransparency())
checkButton = Checkbutton(frame, text='Enable Alpha',
variable=self.varAlpha, command=self.toggleAlpha)
checkButton.pack(side=RIGHT,pady=3)
group.pack(side=TOP,fill = X, expand = 0, pady=3)
return
def toggleAlpha(self):
#################################################################
# toggleAlpha(self)
# This funtion will toggle the objects alpha value
# And, it will also reset the "Bin" to
# "fixed" if user enable the alpha for this object.
#################################################################
if self.nodePath.hasTransparency():
self.nodePath.clearTransparency()
self.nodePath.setBin("default", 0)
else:
self.nodePath.setTransparency(True)
self.nodePath.setBin("fixed", 1)
return
def actorInterface(self, contentFrame):
#################################################################
# actorInterface(self, contentFrame)
# Create the basic interface for Actor Type Node
#################################################################
self.animFrame = None
animeDict = self.info['animDict']
if len(animeDict)==0:
return
self.animFrame = Frame(contentFrame)
group = Pmw.Group(self.animFrame,tag_text='Animations',
tag_font=('MSSansSerif', 10))
innerFrame = group.interior()
for name in animeDict:
self.createEntryField(innerFrame, name,
value = animeDict[name],
command = None,
initialState='disabled',
side = 'top',
defaultButton = True,
buttonText = 'Remove',
defaultFunction = lambda a = name, b = self : b.deleteAnimation(a))
group.pack(side=TOP,fill = X, expand = 0, pady=3)
self.animFrame.pack(side=TOP,fill = X, expand = 0, pady=3)
return
def deleteAnimation(self, anim):
#################################################################
# deleteAnimation(self, anim)
# This function will delete the animation named "anim" in this actor
# But, not directly removed be this function.
# This function will send out a message to notice dataHolder to remove this animation
#################################################################
print anim
widget = self.widgetsDict[anim]
self.accept('animRemovedFromNode',self.redrawAnimProperty)
messenger.send('PW_removeAnimFromNode',[self.name, anim])
return
def redrawAnimProperty(self, nodePath, animDict):
#################################################################
# redrawCurveProperty(self, nodePath, curveList)
# Callback function, will be called once get the result from dataHolder.
# It will check the target nodePath first, then check the curve list is empty or not.
# If yes, then delete whole curve frame. If not, then renew the data and redraw the curve frame again.
#################################################################
self.name = self.nodePath.getName()
if self.name != nodePath.getName():
messenger.send('animRemovedFromNode',[nodePath, animDict])
return
else:
self.ignore('animRemovedFromNode')
if len(animDict)!= 0:
del self.info['animDict']
self.info['animDict'] = animDict
self.animFrame.destroy()
del self.animFrame
self.animFrame = None
self.actorInterface(self.contentFrame)
else:
del self.info['animDict']
self.animFrame.destroy()
del self.animFrame
self.animFrame = None
return
####
#### Anything about Light will be here!
####
def lightInterface(self, contentFrame):
#################################################################
# lightInterface(self, contentFrame)
# Create the basic interface for light Type Node
#################################################################
widget = self.createEntryField(contentFrame,'Type:',
value = self.nodePath.node().getType().getName(),
command = None,
initialState='disabled',
side = 'top')
self.lightNode = self.info['lightNode']
lightingGroup = Pmw.Group(contentFrame,tag_pyclass=None)
frame = lightingGroup.interior()
self.lightColor = seColorEntry(
frame, text = 'Light Color', label_font=('MSSansSerif', 10),
value=[self.lightNode.lightcolor.getX()*255, self.lightNode.lightcolor.getY()*255,self.lightNode.lightcolor.getZ()*255,0])
self.lightColor['command'] = self.setLightingColorVec
self.lightColor['resetValue'] = [0.3*255,0.3*255,0.3*255,0]
self.lightColor.pack(side=TOP, fill=X,expand=1, padx = 2, pady =2)
self.bind(self.lightColor, 'Set light color')
self.varActive = IntVar()
self.varActive.set(self.lightNode.active)
checkButton = Checkbutton(frame, text='Enable This Light',
variable=self.varActive, command=self.toggleLight)
checkButton.pack(side=RIGHT,pady=3)
lightingGroup.pack(side=TOP, fill = X, expand =1)
# Directional light controls
if self.lightNode.type == 'directional':
lightingGroup = Pmw.Group(contentFrame,tag_pyclass=None)
directionalPage = lightingGroup.interior()
self.dSpecularColor = seColorEntry(
directionalPage, text = 'Specular Color', label_font=('MSSansSerif', 10),value = [self.lightNode.specularColor.getX()*255,self.lightNode.specularColor.getY()*255,self.lightNode.specularColor.getZ()*255,0])
self.dSpecularColor['command'] = self.setSpecularColor
self.dSpecularColor.pack(fill = X, expand = 1)
self.bind(self.dSpecularColor,
'Set directional light specular color')
self.dPosition = VectorWidgets.Vector3Entry(
directionalPage, text = 'Position', label_font=('MSSansSerif', 10),value = [self.lightNode.getPosition().getX(),self.lightNode.getPosition().getY(),self.lightNode.getPosition().getZ()])
self.dPosition['command'] = self.setPosition
self.dPosition['resetValue'] = [0,0,0,0]
self.dPosition.pack(fill = X, expand = 1)
self.bind(self.dPosition, 'Set directional light position')
self.dOrientation = VectorWidgets.Vector3Entry(
directionalPage, text = 'Orientation', label_font=('MSSansSerif', 10),
value = [self.lightNode.getOrientation().getX(),self.lightNode.getOrientation().getY(),self.lightNode.getOrientation().getZ(),0])
self.dOrientation['command'] = self.setOrientation
self.dOrientation['resetValue'] = [0,0,0,0]
self.dOrientation.pack(fill = X, expand = 1)
self.bind(self.dOrientation, 'Set directional light orientation')
lightingGroup.pack(side=TOP, fill = X, expand =1)
elif self.lightNode.type == 'point':
# Point light controls
lightingGroup = Pmw.Group(contentFrame,tag_pyclass=None)
pointPage = lightingGroup.interior()
self.pSpecularColor = seColorEntry(
pointPage, text = 'Specular Color', label_font=('MSSansSerif', 10),
value = [self.lightNode.specularColor.getX(),self.lightNode.specularColor.getY(),self.lightNode.specularColor.getZ(),0])
self.pSpecularColor['command'] = self.setSpecularColor
self.pSpecularColor.pack(fill = X, expand = 1)
self.bind(self.pSpecularColor,
'Set point light specular color')
self.pPosition = VectorWidgets.Vector3Entry(
pointPage, text = 'Position', label_font=('MSSansSerif', 10),
value = [self.lightNode.getPosition().getX(),self.lightNode.getPosition().getY(),self.lightNode.getPosition().getZ(),0])
self.pPosition['command'] = self.setPosition
self.pPosition['resetValue'] = [0,0,0,0]
self.pPosition.pack(fill = X, expand = 1)
self.bind(self.pPosition, 'Set point light position')
self.pConstantAttenuation = Slider.Slider(
pointPage,
text = 'Constant Attenuation', label_font=('MSSansSerif', 10),
max = 1.0,
value = self.lightNode.constant)
self.pConstantAttenuation['command'] = self.setConstantAttenuation
self.pConstantAttenuation.pack(fill = X, expand = 1)
self.bind(self.pConstantAttenuation,
'Set point light constant attenuation')
self.pLinearAttenuation = Slider.Slider(
pointPage,
text = 'Linear Attenuation', label_font=('MSSansSerif', 10),
max = 1.0,
value = self.lightNode.linear)
self.pLinearAttenuation['command'] = self.setLinearAttenuation
self.pLinearAttenuation.pack(fill = X, expand = 1)
self.bind(self.pLinearAttenuation,
'Set point light linear attenuation')
self.pQuadraticAttenuation = Slider.Slider(
pointPage,
text = 'Quadratic Attenuation', label_font=('MSSansSerif', 10),
max = 1.0,
value = self.lightNode.quadratic)
self.pQuadraticAttenuation['command'] = self.setQuadraticAttenuation
self.pQuadraticAttenuation.pack(fill = X, expand = 1)
self.bind(self.pQuadraticAttenuation,
'Set point light quadratic attenuation')
lightingGroup.pack(side=TOP, fill = X, expand =1)
elif self.lightNode.type == 'spot':
# Spot light controls
lightingGroup = Pmw.Group(contentFrame,tag_pyclass=None)
spotPage = lightingGroup.interior()
self.sSpecularColor = seColorEntry(
spotPage, text = 'Specular Color', label_font=('MSSansSerif', 10),
value = [self.lightNode.specularColor.getX()*255,self.lightNode.specularColor.getY()*255,self.lightNode.specularColor.getZ()*255,0])
self.sSpecularColor['command'] = self.setSpecularColor
self.sSpecularColor.pack(fill = X, expand = 1)
self.bind(self.sSpecularColor,
'Set spot light specular color')
self.sConstantAttenuation = Slider.Slider(
spotPage,
text = 'Constant Attenuation', label_font=('MSSansSerif', 10),
max = 1.0,
value = self.lightNode.constant)
self.sConstantAttenuation['command'] = self.setConstantAttenuation
self.sConstantAttenuation.pack(fill = X, expand = 1)
self.bind(self.sConstantAttenuation,
'Set spot light constant attenuation')
self.sLinearAttenuation = Slider.Slider(
spotPage,
text = 'Linear Attenuation', label_font=('MSSansSerif', 10),
max = 1.0,
value = self.lightNode.linear)
self.sLinearAttenuation['command'] = self.setLinearAttenuation
self.sLinearAttenuation.pack(fill = X, expand = 1)
self.bind(self.sLinearAttenuation,
'Set spot light linear attenuation')
self.sQuadraticAttenuation = Slider.Slider(
spotPage,
text = 'Quadratic Attenuation', label_font=('MSSansSerif', 10),
max = 1.0,
value = self.lightNode.quadratic)
self.sQuadraticAttenuation['command'] = self.setQuadraticAttenuation
self.sQuadraticAttenuation.pack(fill = X, expand = 1)
self.bind(self.sQuadraticAttenuation,
'Set spot light quadratic attenuation')
self.sExponent = Slider.Slider(
spotPage,
text = 'Exponent', label_font=('MSSansSerif', 10),
max = 1.0,
value = self.lightNode.exponent)
self.sExponent['command'] = self.setExponent
self.sExponent.pack(fill = X, expand = 1)
self.bind(self.sExponent,
'Set spot light exponent')
lightingGroup.pack(side=TOP, fill = X, expand =1)
return
def setLightingColorVec(self,color):
if self.lightNode==None:
return
self.lightNode.setColor(VBase4((color[0]/255),(color[1]/255),(color[2]/255),1))
return
def setSpecularColor(self,color):
if self.lightNode==None:
return
self.lightNode.setSpecColor(VBase4((color[0]/255),(color[1]/255),(color[2]/255),1))
return
def setPosition(self,position):
if self.lightNode==None:
return
self.lightNode.setPosition(Point3(position[0],position[1],position[2]))
return
def setOrientation(self, orient):
if self.lightNode==None:
return
self.lightNode.setOrientation(Vec3(orient[0],orient[1],orient[2]))
return
def setConstantAttenuation(self, value):
self.lightNode.setConstantAttenuation(value)
return
def setLinearAttenuation(self, value):
self.lightNode.setLinearAttenuation(value)
return
def setQuadraticAttenuation(self, value):
self.lightNode.setQuadraticAttenuation(value)
return
def setExponent(self, value):
self.lightNode.setExponent(value)
return
def toggleLight(self):
messenger.send('PW_toggleLight',[self.lightNode])
return
####
#### Anything about Dummy will be here!
####
def dummyInterface(self, contentFrame):
#################################################################
# dummyInterface(self, contentFrame)
# Create the basic interface for dummy Type Node
#################################################################
'''dummyInterface(self, contentFrame)
Create the basic interface for dummy Node
'''
widget = self.createEntryField(contentFrame,'Type:',
value = 'Dummy Nodepath',
command = None,
initialState='disabled',
side = 'top')
group = Pmw.Group(contentFrame,tag_text='Position',
tag_font=('MSSansSerif', 10))
self.createPosEntry(group.interior())
group.pack(side=TOP,fill = X, expand = 0, pady=3)
group = Pmw.Group(contentFrame,tag_text='Orientation',
tag_font=('MSSansSerif', 10))
self.createHprEntry(group.interior())
group.pack(side=TOP,fill = X, expand = 0, pady=3)
self.createScaleEntry(contentFrame)
group = Pmw.Group(contentFrame,tag_text='Color',
tag_font=('MSSansSerif', 10))
frame = group.interior()
self.createColorEntry(frame)
self.varAlpha = IntVar()
self.varAlpha.set(self.nodePath.hasTransparency())
checkButton = Checkbutton(frame, text='Enable Alpha',
variable=self.varAlpha, command=self.toggleAlpha)
checkButton.pack(side=RIGHT,pady=3)
group.pack(side=TOP,fill = X, expand = 0, pady=3)
return
#########
####### This will be called when user try to open property window for SEditor Node
#########
def gridInterface(self, contentFrame):
#################################################################
# gridInterface(self, contentFrame)
# Create the basic interface for grid (Which is stolen from directGrid)
#################################################################
group = Pmw.Group(contentFrame,tag_text='Grid Property',
tag_font=('MSSansSerif', 10))
group.pack(side=TOP,fill = X, expand = 0, padx = 3, pady=3)
gridPage = group.interior()
self.xyzSnap = BooleanVar()
self.xyzSnapButton = Checkbutton(
gridPage,
text = 'XYZ Snap',
anchor = 'w', justify = LEFT,
variable = self.xyzSnap,
command = self.toggleXyzSnap)
self.xyzSnapButton.pack(fill = X, expand = 0, pady=3)
self.hprSnap = BooleanVar()
self.hprSnapButton = Checkbutton(
gridPage,
text = 'HPR Snap',
anchor = 'w', justify = LEFT,
variable = self.hprSnap,
command = self.toggleHprSnap)
self.hprSnapButton.pack(fill = X, expand = 0, pady=3)
self.xyzSnap.set(SEditor.grid.getXyzSnap())
self.hprSnap.set(SEditor.grid.getHprSnap())
self.gridSpacing = Floater.Floater(
gridPage,
text = 'Grid Spacing',
min = 0.1,
value = SEditor.grid.getGridSpacing())
self.gridSpacing['command'] = SEditor.grid.setGridSpacing
self.gridSpacing.pack(fill = X, expand = 0, pady=3)
self.gridSize = Floater.Floater(
gridPage,
text = 'Grid Size',
min = 1.0,
value = SEditor.grid.getGridSize())
self.gridSize['command'] = SEditor.grid.setGridSize
self.gridSize.pack(fill = X, expand = 0, pady=3)
self.gridSnapAngle = Dial.AngleDial(
gridPage,
text = 'Snap Angle',
style = 'mini',
value = SEditor.grid.getSnapAngle())
self.gridSnapAngle['command'] = SEditor.grid.setSnapAngle
self.gridSnapAngle.pack(fill = X, expand = 0, pady=3)
return
def toggleXyzSnap(self):
SEditor.grid.setXyzSnap(self.xyzSnap.get())
return
def toggleHprSnap(self):
SEditor.grid.setHprSnap(self.hprSnap.get())
return
###### Collision Section!!!!
def collisionInterface(self, contentFrame):
#################################################################
# collisionInterface(self, contentFrame)
# Create the basic interface for CollisionNode Type Node
#################################################################
collisionNode = self.info['collisionNode']
self.collisionObj = collisionNode.node().getSolid(0)
widget = self.createEntryField(contentFrame,'Node Type:',
value = self.type,
command = None,
initialState='disabled',
side = 'top')
cType = self.collisionObj.getType().getName()
widget = self.createEntryField(contentFrame,'Object Type:',
value = cType,
command = None,
initialState='disabled',
side = 'top')
group = Pmw.Group(contentFrame,tag_text='Position',
tag_font=('MSSansSerif', 10))
self.createPosEntry(group.interior())
group.pack(side=TOP,fill = X, expand = 0, pady=3)
group = Pmw.Group(contentFrame,tag_text='Orientation',
tag_font=('MSSansSerif', 10))
self.createHprEntry(group.interior())
group.pack(side=TOP,fill = X, expand = 0, pady=3)
self.createScaleEntry(contentFrame)
collisionGroup = Pmw.Group(contentFrame,tag_text='Collision Object Properties',
tag_font=('MSSansSerif', 10))
cObjFrame = collisionGroup.interior()
### Generate different Interface for each different kinds of Collision Objects
### Yeah, yeah. I know this part of code looks so ugly...
if cType == 'CollisionSphere':
centerPos = self.collisionObj.getCenter()
radius = self.collisionObj.getRadius()
group = Pmw.Group(cObjFrame,tag_text='Origin',
tag_font=('MSSansSerif', 10))
posInterior = Frame(group.interior())
self.cPosX = self.createcomponent('originX', (), None,
Floater.Floater, (posInterior,),
text = 'X', relief = FLAT,
value = centerPos.getX(),
label_foreground = 'Red',
entry_width = 9)
self.cPosX['commandData'] = ['sphere-o']
self.cPosX['command'] = self.setCollisionPosHprScale
self.cPosX.pack(side=LEFT,expand=0,fill=X, padx=1)
self.cPosY = self.createcomponent('originY', (), None,
Floater.Floater, (posInterior,),
text = 'Y', relief = FLAT,
value = centerPos.getY(),
label_foreground = '#00A000',
entry_width = 9)
self.cPosY['commandData'] = ['sphere-o']
self.cPosY['command'] = self.setCollisionPosHprScale
self.cPosY.pack(side=LEFT, expand=0,fill=X, padx=1)
self.cPosZ = self.createcomponent('originZ', (), None,
Floater.Floater, (posInterior,),
text = 'Z', relief = FLAT,
value = centerPos.getZ(),
label_foreground = 'Blue',
entry_width = 9)
self.cPosZ['commandData'] = ['sphere-o']
self.cPosZ['command'] = self.setCollisionPosHprScale
self.cPosZ.pack(side=LEFT, expand=0,fill=X, padx=1)
posInterior.pack(side=TOP, expand=0,fill=X, padx=3, pady=3)
group.pack(side=TOP,fill = X, expand = 0, pady=3)
scaleInterior = Frame(cObjFrame)
self.scaleS = self.createcomponent('radius', (), None,
Floater.Floater, (scaleInterior,),
text = 'Radius',
relief = FLAT,
min = 0.0001, value = radius,
resetValue = 1.0,
label_foreground = 'Blue')
self.scaleS['commandData'] = ['sphere-radius']
self.scaleS['command'] = self.setCollisionPosHprScale
self.scaleS.pack(side=LEFT,expand=0,fill=X)
scaleInterior.pack(side=TOP,expand=0,fill=X, padx=3, pady=3)
pass
elif cType == 'CollisionPolygon':
frame = Frame(cObjFrame)
label = Label(frame, text= "Sorry!",font=('MSSansSerif', 10),
borderwidth=5)
label.pack(side=LEFT)
frame.pack(side=TOP, fill=X, expand=True)
frame = Frame(cObjFrame)
label = Label(frame, text= "There is no way to change",font=('MSSansSerif', 10),
borderwidth=5)
label.pack(side=LEFT)
frame.pack(side=TOP, fill=X, expand=True)
frame = Frame(cObjFrame)
label = Label(frame, text= "the basic properties of Collision Polygon!",font=('MSSansSerif', 10),
borderwidth=5)
label.pack(side=LEFT)
frame.pack(side=TOP, fill=X, expand=True)
frame = Frame(cObjFrame)
label = Label(frame, text= "If you really need to change, recreate one...",font=('MSSansSerif', 10),
borderwidth=5)
label.pack(side=LEFT)
frame.pack(side=TOP, fill=X, expand=True)
pass
elif cType == 'CollisionSegment':
pointA = self.collisionObj.getPointA()
pointB = self.collisionObj.getPointB()
group = Pmw.Group(cObjFrame,tag_text='Point A',
tag_font=('MSSansSerif', 10))
posInterior = Frame(group.interior())
self.cPosX = self.createcomponent('pointA-X', (), None,
Floater.Floater, (posInterior,),
text = 'X', relief = FLAT,
value = pointA.getX(),
label_foreground = 'Red',
entry_width = 9)
self.cPosX['commandData'] = ['segment-A']
self.cPosX['command'] = self.setCollisionPosHprScale
self.cPosX.pack(side=LEFT,expand=0,fill=X, padx=1)
self.cPosY = self.createcomponent('pointA-Y', (), None,
Floater.Floater, (posInterior,),
text = 'Y', relief = FLAT,
value = pointA.getY(),
label_foreground = '#00A000',
entry_width = 9)
self.cPosY['commandData'] = ['segment-A']
self.cPosY['command'] = self.setCollisionPosHprScale
self.cPosY.pack(side=LEFT, expand=0,fill=X, padx=1)
self.cPosZ = self.createcomponent('pointA-Z', (), None,
Floater.Floater, (posInterior,),
text = 'Z', relief = FLAT,
value = pointA.getZ(),
label_foreground = 'Blue',
entry_width = 9)
self.cPosZ['commandData'] = ['segment-A']
self.cPosZ['command'] = self.setCollisionPosHprScale
self.cPosZ.pack(side=LEFT, expand=0,fill=X, padx=1)
posInterior.pack(side=TOP, expand=0,fill=X, padx=3, pady=3)
group.pack(side=TOP,fill = X, expand = 0, pady=3)
group = Pmw.Group(cObjFrame,tag_text='Point B',
tag_font=('MSSansSerif', 10))
posInterior = Frame(group.interior())
self.cPosXB = self.createcomponent('pointB-X', (), None,
Floater.Floater, (posInterior,),
text = 'X', relief = FLAT,
value = pointB.getX(),
label_foreground = 'Red',
entry_width = 9)
self.cPosXB['commandData'] = ['segment-B']
self.cPosXB['command'] = self.setCollisionPosHprScale
self.cPosXB.pack(side=LEFT,expand=0,fill=X, padx=1)
self.cPosYB = self.createcomponent('pointB-Y', (), None,
Floater.Floater, (posInterior,),
text = 'Y', relief = FLAT,
value = pointB.getY(),
label_foreground = '#00A000',
entry_width = 9)
self.cPosYB['commandData'] = ['segment-B']
self.cPosYB['command'] = self.setCollisionPosHprScale
self.cPosYB.pack(side=LEFT, expand=0,fill=X, padx=1)
self.cPosZB = self.createcomponent('pointB-Z', (), None,
Floater.Floater, (posInterior,),
text = 'Z', relief = FLAT,
value = pointB.getZ(),
label_foreground = 'Blue',
entry_width = 9)
self.cPosZB['commandData'] = ['segment-B']
self.cPosZB['command'] = self.setCollisionPosHprScale
self.cPosZB.pack(side=LEFT, expand=0,fill=X, padx=1)
posInterior.pack(side=TOP, expand=0,fill=X, padx=3, pady=3)
group.pack(side=TOP,fill = X, expand = 0, pady=3)
pass
elif cType == 'CollisionRay':
origin = self.collisionObj.getOrigin()
direction = self.collisionObj.getDirection()
group = Pmw.Group(cObjFrame,tag_text='Origin Point',
tag_font=('MSSansSerif', 10))
posInterior = Frame(group.interior())
self.cPosX = self.createcomponent('origin-X', (), None,
Floater.Floater, (posInterior,),
text = 'X', relief = FLAT,
value = origin.getX(),
label_foreground = 'Red',
entry_width = 9)
self.cPosX['commandData'] = ['ray-A']
self.cPosX['command'] = self.setCollisionPosHprScale
self.cPosX.pack(side=LEFT,expand=0,fill=X, padx=1)
self.cPosY = self.createcomponent('origin-Y', (), None,
Floater.Floater, (posInterior,),
text = 'Y', relief = FLAT,
value = origin.getY(),
label_foreground = '#00A000',
entry_width = 9)
self.cPosY['commandData'] = ['ray-A']
self.cPosY['command'] = self.setCollisionPosHprScale
self.cPosY.pack(side=LEFT, expand=0,fill=X, padx=1)
self.cPosZ = self.createcomponent('origin-Z', (), None,
Floater.Floater, (posInterior,),
text = 'Z', relief = FLAT,
value = origin.getZ(),
label_foreground = 'Blue',
entry_width = 9)
self.cPosZ['commandData'] = ['ray-A']
self.cPosZ['command'] = self.setCollisionPosHprScale
self.cPosZ.pack(side=LEFT, expand=0,fill=X, padx=1)
posInterior.pack(side=TOP, expand=0,fill=X, padx=3, pady=3)
group.pack(side=TOP,fill = X, expand = 0, pady=3)
group = Pmw.Group(cObjFrame,tag_text='Direction',
tag_font=('MSSansSerif', 10))
posInterior = Frame(group.interior())
self.cPosXB = self.createcomponent('direction-X', (), None,
Floater.Floater, (posInterior,),
text = 'X', relief = FLAT,
value = direction.getX(),
label_foreground = 'Red',
entry_width = 9)
self.cPosXB['commandData'] = ['ray-B']
self.cPosXB['command'] = self.setCollisionPosHprScale
self.cPosXB.pack(side=LEFT,expand=0,fill=X, padx=1)
self.cPosYB = self.createcomponent('direction-Y', (), None,
Floater.Floater, (posInterior,),
text = 'Y', relief = FLAT,
value = direction.getY(),
label_foreground = '#00A000',
entry_width = 9)
self.cPosYB['commandData'] = ['ray-B']
self.cPosYB['command'] = self.setCollisionPosHprScale
self.cPosYB.pack(side=LEFT, expand=0,fill=X, padx=1)
self.cPosZB = self.createcomponent('direction-Z', (), None,
Floater.Floater, (posInterior,),
text = 'Z', relief = FLAT,
value = direction.getZ(),
label_foreground = 'Blue',
entry_width = 9)
self.cPosZB['commandData'] = ['ray-B']
self.cPosZB['command'] = self.setCollisionPosHprScale
self.cPosZB.pack(side=LEFT, expand=0,fill=X, padx=1)
posInterior.pack(side=TOP, expand=0,fill=X, padx=3, pady=3)
group.pack(side=TOP,fill = X, expand = 0, pady=3)
pass
collisionGroup.pack(side=TOP,fill = X, expand = 0, pady=3)
return
def setCollisionPosHprScale(self, data, dataType):
#################################################################
# setCollisionPosHprScale(self, data, dataType)
# Well, the reason that we didn't use the same one with other nodePath
# is that each tyoe of collsion objects has its unique properties and way to set value.
# So, they have to be separated from other nodePath
#################################################################
if dataType == 'sphere-o':
origin = Point3(float(self.cPosX._entry.get()),
float(self.cPosY._entry.get()),
float(self.cPosZ._entry.get()))
self.collisionObj.setCenter(origin)
elif dataType == 'sphere-radius':
self.collisionObj.setRadius(data)
elif dataType == 'segment-A':
pointA = Point3(float(self.cPosX._entry.get()),
float(self.cPosY._entry.get()),
float(self.cPosZ._entry.get()))
self.collisionObj.setPointA(pointA)
elif dataType == 'segment-B':
pointB = Point3(float(self.cPosXB._entry.get()),
float(self.cPosYB._entry.get()),
float(self.cPosZB._entry.get()))
self.collisionObj.setPointB(pointB)
elif dataType == 'ray-A':
pointA = Point3(float(self.cPosX._entry.get()),
float(self.cPosY._entry.get()),
float(self.cPosZ._entry.get()))
self.collisionObj.setOrigin(pointA)
elif dataType == 'ray-B':
pointB = Vec3(float(self.cPosXB._entry.get()),
float(self.cPosYB._entry.get()),
float(self.cPosZB._entry.get()))
self.collisionObj.setDirection(pointB)
return
#################################################################
#################################################################
# Functions below are all call back function
# They will be called when user has manipulated its node on the screen
# The message itself is sent by a task called monitorSelectedNode in the sceneEditor.
#################################################################
#################################################################
def trackDataFromSceneCamera(self, pos=Point3(0,0,0), hpr=Vec3(0,0,0), scale=Point3(0,0,0)):
self.posX.set(pos.getX())
self.posY.set(pos.getY())
self.posZ.set(pos.getZ())
self.hprH.set(hpr.getX())
self.hprP.set(hpr.getY())
self.hprR.set(hpr.getZ())
return
def trackDataFromSceneModel(self, pos=Point3(0,0,0), hpr=Vec3(0,0,0), scale=Point3(0,0,0)):
self.posX.set(pos.getX())
self.posY.set(pos.getY())
self.posZ.set(pos.getZ())
self.hprH.set(hpr.getX())
self.hprP.set(hpr.getY())
self.hprR.set(hpr.getZ())
self.scale.set(scale.getX())
return
def trackDataFromSceneActor(self, pos=Point3(0,0,0), hpr=Vec3(0,0,0), scale=Point3(0,0,0)):
self.posX.set(pos.getX())
self.posY.set(pos.getY())
self.posZ.set(pos.getZ())
self.hprH.set(hpr.getX())
self.hprP.set(hpr.getY())
self.hprR.set(hpr.getZ())
self.scale.set(scale.getX())
return
def trackDataFromSceneLight(self, pos=Point3(0,0,0), hpr=Vec3(0,0,0), scale=Point3(0,0,0)):
if self.lightNode.type == 'directional':
self.dPosition.set([pos.getX(),pos.getY(),pos.getZ()])
self.dOrientation.set([hpr.getX(),hpr.getY(),hpr.getZ()])
pass
elif self.lightNode.type == 'point':
self.pPosition.set([pos.getX(),pos.getY(),pos.getZ()])
pass
return
def trackDataFromSceneDummy(self, pos=Point3(0,0,0), hpr=Vec3(0,0,0), scale=Point3(0,0,0)):
self.posX.set(pos.getX())
self.posY.set(pos.getY())
self.posZ.set(pos.getZ())
self.hprH.set(hpr.getX())
self.hprP.set(hpr.getY())
self.hprR.set(hpr.getZ())
self.scale.set(scale.getX())
return
def trackDataFromSceneCollision(self, pos=Point3(0,0,0), hpr=Vec3(0,0,0), scale=Point3(0,0,0)):
self.posX.set(pos.getX())
self.posY.set(pos.getY())
self.posZ.set(pos.getZ())
self.hprH.set(hpr.getX())
self.hprP.set(hpr.getY())
self.hprR.set(hpr.getZ())
self.scale.set(scale.getX())
return
| bsd-3-clause | -6,343,494,023,786,244,000 | 164,948,659,921,864,540 | 47.433379 | 222 | 0.476552 | false |
kenshay/ImageScripter | ProgramData/SystemFiles/Python/Lib/site-packages/rsa/_version133.py | 82 | 11764 | # -*- coding: utf-8 -*-
#
# Copyright 2011 Sybren A. Stüvel <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Deprecated version of the RSA module
.. deprecated:: 2.0
This submodule is deprecated and will be completely removed as of version 4.0.
Module for calculating large primes, and RSA encryption, decryption,
signing and verification. Includes generating public and private keys.
WARNING: this code implements the mathematics of RSA. It is not suitable for
real-world secure cryptography purposes. It has not been reviewed by a security
expert. It does not include padding of data. There are many ways in which the
output of this module, when used without any modification, can be sucessfully
attacked.
"""
__author__ = "Sybren Stuvel, Marloes de Boer and Ivo Tamboer"
__date__ = "2010-02-05"
__version__ = '1.3.3'
# NOTE: Python's modulo can return negative numbers. We compensate for
# this behaviour using the abs() function
try:
import cPickle as pickle
except ImportError:
import pickle
from pickle import dumps, loads
import base64
import math
import os
import random
import sys
import types
import zlib
from rsa._compat import byte
# Display a warning that this insecure version is imported.
import warnings
warnings.warn('Insecure version of the RSA module is imported as %s, be careful'
% __name__)
warnings.warn('This submodule is deprecated and will be completely removed as of version 4.0.',
DeprecationWarning)
def gcd(p, q):
"""Returns the greatest common divisor of p and q
>>> gcd(42, 6)
6
"""
if p<q: return gcd(q, p)
if q == 0: return p
return gcd(q, abs(p%q))
def bytes2int(bytes):
"""Converts a list of bytes or a string to an integer
"""
if not (type(bytes) is types.ListType or type(bytes) is types.StringType):
raise TypeError("You must pass a string or a list")
# Convert byte stream to integer
integer = 0
for byte in bytes:
integer *= 256
if type(byte) is types.StringType: byte = ord(byte)
integer += byte
return integer
def int2bytes(number):
"""Converts a number to a string of bytes
"""
if not (type(number) is types.LongType or type(number) is types.IntType):
raise TypeError("You must pass a long or an int")
string = ""
while number > 0:
string = "%s%s" % (byte(number & 0xFF), string)
number /= 256
return string
def fast_exponentiation(a, p, n):
"""Calculates r = a^p mod n
"""
result = a % n
remainders = []
while p != 1:
remainders.append(p & 1)
p = p >> 1
while remainders:
rem = remainders.pop()
result = ((a ** rem) * result ** 2) % n
return result
def read_random_int(nbits):
"""Reads a random integer of approximately nbits bits rounded up
to whole bytes"""
nbytes = ceil(nbits/8.)
randomdata = os.urandom(nbytes)
return bytes2int(randomdata)
def ceil(x):
"""ceil(x) -> int(math.ceil(x))"""
return int(math.ceil(x))
def randint(minvalue, maxvalue):
"""Returns a random integer x with minvalue <= x <= maxvalue"""
# Safety - get a lot of random data even if the range is fairly
# small
min_nbits = 32
# The range of the random numbers we need to generate
range = maxvalue - minvalue
# Which is this number of bytes
rangebytes = ceil(math.log(range, 2) / 8.)
# Convert to bits, but make sure it's always at least min_nbits*2
rangebits = max(rangebytes * 8, min_nbits * 2)
# Take a random number of bits between min_nbits and rangebits
nbits = random.randint(min_nbits, rangebits)
return (read_random_int(nbits) % range) + minvalue
def fermat_little_theorem(p):
"""Returns 1 if p may be prime, and something else if p definitely
is not prime"""
a = randint(1, p-1)
return fast_exponentiation(a, p-1, p)
def jacobi(a, b):
"""Calculates the value of the Jacobi symbol (a/b)
"""
if a % b == 0:
return 0
result = 1
while a > 1:
if a & 1:
if ((a-1)*(b-1) >> 2) & 1:
result = -result
b, a = a, b % a
else:
if ((b ** 2 - 1) >> 3) & 1:
result = -result
a = a >> 1
return result
def jacobi_witness(x, n):
"""Returns False if n is an Euler pseudo-prime with base x, and
True otherwise.
"""
j = jacobi(x, n) % n
f = fast_exponentiation(x, (n-1)/2, n)
if j == f: return False
return True
def randomized_primality_testing(n, k):
"""Calculates whether n is composite (which is always correct) or
prime (which is incorrect with error probability 2**-k)
Returns False if the number if composite, and True if it's
probably prime.
"""
q = 0.5 # Property of the jacobi_witness function
# t = int(math.ceil(k / math.log(1/q, 2)))
t = ceil(k / math.log(1/q, 2))
for i in range(t+1):
x = randint(1, n-1)
if jacobi_witness(x, n): return False
return True
def is_prime(number):
"""Returns True if the number is prime, and False otherwise.
"""
"""
if not fermat_little_theorem(number) == 1:
# Not prime, according to Fermat's little theorem
return False
"""
if randomized_primality_testing(number, 5):
# Prime, according to Jacobi
return True
# Not prime
return False
def getprime(nbits):
"""Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In
other words: nbits is rounded up to whole bytes.
"""
nbytes = int(math.ceil(nbits/8.))
while True:
integer = read_random_int(nbits)
# Make sure it's odd
integer |= 1
# Test for primeness
if is_prime(integer): break
# Retry if not prime
return integer
def are_relatively_prime(a, b):
"""Returns True if a and b are relatively prime, and False if they
are not.
"""
d = gcd(a, b)
return (d == 1)
def find_p_q(nbits):
"""Returns a tuple of two different primes of nbits bits"""
p = getprime(nbits)
while True:
q = getprime(nbits)
if not q == p: break
return (p, q)
def extended_euclid_gcd(a, b):
"""Returns a tuple (d, i, j) such that d = gcd(a, b) = ia + jb
"""
if b == 0:
return (a, 1, 0)
q = abs(a % b)
r = long(a / b)
(d, k, l) = extended_euclid_gcd(b, q)
return (d, l, k - l*r)
# Main function: calculate encryption and decryption keys
def calculate_keys(p, q, nbits):
"""Calculates an encryption and a decryption key for p and q, and
returns them as a tuple (e, d)"""
n = p * q
phi_n = (p-1) * (q-1)
while True:
# Make sure e has enough bits so we ensure "wrapping" through
# modulo n
e = getprime(max(8, nbits/2))
if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break
(d, i, j) = extended_euclid_gcd(e, phi_n)
if not d == 1:
raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n))
if not (e * i) % phi_n == 1:
raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n))
return (e, i)
def gen_keys(nbits):
"""Generate RSA keys of nbits bits. Returns (p, q, e, d).
Note: this can take a long time, depending on the key size.
"""
while True:
(p, q) = find_p_q(nbits)
(e, d) = calculate_keys(p, q, nbits)
# For some reason, d is sometimes negative. We don't know how
# to fix it (yet), so we keep trying until everything is shiny
if d > 0: break
return (p, q, e, d)
def gen_pubpriv_keys(nbits):
"""Generates public and private keys, and returns them as (pub,
priv).
The public key consists of a dict {e: ..., , n: ....). The private
key consists of a dict {d: ...., p: ...., q: ....).
"""
(p, q, e, d) = gen_keys(nbits)
return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} )
def encrypt_int(message, ekey, n):
"""Encrypts a message using encryption key 'ekey', working modulo
n"""
if type(message) is types.IntType:
return encrypt_int(long(message), ekey, n)
if not type(message) is types.LongType:
raise TypeError("You must pass a long or an int")
if message > 0 and \
math.floor(math.log(message, 2)) > math.floor(math.log(n, 2)):
raise OverflowError("The message is too long")
return fast_exponentiation(message, ekey, n)
def decrypt_int(cyphertext, dkey, n):
"""Decrypts a cypher text using the decryption key 'dkey', working
modulo n"""
return encrypt_int(cyphertext, dkey, n)
def sign_int(message, dkey, n):
"""Signs 'message' using key 'dkey', working modulo n"""
return decrypt_int(message, dkey, n)
def verify_int(signed, ekey, n):
"""verifies 'signed' using key 'ekey', working modulo n"""
return encrypt_int(signed, ekey, n)
def picklechops(chops):
"""Pickles and base64encodes it's argument chops"""
value = zlib.compress(dumps(chops))
encoded = base64.encodestring(value)
return encoded.strip()
def unpicklechops(string):
"""base64decodes and unpickes it's argument string into chops"""
return loads(zlib.decompress(base64.decodestring(string)))
def chopstring(message, key, n, funcref):
"""Splits 'message' into chops that are at most as long as n,
converts these into integers, and calls funcref(integer, key, n)
for each chop.
Used by 'encrypt' and 'sign'.
"""
msglen = len(message)
mbits = msglen * 8
nbits = int(math.floor(math.log(n, 2)))
nbytes = nbits / 8
blocks = msglen / nbytes
if msglen % nbytes > 0:
blocks += 1
cypher = []
for bindex in range(blocks):
offset = bindex * nbytes
block = message[offset:offset+nbytes]
value = bytes2int(block)
cypher.append(funcref(value, key, n))
return picklechops(cypher)
def gluechops(chops, key, n, funcref):
"""Glues chops back together into a string. calls
funcref(integer, key, n) for each chop.
Used by 'decrypt' and 'verify'.
"""
message = ""
chops = unpicklechops(chops)
for cpart in chops:
mpart = funcref(cpart, key, n)
message += int2bytes(mpart)
return message
def encrypt(message, key):
"""Encrypts a string 'message' with the public key 'key'"""
return chopstring(message, key['e'], key['n'], encrypt_int)
def sign(message, key):
"""Signs a string 'message' with the private key 'key'"""
return chopstring(message, key['d'], key['p']*key['q'], decrypt_int)
def decrypt(cypher, key):
"""Decrypts a cypher with the private key 'key'"""
return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int)
def verify(cypher, key):
"""Verifies a cypher with the public key 'key'"""
return gluechops(cypher, key['e'], key['n'], encrypt_int)
# Do doctest if we're not imported
if __name__ == "__main__":
import doctest
doctest.testmod()
__all__ = ["gen_pubpriv_keys", "encrypt", "decrypt", "sign", "verify"]
| gpl-3.0 | -6,593,819,275,662,260,000 | 1,804,529,373,870,645,200 | 25.673469 | 97 | 0.616254 | false |
robertmattmueller/sdac-compiler | sympy/combinatorics/tests/test_subsets.py | 120 | 1918 | from sympy.combinatorics import Subset
def test_subset():
a = Subset(['c', 'd'], ['a', 'b', 'c', 'd'])
assert a.next_binary() == Subset(['b'], ['a', 'b', 'c', 'd'])
assert a.prev_binary() == Subset(['c'], ['a', 'b', 'c', 'd'])
assert a.next_lexicographic() == Subset(['d'], ['a', 'b', 'c', 'd'])
assert a.prev_lexicographic() == Subset(['c'], ['a', 'b', 'c', 'd'])
assert a.next_gray() == Subset(['c'], ['a', 'b', 'c', 'd'])
assert a.prev_gray() == Subset(['d'], ['a', 'b', 'c', 'd'])
assert a.rank_binary == 3
assert a.rank_lexicographic == 14
assert a.rank_gray == 2
assert a.cardinality == 16
a = Subset([2, 5, 7], [1, 2, 3, 4, 5, 6, 7])
assert a.next_binary() == Subset([2, 5, 6], [1, 2, 3, 4, 5, 6, 7])
assert a.prev_binary() == Subset([2, 5], [1, 2, 3, 4, 5, 6, 7])
assert a.next_lexicographic() == Subset([2, 6], [1, 2, 3, 4, 5, 6, 7])
assert a.prev_lexicographic() == Subset([2, 5, 6, 7], [1, 2, 3, 4, 5, 6, 7])
assert a.next_gray() == Subset([2, 5, 6, 7], [1, 2, 3, 4, 5, 6, 7])
assert a.prev_gray() == Subset([2, 5], [1, 2, 3, 4, 5, 6, 7])
assert a.rank_binary == 37
assert a.rank_lexicographic == 93
assert a.rank_gray == 57
assert a.cardinality == 128
superset = ['a', 'b', 'c', 'd']
assert Subset.unrank_binary(4, superset).rank_binary == 4
assert Subset.unrank_gray(10, superset).rank_gray == 10
superset = [1, 2, 3, 4, 5, 6, 7, 8, 9]
assert Subset.unrank_binary(33, superset).rank_binary == 33
assert Subset.unrank_gray(25, superset).rank_gray == 25
a = Subset([], ['a', 'b', 'c', 'd'])
i = 1
while a.subset != Subset(['d'], ['a', 'b', 'c', 'd']).subset:
a = a.next_lexicographic()
i = i + 1
assert i == 16
i = 1
while a.subset != Subset([], ['a', 'b', 'c', 'd']).subset:
a = a.prev_lexicographic()
i = i + 1
assert i == 16
| gpl-3.0 | -8,247,689,883,960,117,000 | -6,746,505,318,714,085,000 | 38.958333 | 80 | 0.501564 | false |
michael-lazar/rtv | rtv/packages/praw/helpers.py | 2 | 19086 | # This file is part of PRAW.
#
# PRAW is free software: you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later
# version.
#
# PRAW is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# PRAW. If not, see <http://www.gnu.org/licenses/>.
"""
Helper functions.
The functions here provide functionality that is often needed by programs using
PRAW, but which isn't part of reddit's API.
"""
from __future__ import unicode_literals
import six
import sys
import time
from collections import deque
from functools import partial
from timeit import default_timer as timer
from .errors import HTTPException, PRAWException
from operator import attrgetter
BACKOFF_START = 4 # Minimum number of seconds to sleep during errors
KEEP_ITEMS = 128 # On each iteration only remember the first # items
# for conversion between broken reddit timestamps and unix timestamps
REDDIT_TIMESTAMP_OFFSET = 28800
def comment_stream(reddit_session, subreddit, limit=None, verbosity=1):
"""Indefinitely yield new comments from the provided subreddit.
Comments are yielded from oldest to newest.
:param reddit_session: The reddit_session to make requests from. In all the
examples this is assigned to the variable ``r``.
:param subreddit: Either a subreddit object, or the name of a
subreddit. Use `all` to get the comment stream for all comments made to
reddit.
:param limit: The maximum number of comments to fetch in a single
iteration. When None, fetch all available comments (reddit limits this
to 1000 (or multiple of 1000 for multi-subreddits). If this number is
too small, comments may be missed.
:param verbosity: A number that controls the amount of output produced to
stderr. <= 0: no output; >= 1: output the total number of comments
processed and provide the short-term number of comments processed per
second; >= 2: output when additional delays are added in order to avoid
subsequent unexpected http errors. >= 3: output debugging information
regarding the comment stream. (Default: 1)
"""
get_function = partial(reddit_session.get_comments,
six.text_type(subreddit))
return _stream_generator(get_function, limit, verbosity)
def submission_stream(reddit_session, subreddit, limit=None, verbosity=1):
"""Indefinitely yield new submissions from the provided subreddit.
Submissions are yielded from oldest to newest.
:param reddit_session: The reddit_session to make requests from. In all the
examples this is assigned to the variable ``r``.
:param subreddit: Either a subreddit object, or the name of a
subreddit. Use `all` to get the submissions stream for all submissions
made to reddit.
:param limit: The maximum number of submissions to fetch in a single
iteration. When None, fetch all available submissions (reddit limits
this to 1000 (or multiple of 1000 for multi-subreddits). If this number
is too small, submissions may be missed. Since there isn't a limit to
the number of submissions that can be retrieved from r/all, the limit
will be set to 1000 when limit is None.
:param verbosity: A number that controls the amount of output produced to
stderr. <= 0: no output; >= 1: output the total number of submissions
processed and provide the short-term number of submissions processed
per second; >= 2: output when additional delays are added in order to
avoid subsequent unexpected http errors. >= 3: output debugging
information regarding the submission stream. (Default: 1)
"""
if six.text_type(subreddit).lower() == "all":
if limit is None:
limit = 1000
if not hasattr(subreddit, 'reddit_session'):
subreddit = reddit_session.get_subreddit(subreddit)
return _stream_generator(subreddit.get_new, limit, verbosity)
def valid_redditors(redditors, sub):
"""Return a verified list of valid Redditor instances.
:param redditors: A list comprised of Redditor instances and/or strings
that are to be verified as actual redditor accounts.
:param sub: A Subreddit instance that the authenticated account has
flair changing permission on.
Note: Flair will be unset for all valid redditors in `redditors` on the
subreddit `sub`. A valid redditor is defined as a redditor that is
registered on reddit.
"""
simplified = list(set(six.text_type(x).lower() for x in redditors))
return [sub.reddit_session.get_redditor(simplified[i], fetch=False)
for (i, resp) in enumerate(sub.set_flair_csv(
({'user': x, 'flair_text': x} for x in simplified)))
if resp['ok']]
def submissions_between(reddit_session,
subreddit,
lowest_timestamp=None,
highest_timestamp=None,
newest_first=True,
extra_cloudsearch_fields=None,
verbosity=1):
"""Yield submissions between two timestamps.
If both ``highest_timestamp`` and ``lowest_timestamp`` are unspecified,
yields all submissions in the ``subreddit``.
Submissions are yielded from newest to oldest(like in the "new" queue).
:param reddit_session: The reddit_session to make requests from. In all the
examples this is assigned to the variable ``r``.
:param subreddit: Either a subreddit object, or the name of a
subreddit. Use `all` to get the submissions stream for all submissions
made to reddit.
:param lowest_timestamp: The lower bound for ``created_utc`` atributed of
submissions.
(Default: subreddit's created_utc or 0 when subreddit == "all").
:param highest_timestamp: The upper bound for ``created_utc`` attribute
of submissions. (Default: current unix time)
NOTE: both highest_timestamp and lowest_timestamp are proper
unix timestamps(just like ``created_utc`` attributes)
:param newest_first: If set to true, yields submissions
from newest to oldest. Otherwise yields submissions
from oldest to newest
:param extra_cloudsearch_fields: Allows extra filtering of results by
parameters like author, self. Full list is available here:
https://www.reddit.com/wiki/search
:param verbosity: A number that controls the amount of output produced to
stderr. <= 0: no output; >= 1: output the total number of submissions
processed; >= 2: output debugging information regarding
the search queries. (Default: 1)
"""
def debug(msg, level):
if verbosity >= level:
sys.stderr.write(msg + '\n')
def format_query_field(k, v):
if k in ["nsfw", "self"]:
# even though documentation lists "no" and "yes"
# as possible values, in reality they don't work
if v not in [0, 1, "0", "1"]:
raise PRAWException("Invalid value for the extra"
"field {}. Only '0' and '1' are"
"valid values.".format(k))
return "{}:{}".format(k, v)
return "{}:'{}'".format(k, v)
if extra_cloudsearch_fields is None:
extra_cloudsearch_fields = {}
extra_query_part = " ".join(
[format_query_field(k, v) for (k, v)
in sorted(extra_cloudsearch_fields.items())]
)
if highest_timestamp is None:
highest_timestamp = int(time.time()) + REDDIT_TIMESTAMP_OFFSET
else:
highest_timestamp = int(highest_timestamp) + REDDIT_TIMESTAMP_OFFSET
if lowest_timestamp is not None:
lowest_timestamp = int(lowest_timestamp) + REDDIT_TIMESTAMP_OFFSET
elif not isinstance(subreddit, six.string_types):
lowest_timestamp = int(subreddit.created)
elif subreddit not in ("all", "contrib", "mod", "friend"):
lowest_timestamp = int(reddit_session.get_subreddit(subreddit).created)
else:
lowest_timestamp = 0
original_highest_timestamp = highest_timestamp
original_lowest_timestamp = lowest_timestamp
# When making timestamp:X..Y queries, reddit misses submissions
# inside X..Y range, but they can be found inside Y..Z range
# It is not clear what is the value of Z should be, but it seems
# like the difference is usually about ~1 hour or less
# To be sure, let's set the workaround offset to 2 hours
out_of_order_submissions_workaround_offset = 7200
highest_timestamp += out_of_order_submissions_workaround_offset
lowest_timestamp -= out_of_order_submissions_workaround_offset
# Those parameters work ok, but there may be a better set of parameters
window_size = 60 * 60
search_limit = 100
min_search_results_in_window = 50
window_adjustment_ratio = 1.25
backoff = BACKOFF_START
processed_submissions = 0
prev_win_increased = False
prev_win_decreased = False
while highest_timestamp >= lowest_timestamp:
try:
if newest_first:
t1 = max(highest_timestamp - window_size, lowest_timestamp)
t2 = highest_timestamp
else:
t1 = lowest_timestamp
t2 = min(lowest_timestamp + window_size, highest_timestamp)
search_query = 'timestamp:{}..{}'.format(t1, t2)
if extra_query_part:
search_query = "(and {} {})".format(search_query,
extra_query_part)
debug(search_query, 3)
search_results = list(reddit_session.search(search_query,
subreddit=subreddit,
limit=search_limit,
syntax='cloudsearch',
sort='new'))
debug("Received {0} search results for query {1}"
.format(len(search_results), search_query),
2)
backoff = BACKOFF_START
except HTTPException as exc:
debug("{0}. Sleeping for {1} seconds".format(exc, backoff), 2)
time.sleep(backoff)
backoff *= 2
continue
if len(search_results) >= search_limit:
power = 2 if prev_win_decreased else 1
window_size = int(window_size / window_adjustment_ratio**power)
prev_win_decreased = True
debug("Decreasing window size to {0} seconds".format(window_size),
2)
# Since it is possible that there are more submissions
# in the current window, we have to re-do the request
# with reduced window
continue
else:
prev_win_decreased = False
search_results = [s for s in search_results
if original_lowest_timestamp <= s.created and
s.created <= original_highest_timestamp]
for submission in sorted(search_results,
key=attrgetter('created_utc', 'id'),
reverse=newest_first):
yield submission
processed_submissions += len(search_results)
debug('Total processed submissions: {}'
.format(processed_submissions), 1)
if newest_first:
highest_timestamp -= (window_size + 1)
else:
lowest_timestamp += (window_size + 1)
if len(search_results) < min_search_results_in_window:
power = 2 if prev_win_increased else 1
window_size = int(window_size * window_adjustment_ratio**power)
prev_win_increased = True
debug("Increasing window size to {0} seconds"
.format(window_size), 2)
else:
prev_win_increased = False
def _stream_generator(get_function, limit=None, verbosity=1):
def debug(msg, level):
if verbosity >= level:
sys.stderr.write(msg + '\n')
def b36_id(item):
return int(item.id, 36)
seen = BoundedSet(KEEP_ITEMS * 16)
before = None
count = 0 # Count is incremented to bypass the cache
processed = 0
backoff = BACKOFF_START
while True:
items = []
sleep = None
start = timer()
try:
i = None
params = {'uniq': count}
count = (count + 1) % 100
if before:
params['before'] = before
gen = enumerate(get_function(limit=limit, params=params))
for i, item in gen:
if b36_id(item) in seen:
if i == 0:
if before is not None:
# reddit sent us out of order data -- log it
debug('(INFO) {0} already seen with before of {1}'
.format(item.fullname, before), 3)
before = None
break
if i == 0: # Always the first item in the generator
before = item.fullname
if b36_id(item) not in seen:
items.append(item)
processed += 1
if verbosity >= 1 and processed % 100 == 0:
sys.stderr.write(' Items: {0} \r'
.format(processed))
sys.stderr.flush()
if i < KEEP_ITEMS:
seen.add(b36_id(item))
else: # Generator exhausted
if i is None: # Generator yielded no items
assert before is not None
# Try again without before as the before item may be too
# old or no longer exist.
before = None
backoff = BACKOFF_START
except HTTPException as exc:
sleep = (backoff, '{0}. Sleeping for {{0}} seconds.'.format(exc),
2)
backoff *= 2
# Provide rate limit
if verbosity >= 1:
rate = len(items) / (timer() - start)
sys.stderr.write(' Items: {0} ({1:.2f} ips) \r'
.format(processed, rate))
sys.stderr.flush()
# Yield items from oldest to newest
for item in items[::-1]:
yield item
# Sleep if necessary
if sleep:
sleep_time, msg, msg_level = sleep # pylint: disable=W0633
debug(msg.format(sleep_time), msg_level)
time.sleep(sleep_time)
def chunk_sequence(sequence, chunk_length, allow_incomplete=True):
"""Given a sequence, divide it into sequences of length `chunk_length`.
:param allow_incomplete: If True, allow final chunk to be shorter if the
given sequence is not an exact multiple of `chunk_length`.
If False, the incomplete chunk will be discarded.
"""
(complete, leftover) = divmod(len(sequence), chunk_length)
if not allow_incomplete:
leftover = 0
chunk_count = complete + min(leftover, 1)
chunks = []
for x in range(chunk_count):
left = chunk_length * x
right = left + chunk_length
chunks.append(sequence[left:right])
return chunks
def convert_id36_to_numeric_id(id36):
"""Convert strings representing base36 numbers into an integer."""
if not isinstance(id36, six.string_types) or id36.count("_") > 0:
raise ValueError("must supply base36 string, not fullname (e.g. use "
"xxxxx, not t3_xxxxx)")
return int(id36, 36)
def convert_numeric_id_to_id36(numeric_id):
"""Convert an integer into its base36 string representation.
This method has been cleaned up slightly to improve readability. For more
info see:
https://github.com/reddit/reddit/blob/master/r2/r2/lib/utils/_utils.pyx
https://www.reddit.com/r/redditdev/comments/n624n/submission_ids_question/
https://en.wikipedia.org/wiki/Base36
"""
# base36 allows negative numbers, but reddit does not
if not isinstance(numeric_id, six.integer_types) or numeric_id < 0:
raise ValueError("must supply a positive int/long")
# Alphabet used for base 36 conversion
alphabet = '0123456789abcdefghijklmnopqrstuvwxyz'
alphabet_len = len(alphabet)
# Temp assign
current_number = numeric_id
base36 = []
# Current_number must be greater than alphabet length to while/divmod
if 0 <= current_number < alphabet_len:
return alphabet[current_number]
# Break up into chunks
while current_number != 0:
current_number, rem = divmod(current_number, alphabet_len)
base36.append(alphabet[rem])
# String is built in reverse order
return ''.join(reversed(base36))
def flatten_tree(tree, nested_attr='replies', depth_first=False):
"""Return a flattened version of the passed in tree.
:param nested_attr: The attribute name that contains the nested items.
Defaults to ``replies`` which is suitable for comments.
:param depth_first: When true, add to the list in a depth-first manner
rather than the default breadth-first manner.
"""
stack = deque(tree)
extend = stack.extend if depth_first else stack.extendleft
retval = []
while stack:
item = stack.popleft()
nested = getattr(item, nested_attr, None)
if nested:
extend(nested)
retval.append(item)
return retval
def normalize_url(url):
"""Return url after stripping trailing .json and trailing slashes."""
if url.endswith('.json'):
url = url[:-5]
if url.endswith('/'):
url = url[:-1]
return url
class BoundedSet(object):
"""A set with a maximum size that evicts the oldest items when necessary.
This class does not implement the complete set interface.
"""
def __init__(self, max_items):
"""Construct an instance of the BoundedSet."""
self.max_items = max_items
self._fifo = []
self._set = set()
def __contains__(self, item):
"""Test if the BoundedSet contains item."""
return item in self._set
def add(self, item):
"""Add an item to the set discarding the oldest item if necessary."""
if item in self._set:
self._fifo.remove(item)
elif len(self._set) == self.max_items:
self._set.remove(self._fifo.pop(0))
self._fifo.append(item)
self._set.add(item)
| mit | -2,882,465,930,174,155,000 | 865,459,185,255,255,400 | 38.679834 | 79 | 0.611443 | false |
flyfei/python-for-android | python3-alpha/python3-src/Lib/plat-unixware7/STROPTS.py | 106 | 6524 | # Generated by h2py from /usr/include/sys/stropts.h
# Included from sys/types.h
def quad_low(x): return x.val[0]
ADT_EMASKSIZE = 8
SHRT_MIN = -32768
SHRT_MAX = 32767
INT_MIN = (-2147483647-1)
INT_MAX = 2147483647
LONG_MIN = (-2147483647-1)
LONG_MAX = 2147483647
OFF32_MAX = LONG_MAX
ISTAT_ASSERTED = 0
ISTAT_ASSUMED = 1
ISTAT_NONE = 2
OFF_MAX = OFF32_MAX
CLOCK_MAX = LONG_MAX
P_MYID = (-1)
P_MYHOSTID = (-1)
# Included from sys/select.h
FD_SETSIZE = 4096
NBBY = 8
NULL = 0
# Included from sys/conf.h
D_NEW = 0x00
D_OLD = 0x01
D_DMA = 0x02
D_BLKOFF = 0x400
D_LFS = 0x8000
D_STR = 0x0800
D_MOD = 0x1000
D_PSEUDO = 0x2000
D_RANDOM = 0x4000
D_HOT = 0x10000
D_SEEKNEG = 0x04
D_TAPE = 0x08
D_NOBRKUP = 0x10
D_INITPUB = 0x20
D_NOSPECMACDATA = 0x40
D_RDWEQ = 0x80
SECMASK = (D_INITPUB|D_NOSPECMACDATA|D_RDWEQ)
DAF_REQDMA = 0x1
DAF_PHYSREQ = 0x2
DAF_PRE8 = 0x4
DAF_STATIC = 0x8
DAF_STR = 0x10
D_MP = 0x100
D_UPF = 0x200
ROOTFS_NAMESZ = 7
FMNAMESZ = 8
MCD_VERSION = 1
DI_BCBP = 0
DI_MEDIA = 1
# Included from sys/secsys.h
ES_MACOPENLID = 1
ES_MACSYSLID = 2
ES_MACROOTLID = 3
ES_PRVINFO = 4
ES_PRVSETCNT = 5
ES_PRVSETS = 6
ES_MACADTLID = 7
ES_PRVID = 8
ES_TPGETMAJOR = 9
SA_EXEC = 0o01
SA_WRITE = 0o02
SA_READ = 0o04
SA_SUBSIZE = 0o10
# Included from sys/stropts_f.h
X_STR = (ord('S')<<8)
X_I_BASE = (X_STR|0o200)
X_I_NREAD = (X_STR|0o201)
X_I_PUSH = (X_STR|0o202)
X_I_POP = (X_STR|0o203)
X_I_LOOK = (X_STR|0o204)
X_I_FLUSH = (X_STR|0o205)
X_I_SRDOPT = (X_STR|0o206)
X_I_GRDOPT = (X_STR|0o207)
X_I_STR = (X_STR|0o210)
X_I_SETSIG = (X_STR|0o211)
X_I_GETSIG = (X_STR|0o212)
X_I_FIND = (X_STR|0o213)
X_I_LINK = (X_STR|0o214)
X_I_UNLINK = (X_STR|0o215)
X_I_PEEK = (X_STR|0o217)
X_I_FDINSERT = (X_STR|0o220)
X_I_SENDFD = (X_STR|0o221)
X_I_RECVFD = (X_STR|0o222)
# Included from unistd.h
# Included from sys/unistd.h
R_OK = 0o04
W_OK = 0o02
X_OK = 0o01
F_OK = 000
EFF_ONLY_OK = 0o10
EX_OK = 0o20
SEEK_SET = 0
SEEK_CUR = 1
SEEK_END = 2
_SC_ARG_MAX = 1
_SC_CHILD_MAX = 2
_SC_CLK_TCK = 3
_SC_NGROUPS_MAX = 4
_SC_OPEN_MAX = 5
_SC_JOB_CONTROL = 6
_SC_SAVED_IDS = 7
_SC_VERSION = 8
_SC_PASS_MAX = 9
_SC_LOGNAME_MAX = 10
_SC_PAGESIZE = 11
_SC_PAGE_SIZE = _SC_PAGESIZE
_SC_XOPEN_VERSION = 12
_SC_NACLS_MAX = 13
_SC_NPROCESSORS_CONF = 14
_SC_NPROCESSORS_ONLN = 15
_SC_NPROCESSES = 39
_SC_TOTAL_MEMORY = 40
_SC_USEABLE_MEMORY = 41
_SC_GENERAL_MEMORY = 42
_SC_DEDICATED_MEMORY = 43
_SC_NCGS_CONF = 44
_SC_NCGS_ONLN = 45
_SC_MAX_CPUS_PER_CG = 46
_SC_CG_SIMPLE_IMPL = 47
_SC_CACHE_LINE = 48
_SC_SYSTEM_ID = 49
_SC_THREADS = 51
_SC_THREAD_ATTR_STACKADDR = 52
_SC_THREAD_ATTR_STACKSIZE = 53
_SC_THREAD_DESTRUCTOR_ITERATIONS = 54
_SC_THREAD_KEYS_MAX = 55
_SC_THREAD_PRIORITY_SCHEDULING = 56
_SC_THREAD_PRIO_INHERIT = 57
_SC_THREAD_PRIO_PROTECT = 58
_SC_THREAD_STACK_MIN = 59
_SC_THREAD_PROCESS_SHARED = 60
_SC_THREAD_SAFE_FUNCTIONS = 61
_SC_THREAD_THREADS_MAX = 62
_SC_KERNEL_VM = 63
_SC_TZNAME_MAX = 320
_SC_STREAM_MAX = 321
_SC_XOPEN_CRYPT = 323
_SC_XOPEN_ENH_I18N = 324
_SC_XOPEN_SHM = 325
_SC_XOPEN_XCU_VERSION = 327
_SC_AES_OS_VERSION = 330
_SC_ATEXIT_MAX = 331
_SC_2_C_BIND = 350
_SC_2_C_DEV = 351
_SC_2_C_VERSION = 352
_SC_2_CHAR_TERM = 353
_SC_2_FORT_DEV = 354
_SC_2_FORT_RUN = 355
_SC_2_LOCALEDEF = 356
_SC_2_SW_DEV = 357
_SC_2_UPE = 358
_SC_2_VERSION = 359
_SC_BC_BASE_MAX = 370
_SC_BC_DIM_MAX = 371
_SC_BC_SCALE_MAX = 372
_SC_BC_STRING_MAX = 373
_SC_COLL_WEIGHTS_MAX = 380
_SC_EXPR_NEST_MAX = 381
_SC_LINE_MAX = 382
_SC_RE_DUP_MAX = 383
_SC_IOV_MAX = 390
_SC_NPROC_CONF = 391
_SC_NPROC_ONLN = 392
_SC_XOPEN_UNIX = 400
_SC_SEMAPHORES = 440
_CS_PATH = 1
__O_CS_HOSTNAME = 2
_CS_RELEASE = 3
_CS_VERSION = 4
__O_CS_MACHINE = 5
__O_CS_ARCHITECTURE = 6
_CS_HW_SERIAL = 7
__O_CS_HW_PROVIDER = 8
_CS_SRPC_DOMAIN = 9
_CS_INITTAB_NAME = 10
__O_CS_SYSNAME = 11
_CS_LFS_CFLAGS = 20
_CS_LFS_LDFLAGS = 21
_CS_LFS_LIBS = 22
_CS_LFS_LINTFLAGS = 23
_CS_LFS64_CFLAGS = 24
_CS_LFS64_LDFLAGS = 25
_CS_LFS64_LIBS = 26
_CS_LFS64_LINTFLAGS = 27
_CS_ARCHITECTURE = 100
_CS_BUSTYPES = 101
_CS_HOSTNAME = 102
_CS_HW_PROVIDER = 103
_CS_KERNEL_STAMP = 104
_CS_MACHINE = 105
_CS_OS_BASE = 106
_CS_OS_PROVIDER = 107
_CS_SYSNAME = 108
_CS_USER_LIMIT = 109
_PC_LINK_MAX = 1
_PC_MAX_CANON = 2
_PC_MAX_INPUT = 3
_PC_NAME_MAX = 4
_PC_PATH_MAX = 5
_PC_PIPE_BUF = 6
_PC_NO_TRUNC = 7
_PC_VDISABLE = 8
_PC_CHOWN_RESTRICTED = 9
_PC_FILESIZEBITS = 10
_POSIX_VERSION = 199009
_XOPEN_VERSION = 4
GF_PATH = "/etc/group"
PF_PATH = "/etc/passwd"
F_ULOCK = 0
F_LOCK = 1
F_TLOCK = 2
F_TEST = 3
_POSIX_JOB_CONTROL = 1
_POSIX_SAVED_IDS = 1
_POSIX_VDISABLE = 0
NULL = 0
STDIN_FILENO = 0
STDOUT_FILENO = 1
STDERR_FILENO = 2
_XOPEN_UNIX = 1
_XOPEN_ENH_I18N = 1
_XOPEN_XPG4 = 1
_POSIX2_C_VERSION = 199209
_POSIX2_VERSION = 199209
_XOPEN_XCU_VERSION = 4
_POSIX_SEMAPHORES = 1
_POSIX_THREADS = 1
_POSIX_THREAD_ATTR_STACKADDR = 1
_POSIX_THREAD_ATTR_STACKSIZE = 1
_POSIX_THREAD_PRIORITY_SCHEDULING = 1
_POSIX_THREAD_PROCESS_SHARED = 1
_POSIX_THREAD_SAFE_FUNCTIONS = 1
_POSIX2_C_BIND = 1
_POSIX2_CHAR_TERM = 1
_POSIX2_FORT_RUN = 1
_POSIX2_LOCALEDEF = 1
_POSIX2_UPE = 1
_LFS_ASYNCHRONOUS_IO = 1
_LFS_LARGEFILE = 1
_LFS64_ASYNCHRONOUS_IO = 1
_LFS64_LARGEFILE = 1
_LFS64_STDIO = 1
FMNAMESZ = 8
SNDZERO = 0x001
SNDPIPE = 0x002
RNORM = 0x000
RMSGD = 0x001
RMSGN = 0x002
RMODEMASK = 0x003
RPROTDAT = 0x004
RPROTDIS = 0x008
RPROTNORM = 0x010
RPROTMASK = 0x01c
FLUSHR = 0x01
FLUSHW = 0x02
FLUSHRW = 0x03
FLUSHBAND = 0x04
S_INPUT = 0x0001
S_HIPRI = 0x0002
S_OUTPUT = 0x0004
S_MSG = 0x0008
S_ERROR = 0x0010
S_HANGUP = 0x0020
S_RDNORM = 0x0040
S_WRNORM = S_OUTPUT
S_RDBAND = 0x0080
S_WRBAND = 0x0100
S_BANDURG = 0x0200
RS_HIPRI = 0x01
MSG_HIPRI = 0x01
MSG_ANY = 0x02
MSG_BAND = 0x04
MSG_DISCARD = 0x08
MSG_PEEKIOCTL = 0x10
MORECTL = 1
MOREDATA = 2
MUXID_ALL = (-1)
ANYMARK = 0x01
LASTMARK = 0x02
STR = (ord('S')<<8)
I_NREAD = (STR|0o1)
I_PUSH = (STR|0o2)
I_POP = (STR|0o3)
I_LOOK = (STR|0o4)
I_FLUSH = (STR|0o5)
I_SRDOPT = (STR|0o6)
I_GRDOPT = (STR|0o7)
I_STR = (STR|0o10)
I_SETSIG = (STR|0o11)
I_GETSIG = (STR|0o12)
I_FIND = (STR|0o13)
I_LINK = (STR|0o14)
I_UNLINK = (STR|0o15)
I_PEEK = (STR|0o17)
I_FDINSERT = (STR|0o20)
I_SENDFD = (STR|0o21)
I_RECVFD = (STR|0o22)
I_E_RECVFD = (STR|0o16)
I_RECVFD = (STR|0o16)
I_RECVFD = (STR|0o22)
I_SWROPT = (STR|0o23)
I_GWROPT = (STR|0o24)
I_LIST = (STR|0o25)
I_PLINK = (STR|0o26)
I_PUNLINK = (STR|0o27)
I_FLUSHBAND = (STR|0o34)
I_CKBAND = (STR|0o35)
I_GETBAND = (STR|0o36)
I_ATMARK = (STR|0o37)
I_SETCLTIME = (STR|0o40)
I_GETCLTIME = (STR|0o41)
I_CANPUT = (STR|0o42)
I_S_RECVFD = (STR|0o43)
I_STATS = (STR|0o44)
I_BIGPIPE = (STR|0o45)
I_GETTP = (STR|0o46)
INFTIM = -1
| apache-2.0 | 7,269,936,939,459,368,000 | 3,560,960,233,106,017,300 | 18.890244 | 51 | 0.669528 | false |
yceruto/django | tests/model_fields/tests.py | 6 | 26311 | from __future__ import unicode_literals
import datetime
from decimal import Decimal
import unittest
import warnings
from django import test
from django import forms
from django.core.exceptions import ValidationError
from django.db import connection, models, IntegrityError
from django.db.models.fields import (
AutoField, BigIntegerField, BinaryField, BooleanField, CharField,
CommaSeparatedIntegerField, DateField, DateTimeField, DecimalField,
EmailField, FilePathField, FloatField, IntegerField, IPAddressField,
GenericIPAddressField, NOT_PROVIDED, NullBooleanField, PositiveIntegerField,
PositiveSmallIntegerField, SlugField, SmallIntegerField, TextField,
TimeField, URLField)
from django.db.models.fields.files import FileField, ImageField
from django.utils import six
from django.utils.functional import lazy
from .models import (
Foo, Bar, Whiz, BigD, BigS, BigInt, Post, NullBooleanModel,
BooleanModel, DataModel, Document, RenamedField,
VerboseNameField, FksToBooleans)
class BasicFieldTests(test.TestCase):
def test_show_hidden_initial(self):
"""
Regression test for #12913. Make sure fields with choices respect
show_hidden_initial as a kwarg to models.Field.formfield()
"""
choices = [(0, 0), (1, 1)]
model_field = models.Field(choices=choices)
form_field = model_field.formfield(show_hidden_initial=True)
self.assertTrue(form_field.show_hidden_initial)
form_field = model_field.formfield(show_hidden_initial=False)
self.assertFalse(form_field.show_hidden_initial)
def test_nullbooleanfield_blank(self):
"""
Regression test for #13071: NullBooleanField should not throw
a validation error when given a value of None.
"""
nullboolean = NullBooleanModel(nbfield=None)
try:
nullboolean.full_clean()
except ValidationError as e:
self.fail("NullBooleanField failed validation with value of None: %s" % e.messages)
def test_field_repr(self):
"""
Regression test for #5931: __repr__ of a field also displays its name
"""
f = Foo._meta.get_field('a')
self.assertEqual(repr(f), '<django.db.models.fields.CharField: a>')
f = models.fields.CharField()
self.assertEqual(repr(f), '<django.db.models.fields.CharField>')
def test_field_name(self):
"""
Regression test for #14695: explicitly defined field name overwritten
by model's attribute name.
"""
instance = RenamedField()
self.assertTrue(hasattr(instance, 'get_fieldname_display'))
self.assertFalse(hasattr(instance, 'get_modelname_display'))
def test_field_verbose_name(self):
m = VerboseNameField
for i in range(1, 23):
self.assertEqual(m._meta.get_field('field%d' % i).verbose_name,
'verbose field%d' % i)
self.assertEqual(m._meta.get_field('id').verbose_name, 'verbose pk')
def test_choices_form_class(self):
"""Can supply a custom choices form class. Regression for #20999."""
choices = [('a', 'a')]
field = models.CharField(choices=choices)
klass = forms.TypedMultipleChoiceField
self.assertIsInstance(field.formfield(choices_form_class=klass), klass)
class DecimalFieldTests(test.TestCase):
def test_to_python(self):
f = models.DecimalField(max_digits=4, decimal_places=2)
self.assertEqual(f.to_python(3), Decimal("3"))
self.assertEqual(f.to_python("3.14"), Decimal("3.14"))
self.assertRaises(ValidationError, f.to_python, "abc")
def test_default(self):
f = models.DecimalField(default=Decimal("0.00"))
self.assertEqual(f.get_default(), Decimal("0.00"))
def test_format(self):
f = models.DecimalField(max_digits=5, decimal_places=1)
self.assertEqual(f._format(f.to_python(2)), '2.0')
self.assertEqual(f._format(f.to_python('2.6')), '2.6')
self.assertEqual(f._format(None), None)
def test_get_db_prep_lookup(self):
from django.db import connection
f = models.DecimalField(max_digits=5, decimal_places=1)
self.assertEqual(f.get_db_prep_lookup('exact', None, connection=connection), [None])
def test_filter_with_strings(self):
"""
We should be able to filter decimal fields using strings (#8023)
"""
Foo.objects.create(id=1, a='abc', d=Decimal("12.34"))
self.assertEqual(list(Foo.objects.filter(d='1.23')), [])
def test_save_without_float_conversion(self):
"""
Ensure decimals don't go through a corrupting float conversion during
save (#5079).
"""
bd = BigD(d="12.9")
bd.save()
bd = BigD.objects.get(pk=bd.pk)
self.assertEqual(bd.d, Decimal("12.9"))
def test_lookup_really_big_value(self):
"""
Ensure that really big values can be used in a filter statement, even
with older Python versions.
"""
# This should not crash. That counts as a win for our purposes.
Foo.objects.filter(d__gte=100000000000)
class ForeignKeyTests(test.TestCase):
def test_callable_default(self):
"""Test the use of a lazy callable for ForeignKey.default"""
a = Foo.objects.create(id=1, a='abc', d=Decimal("12.34"))
b = Bar.objects.create(b="bcd")
self.assertEqual(b.a, a)
class DateTimeFieldTests(unittest.TestCase):
def test_datetimefield_to_python_usecs(self):
"""DateTimeField.to_python should support usecs"""
f = models.DateTimeField()
self.assertEqual(f.to_python('2001-01-02 03:04:05.000006'),
datetime.datetime(2001, 1, 2, 3, 4, 5, 6))
self.assertEqual(f.to_python('2001-01-02 03:04:05.999999'),
datetime.datetime(2001, 1, 2, 3, 4, 5, 999999))
def test_timefield_to_python_usecs(self):
"""TimeField.to_python should support usecs"""
f = models.TimeField()
self.assertEqual(f.to_python('01:02:03.000004'),
datetime.time(1, 2, 3, 4))
self.assertEqual(f.to_python('01:02:03.999999'),
datetime.time(1, 2, 3, 999999))
class BooleanFieldTests(unittest.TestCase):
def _test_get_db_prep_lookup(self, f):
from django.db import connection
self.assertEqual(f.get_db_prep_lookup('exact', True, connection=connection), [True])
self.assertEqual(f.get_db_prep_lookup('exact', '1', connection=connection), [True])
self.assertEqual(f.get_db_prep_lookup('exact', 1, connection=connection), [True])
self.assertEqual(f.get_db_prep_lookup('exact', False, connection=connection), [False])
self.assertEqual(f.get_db_prep_lookup('exact', '0', connection=connection), [False])
self.assertEqual(f.get_db_prep_lookup('exact', 0, connection=connection), [False])
self.assertEqual(f.get_db_prep_lookup('exact', None, connection=connection), [None])
def _test_to_python(self, f):
self.assertTrue(f.to_python(1) is True)
self.assertTrue(f.to_python(0) is False)
def test_booleanfield_get_db_prep_lookup(self):
self._test_get_db_prep_lookup(models.BooleanField())
def test_nullbooleanfield_get_db_prep_lookup(self):
self._test_get_db_prep_lookup(models.NullBooleanField())
def test_booleanfield_to_python(self):
self._test_to_python(models.BooleanField())
def test_nullbooleanfield_to_python(self):
self._test_to_python(models.NullBooleanField())
def test_booleanfield_choices_blank(self):
"""
Test that BooleanField with choices and defaults doesn't generate a
formfield with the blank option (#9640, #10549).
"""
choices = [(1, 'Si'), (2, 'No')]
f = models.BooleanField(choices=choices, default=1, null=True)
self.assertEqual(f.formfield().choices, [('', '---------')] + choices)
f = models.BooleanField(choices=choices, default=1, null=False)
self.assertEqual(f.formfield().choices, choices)
def test_return_type(self):
b = BooleanModel()
b.bfield = True
b.save()
b2 = BooleanModel.objects.get(pk=b.pk)
self.assertIsInstance(b2.bfield, bool)
self.assertEqual(b2.bfield, True)
b3 = BooleanModel()
b3.bfield = False
b3.save()
b4 = BooleanModel.objects.get(pk=b3.pk)
self.assertIsInstance(b4.bfield, bool)
self.assertEqual(b4.bfield, False)
b = NullBooleanModel()
b.nbfield = True
b.save()
b2 = NullBooleanModel.objects.get(pk=b.pk)
self.assertIsInstance(b2.nbfield, bool)
self.assertEqual(b2.nbfield, True)
b3 = NullBooleanModel()
b3.nbfield = False
b3.save()
b4 = NullBooleanModel.objects.get(pk=b3.pk)
self.assertIsInstance(b4.nbfield, bool)
self.assertEqual(b4.nbfield, False)
# http://code.djangoproject.com/ticket/13293
# Verify that when an extra clause exists, the boolean
# conversions are applied with an offset
b5 = BooleanModel.objects.all().extra(
select={'string_col': 'string'})[0]
self.assertFalse(isinstance(b5.pk, bool))
def test_select_related(self):
"""
Test type of boolean fields when retrieved via select_related() (MySQL,
#15040)
"""
bmt = BooleanModel.objects.create(bfield=True)
bmf = BooleanModel.objects.create(bfield=False)
nbmt = NullBooleanModel.objects.create(nbfield=True)
nbmf = NullBooleanModel.objects.create(nbfield=False)
m1 = FksToBooleans.objects.create(bf=bmt, nbf=nbmt)
m2 = FksToBooleans.objects.create(bf=bmf, nbf=nbmf)
# Test select_related('fk_field_name')
ma = FksToBooleans.objects.select_related('bf').get(pk=m1.id)
# verify types -- should't be 0/1
self.assertIsInstance(ma.bf.bfield, bool)
self.assertIsInstance(ma.nbf.nbfield, bool)
# verify values
self.assertEqual(ma.bf.bfield, True)
self.assertEqual(ma.nbf.nbfield, True)
# Test select_related()
mb = FksToBooleans.objects.select_related().get(pk=m1.id)
mc = FksToBooleans.objects.select_related().get(pk=m2.id)
# verify types -- shouldn't be 0/1
self.assertIsInstance(mb.bf.bfield, bool)
self.assertIsInstance(mb.nbf.nbfield, bool)
self.assertIsInstance(mc.bf.bfield, bool)
self.assertIsInstance(mc.nbf.nbfield, bool)
# verify values
self.assertEqual(mb.bf.bfield, True)
self.assertEqual(mb.nbf.nbfield, True)
self.assertEqual(mc.bf.bfield, False)
self.assertEqual(mc.nbf.nbfield, False)
def test_null_default(self):
"""
Check that a BooleanField defaults to None -- which isn't
a valid value (#15124).
"""
# Patch the boolean field's default value. We give it a default
# value when defining the model to satisfy the check tests
# #20895.
boolean_field = BooleanModel._meta.get_field('bfield')
self.assertTrue(boolean_field.has_default())
old_default = boolean_field.default
try:
boolean_field.default = NOT_PROVIDED
# check patch was succcessful
self.assertFalse(boolean_field.has_default())
b = BooleanModel()
self.assertIsNone(b.bfield)
with self.assertRaises(IntegrityError):
b.save()
finally:
boolean_field.default = old_default
nb = NullBooleanModel()
self.assertIsNone(nb.nbfield)
nb.save() # no error
class ChoicesTests(test.TestCase):
def test_choices_and_field_display(self):
"""
Check that get_choices and get_flatchoices interact with
get_FIELD_display to return the expected values (#7913).
"""
self.assertEqual(Whiz(c=1).get_c_display(), 'First') # A nested value
self.assertEqual(Whiz(c=0).get_c_display(), 'Other') # A top level value
self.assertEqual(Whiz(c=9).get_c_display(), 9) # Invalid value
self.assertEqual(Whiz(c=None).get_c_display(), None) # Blank value
self.assertEqual(Whiz(c='').get_c_display(), '') # Empty value
class SlugFieldTests(test.TestCase):
def test_slugfield_max_length(self):
"""
Make sure SlugField honors max_length (#9706)
"""
bs = BigS.objects.create(s='slug' * 50)
bs = BigS.objects.get(pk=bs.pk)
self.assertEqual(bs.s, 'slug' * 50)
class ValidationTest(test.TestCase):
def test_charfield_raises_error_on_empty_string(self):
f = models.CharField()
self.assertRaises(ValidationError, f.clean, "", None)
def test_charfield_cleans_empty_string_when_blank_true(self):
f = models.CharField(blank=True)
self.assertEqual('', f.clean('', None))
def test_integerfield_cleans_valid_string(self):
f = models.IntegerField()
self.assertEqual(2, f.clean('2', None))
def test_integerfield_raises_error_on_invalid_intput(self):
f = models.IntegerField()
self.assertRaises(ValidationError, f.clean, "a", None)
def test_charfield_with_choices_cleans_valid_choice(self):
f = models.CharField(max_length=1,
choices=[('a', 'A'), ('b', 'B')])
self.assertEqual('a', f.clean('a', None))
def test_charfield_with_choices_raises_error_on_invalid_choice(self):
f = models.CharField(choices=[('a', 'A'), ('b', 'B')])
self.assertRaises(ValidationError, f.clean, "not a", None)
def test_charfield_get_choices_with_blank_defined(self):
f = models.CharField(choices=[('', '<><>'), ('a', 'A')])
self.assertEqual(f.get_choices(True), [('', '<><>'), ('a', 'A')])
def test_choices_validation_supports_named_groups(self):
f = models.IntegerField(
choices=(('group', ((10, 'A'), (20, 'B'))), (30, 'C')))
self.assertEqual(10, f.clean(10, None))
def test_nullable_integerfield_raises_error_with_blank_false(self):
f = models.IntegerField(null=True, blank=False)
self.assertRaises(ValidationError, f.clean, None, None)
def test_nullable_integerfield_cleans_none_on_null_and_blank_true(self):
f = models.IntegerField(null=True, blank=True)
self.assertEqual(None, f.clean(None, None))
def test_integerfield_raises_error_on_empty_input(self):
f = models.IntegerField(null=False)
self.assertRaises(ValidationError, f.clean, None, None)
self.assertRaises(ValidationError, f.clean, '', None)
def test_integerfield_validates_zero_against_choices(self):
f = models.IntegerField(choices=((1, 1),))
self.assertRaises(ValidationError, f.clean, '0', None)
def test_charfield_raises_error_on_empty_input(self):
f = models.CharField(null=False)
self.assertRaises(ValidationError, f.clean, None, None)
def test_datefield_cleans_date(self):
f = models.DateField()
self.assertEqual(datetime.date(2008, 10, 10), f.clean('2008-10-10', None))
def test_boolean_field_doesnt_accept_empty_input(self):
f = models.BooleanField()
self.assertRaises(ValidationError, f.clean, None, None)
class BigIntegerFieldTests(test.TestCase):
def test_limits(self):
# Ensure that values that are right at the limits can be saved
# and then retrieved without corruption.
maxval = 9223372036854775807
minval = -maxval - 1
BigInt.objects.create(value=maxval)
qs = BigInt.objects.filter(value__gte=maxval)
self.assertEqual(qs.count(), 1)
self.assertEqual(qs[0].value, maxval)
BigInt.objects.create(value=minval)
qs = BigInt.objects.filter(value__lte=minval)
self.assertEqual(qs.count(), 1)
self.assertEqual(qs[0].value, minval)
def test_types(self):
b = BigInt(value=0)
self.assertIsInstance(b.value, six.integer_types)
b.save()
self.assertIsInstance(b.value, six.integer_types)
b = BigInt.objects.all()[0]
self.assertIsInstance(b.value, six.integer_types)
def test_coercing(self):
BigInt.objects.create(value='10')
b = BigInt.objects.get(value='10')
self.assertEqual(b.value, 10)
class TypeCoercionTests(test.TestCase):
"""
Test that database lookups can accept the wrong types and convert
them with no error: especially on Postgres 8.3+ which does not do
automatic casting at the DB level. See #10015.
"""
def test_lookup_integer_in_charfield(self):
self.assertEqual(Post.objects.filter(title=9).count(), 0)
def test_lookup_integer_in_textfield(self):
self.assertEqual(Post.objects.filter(body=24).count(), 0)
class FileFieldTests(unittest.TestCase):
def test_clearable(self):
"""
Test that FileField.save_form_data will clear its instance attribute
value if passed False.
"""
d = Document(myfile='something.txt')
self.assertEqual(d.myfile, 'something.txt')
field = d._meta.get_field('myfile')
field.save_form_data(d, False)
self.assertEqual(d.myfile, '')
def test_unchanged(self):
"""
Test that FileField.save_form_data considers None to mean "no change"
rather than "clear".
"""
d = Document(myfile='something.txt')
self.assertEqual(d.myfile, 'something.txt')
field = d._meta.get_field('myfile')
field.save_form_data(d, None)
self.assertEqual(d.myfile, 'something.txt')
def test_changed(self):
"""
Test that FileField.save_form_data, if passed a truthy value, updates
its instance attribute.
"""
d = Document(myfile='something.txt')
self.assertEqual(d.myfile, 'something.txt')
field = d._meta.get_field('myfile')
field.save_form_data(d, 'else.txt')
self.assertEqual(d.myfile, 'else.txt')
def test_delete_when_file_unset(self):
"""
Calling delete on an unset FileField should not call the file deletion
process, but fail silently (#20660).
"""
d = Document()
try:
d.myfile.delete()
except OSError:
self.fail("Deleting an unset FileField should not raise OSError.")
class BinaryFieldTests(test.TestCase):
binary_data = b'\x00\x46\xFE'
def test_set_and_retrieve(self):
data_set = (self.binary_data, six.memoryview(self.binary_data))
for bdata in data_set:
dm = DataModel(data=bdata)
dm.save()
dm = DataModel.objects.get(pk=dm.pk)
self.assertEqual(bytes(dm.data), bytes(bdata))
# Resave (=update)
dm.save()
dm = DataModel.objects.get(pk=dm.pk)
self.assertEqual(bytes(dm.data), bytes(bdata))
# Test default value
self.assertEqual(bytes(dm.short_data), b'\x08')
if connection.vendor == 'mysql' and six.PY3:
# Existing MySQL DB-API drivers fail on binary data.
test_set_and_retrieve = unittest.expectedFailure(test_set_and_retrieve)
def test_max_length(self):
dm = DataModel(short_data=self.binary_data * 4)
self.assertRaises(ValidationError, dm.full_clean)
class GenericIPAddressFieldTests(test.TestCase):
def test_genericipaddressfield_formfield_protocol(self):
"""
Test that GenericIPAddressField with a specified protocol does not
generate a formfield with no specified protocol. See #20740.
"""
model_field = models.GenericIPAddressField(protocol='IPv4')
form_field = model_field.formfield()
self.assertRaises(ValidationError, form_field.clean, '::1')
model_field = models.GenericIPAddressField(protocol='IPv6')
form_field = model_field.formfield()
self.assertRaises(ValidationError, form_field.clean, '127.0.0.1')
class PromiseTest(test.TestCase):
def test_AutoField(self):
lazy_func = lazy(lambda: 1, int)
self.assertIsInstance(
AutoField(primary_key=True).get_prep_value(lazy_func()),
int)
@unittest.skipIf(six.PY3, "Python 3 has no `long` type.")
def test_BigIntegerField(self):
lazy_func = lazy(lambda: long(9999999999999999999), long)
self.assertIsInstance(
BigIntegerField().get_prep_value(lazy_func()),
long)
def test_BinaryField(self):
lazy_func = lazy(lambda: b'', bytes)
self.assertIsInstance(
BinaryField().get_prep_value(lazy_func()),
bytes)
def test_BooleanField(self):
lazy_func = lazy(lambda: True, bool)
self.assertIsInstance(
BooleanField().get_prep_value(lazy_func()),
bool)
def test_CharField(self):
lazy_func = lazy(lambda: '', six.text_type)
self.assertIsInstance(
CharField().get_prep_value(lazy_func()),
six.text_type)
def test_CommaSeparatedIntegerField(self):
lazy_func = lazy(lambda: '1,2', six.text_type)
self.assertIsInstance(
CommaSeparatedIntegerField().get_prep_value(lazy_func()),
six.text_type)
def test_DateField(self):
lazy_func = lazy(lambda: datetime.date.today(), datetime.date)
self.assertIsInstance(
DateField().get_prep_value(lazy_func()),
datetime.date)
def test_DateTimeField(self):
lazy_func = lazy(lambda: datetime.datetime.now(), datetime.datetime)
self.assertIsInstance(
DateTimeField().get_prep_value(lazy_func()),
datetime.datetime)
def test_DecimalField(self):
lazy_func = lazy(lambda: Decimal('1.2'), Decimal)
self.assertIsInstance(
DecimalField().get_prep_value(lazy_func()),
Decimal)
def test_EmailField(self):
lazy_func = lazy(lambda: '[email protected]', six.text_type)
self.assertIsInstance(
EmailField().get_prep_value(lazy_func()),
six.text_type)
def test_FileField(self):
lazy_func = lazy(lambda: 'filename.ext', six.text_type)
self.assertIsInstance(
FileField().get_prep_value(lazy_func()),
six.text_type)
def test_FilePathField(self):
lazy_func = lazy(lambda: 'tests.py', six.text_type)
self.assertIsInstance(
FilePathField().get_prep_value(lazy_func()),
six.text_type)
def test_FloatField(self):
lazy_func = lazy(lambda: 1.2, float)
self.assertIsInstance(
FloatField().get_prep_value(lazy_func()),
float)
def test_ImageField(self):
lazy_func = lazy(lambda: 'filename.ext', six.text_type)
self.assertIsInstance(
ImageField().get_prep_value(lazy_func()),
six.text_type)
def test_IntegerField(self):
lazy_func = lazy(lambda: 1, int)
self.assertIsInstance(
IntegerField().get_prep_value(lazy_func()),
int)
def test_IPAddressField(self):
lazy_func = lazy(lambda: '127.0.0.1', six.text_type)
with warnings.catch_warnings(record=True):
warnings.simplefilter("always")
self.assertIsInstance(
IPAddressField().get_prep_value(lazy_func()),
six.text_type)
def test_GenericIPAddressField(self):
lazy_func = lazy(lambda: '127.0.0.1', six.text_type)
self.assertIsInstance(
GenericIPAddressField().get_prep_value(lazy_func()),
six.text_type)
def test_NullBooleanField(self):
lazy_func = lazy(lambda: True, bool)
self.assertIsInstance(
NullBooleanField().get_prep_value(lazy_func()),
bool)
def test_PositiveIntegerField(self):
lazy_func = lazy(lambda: 1, int)
self.assertIsInstance(
PositiveIntegerField().get_prep_value(lazy_func()),
int)
def test_PositiveSmallIntegerField(self):
lazy_func = lazy(lambda: 1, int)
self.assertIsInstance(
PositiveSmallIntegerField().get_prep_value(lazy_func()),
int)
def test_SlugField(self):
lazy_func = lazy(lambda: 'slug', six.text_type)
self.assertIsInstance(
SlugField().get_prep_value(lazy_func()),
six.text_type)
def test_SmallIntegerField(self):
lazy_func = lazy(lambda: 1, int)
self.assertIsInstance(
SmallIntegerField().get_prep_value(lazy_func()),
int)
def test_TextField(self):
lazy_func = lazy(lambda: 'Abc', six.text_type)
self.assertIsInstance(
TextField().get_prep_value(lazy_func()),
six.text_type)
def test_TimeField(self):
lazy_func = lazy(lambda: datetime.datetime.now().time(), datetime.time)
self.assertIsInstance(
TimeField().get_prep_value(lazy_func()),
datetime.time)
def test_URLField(self):
lazy_func = lazy(lambda: 'http://domain.com', six.text_type)
self.assertIsInstance(
URLField().get_prep_value(lazy_func()),
six.text_type)
class CustomFieldTests(unittest.TestCase):
def test_14786(self):
"""
Regression test for #14786 -- Test that field values are not prepared
twice in get_db_prep_lookup().
"""
class NoopField(models.TextField):
def __init__(self, *args, **kwargs):
self.prep_value_count = 0
super(NoopField, self).__init__(*args, **kwargs)
def get_prep_value(self, value):
self.prep_value_count += 1
return super(NoopField, self).get_prep_value(value)
field = NoopField()
field.get_db_prep_lookup(
'exact', 'TEST', connection=connection, prepared=False
)
self.assertEqual(field.prep_value_count, 1)
| bsd-3-clause | 36,003,222,382,327,950 | 6,816,818,239,458,402,000 | 36.803161 | 95 | 0.623123 | false |
gdooper/scipy | benchmarks/benchmarks/go_benchmark_functions/go_funcs_Z.py | 47 | 6803 | # -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
from numpy import abs, sum, sign, arange
from .go_benchmark import Benchmark
class Zacharov(Benchmark):
r"""
Zacharov objective function.
This class defines the Zacharov [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Zacharov}}(x) = \sum_{i=1}^{n} x_i^2 + \left ( \frac{1}{2}
\sum_{i=1}^{n} i x_i \right )^2
+ \left ( \frac{1}{2} \sum_{i=1}^{n} i x_i
\right )^4
Here, :math:`n` represents the number of dimensions and
:math:`x_i \in [-5, 10]` for :math:`i = 1, ..., n`.
*Global optimum*: :math:`f(x) = 0` for :math:`x_i = 0` for
:math:`i = 1, ..., n`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-5.0] * self.N, [10.0] * self.N))
self.custom_bounds = ([-1, 1], [-1, 1])
self.global_optimum = [[0 for _ in range(self.N)]]
self.fglob = 0.0
self.change_dimensionality = True
def fun(self, x, *args):
self.nfev += 1
u = sum(x ** 2)
v = sum(arange(1, self.N + 1) * x)
return u + (0.5 * v) ** 2 + (0.5 * v) ** 4
class ZeroSum(Benchmark):
r"""
ZeroSum objective function.
This class defines the ZeroSum [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{ZeroSum}}(x) = \begin{cases}
0 & \textrm{if} \sum_{i=1}^n x_i = 0 \\
1 + \left(10000 \left |\sum_{i=1}^n x_i\right|
\right)^{0.5} & \textrm{otherwise}
\end{cases}
Here, :math:`n` represents the number of dimensions and
:math:`x_i \in [-10, 10]` for :math:`i = 1, ..., n`.
*Global optimum*: :math:`f(x) = 0` where :math:`\sum_{i=1}^n x_i = 0`
.. [1] Gavana, A. Global Optimization Benchmarks and AMPGO retrieved 2015
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-10.0] * self.N, [10.0] * self.N))
self.global_optimum = [[]]
self.fglob = 0.0
self.change_dimensionality = True
def fun(self, x, *args):
self.nfev += 1
if abs(sum(x)) < 3e-16:
return 0.0
return 1.0 + (10000.0 * abs(sum(x))) ** 0.5
class Zettl(Benchmark):
r"""
Zettl objective function.
This class defines the Zettl [1]_ global optimization problem. This is a
multimodal minimization problem defined as follows:
.. math::
f_{\text{Zettl}}(x) = \frac{1}{4} x_{1} + \left(x_{1}^{2} - 2 x_{1}
+ x_{2}^{2}\right)^{2}
with :math:`x_i \in [-1, 5]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -0.0037912` for :math:`x = [-0.029896, 0.0]`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-5.0] * self.N, [10.0] * self.N))
self.global_optimum = [[-0.02989597760285287, 0.0]]
self.fglob = -0.003791237220468656
def fun(self, x, *args):
self.nfev += 1
return (x[0] ** 2 + x[1] ** 2 - 2 * x[0]) ** 2 + 0.25 * x[0]
class Zimmerman(Benchmark):
r"""
Zimmerman objective function.
This class defines the Zimmerman [1]_ global optimization problem. This
is a multimodal minimization problem defined as follows:
.. math::
f_{\text{Zimmerman}}(x) = \max \left[Zh1(x), Zp(Zh2(x))
\textrm{sgn}(Zh2(x)), Zp(Zh3(x))
\textrm{sgn}(Zh3(x)),
Zp(-x_1)\textrm{sgn}(x_1),
Zp(-x_2)\textrm{sgn}(x_2) \right]
Where, in this exercise:
.. math::
\begin{cases}
Zh1(x) = 9 - x_1 - x_2 \\
Zh2(x) = (x_1 - 3)^2 + (x_2 - 2)^2 \\
Zh3(x) = x_1x_2 - 14 \\
Zp(t) = 100(1 + t)
\end{cases}
Where :math:`x` is a vector and :math:`t` is a scalar.
Here, :math:`x_i \in [0, 100]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = 0` for :math:`x = [7, 2]`
.. [1] Gavana, A. Global Optimization Benchmarks and AMPGO retrieved 2015
TODO implementation from Gavana
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([0.0] * self.N, [100.0] * self.N))
self.custom_bounds = ([0.0, 8.0], [0.0, 8.0])
self.global_optimum = [[7.0, 2.0]]
self.fglob = 0.0
def fun(self, x, *args):
self.nfev += 1
Zh1 = lambda x: 9.0 - x[0] - x[1]
Zh2 = lambda x: (x[0] - 3.0) ** 2.0 + (x[1] - 2.0) ** 2.0 - 16.0
Zh3 = lambda x: x[0] * x[1] - 14.0
Zp = lambda x: 100.0 * (1.0 + x)
return max(Zh1(x),
Zp(Zh2(x)) * sign(Zh2(x)),
Zp(Zh3(x)) * sign(Zh3(x)),
Zp(-x[0]) * sign(x[0]),
Zp(-x[1]) * sign(x[1]))
class Zirilli(Benchmark):
r"""
Zettl objective function.
This class defines the Zirilli [1]_ global optimization problem. This is a
unimodal minimization problem defined as follows:
.. math::
f_{\text{Zirilli}}(x) = 0.25x_1^4 - 0.5x_1^2 + 0.1x_1 + 0.5x_2^2
Here, :math:`n` represents the number of dimensions and
:math:`x_i \in [-10, 10]` for :math:`i = 1, 2`.
*Global optimum*: :math:`f(x) = -0.3523` for :math:`x = [-1.0465, 0]`
.. [1] Jamil, M. & Yang, X.-S. A Literature Survey of Benchmark Functions
For Global Optimization Problems Int. Journal of Mathematical Modelling
and Numerical Optimisation, 2013, 4, 150-194.
"""
def __init__(self, dimensions=2):
Benchmark.__init__(self, dimensions)
self._bounds = list(zip([-10.0] * self.N, [10.0] * self.N))
self.custom_bounds = ([-2.0, 2.0], [-2.0, 2.0])
self.global_optimum = [[-1.0465, 0.0]]
self.fglob = -0.35238603
def fun(self, x, *args):
self.nfev += 1
return 0.25 * x[0] ** 4 - 0.5 * x[0] ** 2 + 0.1 * x[0] + 0.5 * x[1] ** 2
| bsd-3-clause | 8,645,471,819,861,337,000 | 571,676,206,035,847,200 | 28.837719 | 80 | 0.51345 | false |
absperf/wagtailapproval | wagtailapproval/menu.py | 1 | 3637 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import itertools
from django.contrib.auth import get_user
from django.core.urlresolvers import reverse, reverse_lazy
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy as _n
from wagtail.wagtailadmin import messages
from wagtail.wagtailadmin.menu import MenuItem
from .models import ApprovalStep
def get_user_approval_items(user):
'''Get an iterable of all items pending for a user's approval.
:param User user: A user object whose groups are to be checked for
appropriate steps
:rtype: Iterable[ApprovalItem]
:returns: All the items that this user can approve or reject.
'''
if user.is_superuser:
steps = ApprovalStep.objects.all()
else:
groups = user.groups.all()
steps = ApprovalStep.objects.filter(group__in=groups)
return itertools.chain.from_iterable(
step.get_items(user) for step in steps)
class ApprovalMenuItem(MenuItem):
'''The menu item that shows in the wagtail sidebar'''
def __init__(
self, label=_('Approval'), url=reverse_lazy('wagtailapproval:index'),
classnames='icon icon-tick-inverse', order=201, **kwargs):
super(ApprovalMenuItem, self).__init__(
label,
url,
classnames=classnames,
order=order,
**kwargs)
def is_shown(self, request):
'''Only show the menu if the user is in an owned approval group'''
user = get_user(request)
# If the user is superuser, show the menu if any steps exist at all
if user.is_superuser:
return ApprovalStep.objects.exists()
groups = user.groups.all()
if ApprovalStep.objects.filter(group__in=groups).exists():
# Display the approval notification only outside of the approval
# paths
if not request.path.startswith(reverse('wagtailapproval:index')):
# Get the count of waiting approvals
waiting_approvals = sum(
1 for _ in get_user_approval_items(user))
if waiting_approvals > 0:
messages.info(
request,
_n(
'{num:d} item waiting for approval',
'{num:d} items waiting for approval',
waiting_approvals).format(num=waiting_approvals),
buttons=[
messages.button(
reverse('wagtailapproval:index'),
_('Examine Now'))
]
)
return True
return False
class ApprovalAdminMenuItem(MenuItem):
'''The admin menu item that shows in the wagtail sidebar, for
administrating entire pipelines and manually dropping items into steps.'''
def __init__(
self, label=_('Approval Admin'),
url=reverse_lazy('wagtailapproval:admin_index'),
classnames='icon icon-cog', order=200, **kwargs):
super(ApprovalAdminMenuItem, self).__init__(
label,
url,
classnames=classnames,
order=order,
**kwargs)
def is_shown(self, request):
'''Only show the menu if the user is a superuser and any ApprovalStep
objects exist.'''
user = get_user(request)
if user.is_superuser:
return ApprovalStep.objects.exists()
return False
| bsd-2-clause | 8,908,955,028,682,385,000 | -4,053,755,281,231,734,000 | 35.009901 | 78 | 0.587022 | false |
lulivi/debate_bot | bot.py | 1 | 5398 | #!/usr/bin/python3 -u
# -*- coding: utf-8 -*-
import sys
import time
import telebot # Librería de la API del bot.
from telebot import types # Tipos para la API del bot.
from priv.__init__ import token as tk
bot = telebot.TeleBot(tk()) # Creamos el objeto de nuestro bot.
###############################################################################
# commands
###############################################################################
# start mensaje de bienvenida
@bot.message_handler(commands=['start'])
def command_start(m):
cid = m.chat.id
comando = m.text[7:]
if comando == 'reglas':
command_reglas(m)
else:
bot.send_message(cid,"¡Hola! Soy Debatebot.\nUsa el comando /ayuda para que te muestre mis demás comandos.\n\nEspero ser de utilidad.")
########################################
# muestra los comandos visibles
@bot.message_handler(commands=['ayuda'])
def command_ayuda(m):
bot.reply_to(m,"Guardo y doy información acerca de debates.\n/nuevo establezco el nuevo tema de debate.\n/actual muestro el tema actual de debate.\n/fin termino el debate actual.\n/reglas muestro las reglas actuales del grupo.")
########################################
# nuevo debat
@bot.message_handler(commands=['nuevo'])
def command_nuevo(m):
pos = m.text.find(" ")
cid = m.chat.id
if pos == -1:
bot.send_message(cid,m.from_user.first_name+", escribe:\n/nuevo nuevo_tema_de_debate")
else:
if get_matter(cid) == "":
set_matter(cid, m.text[pos:])
fuid = m.from_user.id
set_matter_id(cid, fuid)
bot.send_message(cid,"El tema actual se ha guardado con éxito, "+m.from_user.first_name+".")
else:
bot.send_message(cid,"Ya se está debatifino un tema, "+m.from_user.first_name+".\n/fin para terminarlo.\n/actual para obtenerlo.")
########################################
# debate actual
@bot.message_handler(commands=['actual'])
def command_actual(m):
cid = m.chat.id
actual = get_matter(cid)
if actual != "":
bot.send_message(cid,"\"* "+actual+" *\" es el tema actual.\n\n/fin para terminarlo.",parse_mode="Markdown")
else:
bot.send_message(cid,"No hay debate actualmente.\n/nuevo para comenzar uno.")
########################################
# terminar el debate
@bot.message_handler(commands=['fin'])
def command_fin(m):
cid = m.chat.id
if get_matter(cid) != "":
uid = get_matter_id(cid)
fuid = m.from_user.id
if uid == fuid:
set_matter(cid)
set_matter_id(cid,uid)
bot.send_message(cid,"Tema cerrado, "+m.from_user.first_name+".\n/nuevo para comenzar uno.")
else:
bot.send_message(cid,"No tiene permiso para terminar el debate, "+m.from_user.first_name+".")
else:
bot.send_message(cid, "No hay debate actualmente, "+m.from_user.first_name+".\n/nuevo para comenzar uno.")
########################################
REGLASID = ""
# reglas
@bot.message_handler(commands=['reglas'])
def command_to_reglas(m):
cid = m.chat.id
if cid < 0:
REGLASID = str(cid)
bot.send_message(cid,"Pulse [aquí](https://telegram.me/debate_bot?start=reglas)",parse_mode="Markdown")
else:
command_reglas(m)
def command_reglas(m):
if REGLASID != "":
reglas = get_reglas(REGLASID)
else:
cid = m.chat.id
reglas = get_reglas(cid)
if reglas != "":
bot.reply_to(m,"Reglas de participación en este grupo:\n\n"+reglas)
else:
bot.reply_to(m,"No hay relgas definidas para este grupo.")
########################################
# definir las reglas
@bot.message_handler(commands=['definereglas'])
def command_definereglas(m):
cid = m.chat.id
text = m.text
pos = text.find(" ")
if pos != -1:
txt = m.text[pos+1:]
set_reglas(cid, txt)
else:
txt = ""
set_reglas(cid, txt)
###############################################################################
# functions
###############################################################################
##### matter #####
def set_matter(chatid,txt=""):
cid = str(chatid)
with open("./matter/"+cid+".mat",'w') as f:
f.write(txt)
def get_matter(chatid):
cid = str(chatid)
with open("./matter/"+cid+".mat",'a') as f:
pass
with open("./matter/"+cid+".mat",'r') as f:
matter = f.read()
return matter
##### reglas #####
def set_reglas(chatid, txt):
cid = str(chatid)
with open("./reglas/"+cid+".rul",'w') as f:
f.write(txt)
def get_reglas(chatid):
cid = str(chatid)
with open("./reglas/"+cid+".rul",'a') as f:
pass
with open("./reglas/"+cid+".rul",'r') as f:
reglas = f.read()
return reglas
##### matter id #####
def set_matter_id(chatid,userid):
cid = str(chatid)
uid = str(userid)
with open("./matter/"+cid+".matid",'w') as f:
f.write(uid)
def get_matter_id(chatid):
cid = str(chatid)
with open("./matter/"+cid+".matid",'a') as f:
pass
with open("./matter/"+cid+".matid",'r') as f:
uid = f.read()
if uid == "":
return -1
else:
return int(uid)
###############################################################################
bot.polling()
| gpl-2.0 | 7,370,021,111,166,991,000 | 5,153,044,615,264,657,000 | 31.083333 | 232 | 0.520779 | false |
jordancheah/zipline | tests/test_munge.py | 34 | 1794 | #
# Copyright 2015 Quantopian, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
import pandas as pd
import numpy as np
from numpy.testing import assert_almost_equal
from unittest import TestCase
from zipline.utils.munge import bfill, ffill
class MungeTests(TestCase):
def test_bfill(self):
# test ndim=1
N = 100
s = pd.Series(np.random.randn(N))
mask = random.sample(range(N), 10)
s.iloc[mask] = np.nan
correct = s.bfill().values
test = bfill(s.values)
assert_almost_equal(correct, test)
# test ndim=2
df = pd.DataFrame(np.random.randn(N, N))
df.iloc[mask] = np.nan
correct = df.bfill().values
test = bfill(df.values)
assert_almost_equal(correct, test)
def test_ffill(self):
# test ndim=1
N = 100
s = pd.Series(np.random.randn(N))
mask = random.sample(range(N), 10)
s.iloc[mask] = np.nan
correct = s.ffill().values
test = ffill(s.values)
assert_almost_equal(correct, test)
# test ndim=2
df = pd.DataFrame(np.random.randn(N, N))
df.iloc[mask] = np.nan
correct = df.ffill().values
test = ffill(df.values)
assert_almost_equal(correct, test)
| apache-2.0 | -451,913,193,184,595,800 | -2,431,071,611,710,052,400 | 29.40678 | 74 | 0.641026 | false |
chugunovyar/factoryForBuild | neuron/SaveClosedPossition.py | 1 | 31069 | # -*- coding: utf-8 -*-
import logging
from neuron.models import DataSet
import dateutil.parser as DP
loggermsg = logging.getLogger('django')
def saveClosedPossition(jsondata):
#loggermsg.info(len(jsondata))
# Проверяем есть ли такой ордер в БД
ifExistOrdernum = DataSet.objects.filter(open_magicnum=jsondata['magicnum'])
# Если нет такого ордера то записываем его в бд.
if len(ifExistOrdernum) == 0:
if float(jsondata['result']) > 0:
effectivnes = 1
else:
effectivnes = 0
dataToSave = DataSet(
open_magicnum = jsondata['magicnum'],\
open_neuron_name = jsondata['neuron_name'],\
open_period = jsondata['period'],\
orderOpenPrice = jsondata['openprice'],\
open_type = jsondata['open_type'],\
open_time = DP.parse(jsondata['orderopentime']),\
open_close_1 = jsondata['open_close_1'],\
open_open_1 = jsondata['open_open_1'],\
open_high_1 = jsondata['open_high_1'],\
open_low_1 = jsondata['open_low_1'],
open_upband_1 = jsondata['open_upband_1'],
open_lowband_1 = jsondata['open_lowband_1'],
open_midleband_1 = jsondata['open_midleband_1'],
open_jaw_1 = jsondata['open_jaw_1'],
open_lips_1 = jsondata['open_lips_1'],
open_teeth_1 = jsondata['open_teeth_1'],
open_volume_1 = jsondata['open_volume_1'],
open_close_2 = jsondata['open_close_2'],
open_open_2 = jsondata['open_open_2'],
open_high_2 = jsondata['open_high_2'],
open_low_2 = jsondata['open_low_2'],
open_upband_2 = jsondata['open_upband_2'],
open_lowband_2 = jsondata['open_lowband_2'],
open_midleband_2 = jsondata['open_midleband_2'],
open_jaw_2 = jsondata['open_jaw_2'],
open_lips_2 = jsondata['open_lips_2'],
open_teeth_2 = jsondata['open_teeth_2'],
open_volume_2 = jsondata['open_volume_2'],
open_close_3 = jsondata['open_close_3'],
open_open_3 = jsondata['open_open_3'],
open_high_3 = jsondata['open_high_3'],
open_low_3 = jsondata['open_low_3'],
open_upband_3 = jsondata['open_upband_3'],
open_lowband_3 = jsondata['open_lowband_3'],
open_midleband_3 = jsondata['open_midleband_3'],
open_jaw_3 = jsondata['open_jaw_3'],
open_lips_3 = jsondata['open_lips_3'],
open_teeth_3 = jsondata['open_teeth_3'],
open_volume_3 = jsondata['open_volume_3'],
open_close_4 = jsondata['open_close_4'],
open_open_4 = jsondata['open_open_4'],
open_high_4 = jsondata['open_high_4'],
open_low_4 = jsondata['open_low_4'],
open_upband_4 = jsondata['open_upband_4'],
open_lowband_4 = jsondata['open_lowband_4'],
open_midleband_4 = jsondata['open_midleband_4'],
open_jaw_4 = jsondata['open_jaw_4'],
open_lips_4 = jsondata['open_lips_4'],
open_teeth_4 = jsondata['open_teeth_4'],
open_volume_4 = jsondata['open_volume_4'],
open_close_5 = jsondata['open_close_5'],
open_open_5 = jsondata['open_open_5'],
open_high_5 = jsondata['open_high_5'],
open_low_5 = jsondata['open_low_5'],
open_upband_5 = jsondata['open_upband_5'],
open_lowband_5 = jsondata['open_lowband_5'],
open_midleband_5 = jsondata['open_midleband_5'],
open_jaw_5 = jsondata['open_jaw_5'],
open_lips_5 = jsondata['open_lips_5'],
open_teeth_5 = jsondata['open_teeth_5'],
open_volume_5 = jsondata['open_volume_5'],
open_close_6 = jsondata['open_close_6'],
open_open_6 = jsondata['open_open_6'],
open_high_6 = jsondata['open_high_6'],
open_low_6 = jsondata['open_low_6'],
open_upband_6 = jsondata['open_upband_6'],
open_lowband_6 = jsondata['open_lowband_6'],
open_midleband_6 = jsondata['open_midleband_6'],
open_jaw_6 = jsondata['open_jaw_6'],
open_lips_6 = jsondata['open_lips_6'],
open_teeth_6 = jsondata['open_teeth_6'],
open_volume_6 = jsondata['open_volume_6'],
open_close_7 = jsondata['open_close_7'],
open_open_7 = jsondata['open_open_7'],
open_high_7 = jsondata['open_high_7'],
open_low_7 = jsondata['open_low_7'],
open_upband_7 = jsondata['open_upband_7'],
open_lowband_7 = jsondata['open_lowband_7'],
open_midleband_7 = jsondata['open_midleband_7'],
open_jaw_7 = jsondata['open_jaw_7'],
open_lips_7 = jsondata['open_lips_7'],
open_teeth_7 = jsondata['open_teeth_7'],
open_volume_7 = jsondata['open_volume_7'],
open_close_8 = jsondata['open_close_8'],
open_open_8 = jsondata['open_open_8'],
open_high_8 = jsondata['open_high_8'],
open_low_8 = jsondata['open_low_8'],
open_upband_8 = jsondata['open_upband_8'],
open_lowband_8 = jsondata['open_lowband_8'],
open_midleband_8 = jsondata['open_midleband_8'],
open_jaw_8 = jsondata['open_jaw_8'],
open_lips_8 = jsondata['open_lips_8'],
open_teeth_8 = jsondata['open_teeth_8'],
open_volume_8 = jsondata['open_volume_8'],
open_close_9 = jsondata['open_close_9'],
open_open_9 = jsondata['open_open_9'],
open_high_9 = jsondata['open_high_9'],
open_low_9 = jsondata['open_low_9'],
open_upband_9 = jsondata['open_upband_9'],
open_lowband_9 = jsondata['open_lowband_9'],
open_midleband_9 = jsondata['open_midleband_9'],
open_jaw_9 = jsondata['open_jaw_9'],
open_lips_9 = jsondata['open_lips_9'],
open_teeth_9 = jsondata['open_teeth_9'],
open_volume_9 = jsondata['open_volume_9'],
open_close_10 = jsondata['open_close_10'],
open_open_10 = jsondata['open_open_10'],
open_high_10 = jsondata['open_high_10'],
open_low_10 = jsondata['open_low_10'],
open_upband_10 = jsondata['open_upband_10'],
open_lowband_10 = jsondata['open_lowband_10'],
open_midleband_10 = jsondata['open_midleband_10'],
open_jaw_10 = jsondata['open_jaw_10'],
open_lips_10 = jsondata['open_lips_10'],
open_teeth_10 = jsondata['open_teeth_10'],
open_volume_10 = jsondata['open_volume_10'],
)
dataToSave.save()
DataSet.objects.filter(open_magicnum=jsondata['magicnum']).update(
open_close_11 = jsondata['open_close_11'],
open_open_11 = jsondata['open_open_11'],
open_high_11 = jsondata['open_high_11'],
open_low_11 = jsondata['open_low_11'],
open_upband_11 = jsondata['open_upband_11'],
open_lowband_11 = jsondata['open_lowband_11'],
open_midleband_11 = jsondata['open_midleband_11'],
open_jaw_11 = jsondata['open_jaw_11'],
open_lips_11 = jsondata['open_lips_11'],
open_teeth_11 = jsondata['open_teeth_11'],
open_volume_11 = jsondata['open_volume_11'],
open_close_12 = jsondata['open_close_12'],
open_open_12 = jsondata['open_open_12'],
open_high_12 = jsondata['open_high_12'],
open_low_12 = jsondata['open_low_12'],
open_upband_12 = jsondata['open_upband_12'],
open_lowband_12 = jsondata['open_lowband_12'],
open_midleband_12 = jsondata['open_midleband_12'],
open_jaw_12 = jsondata['open_jaw_12'],
open_lips_12 = jsondata['open_lips_12'],
open_teeth_12 = jsondata['open_teeth_12'],
open_volume_12 = jsondata['open_volume_12'],
open_close_13 = jsondata['open_close_13'],
open_open_13 = jsondata['open_open_13'],
open_high_13 = jsondata['open_high_13'],
open_low_13 = jsondata['open_low_13'],
open_upband_13 = jsondata['open_upband_13'],
open_lowband_13 = jsondata['open_lowband_13'],
open_midleband_13 = jsondata['open_midleband_13'],
open_jaw_13 = jsondata['open_jaw_13'],
open_lips_13 = jsondata['open_lips_13'],
open_teeth_13 = jsondata['open_teeth_13'],
open_volume_13 = jsondata['open_volume_13'],
open_close_14 = jsondata['open_close_14'],
open_open_14 = jsondata['open_open_14'],
open_high_14 = jsondata['open_high_14'],
open_low_14 = jsondata['open_low_14'],
open_upband_14 = jsondata['open_upband_14'],
open_lowband_14 = jsondata['open_lowband_14'],
open_midleband_14 = jsondata['open_midleband_14'],
open_jaw_14 = jsondata['open_jaw_14'],
open_lips_14 = jsondata['open_lips_14'],
open_teeth_14 = jsondata['open_teeth_14'],
open_volume_14 = jsondata['open_volume_14'],
open_close_15 = jsondata['open_close_15'],
open_open_15 = jsondata['open_open_15'],
open_high_15 = jsondata['open_high_15'],
open_low_15 = jsondata['open_low_15'],
open_upband_15 = jsondata['open_upband_15'],
open_lowband_15 = jsondata['open_lowband_15'],
open_midleband_15 = jsondata['open_midleband_15'],
open_jaw_15 = jsondata['open_jaw_15'],
open_lips_15 = jsondata['open_lips_15'],
open_teeth_15 = jsondata['open_teeth_15'],
open_volume_15 = jsondata['open_volume_15'],
open_close_16 = jsondata['open_close_16'],
open_open_16 = jsondata['open_open_16'],
open_high_16 = jsondata['open_high_16'],
open_low_16 = jsondata['open_low_16'],
open_upband_16 = jsondata['open_upband_16'],
open_lowband_16 = jsondata['open_lowband_16'],
open_midleband_16 = jsondata['open_midleband_16'],
open_jaw_16 = jsondata['open_jaw_16'],
open_lips_16 = jsondata['open_lips_16'],
open_teeth_16 = jsondata['open_teeth_16'],
open_volume_16 = jsondata['open_volume_16'],
open_close_17 = jsondata['open_close_17'],
open_open_17 = jsondata['open_open_17'],
open_high_17 = jsondata['open_high_17'],
open_low_17 = jsondata['open_low_17'],
open_upband_17 = jsondata['open_upband_17'],
open_lowband_17 = jsondata['open_lowband_17'],
open_midleband_17 = jsondata['open_midleband_17'],
open_jaw_17 = jsondata['open_jaw_17'],
open_lips_17 = jsondata['open_lips_17'],
open_teeth_17 = jsondata['open_teeth_17'],
open_volume_17 = jsondata['open_volume_17'],
open_close_18 = jsondata['open_close_18'],
open_open_18 = jsondata['open_open_18'],
open_high_18 = jsondata['open_high_18'],
open_low_18 = jsondata['open_low_18'],
open_upband_18 = jsondata['open_upband_18'],
open_lowband_18 = jsondata['open_lowband_18'],
open_midleband_18 = jsondata['open_midleband_18'],
open_jaw_18 = jsondata['open_jaw_18'],
open_lips_18 = jsondata['open_lips_18'],
open_teeth_18 = jsondata['open_teeth_18'],
open_volume_18 = jsondata['open_volume_18'],
open_close_19 = jsondata['open_close_19'],
open_open_19 = jsondata['open_open_19'],
open_high_19 = jsondata['open_high_19'],
open_low_19 = jsondata['open_low_19'],
open_upband_19 = jsondata['open_upband_19'],
open_lowband_19 = jsondata['open_lowband_19'],
open_midleband_19 = jsondata['open_midleband_19'],
open_jaw_19 = jsondata['open_jaw_19'],
open_lips_19 = jsondata['open_lips_19'],
open_teeth_19 = jsondata['open_teeth_19'],
open_volume_19 = jsondata['open_volume_19'],
open_close_20 = jsondata['open_close_20'],
open_open_20 = jsondata['open_open_20'],
open_high_20 = jsondata['open_high_20'],
open_low_20 = jsondata['open_low_20'],
open_upband_20 = jsondata['open_upband_20'],
open_lowband_20 = jsondata['open_lowband_20'],
open_midleband_20 = jsondata['open_midleband_20'],
open_jaw_20 = jsondata['open_jaw_20'],
open_lips_20 = jsondata['open_lips_20'],
open_teeth_20 = jsondata['open_teeth_20'],
open_volume_20 = jsondata['open_volume_20'],
open_close_21 = jsondata['open_close_21'],
open_open_21 = jsondata['open_open_21'],
open_high_21 = jsondata['open_high_21'],
open_low_21 = jsondata['open_low_21'],
open_upband_21 = jsondata['open_upband_21'],
open_lowband_21 = jsondata['open_lowband_21'],
open_midleband_21 = jsondata['open_midleband_21'],
open_jaw_21 = jsondata['open_jaw_21'],
open_lips_21 = jsondata['open_lips_21'],
open_teeth_21 = jsondata['open_teeth_21'],
open_volume_21 = jsondata['open_volume_21'],
open_close_22 = jsondata['open_close_22'],
open_open_22 = jsondata['open_open_22'],
open_high_22 = jsondata['open_high_22'],
open_low_22 = jsondata['open_low_22'],
open_upband_22 = jsondata['open_upband_22'],
open_lowband_22 = jsondata['open_lowband_22'],
open_midleband_22 = jsondata['open_midleband_22'],
open_jaw_22 = jsondata['open_jaw_22'],
open_lips_22 = jsondata['open_lips_22'],
open_teeth_22 = jsondata['open_teeth_22'],
open_volume_22 = jsondata['open_volume_22'],
open_close_23 = jsondata['open_close_23'],
open_open_23 = jsondata['open_open_23'],
open_high_23 = jsondata['open_high_23'],
open_low_23 = jsondata['open_low_23'],
open_upband_23 = jsondata['open_upband_23'],
open_lowband_23 = jsondata['open_lowband_23'],
open_midleband_23 = jsondata['open_midleband_23'],
open_jaw_23 = jsondata['open_jaw_23'],
open_lips_23 = jsondata['open_lips_23'],
open_teeth_23 = jsondata['open_teeth_23'],
open_volume_23 = jsondata['open_volume_23'],
open_close_24 = jsondata['open_close_24'],
open_open_24 = jsondata['open_open_24'],
open_high_24 = jsondata['open_high_24'],
open_low_24 = jsondata['open_low_24'],
open_upband_24 = jsondata['open_upband_24'],
open_lowband_24 = jsondata['open_lowband_24'],
open_midleband_24 = jsondata['open_midleband_24'],
open_jaw_24 = jsondata['open_jaw_24'],
open_lips_24 = jsondata['open_lips_24'],
open_teeth_24 = jsondata['open_teeth_24'],
open_volume_24 = jsondata['open_volume_24']
)
DataSet.objects.filter(open_magicnum=jsondata['magicnum']).update(
close_close_1 = jsondata['close_close_1'],
close_open_1 = jsondata['close_open_1'],
close_high_1 = jsondata['close_high_1'],
close_low_1 = jsondata['close_low_1'],
close_upband_1 = jsondata['close_upband_1'],
close_lowband_1 = jsondata['close_lowband_1'],
close_midleband_1 = jsondata['close_midleband_1'],
close_jaw_1 = jsondata['close_jaw_1'],
close_lips_1 = jsondata['close_lips_1'],
close_teeth_1 = jsondata['close_teeth_1'],
close_volume_1 = jsondata['close_volume_1'],
close_close_2 = jsondata['close_close_2'],
close_open_2 = jsondata['close_open_2'],
close_high_2 = jsondata['close_high_2'],
close_low_2 = jsondata['close_low_2'],
close_upband_2 = jsondata['close_upband_2'],
close_lowband_2 = jsondata['close_lowband_2'],
close_midleband_2 = jsondata['close_midleband_2'],
close_jaw_2 = jsondata['close_jaw_2'],
close_lips_2 = jsondata['close_lips_2'],
close_teeth_2 = jsondata['close_teeth_2'],
close_volume_2 = jsondata['close_volume_2'],
close_close_3 = jsondata['close_close_3'],
close_open_3 = jsondata['close_open_3'],
close_high_3 = jsondata['close_high_3'],
close_low_3 = jsondata['close_low_3'],
close_upband_3 = jsondata['close_upband_3'],
close_lowband_3 = jsondata['close_lowband_3'],
close_midleband_3 = jsondata['close_midleband_3'],
close_jaw_3 = jsondata['close_jaw_3'],
close_lips_3 = jsondata['close_lips_3'],
close_teeth_3 = jsondata['close_teeth_3'],
close_volume_3 = jsondata['close_volume_3'],
close_close_4 = jsondata['close_close_4'],
close_open_4 = jsondata['close_open_4'],
close_high_4 = jsondata['close_high_4'],
close_low_4 = jsondata['close_low_4'],
close_upband_4 = jsondata['close_upband_4'],
close_lowband_4 = jsondata['close_lowband_4'],
close_midleband_4 = jsondata['close_midleband_4'],
close_jaw_4 = jsondata['close_jaw_4'],
close_lips_4 = jsondata['close_lips_4'],
close_teeth_4 = jsondata['close_teeth_4'],
close_volume_4 = jsondata['close_volume_4'],
close_close_5 = jsondata['close_close_5'],
close_open_5 = jsondata['close_open_5'],
close_high_5 = jsondata['close_high_5'],
close_low_5 = jsondata['close_low_5'],
close_upband_5 = jsondata['close_upband_5'],
close_lowband_5 = jsondata['close_lowband_5'],
close_midleband_5 = jsondata['close_midleband_5'],
close_jaw_5 = jsondata['close_jaw_5'],
close_lips_5 = jsondata['close_lips_5'],
close_teeth_5 = jsondata['close_teeth_5'],
close_volume_5 = jsondata['close_volume_5'],
close_close_6 = jsondata['close_close_6'],
close_open_6 = jsondata['close_open_6'],
close_high_6 = jsondata['close_high_6'],
close_low_6 = jsondata['close_low_6'],
close_upband_6 = jsondata['close_upband_6'],
close_lowband_6 = jsondata['close_lowband_6'],
close_midleband_6 = jsondata['close_midleband_6'],
close_jaw_6 = jsondata['close_jaw_6'],
close_lips_6 = jsondata['close_lips_6'],
close_teeth_6 = jsondata['close_teeth_6'],
close_volume_6 = jsondata['close_volume_6'],
close_close_7 = jsondata['close_close_7'],
close_open_7 = jsondata['close_open_7'],
close_high_7 = jsondata['close_high_7'],
close_low_7 = jsondata['close_low_7'],
close_upband_7 = jsondata['close_upband_7'],
close_lowband_7 = jsondata['close_lowband_7'],
close_midleband_7 = jsondata['close_midleband_7'],
close_jaw_7 = jsondata['close_jaw_7'],
close_lips_7 = jsondata['close_lips_7'],
close_teeth_7 = jsondata['close_teeth_7'],
close_volume_7 = jsondata['close_volume_7'],
close_close_8 = jsondata['close_close_8'],
close_open_8 = jsondata['close_open_8'],
close_high_8 = jsondata['close_high_8'],
close_low_8 = jsondata['close_low_8'],
close_upband_8 = jsondata['close_upband_8'],
close_lowband_8 = jsondata['close_lowband_8'],
close_midleband_8 = jsondata['close_midleband_8'],
close_jaw_8 = jsondata['close_jaw_8'],
close_lips_8 = jsondata['close_lips_8'],
close_teeth_8 = jsondata['close_teeth_8'],
close_volume_8 = jsondata['close_volume_8'],
close_close_9 = jsondata['close_close_9'],
close_open_9 = jsondata['close_open_9'],
close_high_9 = jsondata['close_high_9'],
close_low_9 = jsondata['close_low_9'],
close_upband_9 = jsondata['close_upband_9'],
close_lowband_9 = jsondata['close_lowband_9'],
close_midleband_9 = jsondata['close_midleband_9'],
close_jaw_9 = jsondata['close_jaw_9'],
close_lips_9 = jsondata['close_lips_9'],
close_teeth_9 = jsondata['close_teeth_9'],
close_volume_9 = jsondata['close_volume_9'],
close_close_10 = jsondata['close_close_10'],
close_open_10 = jsondata['close_open_10'],
close_high_10 = jsondata['close_high_10'],
close_low_10 = jsondata['close_low_10'],
close_upband_10 = jsondata['close_upband_10'],
close_lowband_10 = jsondata['close_lowband_10'],
close_midleband_10 = jsondata['close_midleband_10'],
close_jaw_10 = jsondata['close_jaw_10'],
close_lips_10 = jsondata['close_lips_10'],
close_teeth_10 = jsondata['close_teeth_10'],
close_volume_10 = jsondata['close_volume_10'],
close_close_11 = jsondata['close_close_11'],
close_open_11 = jsondata['close_open_11'],
close_high_11 = jsondata['close_high_11'],
close_low_11 = jsondata['close_low_11'],
close_upband_11 = jsondata['close_upband_11'],
close_lowband_11 = jsondata['close_lowband_11'],
close_midleband_11 = jsondata['close_midleband_11'],
close_jaw_11 = jsondata['close_jaw_11'],
close_lips_11 = jsondata['close_lips_11'],
close_teeth_11 = jsondata['close_teeth_11'],
close_volume_11 = jsondata['close_volume_11'],
close_close_12 = jsondata['close_close_12'],
close_open_12 = jsondata['close_open_12'],
close_high_12 = jsondata['close_high_12'],
close_low_12 = jsondata['close_low_12'],
close_upband_12 = jsondata['close_upband_12'],
close_lowband_12 = jsondata['close_lowband_12'],
close_midleband_12 = jsondata['close_midleband_12'],
close_jaw_12 = jsondata['close_jaw_12'],
close_lips_12 = jsondata['close_lips_12'],
close_teeth_12 = jsondata['close_teeth_12'],
close_volume_12 = jsondata['close_volume_12'],
)
DataSet.objects.filter(open_magicnum=jsondata['magicnum']).update(
close_close_13 = jsondata['close_close_13'],
close_open_13 = jsondata['close_open_13'],
close_high_13 = jsondata['close_high_13'],
close_low_13 = jsondata['close_low_13'],
close_upband_13 = jsondata['close_upband_13'],
close_lowband_13 = jsondata['close_lowband_13'],
close_midleband_13 = jsondata['close_midleband_13'],
close_jaw_13 = jsondata['close_jaw_13'],
close_lips_13 = jsondata['close_lips_13'],
close_teeth_13 = jsondata['close_teeth_13'],
close_volume_13 = jsondata['close_volume_13'],
close_close_14 = jsondata['close_close_14'],
close_open_14 = jsondata['close_open_14'],
close_high_14 = jsondata['close_high_14'],
close_low_14 = jsondata['close_low_14'],
close_upband_14 = jsondata['close_upband_14'],
close_lowband_14 = jsondata['close_lowband_14'],
close_midleband_14 = jsondata['close_midleband_14'],
close_jaw_14 = jsondata['close_jaw_14'],
close_lips_14 = jsondata['close_lips_14'],
close_teeth_14 = jsondata['close_teeth_14'],
close_volume_14 = jsondata['close_volume_14'],
close_close_15 = jsondata['close_close_15'],
close_open_15 = jsondata['close_open_15'],
close_high_15 = jsondata['close_high_15'],
close_low_15 = jsondata['close_low_15'],
close_upband_15 = jsondata['close_upband_15'],
close_lowband_15 = jsondata['close_lowband_15'],
close_midleband_15 = jsondata['close_midleband_15'],
close_jaw_15 = jsondata['close_jaw_15'],
close_lips_15 = jsondata['close_lips_15'],
close_teeth_15 = jsondata['close_teeth_15'],
close_volume_15 = jsondata['close_volume_15'],
close_close_16 = jsondata['close_close_16'],
close_open_16 = jsondata['close_open_16'],
close_high_16 = jsondata['close_high_16'],
close_low_16 = jsondata['close_low_16'],
close_upband_16 = jsondata['close_upband_16'],
close_lowband_16 = jsondata['close_lowband_16'],
close_midleband_16 = jsondata['close_midleband_16'],
close_jaw_16 = jsondata['close_jaw_16'],
close_lips_16 = jsondata['close_lips_16'],
close_teeth_16 = jsondata['close_teeth_16'],
close_volume_16 = jsondata['close_volume_16'],
close_close_17 = jsondata['close_close_17'],
close_open_17 = jsondata['close_open_17'],
close_high_17 = jsondata['close_high_17'],
close_low_17 = jsondata['close_low_17'],
close_upband_17 = jsondata['close_upband_17'],
close_lowband_17 = jsondata['close_lowband_17'],
close_midleband_17 = jsondata['close_midleband_17'],
close_jaw_17 = jsondata['close_jaw_17'],
close_lips_17 = jsondata['close_lips_17'],
close_teeth_17 = jsondata['close_teeth_17'],
close_volume_17 = jsondata['close_volume_17'],
close_close_18 = jsondata['close_close_18'],
close_open_18 = jsondata['close_open_18'],
close_high_18 = jsondata['close_high_18'],
close_low_18 = jsondata['close_low_18'],
close_upband_18 = jsondata['close_upband_18'],
close_lowband_18 = jsondata['close_lowband_18'],
close_midleband_18 = jsondata['close_midleband_18'],
close_jaw_18 = jsondata['close_jaw_18'],
close_lips_18 = jsondata['close_lips_18'],
close_teeth_18 = jsondata['close_teeth_18'],
close_volume_18 = jsondata['close_volume_18'],
close_close_19 = jsondata['close_close_19'],
close_open_19 = jsondata['close_open_19'],
close_high_19 = jsondata['close_high_19'],
close_low_19 = jsondata['close_low_19'],
close_upband_19 = jsondata['close_upband_19'],
close_lowband_19 = jsondata['close_lowband_19'],
close_midleband_19 = jsondata['close_midleband_19'],
close_jaw_19 = jsondata['close_jaw_19'],
close_lips_19 = jsondata['close_lips_19'],
close_teeth_19 = jsondata['close_teeth_19'],
close_volume_19 = jsondata['close_volume_19'],
close_close_20 = jsondata['close_close_20'],
close_open_20 = jsondata['close_open_20'],
close_high_20 = jsondata['close_high_20'],
close_low_20 = jsondata['close_low_20'],
close_upband_20 = jsondata['close_upband_20'],
close_lowband_20 = jsondata['close_lowband_20'],
close_midleband_20 = jsondata['close_midleband_20'],
close_jaw_20 = jsondata['close_jaw_20'],
close_lips_20 = jsondata['close_lips_20'],
close_teeth_20 = jsondata['close_teeth_20'],
close_volume_20 = jsondata['close_volume_20'],
close_close_21 = jsondata['close_close_21'],
close_open_21 = jsondata['close_open_21'],
close_high_21 = jsondata['close_high_21'],
close_low_21 = jsondata['close_low_21'],
close_upband_21 = jsondata['close_upband_21'],
close_lowband_21 = jsondata['close_lowband_21'],
close_midleband_21 = jsondata['close_midleband_21'],
close_jaw_21 = jsondata['close_jaw_21'],
close_lips_21 = jsondata['close_lips_21'],
close_teeth_21 = jsondata['close_teeth_21'],
close_volume_21 = jsondata['close_volume_21'],
close_close_22 = jsondata['close_close_22'],
close_open_22 = jsondata['close_open_22'],
close_high_22 = jsondata['close_high_22'],
close_low_22 = jsondata['close_low_22'],
close_upband_22 = jsondata['close_upband_22'],
close_lowband_22 = jsondata['close_lowband_22'],
close_midleband_22 = jsondata['close_midleband_22'],
close_jaw_22 = jsondata['close_jaw_22'],
close_lips_22 = jsondata['close_lips_22'],
close_teeth_22 = jsondata['close_teeth_22'],
close_volume_22 = jsondata['close_volume_22'],
close_close_23 = jsondata['close_close_23'],
close_open_23 = jsondata['close_open_23'],
close_high_23 = jsondata['close_high_23'],
close_low_23 = jsondata['close_low_23'],
close_upband_23 = jsondata['close_upband_23'],
close_lowband_23 = jsondata['close_lowband_23'],
close_midleband_23 = jsondata['close_midleband_23'],
close_jaw_23 = jsondata['close_jaw_23'],
close_lips_23 = jsondata['close_lips_23'],
close_teeth_23 = jsondata['close_teeth_23'],
close_volume_23 = jsondata['close_volume_23'],
close_close_24 = jsondata['close_close_24'],
close_open_24 = jsondata['close_open_24'],
close_high_24 = jsondata['close_high_24'],
close_low_24 = jsondata['close_low_24'],
close_upband_24 = jsondata['close_upband_24'],
close_lowband_24 = jsondata['close_lowband_24'],
close_midleband_24 = jsondata['close_midleband_24'],
close_jaw_24 = jsondata['close_jaw_24'],
close_lips_24 = jsondata['close_lips_24'],
close_teeth_24 = jsondata['close_teeth_24'],
close_volume_24 = jsondata['close_volume_24'],
close_result = jsondata['result'],
close_effectivnes = effectivnes,
close_neuron_name = jsondata['neuron_name'],
close_closeprice = jsondata['closeprice'],
close_time = DP.parse(jsondata['orderclosetime'])
)
| gpl-3.0 | 6,649,889,755,978,791,000 | 6,234,751,372,971,357,000 | 49.413008 | 135 | 0.546768 | false |
rodrigc/buildbot | master/buildbot/test/unit/util/test_state.py | 6 | 2671 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from twisted.internet import defer
from twisted.trial import unittest
from buildbot.test.fake import fakemaster
from buildbot.test.util.misc import TestReactorMixin
from buildbot.util import state
class FakeObject(state.StateMixin):
name = "fake-name"
def __init__(self, master):
self.master = master
class TestStateMixin(TestReactorMixin, unittest.TestCase):
OBJECTID = 19
def setUp(self):
self.setUpTestReactor()
self.master = fakemaster.make_master(self, wantDb=True)
self.object = FakeObject(self.master)
@defer.inlineCallbacks
def test_getState(self):
self.master.db.state.fakeState('fake-name', 'FakeObject',
fav_color=['red', 'purple'])
res = yield self.object.getState('fav_color')
self.assertEqual(res, ['red', 'purple'])
@defer.inlineCallbacks
def test_getState_default(self):
res = yield self.object.getState('fav_color', 'black')
self.assertEqual(res, 'black')
def test_getState_KeyError(self):
self.master.db.state.fakeState('fake-name', 'FakeObject',
fav_color=['red', 'purple'])
d = self.object.getState('fav_book')
def cb(_):
self.fail("should not succeed")
def check_exc(f):
f.trap(KeyError)
d.addCallbacks(cb, check_exc)
return d
@defer.inlineCallbacks
def test_setState(self):
yield self.object.setState('y', 14)
self.master.db.state.assertStateByClass('fake-name', 'FakeObject',
y=14)
@defer.inlineCallbacks
def test_setState_existing(self):
self.master.db.state.fakeState('fake-name', 'FakeObject', x=13)
yield self.object.setState('x', 14)
self.master.db.state.assertStateByClass('fake-name', 'FakeObject',
x=14)
| gpl-2.0 | -3,195,106,688,496,975,000 | 1,745,125,591,363,724,800 | 31.975309 | 79 | 0.643954 | false |
campbe13/openhatch | vendor/packages/Django/django/http/utils.py | 36 | 1499 | """
Functions that modify an HTTP request or response in some way.
"""
# This group of functions are run as part of the response handling, after
# everything else, including all response middleware. Think of them as
# "compulsory response middleware". Be careful about what goes here, because
# it's a little fiddly to override this behavior, so they should be truly
# universally applicable.
def fix_location_header(request, response):
"""
Ensures that we always use an absolute URI in any location header in the
response. This is required by RFC 2616, section 14.30.
Code constructing response objects is free to insert relative paths, as
this function converts them to absolute paths.
"""
if 'Location' in response and request.get_host():
response['Location'] = request.build_absolute_uri(response['Location'])
return response
def conditional_content_removal(request, response):
"""
Removes the content of responses for HEAD requests, 1xx, 204 and 304
responses. Ensures compliance with RFC 2616, section 4.3.
"""
if 100 <= response.status_code < 200 or response.status_code in (204, 304):
if response.streaming:
response.streaming_content = []
else:
response.content = ''
response['Content-Length'] = '0'
if request.method == 'HEAD':
if response.streaming:
response.streaming_content = []
else:
response.content = ''
return response
| agpl-3.0 | 2,412,558,427,209,535,500 | -1,214,356,521,210,083,600 | 35.560976 | 79 | 0.682455 | false |
dustcloud/dustlink | SmartMeshSDK/IpMgrConnectorMux/IpMgrConnectorMuxInternal.py | 4 | 9593 | import threading
import socket
import select
import struct
import MuxMsg
from SmartMeshSDK import ApiException, \
ApiConnector
from SmartMeshSDK.ApiDefinition import IpMgrDefinition
class IpMgrConnectorMuxInternal(ApiConnector.ApiConnector ) :
'''
\ingroup ApiConnector
\brief Internal class for IP manager connector, through Serial Mux.
Members of class
acknowledgeBuf - binary payload of acknowledge
ackCmdId - command ID from acknowledge
sendSemaphor - semaphore to wait for acknowledgement of command (threading.Semaphore)
inputThread - thread for processing input packets (threading.Thread)
socket - TCP socket for connection with Serial Mux
'''
PARAM_HOST = 'host'
PARAM_PORT = 'port'
PARAM_ISSENDHELLO = 'isSendHello'
DEFAULT_PARAM_HOST = '127.0.0.1'
DEFAULT_PARAM_PORT = 9900
_RC_OK = 0
_RC_TIMEOUT = 5
def __init__(self, maxQSize = 100) :
ApiConnector.ApiConnector.__init__(self, maxQSize)
self.acknowledgeBuf = None
self.ackCmdId = -1
self.sendSemaphor = threading.BoundedSemaphore(1)
self.sendLock = threading.Lock()
self.socket = None
self.inputThread = None
self.muxMsg = MuxMsg.MuxMsg(self.processCmd)
self.apiDef = IpMgrDefinition.IpMgrDefinition()
self.notifIds = self.apiDef.getIds(self.apiDef.NOTIFICATION)
def connect(self, params = {}) :
'''
\brief Connect to device
\param params Dictionary of connection parameters:
- 'host' - IP address of Mux (default: '127.0.0.1')
- 'port' - port of Mux (default: 9900)
- 'isSendHello' - send Hello message after connection (default True)
'''
host = self.DEFAULT_PARAM_HOST
port = self.DEFAULT_PARAM_PORT
isSendHello = True
if self.PARAM_HOST in params and params[self.PARAM_HOST] :
host = params[self.PARAM_HOST]
if self.PARAM_PORT in params and params[self.PARAM_PORT] :
port = int(params[self.PARAM_PORT])
if self.PARAM_ISSENDHELLO in params :
isSendHello = params[self.PARAM_ISSENDHELLO]
if self.inputThread : # Wait finish disconnect process
try :
self.inputThread.join(1.0)
if self.inputThread.isAlive() :
raise ApiException.ConnectionError("Already connected")
except RuntimeError :
pass # Ignore join error
self.inputThread = None
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect( (host, port) )
self.socket.setblocking(1)
except socket.error as ex:
raise ApiException.ConnectionError(str(ex))
self.sendSemaphor.acquire(False) # Clear semaphore
# Start thread for processing input stream
self.inputThread = threading.Thread(target = self.inputProcess)
self.inputThread.name = "IpMgrConnectorMuxInternal"
self.inputThread.start()
ApiConnector.ApiConnector.connect(self)
if isSendHello :
self.sendHelloCmd()
def disconnect(self, reason="") :
if not self.isConnected :
return
try :
self.socket.send("stop")
self.socket.shutdown(socket.SHUT_RD) # start disconnection
self.socket.close()
except socket.error :
pass # Ignore socket error
ApiConnector.ApiConnector.disconnect(self, reason)
def send(self, cmdNames, params) :
self.sendLock.acquire()
try :
if not self.isConnected :
raise ApiException.ConnectionError("Disconnected")
# Send data
ApiConnector.log.debug("IO OUT. {0} : {1}".format(cmdNames, params))
(cmdId, paramsBinList) = self.apiDef.serialize(cmdNames, params)
paramsBin = struct.pack('!'+str(len(paramsBinList))+'B', *paramsBinList)
ApiConnector.logDump(paramsBin, "RawIO OUT. Command ID: {0}".format(cmdId))
packet = self.muxMsg.build_message(cmdId, paramsBin)
self.acknowledgeBuf = None
self.ackCmdId = -1
try :
self.socket.sendall(packet)
except socket.error, way:
# Socket error. Disconnect from device. Stop command processing
reason = "IO output error [{0}] {1}".format(way.args[0], way.args[1])
self.disconnect(reason)
raise ApiException.ConnectionError(reason)
# Waiting acknowledge
self.sendSemaphor.acquire()
if not self.isConnected : # Disconnect happened during waiting ack.
raise ApiException.ConnectionError(self.disconnectReason)
# Process acknowledge
cmdId = self.apiDef.nameToId(self.apiDef.COMMAND, (cmdNames[0],))
if self.ackCmdId != cmdId :
reason = "Unexpected acknowledge {0} for command {1} ({2})".format(self.ackCmdId, cmdId, cmdNames)
self.disconnect(reason)
raise ApiException.ConnectionError(reason)
# Parse acknowledge
ackList = struct.unpack('!'+str(len(self.acknowledgeBuf))+'B', self.acknowledgeBuf)
(resCmdName, resParams) = self.apiDef.deserialize(self.apiDef.COMMAND, self.ackCmdId, ackList)
ApiConnector.log.debug("IO INP. {0} : {1}".format(resCmdName, resParams))
if self.apiDef.RC in resParams and resParams[self.apiDef.RC] != self._RC_OK :
if resParams[self.apiDef.RC] == self._RC_TIMEOUT :
raise ApiException.CommandTimeoutError(resCmdName)
try:
desc = '({0})\n{1}'.format(
self.apiDef.responseFieldValueToDesc(
resCmdName,
self.apiDef.RC,
resParams[self.apiDef.RC],
),
self.apiDef.rcToDescription(
resParams[self.apiDef.RC],
resCmdName,
),
)
except:
desc = None
raise ApiException.APIError(
cmd=resCmdName,
rc=resParams[self.apiDef.RC],
desc=desc
)
self.ackCmdId = -1
self.acknowledgeBuf = None
finally:
self.sendLock.release()
return resParams
def ackSignal(self):
'''
\brief Send signal 'Acknowledge received'
'''
try :
self.sendSemaphor.release()
except ValueError :
pass
def inputProcess(self):
'''
\brief Processing device input
'''
try :
while True :
select.select([self.socket], [], [self.socket])
buf = self.socket.recv(4096)
if not buf :
raise socket.error(0, "Connection close")
self.muxMsg.parse(buf)
except socket.error, way:
# Disconnect process -------------------------------------------------
if way.args[0] == 9 : #
way = socket.error(0, "Connection close")
ApiConnector.ApiConnector.disconnect(self, "Disconnect. Reason: {0} [{1}]".format(way.args[1], way.args[0]))
self.acknowledgeBuf = None
self.ackCmdId = -1
self.ackSignal()
try :
self.socket.close()
except socket.error :
pass # Ignore socket error
def processCmd(self, reserved, cmdId, payload):
'''
\brief deserialize and process command
'''
ApiConnector.logDump(payload, "RawIO INP. Command ID: {0}".format(cmdId))
if cmdId in self.notifIds :
try :
payloadList = struct.unpack('!'+str(len(payload))+'B', payload)
(notifNames, params) = self.apiDef.deserialize(self.apiDef.NOTIFICATION, cmdId, payloadList)
ApiConnector.log.debug("IO INP. {0} : {1}".format(notifNames, params))
self.putNotification((notifNames, params))
except ApiException.ConnectionError as ex:
raise socket.error(0, ex.value) # Initiate disconnection
except Exception as ex :
ApiConnector.log.error("Deserialization command {0}. Error {1}".format(cmdId, ex))
else :
self.ackCmdId = cmdId
self.acknowledgeBuf = payload
self.ackSignal()
def sendHelloCmd(self):
'''
\brief Send Hello command
'''
res = self.send(["mux_hello"], {"version" : self.muxMsg.getVer(), "secret" : self.muxMsg.getAuth()})
return res
| bsd-3-clause | -8,735,074,126,666,874,000 | 136,916,129,993,505,020 | 39.89083 | 120 | 0.530699 | false |
diego-d5000/MisValesMd | env/lib/python2.7/site-packages/django/core/checks/model_checks.py | 1 | 2454 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import inspect
import types
from django.apps import apps
from django.core.checks import Error, Tags, register
@register(Tags.models)
def check_all_models(app_configs=None, **kwargs):
errors = []
for model in apps.get_models():
if app_configs is None or model._meta.app_config in app_configs:
if not inspect.ismethod(model.check):
errors.append(
Error(
"The '%s.check()' class method is "
"currently overridden by %r." % (
model.__name__, model.check),
hint=None,
obj=model,
id='models.E020'
)
)
else:
errors.extend(model.check(**kwargs))
return errors
@register(Tags.models, Tags.signals)
def check_model_signals(app_configs=None, **kwargs):
"""
Ensure lazily referenced model signals senders are installed.
"""
# Avoid circular import
from django.db import models
errors = []
for name in dir(models.signals):
obj = getattr(models.signals, name)
if isinstance(obj, models.signals.ModelSignal):
for reference, receivers in obj.unresolved_references.items():
for receiver, _, _ in receivers:
# The receiver is either a function or an instance of class
# defining a `__call__` method.
if isinstance(receiver, types.FunctionType):
description = "The '%s' function" % receiver.__name__
else:
description = "An instance of the '%s' class" % receiver.__class__.__name__
errors.append(
Error(
"%s was connected to the '%s' signal "
"with a lazy reference to the '%s' sender, "
"which has not been installed." % (
description, name, '.'.join(reference)
),
obj=receiver.__module__,
hint=None,
id='signals.E001'
)
)
return errors
| mit | -643,845,291,321,849,700 | -9,001,411,443,915,552,000 | 36.34375 | 99 | 0.467808 | false |
MM1nd/worldengine | worldengine/astar.py | 4 | 6331 | #!/usr/bin/env python
"""
A* works based on cost, the higher the cost the less likely it is to travel
that path. There are no hard limits, it works on minus infinity and
positive infinity.
It will take a starting position and and end position, then find the path
between the two with the lowest cost.
This is perfect for height maps for example, because you can use it to
find path through mountain/hills between villages.
usage: You can use the PathFinder.find(height_map, source, destination)
where height_map is any 2D array while source and destination are both
lists of two values [x, y].
author: Bret Curtis
"""
class Path:
""" A path object, containing the nodes and total cost."""
def __init__(self, nodes, total_cost):
self.nodes = nodes
self.totalCost = total_cost
def get_nodes(self):
return self.nodes
def get_total_movement_cost(self):
return self.totalCost
class Node:
""" The basic unit/pixel/location is a Node."""
def __init__(self, location, movement_cost, lid, parent=None):
self.location = location # where is this node located
self.mCost = movement_cost # total move cost to reach this node
self.parent = parent # parent node
self.score = 0 # calculated score for this node
self.lid = lid # location id unique for each location in the map
def __eq__(self, n):
if n.lid == self.lid:
return 1
else:
return 0
class AStar:
""" The "A* Star Search Algorithm" itself.
Have a read:
https://en.wikipedia.org/wiki/A*_search_algorithm
"""
def __init__(self, map_handler):
self.mh = map_handler
self.o = []
self.on = []
self.c = []
def _get_best_open_node(self):
best_node = None
for n in self.on:
if not best_node:
best_node = n
else:
if n.score <= best_node.score:
best_node = n
return best_node
@staticmethod
def _trace_path(n):
nodes = []
total_cost = n.mCost
p = n.parent
nodes.insert(0, n)
while 1:
if p.parent is None:
break
nodes.insert(0, p)
p = p.parent
return Path(nodes, total_cost)
def _handle_node(self, node, end):
i = self.o.index(node.lid)
self.on.pop(i)
self.o.pop(i)
self.c.append(node.lid)
nodes = self.mh.get_adjacent_nodes(node, end)
for n in nodes:
if n.location == end: # reached the destination
return n
elif n.lid in self.c: # already in close, skip this
continue
elif n.lid in self.o: # already in open, check if better score
i = self.o.index(n.lid)
on = self.on[i]
if n.mCost < on.mCost:
self.on.pop(i)
self.o.pop(i)
self.on.append(n)
self.o.append(n.lid)
else: # new node, append to open list
self.on.append(n)
self.o.append(n.lid)
return None
def find_path(self, from_location, to_location):
end = to_location
f_node = self.mh.get_node(from_location)
self.on.append(f_node)
self.o.append(f_node.lid)
next_node = f_node
counter = 0 # a bail-out counter
while next_node is not None:
if counter > 10000:
break # no path found under limit
finish = self._handle_node(next_node, end)
if finish:
return self._trace_path(finish)
next_node = self._get_best_open_node()
counter += 1
return None
class SQLocation:
"""A simple Square Map Location implementation"""
def __init__(self, x, y):
self.x = x
self.y = y
def __eq__(self, l):
if l.x == self.x and l.y == self.y:
return 1
else:
return 0
class SQMapHandler:
"""A simple Square Map implementation"""
def __init__(self, map_data, width, height):
self.m = map_data
self.w = width
self.h = height
def get_node(self, location):
x = location.x
y = location.y
if x < 0 or x >= self.w or y < 0 or y >= self.h:
return None
d = self.m[(y * self.w) + x]
return Node(location, d, ((y * self.w) + x))
def get_adjacent_nodes(self, cur_node, destination):
result = []
cl = cur_node.location
dl = destination
n = self._handle_node(cl.x + 1, cl.y, cur_node, dl.x, dl.y)
if n:
result.append(n)
n = self._handle_node(cl.x - 1, cl.y, cur_node, dl.x, dl.y)
if n:
result.append(n)
n = self._handle_node(cl.x, cl.y + 1, cur_node, dl.x, dl.y)
if n:
result.append(n)
n = self._handle_node(cl.x, cl.y - 1, cur_node, dl.x, dl.y)
if n:
result.append(n)
return result
def _handle_node(self, x, y, from_node, destination_x, destination_y):
n = self.get_node(SQLocation(x, y))
if n is not None:
dx = max(x, destination_x) - min(x, destination_x)
dy = max(y, destination_y) - min(y, destination_y)
em_cost = dx + dy
n.mCost += from_node.mCost
n.score = n.mCost + em_cost
n.parent = from_node
return n
return None
class PathFinder:
"""Using the a* algorithm we will try to find the best path between two
points.
"""
def __init__(self):
pass
@staticmethod
def find(height_map, source, destination):
sx, sy = source
dx, dy = destination
path = []
height, width = height_map.shape
graph = height_map.flatten('C') #flatten array (row-major)
pathfinder = AStar(SQMapHandler(graph, width, height))
start = SQLocation(sx, sy)
end = SQLocation(dx, dy)
p = pathfinder.find_path(start, end)
if not p:
return path
for node in p.nodes:
path.append([node.location.x, node.location.y])
return path
| mit | 711,988,068,682,645,500 | -3,517,189,522,458,651,600 | 26.767544 | 75 | 0.538304 | false |
napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes_/__init__.py | 1 | 42200 | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
from . import default_metric
from . import delay_metric
from . import expense_metric
from . import error_metric
class prefixes(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/ipv4-external-reachability/prefixes/prefixes. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: IPv4 external prefixes and reachability attributes.
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__state",
"__default_metric",
"__delay_metric",
"__expense_metric",
"__error_metric",
)
_yang_name = "prefixes"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__default_metric = YANGDynClass(
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__delay_metric = YANGDynClass(
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__expense_metric = YANGDynClass(
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__error_metric = YANGDynClass(
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"ipv4-external-reachability",
"prefixes",
"prefixes",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
YANG Description: State parameters of IPv4 standard prefix.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of IPv4 standard prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_default_metric(self):
"""
Getter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
YANG Description: This container defines ISIS Default Metric.
"""
return self.__default_metric
def _set_default_metric(self, v, load=False):
"""
Setter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_metric() directly.
YANG Description: This container defines ISIS Default Metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """default_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__default_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_default_metric(self):
self.__default_metric = YANGDynClass(
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_delay_metric(self):
"""
Getter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
YANG Description: This container defines the ISIS delay metric.
"""
return self.__delay_metric
def _set_delay_metric(self, v, load=False):
"""
Setter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_delay_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_delay_metric() directly.
YANG Description: This container defines the ISIS delay metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """delay_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__delay_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_delay_metric(self):
self.__delay_metric = YANGDynClass(
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_expense_metric(self):
"""
Getter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
YANG Description: This container defines the ISIS expense metric.
"""
return self.__expense_metric
def _set_expense_metric(self, v, load=False):
"""
Setter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_expense_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_expense_metric() directly.
YANG Description: This container defines the ISIS expense metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """expense_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__expense_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_expense_metric(self):
self.__expense_metric = YANGDynClass(
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_error_metric(self):
"""
Getter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
YANG Description: This container defines the ISIS error metric.
"""
return self.__error_metric
def _set_error_metric(self, v, load=False):
"""
Setter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_error_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_error_metric() directly.
YANG Description: This container defines the ISIS error metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """error_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__error_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_error_metric(self):
self.__error_metric = YANGDynClass(
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
default_metric = __builtin__.property(_get_default_metric)
delay_metric = __builtin__.property(_get_delay_metric)
expense_metric = __builtin__.property(_get_expense_metric)
error_metric = __builtin__.property(_get_error_metric)
_pyangbind_elements = OrderedDict(
[
("state", state),
("default_metric", default_metric),
("delay_metric", delay_metric),
("expense_metric", expense_metric),
("error_metric", error_metric),
]
)
from . import state
from . import default_metric
from . import delay_metric
from . import expense_metric
from . import error_metric
class prefixes(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/ipv4-external-reachability/prefixes/prefixes. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: IPv4 external prefixes and reachability attributes.
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__state",
"__default_metric",
"__delay_metric",
"__expense_metric",
"__error_metric",
)
_yang_name = "prefixes"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__default_metric = YANGDynClass(
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__delay_metric = YANGDynClass(
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__expense_metric = YANGDynClass(
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__error_metric = YANGDynClass(
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"ipv4-external-reachability",
"prefixes",
"prefixes",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
YANG Description: State parameters of IPv4 standard prefix.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of IPv4 standard prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_default_metric(self):
"""
Getter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
YANG Description: This container defines ISIS Default Metric.
"""
return self.__default_metric
def _set_default_metric(self, v, load=False):
"""
Setter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_metric() directly.
YANG Description: This container defines ISIS Default Metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """default_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__default_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_default_metric(self):
self.__default_metric = YANGDynClass(
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_delay_metric(self):
"""
Getter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
YANG Description: This container defines the ISIS delay metric.
"""
return self.__delay_metric
def _set_delay_metric(self, v, load=False):
"""
Setter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_delay_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_delay_metric() directly.
YANG Description: This container defines the ISIS delay metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """delay_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__delay_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_delay_metric(self):
self.__delay_metric = YANGDynClass(
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_expense_metric(self):
"""
Getter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
YANG Description: This container defines the ISIS expense metric.
"""
return self.__expense_metric
def _set_expense_metric(self, v, load=False):
"""
Setter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_expense_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_expense_metric() directly.
YANG Description: This container defines the ISIS expense metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """expense_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__expense_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_expense_metric(self):
self.__expense_metric = YANGDynClass(
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_error_metric(self):
"""
Getter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
YANG Description: This container defines the ISIS error metric.
"""
return self.__error_metric
def _set_error_metric(self, v, load=False):
"""
Setter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_error_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_error_metric() directly.
YANG Description: This container defines the ISIS error metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """error_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__error_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_error_metric(self):
self.__error_metric = YANGDynClass(
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
default_metric = __builtin__.property(_get_default_metric)
delay_metric = __builtin__.property(_get_delay_metric)
expense_metric = __builtin__.property(_get_expense_metric)
error_metric = __builtin__.property(_get_error_metric)
_pyangbind_elements = OrderedDict(
[
("state", state),
("default_metric", default_metric),
("delay_metric", delay_metric),
("expense_metric", expense_metric),
("error_metric", error_metric),
]
)
| apache-2.0 | 2,167,644,750,156,851,500 | 2,500,236,777,942,914,600 | 41.157842 | 402 | 0.593128 | false |
rohitwaghchaure/erpnext_develop | erpnext/patches/v4_0/create_price_list_if_missing.py | 119 | 1087 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils.nestedset import get_root_of
def execute():
# setup not complete
if not frappe.db.sql("""select name from tabCompany limit 1"""):
return
if "shopping_cart" in frappe.get_installed_apps():
frappe.reload_doc("shopping_cart", "doctype", "shopping_cart_settings")
if not frappe.db.sql("select name from `tabPrice List` where buying=1"):
create_price_list(_("Standard Buying"), buying=1)
if not frappe.db.sql("select name from `tabPrice List` where selling=1"):
create_price_list(_("Standard Selling"), selling=1)
def create_price_list(pl_name, buying=0, selling=0):
price_list = frappe.get_doc({
"doctype": "Price List",
"price_list_name": pl_name,
"enabled": 1,
"buying": buying,
"selling": selling,
"currency": frappe.db.get_default("currency"),
"territories": [{
"territory": get_root_of("Territory")
}]
})
price_list.insert()
| gpl-3.0 | -7,422,420,941,382,180,000 | -4,927,974,857,295,257,000 | 30.057143 | 74 | 0.703772 | false |
mnach/suds-py3k | suds/serviceproxy.py | 2 | 2974 | # This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( [email protected] )
"""
The service proxy provides access to web services.
Replaced by: L{client.Client}
"""
from logging import getLogger
from suds import *
from suds.client import Client
log = getLogger(__name__)
class ServiceProxy(object):
"""
A lightweight soap based web service proxy.
@ivar __client__: A client.
Everything is delegated to the 2nd generation API.
@type __client__: L{Client}
@note: Deprecated, replaced by L{Client}.
"""
def __init__(self, url, **kwargs):
"""
@param url: The URL for the WSDL.
@type url: str
@param kwargs: keyword arguments.
@keyword faults: Raise faults raised by server (default:True),
else return tuple from service method invocation as (http code, object).
@type faults: boolean
@keyword proxy: An http proxy to be specified on requests (default:{}).
The proxy is defined as {protocol:proxy,}
@type proxy: dict
"""
client = Client(url, **kwargs)
self.__client__ = client
def get_instance(self, name):
"""
Get an instance of a WSDL type by name
@param name: The name of a type defined in the WSDL.
@type name: str
@return: An instance on success, else None
@rtype: L{sudsobject.Object}
"""
return self.__client__.factory.create(name)
def get_enum(self, name):
"""
Get an instance of an enumeration defined in the WSDL by name.
@param name: The name of a enumeration defined in the WSDL.
@type name: str
@return: An instance on success, else None
@rtype: L{sudsobject.Object}
"""
return self.__client__.factory.create(name)
def __str__(self):
return str(self.__client__)
def __unicode__(self):
return str(self.__client__)
def __getattr__(self, name):
builtin = name.startswith('__') and name.endswith('__')
if builtin:
return self.__dict__[name]
else:
return getattr(self.__client__.service, name) | lgpl-3.0 | -5,556,814,001,815,650,000 | 8,415,065,050,782,345,000 | 33.593023 | 88 | 0.62811 | false |
JaDogg/__py_playground | reference/parsley/ometa/test/test_tube.py | 3 | 3004 | from __future__ import absolute_import, unicode_literals
import unittest
from ometa.grammar import OMeta
from ometa.tube import TrampolinedParser
def iterbytes(originalBytes):
for i in range(len(originalBytes)):
yield originalBytes[i:i+1]
class TrampolinedReceiver():
"""
Receive and store the passed in data.
"""
currentRule = 'initial'
def __init__(self):
self.received = []
def receive(self, data):
self.received.append(data)
class TrampolinedParserTestCase(unittest.TestCase):
"""
Tests for L{ometa.tube.TrampolinedParser}
"""
def _parseGrammar(self, grammar, name="Grammar"):
return OMeta(grammar).parseGrammar(name)
def setUp(self):
_grammar = r"""
delimiter = '\r\n'
initial = <(~delimiter anything)*>:val delimiter -> receiver.receive(val)
witharg :arg1 :arg2 = <(~delimiter anything)*>:a delimiter -> receiver.receive(arg1+arg2+a)
"""
self.grammar = self._parseGrammar(_grammar)
def test_dataNotFullyReceived(self):
"""
Since the initial rule inside the grammar is not matched, the receiver
shouldn't receive any byte.
"""
receiver = TrampolinedReceiver()
trampolinedParser = TrampolinedParser(self.grammar, receiver, {})
buf = 'foobarandnotreachdelimiter'
for c in iterbytes(buf):
trampolinedParser.receive(c)
self.assertEqual(receiver.received, [])
def test_dataFullyReceived(self):
"""
The receiver should receive the data according to the grammar.
"""
receiver = TrampolinedReceiver()
trampolinedParser = TrampolinedParser(self.grammar, receiver, {})
buf = '\r\n'.join(('foo', 'bar', 'foo', 'bar'))
for c in iterbytes(buf):
trampolinedParser.receive(c)
self.assertEqual(receiver.received, ['foo', 'bar', 'foo'])
trampolinedParser.receive('\r\n')
self.assertEqual(receiver.received, ['foo', 'bar', 'foo', 'bar'])
def test_bindings(self):
"""
The passed-in bindings should be accessible inside the grammar.
"""
receiver = TrampolinedReceiver()
grammar = r"""
initial = digit:d (-> int(d)+SMALL_INT):val -> receiver.receive(val)
"""
bindings = {'SMALL_INT': 3}
TrampolinedParser(self._parseGrammar(grammar), receiver, bindings).receive('0')
self.assertEqual(receiver.received, [3])
def test_currentRuleWithArgs(self):
"""
TrampolinedParser should be able to invoke curruent rule with args.
"""
receiver = TrampolinedReceiver()
receiver.currentRule = "witharg", "nice ", "day"
trampolinedParser = TrampolinedParser(self.grammar, receiver, {})
buf = ' oh yes\r\n'
for c in iterbytes(buf):
trampolinedParser.receive(c)
self.assertEqual(receiver.received, ["nice day oh yes"])
| mit | 243,262,195,913,948,260 | -275,580,688,192,211,000 | 30.957447 | 103 | 0.617843 | false |
box/ClusterRunner | app/master/time_based_atom_grouper.py | 4 | 11090 | from collections import OrderedDict
from app.master.atom_grouper import AtomGrouper
class TimeBasedAtomGrouper(object):
"""
This class implements the algorithm to best split & group atoms based on historic time values. This algorithm is
somewhat complicated, so I'm going to give a summary here.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Let N be the number of concurrent executors allocated for this job.
Let T be the aggregate serial time to execute all atoms on a single executor.
Both N and T are known values at the beginning of this algorithm.
In the ideal subjob atom-grouping, we would have exactly N subjobs, each allocated with T/N amount of work that
would all end at the same time. However, in reality, there are a few factors that makes this solution unfeasible:
- There is a significant amount of variability in the times of running these atoms, so numbers are never exact.
- Certain builds will introduce new tests (for which we don't have historical time data for).
- Not all of the machines are exactly the same, so we can't expect identical performance.
We have two aims for this algorithm:
- Minimize the amount of framework overhead (time spent sending and retrieving subjobs) and maximize the amount of
time the slaves actually spend running the build.
- Don't overload any single executor with too much work--this will cause the whole build to wait on a single
executor. We want to try to get all of the executors to end as close to the same time as possible in order to
get rid of any inefficient use of slave machines.
In order to accomplish this, the algorithm implemented by this class tries to split up the majority of the atoms
into N buckets, and splits up the rest of the atoms into smaller buckets. Hopefully, the timeline graph of
executed subjobs for each of the executors would end up looking like this:
[========================================================================][===][==][==]
[===============================================================================][==]
[====================================================================][====][===][==][=]
[========================================================================][===][==][=]
[=====================================================================][====][==][==]
[==================================================================================][=]
[===================================================================][======][==][==]
The algorithm has two stages of subjob creation: the 'big chunk' stage and the 'small chunk' stage. The 'big chunk'
stage creates exactly N large subjob groupings that will consist of the majority of atoms (in terms of runtime).
The 'small chunk' stage creates ~2N short subjob groupings that will be used to fill in the gaps in order to aim for
having all of the executors end at similar times.
Notes:
- For new atoms that we don't have historic times for, we will assign it the highest atom time value in order to
avoid underestimating the length of unknown atoms.
- We will have to try tweaking the percentage of T that we want to be allocated for the initial large batch of
big subjobs. Same goes for the number and size of the smaller buckets.
"""
BIG_CHUNK_FRACTION = 0.8
def __init__(self, atoms, max_executors, atom_time_map, project_directory):
"""
:param atoms: the list of atoms for this build
:type atoms: list[app.master.atom.Atom]
:param max_executors: the maximum number of executors for this build
:type max_executors: int
:param atom_time_map: a dictionary containing the historic times for atoms for this particular job
:type atom_time_map: dict[str, float]
:type project_directory: str
"""
self._atoms = atoms
self._max_executors = max_executors
self._atom_time_map = atom_time_map
self._project_directory = project_directory
def groupings(self):
"""
Group the atoms into subjobs using historic timing data.
:return: a list of lists of atoms
:rtype: list[list[app.master.atom.Atom]]
"""
# 1). Coalesce the atoms with historic atom times, and also get total estimated runtime
try:
total_estimated_runtime = self._set_expected_atom_times(
self._atoms, self._atom_time_map, self._project_directory)
except _AtomTimingDataError:
grouper = AtomGrouper(self._atoms, self._max_executors)
return grouper.groupings()
# 2). Sort them by decreasing time, and add them to an OrderedDict
atoms_by_decreasing_time = sorted(self._atoms, key=lambda atom: atom.expected_time, reverse=True)
sorted_atom_times_left = OrderedDict([(atom, atom.expected_time) for atom in atoms_by_decreasing_time])
# 3). Group them!
# Calculate what the target 'big subjob' time is going to be for each executor's initial subjob
big_subjob_time = (total_estimated_runtime * self.BIG_CHUNK_FRACTION) / self._max_executors
# Calculate what the target 'small subjob' time is going to be
small_subjob_time = (total_estimated_runtime * (1.0 - self.BIG_CHUNK_FRACTION)) / (2 * self._max_executors)
# _group_atoms_into_sized_buckets() will remove elements from sorted_atom_times_left.
subjobs = self._group_atoms_into_sized_buckets(sorted_atom_times_left, big_subjob_time, self._max_executors)
small_subjobs = self._group_atoms_into_sized_buckets(sorted_atom_times_left, small_subjob_time, None)
subjobs.extend(small_subjobs)
return subjobs
def _set_expected_atom_times(self, new_atoms, old_atoms_with_times, project_directory):
"""
Set the expected runtime (new_atom.expected_time) of each atom in new_atoms using historic timing data.
Additionally, return the total estimated serial-runtime for this build. Although this seems like an odd thing
for this method to return, it is done here for efficiency. There can be thousands of atoms, and iterating
through them multiple times seems inefficient.
:param new_atoms: the list of atoms that will be run in this build
:type new_atoms: list[app.master.atom.Atom]
:param old_atoms_with_times: a dictionary containing the historic times for atoms for this particular job
:type old_atoms_with_times: dict[str, float]
:type project_directory: str
:return: the total estimated runtime in seconds
:rtype: float
"""
atoms_without_timing_data = []
total_time = 0
max_atom_time = 0
# Generate list for atoms that have timing data
for new_atom in new_atoms:
if new_atom.command_string not in old_atoms_with_times:
atoms_without_timing_data.append(new_atom)
continue
new_atom.expected_time = old_atoms_with_times[new_atom.command_string]
# Discover largest single atom time to use as conservative estimates for atoms with unknown times
if max_atom_time < new_atom.expected_time:
max_atom_time = new_atom.expected_time
# We want to return the atom with the project directory still in it, as this data will directly be
# sent to the slave to be run.
total_time += new_atom.expected_time
# For the atoms without historic timing data, assign them the largest atom time we have
for new_atom in atoms_without_timing_data:
new_atom.expected_time = max_atom_time
if len(new_atoms) == len(atoms_without_timing_data):
raise _AtomTimingDataError
total_time += (max_atom_time * len(atoms_without_timing_data))
return total_time
def _group_atoms_into_sized_buckets(self, sorted_atom_time_dict, target_group_time, max_groups_to_create):
"""
Given a sorted dictionary (Python FTW) of [atom, time] pairs in variable sorted_atom_time_dict, return a list
of lists of atoms that each are estimated to take target_group_time seconds. This method will generate at most
max_groups_to_create groupings, and will return once this limit is reached or when sorted_atom_time_dict is
empty.
Note, this method will modify sorted_atom_time_dict's state by removing elements as needed (often from the
middle of the collection).
:param sorted_atom_time_dict: the sorted (longest first), double-ended queue containing [atom, time] pairs.
This OrderedDict will have elements removed from this method.
:type sorted_atom_time_dict: OrderedDict[app.master.atom.Atom, float]
:param target_group_time: how long each subjob should approximately take
:type target_group_time: float
:param max_groups_to_create: the maximum number of subjobs to create. Once max_groups_to_create limit is
reached, this method will return the subjobs that have already been grouped. If set to None, then there
is no limit.
:type max_groups_to_create: int|None
:return: the groups of grouped atoms, with each group taking an estimated target_group_time
:rtype: list[list[app.master.atom.Atom]]
"""
subjobs = []
subjob_time_so_far = 0
subjob_atoms = []
while (max_groups_to_create is None or len(subjobs) < max_groups_to_create) and len(sorted_atom_time_dict) > 0:
for atom, time in sorted_atom_time_dict.items():
if len(subjob_atoms) == 0 or (time + subjob_time_so_far) <= target_group_time:
subjob_time_so_far += time
subjob_atoms.append(atom)
sorted_atom_time_dict.pop(atom)
# If (number of subjobs created so far + atoms left) is less than or equal to the total number of
# subjobs we need to create, then have each remaining atom be a subjob and return.
# The "+ 1" is here to account for the current subjob being generated, but that hasn't been
# appended to subjobs yet.
if max_groups_to_create is not None and (len(subjobs) + len(sorted_atom_time_dict) + 1) <= max_groups_to_create:
subjobs.append(subjob_atoms)
for atom, _ in sorted_atom_time_dict.items():
sorted_atom_time_dict.pop(atom)
subjobs.append([atom])
return subjobs
subjobs.append(subjob_atoms)
subjob_atoms = []
subjob_time_so_far = 0
return subjobs
class _AtomTimingDataError(Exception):
"""
An exception to represent the case where the atom timing data is either not present or incorrect.
"""
| apache-2.0 | -6,812,826,231,849,662,000 | 5,528,387,585,956,611,000 | 53.097561 | 132 | 0.622813 | false |
bgxavier/nova | nova/tests/unit/virt/hyperv/test_networkutils.py | 68 | 3245 | # Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nova import test
from nova.virt.hyperv import networkutils
from nova.virt.hyperv import vmutils
class NetworkUtilsTestCase(test.NoDBTestCase):
"""Unit tests for the Hyper-V NetworkUtils class."""
_FAKE_PORT = {'Name': mock.sentinel.FAKE_PORT_NAME}
_FAKE_RET_VALUE = 0
_MSVM_VIRTUAL_SWITCH = 'Msvm_VirtualSwitch'
def setUp(self):
self._networkutils = networkutils.NetworkUtils()
self._networkutils._conn = mock.MagicMock()
super(NetworkUtilsTestCase, self).setUp()
def test_get_external_vswitch(self):
mock_vswitch = mock.MagicMock()
mock_vswitch.path_.return_value = mock.sentinel.FAKE_VSWITCH_PATH
getattr(self._networkutils._conn,
self._MSVM_VIRTUAL_SWITCH).return_value = [mock_vswitch]
switch_path = self._networkutils.get_external_vswitch(
mock.sentinel.FAKE_VSWITCH_NAME)
self.assertEqual(mock.sentinel.FAKE_VSWITCH_PATH, switch_path)
def test_get_external_vswitch_not_found(self):
self._networkutils._conn.Msvm_VirtualEthernetSwitch.return_value = []
self.assertRaises(vmutils.HyperVException,
self._networkutils.get_external_vswitch,
mock.sentinel.FAKE_VSWITCH_NAME)
def test_get_external_vswitch_no_name(self):
mock_vswitch = mock.MagicMock()
mock_vswitch.path_.return_value = mock.sentinel.FAKE_VSWITCH_PATH
mock_ext_port = self._networkutils._conn.Msvm_ExternalEthernetPort()[0]
self._prepare_external_port(mock_vswitch, mock_ext_port)
switch_path = self._networkutils.get_external_vswitch(None)
self.assertEqual(mock.sentinel.FAKE_VSWITCH_PATH, switch_path)
def _prepare_external_port(self, mock_vswitch, mock_ext_port):
mock_lep = mock_ext_port.associators()[0]
mock_lep.associators.return_value = [mock_vswitch]
def test_create_vswitch_port(self):
svc = self._networkutils._conn.Msvm_VirtualSwitchManagementService()[0]
svc.CreateSwitchPort.return_value = (
self._FAKE_PORT, self._FAKE_RET_VALUE)
port = self._networkutils.create_vswitch_port(
mock.sentinel.FAKE_VSWITCH_PATH, mock.sentinel.FAKE_PORT_NAME)
svc.CreateSwitchPort.assert_called_once_with(
Name=mock.ANY, FriendlyName=mock.sentinel.FAKE_PORT_NAME,
ScopeOfResidence="", VirtualSwitch=mock.sentinel.FAKE_VSWITCH_PATH)
self.assertEqual(self._FAKE_PORT, port)
def test_vswitch_port_needed(self):
self.assertTrue(self._networkutils.vswitch_port_needed())
| apache-2.0 | -6,777,010,906,554,174,000 | -486,861,760,068,570,940 | 38.573171 | 79 | 0.686595 | false |
NeCTAR-RC/nova | nova/tests/unit/api/openstack/compute/test_services.py | 9 | 39890 | # Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import datetime
import iso8601
import mock
from oslo_utils import fixture as utils_fixture
import webob.exc
from nova.api.openstack import api_version_request as api_version
from nova.api.openstack.compute.legacy_v2.contrib import services \
as services_v2
from nova.api.openstack.compute import services as services_v21
from nova.api.openstack import extensions
from nova.api.openstack import wsgi as os_wsgi
from nova import availability_zones
from nova.cells import utils as cells_utils
from nova.compute import cells_api
from nova import context
from nova import exception
from nova import objects
from nova.servicegroup.drivers import db as db_driver
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit.objects import test_service
fake_services_list = [
dict(test_service.fake_service,
binary='nova-scheduler',
host='host1',
id=1,
disabled=True,
topic='scheduler',
updated_at=datetime.datetime(2012, 10, 29, 13, 42, 2),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 27),
last_seen_up=datetime.datetime(2012, 10, 29, 13, 42, 2),
forced_down=False,
disabled_reason='test1'),
dict(test_service.fake_service,
binary='nova-compute',
host='host1',
id=2,
disabled=True,
topic='compute',
updated_at=datetime.datetime(2012, 10, 29, 13, 42, 5),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 27),
last_seen_up=datetime.datetime(2012, 10, 29, 13, 42, 5),
forced_down=False,
disabled_reason='test2'),
dict(test_service.fake_service,
binary='nova-scheduler',
host='host2',
id=3,
disabled=False,
topic='scheduler',
updated_at=datetime.datetime(2012, 9, 19, 6, 55, 34),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 28),
last_seen_up=datetime.datetime(2012, 9, 19, 6, 55, 34),
forced_down=False,
disabled_reason=None),
dict(test_service.fake_service,
binary='nova-compute',
host='host2',
id=4,
disabled=True,
topic='compute',
updated_at=datetime.datetime(2012, 9, 18, 8, 3, 38),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 28),
last_seen_up=datetime.datetime(2012, 9, 18, 8, 3, 38),
forced_down=False,
disabled_reason='test4'),
# NOTE(rpodolyaka): API services are special case and must be filtered out
dict(test_service.fake_service,
binary='nova-osapi_compute',
host='host2',
id=5,
disabled=False,
topic=None,
updated_at=None,
created_at=datetime.datetime(2012, 9, 18, 2, 46, 28),
last_seen_up=None,
forced_down=False,
disabled_reason=None),
dict(test_service.fake_service,
binary='nova-metadata',
host='host2',
id=6,
disabled=False,
topic=None,
updated_at=None,
created_at=datetime.datetime(2012, 9, 18, 2, 46, 28),
last_seen_up=None,
forced_down=False,
disabled_reason=None),
]
class FakeRequest(object):
environ = {"nova.context": context.get_admin_context()}
GET = {}
def __init__(self, version=os_wsgi.DEFAULT_API_VERSION): # version='2.1'):
super(FakeRequest, self).__init__()
self.api_version_request = api_version.APIVersionRequest(version)
class FakeRequestWithService(FakeRequest):
GET = {"binary": "nova-compute"}
class FakeRequestWithHost(FakeRequest):
GET = {"host": "host1"}
class FakeRequestWithHostService(FakeRequest):
GET = {"host": "host1", "binary": "nova-compute"}
def fake_service_get_all(services):
def service_get_all(context, filters=None, set_zones=False):
if set_zones or 'availability_zone' in filters:
return availability_zones.set_availability_zones(context,
services)
return services
return service_get_all
def fake_db_api_service_get_all(context, disabled=None):
return fake_services_list
def fake_db_service_get_by_host_binary(services):
def service_get_by_host_binary(context, host, binary):
for service in services:
if service['host'] == host and service['binary'] == binary:
return service
raise exception.HostBinaryNotFound(host=host, binary=binary)
return service_get_by_host_binary
def fake_service_get_by_host_binary(context, host, binary):
fake = fake_db_service_get_by_host_binary(fake_services_list)
return fake(context, host, binary)
def _service_get_by_id(services, value):
for service in services:
if service['id'] == value:
return service
return None
def fake_db_service_update(services):
def service_update(context, service_id, values):
service = _service_get_by_id(services, service_id)
if service is None:
raise exception.ServiceNotFound(service_id=service_id)
service = copy.deepcopy(service)
service.update(values)
return service
return service_update
def fake_service_update(context, service_id, values):
fake = fake_db_service_update(fake_services_list)
return fake(context, service_id, values)
def fake_utcnow():
return datetime.datetime(2012, 10, 29, 13, 42, 11)
class ServicesTestV21(test.TestCase):
service_is_up_exc = webob.exc.HTTPInternalServerError
bad_request = exception.ValidationError
wsgi_api_version = os_wsgi.DEFAULT_API_VERSION
def _set_up_controller(self):
self.controller = services_v21.ServiceController()
def setUp(self):
super(ServicesTestV21, self).setUp()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self._set_up_controller()
self.controller.host_api.service_get_all = (
mock.Mock(side_effect=fake_service_get_all(fake_services_list)))
self.useFixture(utils_fixture.TimeFixture(fake_utcnow()))
self.stub_out('nova.db.service_get_by_host_and_binary',
fake_db_service_get_by_host_binary(fake_services_list))
self.stub_out('nova.db.service_update',
fake_db_service_update(fake_services_list))
self.req = fakes.HTTPRequest.blank('')
def _process_output(self, services, has_disabled=False, has_id=False):
return services
def test_services_list(self):
req = FakeRequest()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'id': 1,
'state': 'up',
'disabled_reason': 'test1',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'disabled_reason': 'test2',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-scheduler',
'host': 'host2',
'zone': 'internal',
'id': 3,
'status': 'enabled',
'disabled_reason': None,
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'id': 4,
'status': 'disabled',
'disabled_reason': 'test4',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_host(self):
req = FakeRequestWithHost()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'disabled_reason': 'test1',
'id': 1,
'zone': 'internal',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'disabled_reason': 'test2',
'id': 2,
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_service(self):
req = FakeRequestWithService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'disabled_reason': 'test2',
'id': 2,
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'disabled_reason': 'test4',
'id': 4,
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_host_service(self):
req = FakeRequestWithHostService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'disabled_reason': 'test2',
'id': 2,
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_detail(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequest()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'id': 1,
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'disabled_reason': 'test1'},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'id': 2,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'},
{'binary': 'nova-scheduler',
'host': 'host2',
'zone': 'internal',
'status': 'enabled',
'id': 3,
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34),
'disabled_reason': None},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'id': 4,
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38),
'disabled_reason': 'test4'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_host(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithHost()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'id': 1,
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'disabled_reason': 'test1'},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_service(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'},
{'binary': 'nova-compute',
'host': 'host2',
'id': 4,
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38),
'disabled_reason': 'test4'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_host_service(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithHostService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'id': 2,
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_services_detail_with_delete_extension(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
req = FakeRequest()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'id': 1,
'zone': 'internal',
'disabled_reason': 'test1',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'id': 2,
'zone': 'nova',
'disabled_reason': 'test2',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-scheduler',
'host': 'host2',
'disabled_reason': None,
'id': 3,
'zone': 'internal',
'status': 'enabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
{'binary': 'nova-compute',
'host': 'host2',
'id': 4,
'disabled_reason': 'test4',
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response, has_id=True)
self.assertEqual(res_dict, response)
def test_services_enable(self):
def _service_update(context, service_id, values):
self.assertIsNone(values['disabled_reason'])
return dict(test_service.fake_service, id=service_id, **values)
self.stub_out('nova.db.service_update', _service_update)
body = {'host': 'host1', 'binary': 'nova-compute'}
res_dict = self.controller.update(self.req, "enable", body=body)
self.assertEqual(res_dict['service']['status'], 'enabled')
self.assertNotIn('disabled_reason', res_dict['service'])
def test_services_enable_with_invalid_host(self):
body = {'host': 'invalid', 'binary': 'nova-compute'}
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
self.req,
"enable",
body=body)
def test_services_enable_with_invalid_binary(self):
body = {'host': 'host1', 'binary': 'invalid'}
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
self.req,
"enable",
body=body)
def test_services_disable(self):
body = {'host': 'host1', 'binary': 'nova-compute'}
res_dict = self.controller.update(self.req, "disable", body=body)
self.assertEqual(res_dict['service']['status'], 'disabled')
self.assertNotIn('disabled_reason', res_dict['service'])
def test_services_disable_with_invalid_host(self):
body = {'host': 'invalid', 'binary': 'nova-compute'}
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
self.req,
"disable",
body=body)
def test_services_disable_with_invalid_binary(self):
body = {'host': 'host1', 'binary': 'invalid'}
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
self.req,
"disable",
body=body)
def test_services_disable_log_reason(self):
self.ext_mgr.extensions['os-extended-services'] = True
body = {'host': 'host1',
'binary': 'nova-compute',
'disabled_reason': 'test-reason',
}
res_dict = self.controller.update(self.req,
"disable-log-reason",
body=body)
self.assertEqual(res_dict['service']['status'], 'disabled')
self.assertEqual(res_dict['service']['disabled_reason'], 'test-reason')
def test_mandatory_reason_field(self):
self.ext_mgr.extensions['os-extended-services'] = True
body = {'host': 'host1',
'binary': 'nova-compute',
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, self.req, "disable-log-reason",
body=body)
def test_invalid_reason_field(self):
self.ext_mgr.extensions['os-extended-services'] = True
reason = 'a' * 256
body = {'host': 'host1',
'binary': 'nova-compute',
'disabled_reason': reason,
}
self.assertRaises(self.bad_request,
self.controller.update, self.req, "disable-log-reason",
body=body)
def test_services_delete(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
with mock.patch.object(self.controller.host_api,
'service_delete') as service_delete:
self.controller.delete(self.req, '1')
service_delete.assert_called_once_with(
self.req.environ['nova.context'], '1')
self.assertEqual(self.controller.delete.wsgi_code, 204)
def test_services_delete_not_found(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete, self.req, 1234)
def test_services_delete_bad_request(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.delete, self.req, 'abc')
# This test is just to verify that the servicegroup API gets used when
# calling the API
@mock.patch.object(db_driver.DbDriver, 'is_up', side_effect=KeyError)
def test_services_with_exception(self, mock_is_up):
req = FakeRequestWithHostService()
self.assertRaises(self.service_is_up_exc, self.controller.index, req)
class ServicesTestV211(ServicesTestV21):
wsgi_api_version = '2.11'
def test_services_list(self):
req = FakeRequest(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'id': 1,
'state': 'up',
'forced_down': False,
'disabled_reason': 'test1',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'disabled_reason': 'test2',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-scheduler',
'host': 'host2',
'zone': 'internal',
'id': 3,
'status': 'enabled',
'disabled_reason': None,
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'id': 4,
'status': 'disabled',
'disabled_reason': 'test4',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_host(self):
req = FakeRequestWithHost(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'disabled_reason': 'test1',
'id': 1,
'zone': 'internal',
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'disabled_reason': 'test2',
'id': 2,
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_service(self):
req = FakeRequestWithService(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'disabled_reason': 'test2',
'id': 2,
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'disabled_reason': 'test4',
'id': 4,
'status': 'disabled',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_host_service(self):
req = FakeRequestWithHostService(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'disabled_reason': 'test2',
'id': 2,
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_detail(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequest(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'id': 1,
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'disabled_reason': 'test1'},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'id': 2,
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'},
{'binary': 'nova-scheduler',
'host': 'host2',
'zone': 'internal',
'status': 'enabled',
'id': 3,
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34),
'disabled_reason': None},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'id': 4,
'status': 'disabled',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38),
'disabled_reason': 'test4'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_host(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithHost(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'id': 1,
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'disabled_reason': 'test1'},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_service(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithService(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'},
{'binary': 'nova-compute',
'host': 'host2',
'id': 4,
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38),
'disabled_reason': 'test4'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_host_service(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithHostService(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'id': 2,
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_services_detail_with_delete_extension(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
req = FakeRequest(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'id': 1,
'zone': 'internal',
'disabled_reason': 'test1',
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'id': 2,
'zone': 'nova',
'disabled_reason': 'test2',
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-scheduler',
'host': 'host2',
'disabled_reason': None,
'id': 3,
'zone': 'internal',
'status': 'enabled',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
{'binary': 'nova-compute',
'host': 'host2',
'id': 4,
'disabled_reason': 'test4',
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response, has_id=True)
self.assertEqual(res_dict, response)
class ServicesTestV20(ServicesTestV21):
service_is_up_exc = KeyError
bad_request = webob.exc.HTTPBadRequest
def setUp(self):
super(ServicesTestV20, self).setUp()
self.req = fakes.HTTPRequest.blank('', use_admin_context=True)
self.non_admin_req = fakes.HTTPRequest.blank('')
def _set_up_controller(self):
self.controller = services_v2.ServiceController(self.ext_mgr)
def test_services_delete_not_enabled(self):
self.assertRaises(webob.exc.HTTPMethodNotAllowed,
self.controller.delete, self.req, '300')
def _process_output(self, services, has_disabled=False, has_id=False):
for service in services['services']:
if not has_disabled:
service.pop('disabled_reason')
if not has_id:
service.pop('id')
return services
def test_update_with_non_admin(self):
self.assertRaises(exception.AdminRequired, self.controller.update,
self.non_admin_req, fakes.FAKE_UUID, body={})
def test_delete_with_non_admin(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
self.assertRaises(exception.AdminRequired, self.controller.delete,
self.non_admin_req, fakes.FAKE_UUID)
def test_index_with_non_admin(self):
self.assertRaises(exception.AdminRequired, self.controller.index,
self.non_admin_req)
class ServicesCellsTestV21(test.TestCase):
def setUp(self):
super(ServicesCellsTestV21, self).setUp()
host_api = cells_api.HostAPI()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self._set_up_controller()
self.controller.host_api = host_api
self.useFixture(utils_fixture.TimeFixture(fake_utcnow()))
services_list = []
for service in fake_services_list:
service = service.copy()
del service['version']
service_obj = objects.Service(**service)
service_proxy = cells_utils.ServiceProxy(service_obj, 'cell1')
services_list.append(service_proxy)
host_api.cells_rpcapi.service_get_all = (
mock.Mock(side_effect=fake_service_get_all(services_list)))
def _set_up_controller(self):
self.controller = services_v21.ServiceController()
def _process_out(self, res_dict):
for res in res_dict['services']:
res.pop('disabled_reason')
def test_services_detail(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
req = FakeRequest()
res_dict = self.controller.index(req)
utc = iso8601.iso8601.Utc()
response = {'services': [
{'id': 'cell1@1',
'binary': 'nova-scheduler',
'host': 'cell1@host1',
'zone': 'internal',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2,
tzinfo=utc)},
{'id': 'cell1@2',
'binary': 'nova-compute',
'host': 'cell1@host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5,
tzinfo=utc)},
{'id': 'cell1@3',
'binary': 'nova-scheduler',
'host': 'cell1@host2',
'zone': 'internal',
'status': 'enabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34,
tzinfo=utc)},
{'id': 'cell1@4',
'binary': 'nova-compute',
'host': 'cell1@host2',
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38,
tzinfo=utc)}]}
self._process_out(res_dict)
self.assertEqual(response, res_dict)
class ServicesCellsTestV20(ServicesCellsTestV21):
def _set_up_controller(self):
self.controller = services_v2.ServiceController(self.ext_mgr)
def _process_out(self, res_dict):
pass
class ServicesPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(ServicesPolicyEnforcementV21, self).setUp()
self.controller = services_v21.ServiceController()
self.req = fakes.HTTPRequest.blank('')
def test_update_policy_failed(self):
rule_name = "os_compute_api:os-services"
self.policy.set_rules({rule_name: "project_id:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.update, self.req, fakes.FAKE_UUID,
body={'host': 'host1',
'binary': 'nova-compute'})
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_delete_policy_failed(self):
rule_name = "os_compute_api:os-services"
self.policy.set_rules({rule_name: "project_id:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.delete, self.req, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_index_policy_failed(self):
rule_name = "os_compute_api:os-services"
self.policy.set_rules({rule_name: "project_id:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index, self.req)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
| apache-2.0 | -6,164,523,076,786,492,000 | -157,087,939,571,105,180 | 38.06954 | 79 | 0.493758 | false |
Ujjwal29/ansible | test/units/module_utils/test_database.py | 325 | 5737 | import collections
import mock
import os
import re
from nose.tools import eq_
try:
from nose.tools import assert_raises_regexp
except ImportError:
# Python < 2.7
def assert_raises_regexp(expected, regexp, callable, *a, **kw):
try:
callable(*a, **kw)
except expected as e:
if isinstance(regexp, basestring):
regexp = re.compile(regexp)
if not regexp.search(str(e)):
raise Exception('"%s" does not match "%s"' %
(regexp.pattern, str(e)))
else:
if hasattr(expected,'__name__'): excName = expected.__name__
else: excName = str(expected)
raise AssertionError("%s not raised" % excName)
from ansible.module_utils.database import (
pg_quote_identifier,
SQLParseError,
)
# Note: Using nose's generator test cases here so we can't inherit from
# unittest.TestCase
class TestQuotePgIdentifier(object):
# These are all valid strings
# The results are based on interpreting the identifier as a table name
valid = {
# User quoted
'"public.table"': '"public.table"',
'"public"."table"': '"public"."table"',
'"schema test"."table test"': '"schema test"."table test"',
# We quote part
'public.table': '"public"."table"',
'"public".table': '"public"."table"',
'public."table"': '"public"."table"',
'schema test.table test': '"schema test"."table test"',
'"schema test".table test': '"schema test"."table test"',
'schema test."table test"': '"schema test"."table test"',
# Embedded double quotes
'table "test"': '"table ""test"""',
'public."table ""test"""': '"public"."table ""test"""',
'public.table "test"': '"public"."table ""test"""',
'schema "test".table': '"schema ""test"""."table"',
'"schema ""test""".table': '"schema ""test"""."table"',
'"""wat"""."""test"""': '"""wat"""."""test"""',
# Sigh, handle these as well:
'"no end quote': '"""no end quote"',
'schema."table': '"schema"."""table"',
'"schema.table': '"""schema"."table"',
'schema."table.something': '"schema"."""table"."something"',
# Embedded dots
'"schema.test"."table.test"': '"schema.test"."table.test"',
'"schema.".table': '"schema."."table"',
'"schema."."table"': '"schema."."table"',
'schema.".table"': '"schema".".table"',
'"schema".".table"': '"schema".".table"',
'"schema.".".table"': '"schema.".".table"',
# These are valid but maybe not what the user intended
'."table"': '".""table"""',
'table.': '"table."',
}
invalid = {
('test.too.many.dots', 'table'): 'PostgreSQL does not support table with more than 3 dots',
('"test.too".many.dots', 'database'): 'PostgreSQL does not support database with more than 1 dots',
('test.too."many.dots"', 'database'): 'PostgreSQL does not support database with more than 1 dots',
('"test"."too"."many"."dots"', 'database'): "PostgreSQL does not support database with more than 1 dots",
('"test"."too"."many"."dots"', 'schema'): "PostgreSQL does not support schema with more than 2 dots",
('"test"."too"."many"."dots"', 'table'): "PostgreSQL does not support table with more than 3 dots",
('"test"."too"."many"."dots"."for"."column"', 'column'): "PostgreSQL does not support column with more than 4 dots",
('"table "invalid" double quote"', 'table'): 'User escaped identifiers must escape extra quotes',
('"schema "invalid"""."table "invalid"', 'table'): 'User escaped identifiers must escape extra quotes',
('"schema."table"','table'): 'User escaped identifiers must escape extra quotes',
('"schema".', 'table'): 'Identifier name unspecified or unquoted trailing dot',
}
def check_valid_quotes(self, identifier, quoted_identifier):
eq_(pg_quote_identifier(identifier, 'table'), quoted_identifier)
def test_valid_quotes(self):
for identifier in self.valid:
yield self.check_valid_quotes, identifier, self.valid[identifier]
def check_invalid_quotes(self, identifier, id_type, msg):
assert_raises_regexp(SQLParseError, msg, pg_quote_identifier, *(identifier, id_type))
def test_invalid_quotes(self):
for test in self.invalid:
yield self.check_invalid_quotes, test[0], test[1], self.invalid[test]
def test_how_many_dots(self):
eq_(pg_quote_identifier('role', 'role'), '"role"')
assert_raises_regexp(SQLParseError, "PostgreSQL does not support role with more than 1 dots", pg_quote_identifier, *('role.more', 'role'))
eq_(pg_quote_identifier('db', 'database'), '"db"')
assert_raises_regexp(SQLParseError, "PostgreSQL does not support database with more than 1 dots", pg_quote_identifier, *('db.more', 'database'))
eq_(pg_quote_identifier('db.schema', 'schema'), '"db"."schema"')
assert_raises_regexp(SQLParseError, "PostgreSQL does not support schema with more than 2 dots", pg_quote_identifier, *('db.schema.more', 'schema'))
eq_(pg_quote_identifier('db.schema.table', 'table'), '"db"."schema"."table"')
assert_raises_regexp(SQLParseError, "PostgreSQL does not support table with more than 3 dots", pg_quote_identifier, *('db.schema.table.more', 'table'))
eq_(pg_quote_identifier('db.schema.table.column', 'column'), '"db"."schema"."table"."column"')
assert_raises_regexp(SQLParseError, "PostgreSQL does not support column with more than 4 dots", pg_quote_identifier, *('db.schema.table.column.more', 'column'))
| gpl-3.0 | 1,932,165,982,291,614,000 | -7,225,917,246,298,614,000 | 47.618644 | 168 | 0.601883 | false |
Thraxis/pymedusa | sickbeard/server/web/config/notifications.py | 1 | 17493 | # coding=utf-8
"""
Configure notifications
"""
from __future__ import unicode_literals
import os
from tornado.routes import route
import sickbeard
from sickbeard import (
config, logger, ui,
)
from sickrage.helper.common import try_int
from sickrage.helper.encoding import ek
from sickbeard.server.web.core import PageTemplate
from sickbeard.server.web.config.handler import Config
@route('/config/notifications(/?.*)')
class ConfigNotifications(Config):
"""
Handler for notification configuration
"""
def __init__(self, *args, **kwargs):
super(ConfigNotifications, self).__init__(*args, **kwargs)
def index(self):
"""
Render the notification configuration page
"""
t = PageTemplate(rh=self, filename='config_notifications.mako')
return t.render(submenu=self.ConfigMenu(), title='Config - Notifications',
header='Notifications', topmenu='config',
controller='config', action='notifications')
def saveNotifications(self, use_kodi=None, kodi_always_on=None, kodi_notify_onsnatch=None,
kodi_notify_ondownload=None,
kodi_notify_onsubtitledownload=None, kodi_update_onlyfirst=None,
kodi_update_library=None, kodi_update_full=None, kodi_host=None, kodi_username=None,
kodi_password=None,
use_plex_server=None, plex_notify_onsnatch=None, plex_notify_ondownload=None,
plex_notify_onsubtitledownload=None, plex_update_library=None,
plex_server_host=None, plex_server_token=None, plex_client_host=None, plex_server_username=None, plex_server_password=None,
use_plex_client=None, plex_client_username=None, plex_client_password=None,
plex_server_https=None, use_emby=None, emby_host=None, emby_apikey=None,
use_growl=None, growl_notify_onsnatch=None, growl_notify_ondownload=None,
growl_notify_onsubtitledownload=None, growl_host=None, growl_password=None,
use_freemobile=None, freemobile_notify_onsnatch=None, freemobile_notify_ondownload=None,
freemobile_notify_onsubtitledownload=None, freemobile_id=None, freemobile_apikey=None,
use_telegram=None, telegram_notify_onsnatch=None, telegram_notify_ondownload=None,
telegram_notify_onsubtitledownload=None, telegram_id=None, telegram_apikey=None,
use_prowl=None, prowl_notify_onsnatch=None, prowl_notify_ondownload=None,
prowl_notify_onsubtitledownload=None, prowl_api=None, prowl_priority=0,
prowl_show_list=None, prowl_show=None, prowl_message_title=None,
use_twitter=None, twitter_notify_onsnatch=None, twitter_notify_ondownload=None,
twitter_notify_onsubtitledownload=None, twitter_usedm=None, twitter_dmto=None,
use_boxcar2=None, boxcar2_notify_onsnatch=None, boxcar2_notify_ondownload=None,
boxcar2_notify_onsubtitledownload=None, boxcar2_accesstoken=None,
use_pushover=None, pushover_notify_onsnatch=None, pushover_notify_ondownload=None,
pushover_notify_onsubtitledownload=None, pushover_userkey=None, pushover_apikey=None, pushover_device=None, pushover_sound=None,
use_libnotify=None, libnotify_notify_onsnatch=None, libnotify_notify_ondownload=None,
libnotify_notify_onsubtitledownload=None,
use_nmj=None, nmj_host=None, nmj_database=None, nmj_mount=None, use_synoindex=None,
use_nmjv2=None, nmjv2_host=None, nmjv2_dbloc=None, nmjv2_database=None,
use_trakt=None, trakt_username=None, trakt_pin=None,
trakt_remove_watchlist=None, trakt_sync_watchlist=None, trakt_remove_show_from_sickrage=None, trakt_method_add=None,
trakt_start_paused=None, trakt_use_recommended=None, trakt_sync=None, trakt_sync_remove=None,
trakt_default_indexer=None, trakt_remove_serieslist=None, trakt_timeout=None, trakt_blacklist_name=None,
use_synologynotifier=None, synologynotifier_notify_onsnatch=None,
synologynotifier_notify_ondownload=None, synologynotifier_notify_onsubtitledownload=None,
use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None,
pytivo_notify_onsubtitledownload=None, pytivo_update_library=None,
pytivo_host=None, pytivo_share_name=None, pytivo_tivo_name=None,
use_nma=None, nma_notify_onsnatch=None, nma_notify_ondownload=None,
nma_notify_onsubtitledownload=None, nma_api=None, nma_priority=0,
use_pushalot=None, pushalot_notify_onsnatch=None, pushalot_notify_ondownload=None,
pushalot_notify_onsubtitledownload=None, pushalot_authorizationtoken=None,
use_pushbullet=None, pushbullet_notify_onsnatch=None, pushbullet_notify_ondownload=None,
pushbullet_notify_onsubtitledownload=None, pushbullet_api=None, pushbullet_device=None,
pushbullet_device_list=None,
use_email=None, email_notify_onsnatch=None, email_notify_ondownload=None,
email_notify_onsubtitledownload=None, email_host=None, email_port=25, email_from=None,
email_tls=None, email_user=None, email_password=None, email_list=None, email_subject=None, email_show_list=None,
email_show=None):
"""
Save notification related settings
"""
results = []
sickbeard.USE_KODI = config.checkbox_to_value(use_kodi)
sickbeard.KODI_ALWAYS_ON = config.checkbox_to_value(kodi_always_on)
sickbeard.KODI_NOTIFY_ONSNATCH = config.checkbox_to_value(kodi_notify_onsnatch)
sickbeard.KODI_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(kodi_notify_ondownload)
sickbeard.KODI_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(kodi_notify_onsubtitledownload)
sickbeard.KODI_UPDATE_LIBRARY = config.checkbox_to_value(kodi_update_library)
sickbeard.KODI_UPDATE_FULL = config.checkbox_to_value(kodi_update_full)
sickbeard.KODI_UPDATE_ONLYFIRST = config.checkbox_to_value(kodi_update_onlyfirst)
sickbeard.KODI_HOST = config.clean_hosts(kodi_host)
sickbeard.KODI_USERNAME = kodi_username
sickbeard.KODI_PASSWORD = kodi_password
sickbeard.USE_PLEX_SERVER = config.checkbox_to_value(use_plex_server)
sickbeard.PLEX_NOTIFY_ONSNATCH = config.checkbox_to_value(plex_notify_onsnatch)
sickbeard.PLEX_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(plex_notify_ondownload)
sickbeard.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(plex_notify_onsubtitledownload)
sickbeard.PLEX_UPDATE_LIBRARY = config.checkbox_to_value(plex_update_library)
sickbeard.PLEX_CLIENT_HOST = config.clean_hosts(plex_client_host)
sickbeard.PLEX_SERVER_HOST = config.clean_hosts(plex_server_host)
sickbeard.PLEX_SERVER_TOKEN = config.clean_host(plex_server_token)
sickbeard.PLEX_SERVER_USERNAME = plex_server_username
if plex_server_password != '*' * len(sickbeard.PLEX_SERVER_PASSWORD):
sickbeard.PLEX_SERVER_PASSWORD = plex_server_password
sickbeard.USE_PLEX_CLIENT = config.checkbox_to_value(use_plex_client)
sickbeard.PLEX_CLIENT_USERNAME = plex_client_username
if plex_client_password != '*' * len(sickbeard.PLEX_CLIENT_PASSWORD):
sickbeard.PLEX_CLIENT_PASSWORD = plex_client_password
sickbeard.PLEX_SERVER_HTTPS = config.checkbox_to_value(plex_server_https)
sickbeard.USE_EMBY = config.checkbox_to_value(use_emby)
sickbeard.EMBY_HOST = config.clean_host(emby_host)
sickbeard.EMBY_APIKEY = emby_apikey
sickbeard.USE_GROWL = config.checkbox_to_value(use_growl)
sickbeard.GROWL_NOTIFY_ONSNATCH = config.checkbox_to_value(growl_notify_onsnatch)
sickbeard.GROWL_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(growl_notify_ondownload)
sickbeard.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(growl_notify_onsubtitledownload)
sickbeard.GROWL_HOST = config.clean_host(growl_host, default_port=23053)
sickbeard.GROWL_PASSWORD = growl_password
sickbeard.USE_FREEMOBILE = config.checkbox_to_value(use_freemobile)
sickbeard.FREEMOBILE_NOTIFY_ONSNATCH = config.checkbox_to_value(freemobile_notify_onsnatch)
sickbeard.FREEMOBILE_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(freemobile_notify_ondownload)
sickbeard.FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(freemobile_notify_onsubtitledownload)
sickbeard.FREEMOBILE_ID = freemobile_id
sickbeard.FREEMOBILE_APIKEY = freemobile_apikey
sickbeard.USE_TELEGRAM = config.checkbox_to_value(use_telegram)
sickbeard.TELEGRAM_NOTIFY_ONSNATCH = config.checkbox_to_value(telegram_notify_onsnatch)
sickbeard.TELEGRAM_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(telegram_notify_ondownload)
sickbeard.TELEGRAM_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(telegram_notify_onsubtitledownload)
sickbeard.TELEGRAM_ID = telegram_id
sickbeard.TELEGRAM_APIKEY = telegram_apikey
sickbeard.USE_PROWL = config.checkbox_to_value(use_prowl)
sickbeard.PROWL_NOTIFY_ONSNATCH = config.checkbox_to_value(prowl_notify_onsnatch)
sickbeard.PROWL_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(prowl_notify_ondownload)
sickbeard.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(prowl_notify_onsubtitledownload)
sickbeard.PROWL_API = prowl_api
sickbeard.PROWL_PRIORITY = prowl_priority
sickbeard.PROWL_MESSAGE_TITLE = prowl_message_title
sickbeard.USE_TWITTER = config.checkbox_to_value(use_twitter)
sickbeard.TWITTER_NOTIFY_ONSNATCH = config.checkbox_to_value(twitter_notify_onsnatch)
sickbeard.TWITTER_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(twitter_notify_ondownload)
sickbeard.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(twitter_notify_onsubtitledownload)
sickbeard.TWITTER_USEDM = config.checkbox_to_value(twitter_usedm)
sickbeard.TWITTER_DMTO = twitter_dmto
sickbeard.USE_BOXCAR2 = config.checkbox_to_value(use_boxcar2)
sickbeard.BOXCAR2_NOTIFY_ONSNATCH = config.checkbox_to_value(boxcar2_notify_onsnatch)
sickbeard.BOXCAR2_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(boxcar2_notify_ondownload)
sickbeard.BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(boxcar2_notify_onsubtitledownload)
sickbeard.BOXCAR2_ACCESSTOKEN = boxcar2_accesstoken
sickbeard.USE_PUSHOVER = config.checkbox_to_value(use_pushover)
sickbeard.PUSHOVER_NOTIFY_ONSNATCH = config.checkbox_to_value(pushover_notify_onsnatch)
sickbeard.PUSHOVER_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(pushover_notify_ondownload)
sickbeard.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(pushover_notify_onsubtitledownload)
sickbeard.PUSHOVER_USERKEY = pushover_userkey
sickbeard.PUSHOVER_APIKEY = pushover_apikey
sickbeard.PUSHOVER_DEVICE = pushover_device
sickbeard.PUSHOVER_SOUND = pushover_sound
sickbeard.USE_LIBNOTIFY = config.checkbox_to_value(use_libnotify)
sickbeard.LIBNOTIFY_NOTIFY_ONSNATCH = config.checkbox_to_value(libnotify_notify_onsnatch)
sickbeard.LIBNOTIFY_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(libnotify_notify_ondownload)
sickbeard.LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(libnotify_notify_onsubtitledownload)
sickbeard.USE_NMJ = config.checkbox_to_value(use_nmj)
sickbeard.NMJ_HOST = config.clean_host(nmj_host)
sickbeard.NMJ_DATABASE = nmj_database
sickbeard.NMJ_MOUNT = nmj_mount
sickbeard.USE_NMJv2 = config.checkbox_to_value(use_nmjv2)
sickbeard.NMJv2_HOST = config.clean_host(nmjv2_host)
sickbeard.NMJv2_DATABASE = nmjv2_database
sickbeard.NMJv2_DBLOC = nmjv2_dbloc
sickbeard.USE_SYNOINDEX = config.checkbox_to_value(use_synoindex)
sickbeard.USE_SYNOLOGYNOTIFIER = config.checkbox_to_value(use_synologynotifier)
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = config.checkbox_to_value(synologynotifier_notify_onsnatch)
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(synologynotifier_notify_ondownload)
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(
synologynotifier_notify_onsubtitledownload)
config.change_USE_TRAKT(use_trakt)
sickbeard.TRAKT_USERNAME = trakt_username
sickbeard.TRAKT_REMOVE_WATCHLIST = config.checkbox_to_value(trakt_remove_watchlist)
sickbeard.TRAKT_REMOVE_SERIESLIST = config.checkbox_to_value(trakt_remove_serieslist)
sickbeard.TRAKT_REMOVE_SHOW_FROM_SICKRAGE = config.checkbox_to_value(trakt_remove_show_from_sickrage)
sickbeard.TRAKT_SYNC_WATCHLIST = config.checkbox_to_value(trakt_sync_watchlist)
sickbeard.TRAKT_METHOD_ADD = int(trakt_method_add)
sickbeard.TRAKT_START_PAUSED = config.checkbox_to_value(trakt_start_paused)
sickbeard.TRAKT_USE_RECOMMENDED = config.checkbox_to_value(trakt_use_recommended)
sickbeard.TRAKT_SYNC = config.checkbox_to_value(trakt_sync)
sickbeard.TRAKT_SYNC_REMOVE = config.checkbox_to_value(trakt_sync_remove)
sickbeard.TRAKT_DEFAULT_INDEXER = int(trakt_default_indexer)
sickbeard.TRAKT_TIMEOUT = int(trakt_timeout)
sickbeard.TRAKT_BLACKLIST_NAME = trakt_blacklist_name
sickbeard.USE_EMAIL = config.checkbox_to_value(use_email)
sickbeard.EMAIL_NOTIFY_ONSNATCH = config.checkbox_to_value(email_notify_onsnatch)
sickbeard.EMAIL_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(email_notify_ondownload)
sickbeard.EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(email_notify_onsubtitledownload)
sickbeard.EMAIL_HOST = config.clean_host(email_host)
sickbeard.EMAIL_PORT = try_int(email_port, 25)
sickbeard.EMAIL_FROM = email_from
sickbeard.EMAIL_TLS = config.checkbox_to_value(email_tls)
sickbeard.EMAIL_USER = email_user
sickbeard.EMAIL_PASSWORD = email_password
sickbeard.EMAIL_LIST = email_list
sickbeard.EMAIL_SUBJECT = email_subject
sickbeard.USE_PYTIVO = config.checkbox_to_value(use_pytivo)
sickbeard.PYTIVO_NOTIFY_ONSNATCH = config.checkbox_to_value(pytivo_notify_onsnatch)
sickbeard.PYTIVO_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(pytivo_notify_ondownload)
sickbeard.PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(pytivo_notify_onsubtitledownload)
sickbeard.PYTIVO_UPDATE_LIBRARY = config.checkbox_to_value(pytivo_update_library)
sickbeard.PYTIVO_HOST = config.clean_host(pytivo_host)
sickbeard.PYTIVO_SHARE_NAME = pytivo_share_name
sickbeard.PYTIVO_TIVO_NAME = pytivo_tivo_name
sickbeard.USE_NMA = config.checkbox_to_value(use_nma)
sickbeard.NMA_NOTIFY_ONSNATCH = config.checkbox_to_value(nma_notify_onsnatch)
sickbeard.NMA_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(nma_notify_ondownload)
sickbeard.NMA_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(nma_notify_onsubtitledownload)
sickbeard.NMA_API = nma_api
sickbeard.NMA_PRIORITY = nma_priority
sickbeard.USE_PUSHALOT = config.checkbox_to_value(use_pushalot)
sickbeard.PUSHALOT_NOTIFY_ONSNATCH = config.checkbox_to_value(pushalot_notify_onsnatch)
sickbeard.PUSHALOT_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(pushalot_notify_ondownload)
sickbeard.PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(pushalot_notify_onsubtitledownload)
sickbeard.PUSHALOT_AUTHORIZATIONTOKEN = pushalot_authorizationtoken
sickbeard.USE_PUSHBULLET = config.checkbox_to_value(use_pushbullet)
sickbeard.PUSHBULLET_NOTIFY_ONSNATCH = config.checkbox_to_value(pushbullet_notify_onsnatch)
sickbeard.PUSHBULLET_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(pushbullet_notify_ondownload)
sickbeard.PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(pushbullet_notify_onsubtitledownload)
sickbeard.PUSHBULLET_API = pushbullet_api
sickbeard.PUSHBULLET_DEVICE = pushbullet_device_list
sickbeard.save_config()
if results:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br>\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek(os.path.join, sickbeard.CONFIG_FILE))
return self.redirect('/config/notifications/')
| gpl-3.0 | 8,004,438,097,058,878,000 | 3,388,531,765,848,241,700 | 64.02974 | 154 | 0.694449 | false |
chhao91/QGIS | python/plugins/processing/gui/CommanderWindow.py | 12 | 9065 | # -*- coding: utf-8 -*-
"""
***************************************************************************
CommanderWindow.py
---------------------
Date : April 2013
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'April 2013'
__copyright__ = '(C) 2013, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import types
import os
import imp
from PyQt4.QtCore import Qt, QSize
from PyQt4.QtGui import QDialog, QLabel, QSpacerItem, QHBoxLayout, QVBoxLayout, QSizePolicy, QComboBox, QCompleter, QSortFilterProxyModel
from qgis.utils import iface
from processing.core.Processing import Processing
from processing.gui.MessageDialog import MessageDialog
from processing.gui.AlgorithmDialog import AlgorithmDialog
from processing.tools.system import userFolder, mkdir
ITEMHEIGHT = 30
OFFSET = 20
HEIGHT = 60
class CommanderWindow(QDialog):
def __init__(self, parent, canvas):
self.canvas = canvas
QDialog.__init__(self, parent, Qt.FramelessWindowHint)
self.commands = imp.load_source('commands', self.commandsFile())
self.initGui()
def commandsFolder(self):
folder = unicode(os.path.join(userFolder(), 'commander'))
mkdir(folder)
return os.path.abspath(folder)
def commandsFile(self):
f = os.path.join(self.commandsFolder(), 'commands.py')
if not os.path.exists(f):
out = open(f, 'w')
out.write('from qgis.core import *\n')
out.write('import processing\n\n')
out.write('def removeall():\n')
out.write('\tmapreg = QgsMapLayerRegistry.instance()\n')
out.write('\tmapreg.removeAllMapLayers()\n\n')
out.write('def load(*args):\n')
out.write('\tprocessing.load(args[0])\n')
out.close()
return f
def algsListHasChanged(self):
self.fillCombo()
def initGui(self):
self.combo = ExtendedComboBox()
self.fillCombo()
self.combo.setEditable(True)
self.label = QLabel('Enter command:')
self.errorLabel = QLabel('Enter command:')
self.vlayout = QVBoxLayout()
self.vlayout.setSpacing(2)
self.vlayout.setMargin(0)
self.vlayout.addSpacerItem(QSpacerItem(0, OFFSET,
QSizePolicy.Maximum, QSizePolicy.Expanding))
self.hlayout = QHBoxLayout()
self.hlayout.addWidget(self.label)
self.vlayout.addLayout(self.hlayout)
self.hlayout2 = QHBoxLayout()
self.hlayout2.addWidget(self.combo)
self.vlayout.addLayout(self.hlayout2)
self.vlayout.addSpacerItem(QSpacerItem(0, OFFSET,
QSizePolicy.Maximum, QSizePolicy.Expanding))
self.setLayout(self.vlayout)
self.combo.lineEdit().returnPressed.connect(self.run)
self.prepareGui()
def fillCombo(self):
self.combo.clear()
# Add algorithms
for providerName in Processing.algs.keys():
provider = Processing.algs[providerName]
algs = provider.values()
for alg in algs:
self.combo.addItem('Processing algorithm: ' + alg.name)
# Add functions
for command in dir(self.commands):
if isinstance(self.commands.__dict__.get(command),
types.FunctionType):
self.combo.addItem('Command: ' + command)
#Add menu entries
menuActions = []
actions = iface.mainWindow().menuBar().actions()
for action in actions:
menuActions.extend(self.getActions(action))
for action in menuActions:
self.combo.addItem('Menu action: ' + unicode(action.text()))
def prepareGui(self):
self.combo.setEditText('')
self.combo.setMaximumSize(QSize(self.canvas.rect().width() - 2 * OFFSET, ITEMHEIGHT))
self.combo.view().setStyleSheet('min-height: 150px')
self.combo.setFocus(Qt.OtherFocusReason)
self.label.setMaximumSize(self.combo.maximumSize())
self.label.setVisible(False)
self.adjustSize()
pt = self.canvas.rect().topLeft()
absolutePt = self.canvas.mapToGlobal(pt)
self.move(absolutePt)
self.resize(self.canvas.rect().width(), HEIGHT)
self.setStyleSheet('CommanderWindow {background-color: #e7f5fe; \
border: 1px solid #b9cfe4;}')
def getActions(self, action):
menuActions = []
menu = action.menu()
if menu is None:
menuActions.append(action)
return menuActions
else:
actions = menu.actions()
for subaction in actions:
if subaction.menu() is not None:
menuActions.extend(self.getActions(subaction))
elif not subaction.isSeparator():
menuActions.append(subaction)
return menuActions
def run(self):
s = unicode(self.combo.currentText())
if s.startswith('Processing algorithm: '):
algName = s[len('Processing algorithm: '):]
alg = Processing.getAlgorithmFromFullName(algName)
if alg is not None:
self.close()
self.runAlgorithm(alg)
elif s.startswith("Command: "):
command = s[len("Command: "):]
try:
self.runCommand(command)
self.close()
except Exception as e:
self.label.setVisible(True)
self.label.setText('Error:' + unicode(e))
elif s.startswith('Menu action: '):
actionName = s[len('Menu action: '):]
menuActions = []
actions = iface.mainWindow().menuBar().actions()
for action in actions:
menuActions.extend(self.getActions(action))
for action in menuActions:
if action.text() == actionName:
self.close()
action.trigger()
return
else:
try:
self.runCommand(s)
self.close()
except Exception as e:
self.label.setVisible(True)
self.label.setText('Error:' + unicode(e))
def runCommand(self, command):
tokens = command.split(' ')
if len(tokens) == 1:
method = self.commands.__dict__.get(command)
if method is not None:
method()
else:
raise Exception('Wrong command')
else:
method = self.commands.__dict__.get(tokens[0])
if method is not None:
method(*tokens[1:])
else:
raise Exception('Wrong command')
def runAlgorithm(self, alg):
alg = alg.getCopy()
message = alg.checkBeforeOpeningParametersDialog()
if message:
dlg = MessageDialog()
dlg.setTitle(self.tr('Missing dependency'))
dlg.setMessage(message)
dlg.exec_()
return
dlg = alg.getCustomParametersDialog()
if not dlg:
dlg = AlgorithmDialog(alg)
canvas = iface.mapCanvas()
prevMapTool = canvas.mapTool()
dlg.show()
dlg.exec_()
if canvas.mapTool() != prevMapTool:
try:
canvas.mapTool().reset()
except:
pass
canvas.setMapTool(prevMapTool)
class ExtendedComboBox(QComboBox):
def __init__(self, parent=None):
super(ExtendedComboBox, self).__init__(parent)
self.setFocusPolicy(Qt.StrongFocus)
self.setEditable(True)
self.pFilterModel = QSortFilterProxyModel(self)
self.pFilterModel.setFilterCaseSensitivity(Qt.CaseInsensitive)
self.pFilterModel.setSourceModel(self.model())
self.completer = QCompleter(self.pFilterModel, self)
self.completer.setCompletionMode(QCompleter.UnfilteredPopupCompletion)
self.completer.popup().setStyleSheet('min-height: 150px')
self.completer.popup().setAlternatingRowColors(True)
self.setCompleter(self.completer)
self.lineEdit().textEdited[unicode].connect(self.pFilterModel.setFilterFixedString)
| gpl-2.0 | -6,762,247,401,939,053,000 | 4,139,179,256,543,781,400 | 36.458678 | 137 | 0.559846 | false |
poliastro/poliastro | tests/test_bodies.py | 1 | 2126 | import pytest
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from poliastro.bodies import Body, Earth, Jupiter, Sun
def test_body_has_k_given_in_constructor():
k = 3.98e5 * u.km ** 3 / u.s ** 2
earth = Body(None, k, "")
assert earth.k == k
def test_body_from_parameters_raises_valueerror_if_k_units_not_correct():
wrong_k = 4902.8 * u.kg
_name = _symbol = ""
_R = 0
with pytest.raises(u.UnitsError) as excinfo:
Body.from_parameters(None, wrong_k, _name, _symbol, _R)
assert (
"UnitsError: Argument 'k' to function 'from_parameters' must be in units convertible to 'km3 / s2'."
in excinfo.exconly()
)
def test_body_from_parameters_returns_body_object():
k = 1.26712763e17 * u.m ** 3 / u.s ** 2
R = 71492000 * u.m
_name = _symbol = "jupiter"
jupiter = Body.from_parameters(Sun, k, _name, _symbol, Jupiter.R)
assert jupiter.k == k
assert jupiter.R == R
def test_body_printing_has_name_and_symbol():
name = "2 Pallas"
symbol = u"\u26b4"
k = 1.41e10 * u.m ** 3 / u.s ** 2
pallas2 = Body(None, k, name, symbol)
assert name in str(pallas2)
assert symbol in str(pallas2)
def test_earth_has_k_given_in_literature():
expected_k = 3.986004418e14 * u.m ** 3 / u.s ** 2
k = Earth.k
assert_quantity_allclose(k.decompose([u.km, u.s]), expected_k)
def test_earth_has_angular_velocity_given_in_literature():
expected_k = 7.292114e-5 * u.rad / u.s
k = Earth.angular_velocity
assert_quantity_allclose(k.decompose([u.rad, u.s]), expected_k)
def test_from_relative():
TRAPPIST1 = Body.from_relative(
reference=Sun,
parent=None,
k=0.08, # Relative to the Sun
name="TRAPPIST",
symbol=None,
R=0.114,
) # Relative to the Sun
# Check values properly calculated
VALUECHECK = Body.from_relative(
reference=Earth,
parent=TRAPPIST1,
k=1,
name="VALUECHECK",
symbol=None,
R=1,
)
assert Earth.k == VALUECHECK.k
assert Earth.R == VALUECHECK.R
| mit | 1,188,030,122,400,297,700 | -315,680,634,841,359,500 | 26.61039 | 108 | 0.61571 | false |
cyx1231st/nova | nova/objects/build_request.py | 6 | 6724 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
import six
from nova.db.sqlalchemy import api as db
from nova.db.sqlalchemy import api_models
from nova import exception
from nova.i18n import _LE
from nova import objects
from nova.objects import base
from nova.objects import fields
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
OBJECT_FIELDS = ['info_cache', 'security_groups']
JSON_FIELDS = ['instance_metadata']
IP_FIELDS = ['access_ip_v4', 'access_ip_v6']
@base.NovaObjectRegistry.register
class BuildRequest(base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'id': fields.IntegerField(),
'project_id': fields.StringField(),
'user_id': fields.StringField(),
'display_name': fields.StringField(nullable=True),
'instance_metadata': fields.DictOfStringsField(nullable=True),
'progress': fields.IntegerField(nullable=True),
'vm_state': fields.StringField(nullable=True),
'task_state': fields.StringField(nullable=True),
'image_ref': fields.StringField(nullable=True),
'access_ip_v4': fields.IPV4AddressField(nullable=True),
'access_ip_v6': fields.IPV6AddressField(nullable=True),
'info_cache': fields.ObjectField('InstanceInfoCache', nullable=True),
'security_groups': fields.ObjectField('SecurityGroupList'),
'config_drive': fields.BooleanField(default=False),
'key_name': fields.StringField(nullable=True),
'locked_by': fields.EnumField(['owner', 'admin'], nullable=True),
'request_spec': fields.ObjectField('RequestSpec'),
# NOTE(alaski): Normally these would come from the NovaPersistentObject
# mixin but they're being set explicitly because we only need
# created_at/updated_at. There is no soft delete for this object.
# These fields should be carried over to the instance when it is
# scheduled and created in a cell database.
'created_at': fields.DateTimeField(nullable=True),
'updated_at': fields.DateTimeField(nullable=True),
}
def _load_request_spec(self, db_spec):
self.request_spec = objects.RequestSpec._from_db_object(self._context,
objects.RequestSpec(), db_spec)
def _load_info_cache(self, db_info_cache):
self.info_cache = objects.InstanceInfoCache.obj_from_primitive(
jsonutils.loads(db_info_cache))
def _load_security_groups(self, db_sec_group):
self.security_groups = objects.SecurityGroupList.obj_from_primitive(
jsonutils.loads(db_sec_group))
@staticmethod
def _from_db_object(context, req, db_req):
for key in req.fields:
if isinstance(req.fields[key], fields.ObjectField):
try:
getattr(req, '_load_%s' % key)(db_req[key])
except AttributeError:
LOG.exception(_LE('No load handler for %s'), key)
elif key in JSON_FIELDS and db_req[key] is not None:
setattr(req, key, jsonutils.loads(db_req[key]))
else:
setattr(req, key, db_req[key])
req.obj_reset_changes()
req._context = context
return req
@staticmethod
@db.api_context_manager.reader
def _get_by_instance_uuid_from_db(context, instance_uuid):
db_req = (context.session.query(api_models.BuildRequest)
.join(api_models.RequestSpec)
.with_entities(api_models.BuildRequest,
api_models.RequestSpec)
.filter(
api_models.RequestSpec.instance_uuid == instance_uuid)
).first()
if not db_req:
raise exception.BuildRequestNotFound(uuid=instance_uuid)
# db_req is a tuple (api_models.BuildRequest, api_models.RequestSpect)
build_req = db_req[0]
build_req['request_spec'] = db_req[1]
return build_req
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid):
db_req = cls._get_by_instance_uuid_from_db(context, instance_uuid)
return cls._from_db_object(context, cls(), db_req)
@staticmethod
@db.api_context_manager.writer
def _create_in_db(context, updates):
db_req = api_models.BuildRequest()
db_req.update(updates)
db_req.save(context.session)
# NOTE: This is done because a later access will trigger a lazy load
# outside of the db session so it will fail. We don't lazy load
# request_spec on the object later because we never need a BuildRequest
# without the RequestSpec.
db_req.request_spec
return db_req
def _get_update_primitives(self):
updates = self.obj_get_changes()
for key, value in six.iteritems(updates):
if key in OBJECT_FIELDS and value is not None:
updates[key] = jsonutils.dumps(value.obj_to_primitive())
elif key in JSON_FIELDS and value is not None:
updates[key] = jsonutils.dumps(value)
elif key in IP_FIELDS and value is not None:
# These are stored as a string in the db and must be converted
updates[key] = str(value)
req_spec_obj = updates.pop('request_spec', None)
if req_spec_obj:
updates['request_spec_id'] = req_spec_obj.id
return updates
@base.remotable
def create(self):
if self.obj_attr_is_set('id'):
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self._get_update_primitives()
db_req = self._create_in_db(self._context, updates)
self._from_db_object(self._context, self, db_req)
@staticmethod
@db.api_context_manager.writer
def _destroy_in_db(context, id):
context.session.query(api_models.BuildRequest).filter_by(
id=id).delete()
@base.remotable
def destroy(self):
self._destroy_in_db(self._context, self.id)
| apache-2.0 | -8,048,832,191,466,337,000 | 1,053,935,824,634,759,600 | 40.506173 | 79 | 0.637567 | false |
eddyb/servo | tests/wpt/web-platform-tests/mathml/tools/stacks.py | 92 | 2243 | #!/usr/bin/python
from utils import mathfont
import fontforge
v = 7 * mathfont.em
f = mathfont.create("stack-axisheight%d" % v)
f.math.AxisHeight = v
f.math.StackBottomDisplayStyleShiftDown = 0
f.math.StackBottomShiftDown = 0
f.math.StackDisplayStyleGapMin = 0
f.math.StackGapMin = 0
f.math.StackTopDisplayStyleShiftUp = 0
f.math.StackTopShiftUp = 0
mathfont.save(f)
v = 5 * mathfont.em
f = mathfont.create("stack-bottomdisplaystyleshiftdown%d" % v)
f.math.AxisHeight = 0
f.math.StackBottomDisplayStyleShiftDown = v
f.math.StackBottomShiftDown = 0
f.math.StackDisplayStyleGapMin = 0
f.math.StackGapMin = 0
f.math.StackTopDisplayStyleShiftUp = 0
f.math.StackTopShiftUp = 0
mathfont.save(f)
v = 6 * mathfont.em
f = mathfont.create("stack-bottomshiftdown%d" % v)
f.math.AxisHeight = 0
f.math.StackBottomDisplayStyleShiftDown = 0
f.math.StackBottomShiftDown = v
f.math.StackDisplayStyleGapMin = 0
f.math.StackGapMin = 0
f.math.StackTopDisplayStyleShiftUp = 0
f.math.StackTopShiftUp = 0
mathfont.save(f)
v = 4 * mathfont.em
f = mathfont.create("stack-displaystylegapmin%d" % v)
f.math.AxisHeight = 0
f.math.StackBottomDisplayStyleShiftDown = 0
f.math.StackBottomShiftDown = 0
f.math.StackDisplayStyleGapMin = v
f.math.StackGapMin = 0
f.math.StackTopDisplayStyleShiftUp = 0
f.math.StackTopShiftUp = 0
mathfont.save(f)
v = 8 * mathfont.em
f = mathfont.create("stack-gapmin%d" % v)
f.math.AxisHeight = 0
f.math.StackBottomDisplayStyleShiftDown = 0
f.math.StackBottomShiftDown = 0
f.math.StackDisplayStyleGapMin = 0
f.math.StackGapMin = v
f.math.StackTopDisplayStyleShiftUp = 0
f.math.StackTopShiftUp = 0
mathfont.save(f)
v = 3 * mathfont.em
f = mathfont.create("stack-topdisplaystyleshiftup%d" % v)
f.math.AxisHeight = 0
f.math.StackBottomDisplayStyleShiftDown = 0
f.math.StackBottomShiftDown = 0
f.math.StackDisplayStyleGapMin = 0
f.math.StackGapMin = 0
f.math.StackTopDisplayStyleShiftUp = v
f.math.StackTopShiftUp = 0
mathfont.save(f)
v = 9 * mathfont.em
f = mathfont.create("stack-topshiftup%d" % v)
f.math.AxisHeight = 0
f.math.StackBottomDisplayStyleShiftDown = 0
f.math.StackBottomShiftDown = 0
f.math.StackDisplayStyleGapMin = 0
f.math.StackGapMin = 0
f.math.StackTopDisplayStyleShiftUp = 0
f.math.StackTopShiftUp = v
mathfont.save(f)
| mpl-2.0 | -2,216,101,626,400,674,300 | 4,806,325,379,290,747,000 | 26.691358 | 62 | 0.778868 | false |
lferr/charm | charm/test/schemes/dabenc_test.py | 1 | 3191 | from charm.schemes.dabe_aw11 import Dabe
from charm.adapters.dabenc_adapt_hybrid import HybridABEncMA
from charm.toolbox.pairinggroup import PairingGroup, GT
import unittest
debug = False
class DabeTest(unittest.TestCase):
def testDabe(self):
groupObj = PairingGroup('SS512')
dabe = Dabe(groupObj)
GP = dabe.setup()
#Setup an authority
auth_attrs= ['ONE', 'TWO', 'THREE', 'FOUR']
(SK, PK) = dabe.authsetup(GP, auth_attrs)
if debug: print("Authority SK")
if debug: print(SK)
#Setup a user and give him some keys
gid, K = "bob", {}
usr_attrs = ['THREE', 'ONE', 'TWO']
for i in usr_attrs: dabe.keygen(GP, SK, i, gid, K)
if debug: print('User credential list: %s' % usr_attrs)
if debug: print("\nSecret key:")
if debug: groupObj.debug(K)
#Encrypt a random element in GT
m = groupObj.random(GT)
policy = '((one or three) and (TWO or FOUR))'
if debug: print('Acces Policy: %s' % policy)
CT = dabe.encrypt(PK, GP, m, policy)
if debug: print("\nCiphertext...")
if debug: groupObj.debug(CT)
orig_m = dabe.decrypt(GP, K, CT)
assert m == orig_m, 'FAILED Decryption!!!'
if debug: print('Successful Decryption!')
class HybridABEncMATest(unittest.TestCase):
def testHybridABEncMA(self):
groupObj = PairingGroup('SS512')
dabe = Dabe(groupObj)
hyb_abema = HybridABEncMA(dabe, groupObj)
#Setup global parameters for all new authorities
gp = hyb_abema.setup()
#Instantiate a few authorities
#Attribute names must be globally unique. HybridABEncMA
#Two authorities may not issue keys for the same attribute.
#Otherwise, the decryption algorithm will not know which private key to use
jhu_attributes = ['jhu.professor', 'jhu.staff', 'jhu.student']
jhmi_attributes = ['jhmi.doctor', 'jhmi.nurse', 'jhmi.staff', 'jhmi.researcher']
(jhuSK, jhuPK) = hyb_abema.authsetup(gp, jhu_attributes)
(jhmiSK, jhmiPK) = hyb_abema.authsetup(gp, jhmi_attributes)
allAuthPK = {}; allAuthPK.update(jhuPK); allAuthPK.update(jhmiPK)
#Setup a user with a few keys
bobs_gid = "20110615 [email protected] cryptokey"
K = {}
hyb_abema.keygen(gp, jhuSK,'jhu.professor', bobs_gid, K)
hyb_abema.keygen(gp, jhmiSK,'jhmi.researcher', bobs_gid, K)
msg = b'Hello World, I am a sensitive record!'
size = len(msg)
policy_str = "(jhmi.doctor or (jhmi.researcher and jhu.professor))"
ct = hyb_abema.encrypt(allAuthPK, gp, msg, policy_str)
if debug:
print("Ciphertext")
print("c1 =>", ct['c1'])
print("c2 =>", ct['c2'])
decrypted_msg = hyb_abema.decrypt(gp, K, ct)
if debug: print("Result =>", decrypted_msg)
assert decrypted_msg == msg, "Failed Decryption!!!"
if debug: print("Successful Decryption!!!")
del groupObj
if __name__ == "__main__":
unittest.main()
| lgpl-3.0 | -6,311,533,257,095,640,000 | 7,343,127,029,150,148,000 | 36.104651 | 88 | 0.587277 | false |
sem-geologist/hyperspy | hyperspy/samfire_utils/goodness_of_fit_tests/test_general.py | 6 | 1052 | # -*- coding: utf-8 -*-
# Copyright 2007-2016 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
class goodness_test(object):
_tolerance = None
@property
def tolerance(self):
return self._tolerance
@tolerance.setter
def tolerance(self, value):
if value is None:
self._tolerance = None
else:
self._tolerance = np.abs(value)
| gpl-3.0 | -1,689,254,147,218,531,600 | -4,997,122,668,598,886,000 | 29.057143 | 70 | 0.69962 | false |
rhdedgar/openshift-tools | openshift/installer/vendored/openshift-ansible-3.4.40/roles/lib_openshift/src/test/unit/test_oadm_manage_node.py | 17 | 10306 | '''
Unit tests for oadm_manage_node
'''
import os
import six
import sys
import unittest
import mock
# Removing invalid variable names for tests so that I can
# keep them brief
# pylint: disable=invalid-name,no-name-in-module
# Disable import-error b/c our libraries aren't loaded in jenkins
# pylint: disable=import-error
# place class in our python path
module_path = os.path.join('/'.join(os.path.realpath(__file__).split('/')[:-4]), 'library') # noqa: E501
sys.path.insert(0, module_path)
from oadm_manage_node import ManageNode, locate_oc_binary # noqa: E402
class ManageNodeTest(unittest.TestCase):
'''
Test class for oadm_manage_node
'''
@mock.patch('oadm_manage_node.Utils.create_tmpfile_copy')
@mock.patch('oadm_manage_node.ManageNode.openshift_cmd')
def test_list_pods(self, mock_openshift_cmd, mock_tmpfile_copy):
''' Testing a get '''
params = {'node': ['ip-172-31-49-140.ec2.internal'],
'schedulable': None,
'selector': None,
'pod_selector': None,
'list_pods': True,
'kubeconfig': '/etc/origin/master/admin.kubeconfig',
'evacuate': False,
'grace_period': False,
'dry_run': False,
'force': False}
pod_list = '''{
"metadata": {},
"items": [
{
"metadata": {
"name": "docker-registry-1-xuhik",
"generateName": "docker-registry-1-",
"namespace": "default",
"selfLink": "/api/v1/namespaces/default/pods/docker-registry-1-xuhik",
"uid": "ae2a25a2-e316-11e6-80eb-0ecdc51fcfc4",
"resourceVersion": "1501",
"creationTimestamp": "2017-01-25T15:55:23Z",
"labels": {
"deployment": "docker-registry-1",
"deploymentconfig": "docker-registry",
"docker-registry": "default"
},
"annotations": {
"openshift.io/deployment-config.latest-version": "1",
"openshift.io/deployment-config.name": "docker-registry",
"openshift.io/deployment.name": "docker-registry-1",
"openshift.io/scc": "restricted"
}
},
"spec": {}
},
{
"metadata": {
"name": "router-1-kp3m3",
"generateName": "router-1-",
"namespace": "default",
"selfLink": "/api/v1/namespaces/default/pods/router-1-kp3m3",
"uid": "9e71f4a5-e316-11e6-80eb-0ecdc51fcfc4",
"resourceVersion": "1456",
"creationTimestamp": "2017-01-25T15:54:56Z",
"labels": {
"deployment": "router-1",
"deploymentconfig": "router",
"router": "router"
},
"annotations": {
"openshift.io/deployment-config.latest-version": "1",
"openshift.io/deployment-config.name": "router",
"openshift.io/deployment.name": "router-1",
"openshift.io/scc": "hostnetwork"
}
},
"spec": {}
}]
}'''
mock_openshift_cmd.side_effect = [
{"cmd": "/usr/bin/oadm manage-node ip-172-31-49-140.ec2.internal --list-pods",
"results": pod_list,
"returncode": 0}
]
mock_tmpfile_copy.side_effect = [
'/tmp/mocked_kubeconfig',
]
results = ManageNode.run_ansible(params, False)
# returned a single node
self.assertTrue(len(results['results']['nodes']) == 1)
# returned 2 pods
self.assertTrue(len(results['results']['nodes']['ip-172-31-49-140.ec2.internal']) == 2)
@mock.patch('oadm_manage_node.Utils.create_tmpfile_copy')
@mock.patch('oadm_manage_node.ManageNode.openshift_cmd')
def test_schedulable_false(self, mock_openshift_cmd, mock_tmpfile_copy):
''' Testing a get '''
params = {'node': ['ip-172-31-49-140.ec2.internal'],
'schedulable': False,
'selector': None,
'pod_selector': None,
'list_pods': False,
'kubeconfig': '/etc/origin/master/admin.kubeconfig',
'evacuate': False,
'grace_period': False,
'dry_run': False,
'force': False}
node = [{
"apiVersion": "v1",
"kind": "Node",
"metadata": {
"creationTimestamp": "2017-01-26T14:34:43Z",
"labels": {
"beta.kubernetes.io/arch": "amd64",
"beta.kubernetes.io/instance-type": "m4.large",
"beta.kubernetes.io/os": "linux",
"failure-domain.beta.kubernetes.io/region": "us-east-1",
"failure-domain.beta.kubernetes.io/zone": "us-east-1c",
"hostname": "opstest-node-compute-0daaf",
"kubernetes.io/hostname": "ip-172-31-51-111.ec2.internal",
"ops_node": "old",
"region": "us-east-1",
"type": "compute"
},
"name": "ip-172-31-51-111.ec2.internal",
"resourceVersion": "6936",
"selfLink": "/api/v1/nodes/ip-172-31-51-111.ec2.internal",
"uid": "93d7fdfb-e3d4-11e6-a982-0e84250fc302"
},
"spec": {
"externalID": "i-06bb330e55c699b0f",
"providerID": "aws:///us-east-1c/i-06bb330e55c699b0f",
}}]
mock_openshift_cmd.side_effect = [
{"cmd": "/usr/bin/oc get node -o json ip-172-31-49-140.ec2.internal",
"results": node,
"returncode": 0},
{"cmd": "/usr/bin/oadm manage-node ip-172-31-49-140.ec2.internal --schedulable=False",
"results": "NAME STATUS AGE\n" +
"ip-172-31-49-140.ec2.internal Ready,SchedulingDisabled 5h\n",
"returncode": 0}]
mock_tmpfile_copy.side_effect = [
'/tmp/mocked_kubeconfig',
]
results = ManageNode.run_ansible(params, False)
self.assertTrue(results['changed'])
self.assertEqual(results['results']['nodes'][0]['name'], 'ip-172-31-49-140.ec2.internal')
self.assertEqual(results['results']['nodes'][0]['schedulable'], False)
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_fallback(self, mock_env_get, mock_path_exists):
''' Testing binary lookup fallback '''
mock_env_get.side_effect = lambda _v, _d: ''
mock_path_exists.side_effect = lambda _: False
self.assertEqual(locate_oc_binary(), 'oc')
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_in_path_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup in path '''
oc_bin = '/usr/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_shutil_which.side_effect = lambda _f, path=None: oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_in_path(self, mock_env_get, mock_path_exists):
''' Testing binary lookup in path '''
oc_bin = '/usr/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_path_exists.side_effect = lambda f: f == oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_in_usr_local(self, mock_env_get, mock_path_exists):
''' Testing binary lookup in /usr/local/bin '''
oc_bin = '/usr/local/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_path_exists.side_effect = lambda f: f == oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY3, 'py2 test only')
@mock.patch('os.path.exists')
@mock.patch('os.environ.get')
def test_binary_lookup_in_home(self, mock_env_get, mock_path_exists):
''' Testing binary lookup in ~/bin '''
oc_bin = os.path.expanduser('~/bin/oc')
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_path_exists.side_effect = lambda f: f == oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_fallback_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup fallback '''
mock_env_get.side_effect = lambda _v, _d: ''
mock_shutil_which.side_effect = lambda _f, path=None: None
self.assertEqual(locate_oc_binary(), 'oc')
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_in_usr_local_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup in /usr/local/bin '''
oc_bin = '/usr/local/bin/oc'
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_shutil_which.side_effect = lambda _f, path=None: oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
@unittest.skipIf(six.PY2, 'py3 test only')
@mock.patch('shutil.which')
@mock.patch('os.environ.get')
def test_binary_lookup_in_home_py3(self, mock_env_get, mock_shutil_which):
''' Testing binary lookup in ~/bin '''
oc_bin = os.path.expanduser('~/bin/oc')
mock_env_get.side_effect = lambda _v, _d: '/bin:/usr/bin'
mock_shutil_which.side_effect = lambda _f, path=None: oc_bin
self.assertEqual(locate_oc_binary(), oc_bin)
| apache-2.0 | -7,113,531,480,938,927,000 | -4,209,845,674,932,352,500 | 36.205776 | 105 | 0.540559 | false |
sankalpg/Essentia_tonicDebug_TEMP | test/src/unittest/sfx/test_aftermaxtobeforemaxenergyratio_streaming.py | 10 | 2179 | #!/usr/bin/env python
# Copyright (C) 2006-2013 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
from essentia_test import *
from essentia.streaming import AfterMaxToBeforeMaxEnergyRatio as \
sAfterMaxToBeforeMaxEnergyRatio
class TestAfterMaxToBeforeMaxEnergyRatio_Streaming(TestCase):
def testEmpty(self):
gen = VectorInput([])
strRatio = sAfterMaxToBeforeMaxEnergyRatio()
p = Pool()
gen.data >> strRatio.pitch
strRatio.afterMaxToBeforeMaxEnergyRatio >> (p, 'lowlevel.amtbmer')
run(gen)
self.assertRaises(KeyError, lambda: p['lowlevel.amtbmer'])
def testRegression(self):
# this algorithm has a standard mode implementation which has been
# tested thru the unitests in python. Therefore it's only tested that
# for a certain input standard == streaming
pitch = readVector(join(filedir(), 'aftermaxtobeforemaxenergyratio', 'input.txt'))
p = Pool()
gen = VectorInput(pitch)
strRatio = sAfterMaxToBeforeMaxEnergyRatio()
gen.data >> strRatio.pitch
strRatio.afterMaxToBeforeMaxEnergyRatio >> (p, 'lowlevel.amtbmer')
run(gen)
stdResult = AfterMaxToBeforeMaxEnergyRatio()(pitch)
strResult = p['lowlevel.amtbmer']
self.assertAlmostEqual(strResult, stdResult, 5e-7)
suite = allTests(TestAfterMaxToBeforeMaxEnergyRatio_Streaming)
if __name__ == '__main__':
TextTestRunner(verbosity=2).run(suite)
| agpl-3.0 | -6,332,302,290,537,262,000 | 7,358,846,747,364,656,000 | 33.046875 | 90 | 0.712712 | false |
blankenberg/tools-iuc | data_managers/data_manager_diamond_database_builder/data_manager/data_manager_diamond_database_builder.py | 9 | 10627 | #!/usr/bin/env python
import bz2
import gzip
import json
import optparse
import os
import shutil
import subprocess
import sys
import tarfile
import tempfile
import urllib.error
import urllib.parse
import urllib.request
import zipfile
from ftplib import FTP
CHUNK_SIZE = 2**20 # 1mb
def cleanup_before_exit(tmp_dir):
if tmp_dir and os.path.exists(tmp_dir):
shutil.rmtree(tmp_dir)
def _get_files_in_ftp_path(ftp, path):
path_contents = []
ftp.retrlines('MLSD %s' % (path), path_contents.append)
return [line.split(';')[-1].lstrip() for line in path_contents]
def _get_stream_readers_for_tar(file_obj, tmp_dir):
fasta_tar = tarfile.open(fileobj=file_obj, mode='r:*')
return [fasta_tar.extractfile(member) for member in fasta_tar.getmembers()]
def _get_stream_readers_for_zip(file_obj, tmp_dir):
fasta_zip = zipfile.ZipFile(file_obj, 'r')
rval = []
for member in fasta_zip.namelist():
fasta_zip.extract(member, tmp_dir)
rval.append(open(os.path.join(tmp_dir, member), 'rb'))
return rval
def _get_stream_readers_for_gzip(file_obj, tmp_dir):
return [gzip.GzipFile(fileobj=file_obj, mode='rb')]
def _get_stream_readers_for_bz2(file_obj, tmp_dir):
return [bz2.BZ2File(file_obj.name, 'rb')]
def download_from_ncbi(data_manager_dict, params, target_directory,
database_id, database_name):
NCBI_FTP_SERVER = 'ftp.ncbi.nlm.nih.gov'
NCBI_DOWNLOAD_PATH = '/blast/db/FASTA/'
COMPRESSED_EXTENSIONS = [('.tar.gz', _get_stream_readers_for_tar),
('.tar.bz2', _get_stream_readers_for_tar),
('.zip', _get_stream_readers_for_zip),
('.gz', _get_stream_readers_for_gzip),
('.bz2', _get_stream_readers_for_bz2)]
ncbi_identifier = params['reference_source']['requested_identifier']
ftp = FTP(NCBI_FTP_SERVER)
ftp.login()
path_contents = _get_files_in_ftp_path(ftp, NCBI_DOWNLOAD_PATH)
ncbi_file_name = None
get_stream_reader = None
ext = None
for ext, get_stream_reader in COMPRESSED_EXTENSIONS:
if "%s%s" % (ncbi_identifier, ext) in path_contents:
ncbi_file_name = "%s%s%s" % (NCBI_DOWNLOAD_PATH, ncbi_identifier, ext)
break
if not ncbi_file_name:
raise Exception('Unable to determine filename for NCBI database for %s: %s' % (ncbi_identifier, path_contents))
tmp_dir = tempfile.mkdtemp(prefix='tmp-data-manager-ncbi-')
ncbi_fasta_filename = os.path.join(tmp_dir, "%s%s" % (ncbi_identifier, ext))
# fasta_base_filename = "%s.fa" % database_id
# fasta_filename = os.path.join(target_directory, fasta_base_filename)
# fasta_writer = open(fasta_filename, 'wb+')
tmp_extract_dir = os.path.join(tmp_dir, 'extracted_fasta')
os.mkdir(tmp_extract_dir)
tmp_fasta = open(ncbi_fasta_filename, 'wb+')
ftp.retrbinary('RETR %s' % ncbi_file_name, tmp_fasta.write)
tmp_fasta.flush()
tmp_fasta.seek(0)
fasta_readers = get_stream_reader(tmp_fasta, tmp_extract_dir)
data_table_entry = _stream_fasta_to_file(fasta_readers, target_directory, database_id, database_name, params)
_add_data_table_entry(data_manager_dict, data_table_entry)
for fasta_reader in fasta_readers:
fasta_reader.close()
tmp_fasta.close()
cleanup_before_exit(tmp_dir)
def download_from_url(data_manager_dict, params, target_directory, database_id, database_name):
# TODO: we should automatically do decompression here
urls = list(filter(bool, [x.strip() for x in params['reference_source']['user_url'].split('\n')]))
fasta_reader = [urllib.request.urlopen(url) for url in urls]
data_table_entry = _stream_fasta_to_file(fasta_reader, target_directory, database_id, database_name, params)
_add_data_table_entry(data_manager_dict, data_table_entry)
def download_from_history(data_manager_dict, params, target_directory, database_id, database_name):
# TODO: allow multiple FASTA input files
input_filename = params['reference_source']['input_fasta']
if isinstance(input_filename, list):
fasta_reader = [open(filename, 'rb') for filename in input_filename]
else:
fasta_reader = open(input_filename, 'rb')
data_table_entry = _stream_fasta_to_file(fasta_reader, target_directory, database_id, database_name, params)
_add_data_table_entry(data_manager_dict, data_table_entry)
def copy_from_directory(data_manager_dict, params, target_directory, database_id, database_name):
input_filename = params['reference_source']['fasta_filename']
create_symlink = params['reference_source']['create_symlink'] == 'create_symlink'
if create_symlink:
data_table_entry = _create_symlink(input_filename, target_directory, database_id, database_name)
else:
if isinstance(input_filename, list):
fasta_reader = [open(filename, 'rb') for filename in input_filename]
else:
fasta_reader = open(input_filename)
data_table_entry = _stream_fasta_to_file(fasta_reader, target_directory, database_id, database_name, params)
_add_data_table_entry(data_manager_dict, data_table_entry)
def _add_data_table_entry(data_manager_dict, data_table_entry):
data_manager_dict['data_tables'] = data_manager_dict.get('data_tables', {})
data_manager_dict['data_tables']['diamond_database'] = data_manager_dict['data_tables'].get('diamond_database', [])
data_manager_dict['data_tables']['diamond_database'].append(data_table_entry)
return data_manager_dict
def _stream_fasta_to_file(fasta_stream, target_directory, database_id,
database_name, params, close_stream=True):
fasta_base_filename = "%s.fa" % database_id
fasta_filename = os.path.join(target_directory, fasta_base_filename)
temp_fasta = tempfile.NamedTemporaryFile(delete=False, suffix=".fasta")
temp_fasta.close()
fasta_writer = open(temp_fasta.name, 'wb+')
if not isinstance(fasta_stream, list):
fasta_stream = [fasta_stream]
last_char = None
for fh in fasta_stream:
if last_char not in [None, '\n', '\r']:
fasta_writer.write('\n')
while True:
data = fh.read(CHUNK_SIZE)
if data:
fasta_writer.write(data)
last_char = data[-1]
else:
break
if close_stream:
fh.close()
fasta_writer.close()
args = ['diamond', 'makedb',
'--in', temp_fasta.name,
'--db', fasta_filename]
if params['tax_cond']['tax_select'] == "history":
for i in ["taxonmap", "taxonnodes", "taxonnames"]:
args.extend(['--' + i, params['tax_cond'][i]])
elif params['tax_cond']['tax_select'] == "ncbi":
if os.path.isfile(os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL.gz')):
args.extend(['--taxonmap',
os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL.gz')])
elif os.path.isfile(os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL')):
args.extend(['--taxonmap',
os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.FULL')])
elif os.path.isfile(os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.gz')):
args.extend(['--taxonmap',
os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid.gz')])
elif os.path.isfile(os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid')):
args.extend(['--taxonmap',
os.path.join(params['tax_cond']['ncbi_tax'], 'prot.accession2taxid')])
else:
raise Exception('Unable to find prot.accession2taxid file in %s' % (params['tax_cond']['ncbi_tax']))
args.extend(['--taxonnodes',
os.path.join(params['tax_cond']['ncbi_tax'], 'nodes.dmp')])
args.extend(['--taxonnames',
os.path.join(params['tax_cond']['ncbi_tax'], 'names.dmp')])
tmp_stderr = tempfile.NamedTemporaryFile(prefix="tmp-data-manager-diamond-database-builder-stderr")
proc = subprocess.Popen(args=args, shell=False, cwd=target_directory,
stderr=tmp_stderr.fileno())
return_code = proc.wait()
if return_code:
tmp_stderr.flush()
tmp_stderr.seek(0)
print("Error building diamond database:", file=sys.stderr)
while True:
chunk = tmp_stderr.read(CHUNK_SIZE)
if not chunk:
break
sys.stderr.write(chunk.decode('utf-8'))
sys.exit(return_code)
tmp_stderr.close()
os.remove(temp_fasta.name)
return dict(value=database_id, name=database_name,
db_path="%s.dmnd" % fasta_base_filename)
def _create_symlink(input_filename, target_directory, database_id, database_name):
fasta_base_filename = "%s.fa" % database_id
fasta_filename = os.path.join(target_directory, fasta_base_filename)
os.symlink(input_filename, fasta_filename)
return dict(value=database_id, name=database_name, db_path=fasta_base_filename)
REFERENCE_SOURCE_TO_DOWNLOAD = dict(ncbi=download_from_ncbi,
url=download_from_url,
history=download_from_history,
directory=copy_from_directory)
def main():
# Parse Command Line
parser = optparse.OptionParser()
parser.add_option('-d', '--dbkey_description', dest='dbkey_description',
action='store', type="string", default=None,
help='dbkey_description')
(options, args) = parser.parse_args()
filename = args[0]
with open(filename) as fp:
params = json.load(fp)
target_directory = params['output_data'][0]['extra_files_path']
os.mkdir(target_directory)
data_manager_dict = {}
param_dict = params['param_dict']
database_id = param_dict['database_id']
database_name = param_dict['database_name']
if param_dict['tax_cond']['tax_select'] == "ncbi":
param_dict['tax_cond']['ncbi_tax'] = args[1]
# Fetch the FASTA
REFERENCE_SOURCE_TO_DOWNLOAD[param_dict['reference_source']['reference_source_selector']](data_manager_dict, param_dict, target_directory, database_id, database_name)
# save info to json file
open(filename, 'w').write(json.dumps(data_manager_dict, sort_keys=True))
if __name__ == "__main__":
main()
| mit | 5,089,912,025,152,225,000 | 6,560,234,805,302,307,000 | 38.505576 | 170 | 0.629529 | false |
yuewko/neutron | neutron/agent/ovsdb/impl_idl.py | 4 | 7372 | # Copyright (c) 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six.moves import queue as Queue
import time
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from ovs.db import idl
from neutron.agent.ovsdb import api
from neutron.agent.ovsdb.native import commands as cmd
from neutron.agent.ovsdb.native import connection
from neutron.agent.ovsdb.native import idlutils
from neutron.i18n import _LE
OPTS = [
cfg.StrOpt('ovsdb_connection',
default='tcp:127.0.0.1:6640',
help=_('The connection string for the native OVSDB backend')),
]
cfg.CONF.register_opts(OPTS, 'OVS')
# TODO(twilson) DEFAULT.ovs_vsctl_timeout should be OVS.vsctl_timeout
cfg.CONF.import_opt('ovs_vsctl_timeout', 'neutron.agent.common.ovs_lib')
LOG = logging.getLogger(__name__)
class Transaction(api.Transaction):
def __init__(self, api, ovsdb_connection, timeout,
check_error=False, log_errors=False):
self.api = api
self.check_error = check_error
self.log_errors = log_errors
self.commands = []
self.results = Queue.Queue(1)
self.ovsdb_connection = ovsdb_connection
self.timeout = timeout
def add(self, command):
"""Add a command to the transaction
returns The command passed as a convenience
"""
self.commands.append(command)
return command
def commit(self):
self.ovsdb_connection.queue_txn(self)
result = self.results.get()
if self.check_error:
if isinstance(result, idlutils.ExceptionResult):
if self.log_errors:
LOG.error(result.tb)
raise result.ex
return result
def do_commit(self):
start_time = time.time()
attempts = 0
while True:
elapsed_time = time.time() - start_time
if attempts > 0 and elapsed_time > self.timeout:
raise RuntimeError("OVS transaction timed out")
attempts += 1
# TODO(twilson) Make sure we don't loop longer than vsctl_timeout
txn = idl.Transaction(self.api.idl)
for i, command in enumerate(self.commands):
LOG.debug("Running txn command(idx=%(idx)s): %(cmd)s",
{'idx': i, 'cmd': command})
try:
command.run_idl(txn)
except Exception:
with excutils.save_and_reraise_exception() as ctx:
txn.abort()
if not self.check_error:
ctx.reraise = False
seqno = self.api.idl.change_seqno
status = txn.commit_block()
if status == txn.TRY_AGAIN:
LOG.debug("OVSDB transaction returned TRY_AGAIN, retrying")
if self.api.idl._session.rpc.status != 0:
LOG.debug("Lost connection to OVSDB, reconnecting!")
self.api.idl.force_reconnect()
idlutils.wait_for_change(
self.api.idl, self.timeout - elapsed_time,
seqno)
continue
elif status == txn.ERROR:
msg = _LE("OVSDB Error: %s") % txn.get_error()
if self.log_errors:
LOG.error(msg)
if self.check_error:
# For now, raise similar error to vsctl/utils.execute()
raise RuntimeError(msg)
return
elif status == txn.ABORTED:
LOG.debug("Transaction aborted")
return
elif status == txn.UNCHANGED:
LOG.debug("Transaction caused no change")
return [cmd.result for cmd in self.commands]
class OvsdbIdl(api.API):
ovsdb_connection = connection.Connection(cfg.CONF.OVS.ovsdb_connection,
cfg.CONF.ovs_vsctl_timeout,
'Open_vSwitch')
def __init__(self, context):
super(OvsdbIdl, self).__init__(context)
OvsdbIdl.ovsdb_connection.start()
self.idl = OvsdbIdl.ovsdb_connection.idl
@property
def _tables(self):
return self.idl.tables
@property
def _ovs(self):
return self._tables['Open_vSwitch'].rows.values()[0]
def transaction(self, check_error=False, log_errors=True, **kwargs):
return Transaction(self, OvsdbIdl.ovsdb_connection,
self.context.vsctl_timeout,
check_error, log_errors)
def add_br(self, name, may_exist=True):
return cmd.AddBridgeCommand(self, name, may_exist)
def del_br(self, name, if_exists=True):
return cmd.DelBridgeCommand(self, name, if_exists)
def br_exists(self, name):
return cmd.BridgeExistsCommand(self, name)
def port_to_br(self, name):
return cmd.PortToBridgeCommand(self, name)
def iface_to_br(self, name):
# For our purposes, ports and interfaces always have the same name
return cmd.PortToBridgeCommand(self, name)
def list_br(self):
return cmd.ListBridgesCommand(self)
def br_get_external_id(self, name, field):
return cmd.BrGetExternalIdCommand(self, name, field)
def br_set_external_id(self, name, field, value):
return cmd.BrSetExternalIdCommand(self, name, field, value)
def db_set(self, table, record, *col_values):
return cmd.DbSetCommand(self, table, record, *col_values)
def db_clear(self, table, record, column):
return cmd.DbClearCommand(self, table, record, column)
def db_get(self, table, record, column):
return cmd.DbGetCommand(self, table, record, column)
def db_list(self, table, records=None, columns=None, if_exists=False):
return cmd.DbListCommand(self, table, records, columns, if_exists)
def db_find(self, table, *conditions, **kwargs):
return cmd.DbFindCommand(self, table, *conditions, **kwargs)
def set_controller(self, bridge, controllers):
return cmd.SetControllerCommand(self, bridge, controllers)
def del_controller(self, bridge):
return cmd.DelControllerCommand(self, bridge)
def get_controller(self, bridge):
return cmd.GetControllerCommand(self, bridge)
def set_fail_mode(self, bridge, mode):
return cmd.SetFailModeCommand(self, bridge, mode)
def add_port(self, bridge, port, may_exist=True):
return cmd.AddPortCommand(self, bridge, port, may_exist)
def del_port(self, port, bridge=None, if_exists=True):
return cmd.DelPortCommand(self, port, bridge, if_exists)
def list_ports(self, bridge):
return cmd.ListPortsCommand(self, bridge)
| apache-2.0 | 2,267,540,957,012,170,200 | -1,139,315,471,074,807,800 | 35.315271 | 78 | 0.609061 | false |
theilmbh/pyoperant | pyoperant/panels.py | 3 | 1654 | ## Panel classes
class BasePanel(object):
"""Returns a panel instance.
This class should be subclassed to define a local panel configuration.
To build a panel, do the following in the __init__() method of your local
subclass:
1. add instances of the necessary interfaces to the 'interfaces' dict
attribute:
>>> self.interfaces['comedi'] = comedi.ComediInterface(device_name='/dev/comedi0')
2. add inputs and outputs to the 'inputs' and 'outputs' list attributes:
>>> for in_chan in range(4):
self.inputs.append(hwio.BooleanInput(interface=self.interfaces['comedi'],
params = {'subdevice': 2,
'channel': in_chan
},
)
3. add components constructed from your inputs and outputs:
>>> self.hopper = components.Hopper(IR=self.inputs[3],solenoid=self.outputs[4])
4. assign panel methods needed for operant behavior, such as 'reward':
>>> self.reward = self.hopper.reward
5. finally, define a reset() method that will set the entire panel to a
neutral state:
>>> def reset(self):
>>> for output in self.outputs:
>>> output.set(False)
>>> self.house_light.write(True)
>>> return True
"""
def __init__(self, *args,**kwargs):
self.interfaces = {}
self.inputs = []
self.outputs = []
def reset(self):
raise NotImplementedError
| bsd-3-clause | -2,208,551,785,858,211,800 | -5,995,811,026,204,348,000 | 34.956522 | 90 | 0.537485 | false |
lattwood/phantomjs | src/breakpad/src/tools/gyp/tools/pretty_vcproj.py | 137 | 9479 | #!/usr/bin/python2.5
# Copyright 2009 Google Inc.
# All Rights Reserved.
"""Make the format of a vcproj really pretty.
This script normalize and sort an xml. It also fetches all the properties
inside linked vsprops and include them explicitly in the vcproj.
It outputs the resulting xml to stdout.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import os
import sys
from xml.dom.minidom import parse
from xml.dom.minidom import Node
REPLACEMENTS = dict()
ARGUMENTS = None
class CmpTuple:
"""Compare function between 2 tuple."""
def __call__(self, x, y):
(key1, value1) = x
(key2, value2) = y
return cmp(key1, key2)
class CmpNode:
"""Compare function between 2 xml nodes."""
def get_string(self, node):
node_string = "node"
node_string += node.nodeName
if node.nodeValue:
node_string += node.nodeValue
if node.attributes:
# We first sort by name, if present.
node_string += node.getAttribute("Name")
all_nodes = []
for (name, value) in node.attributes.items():
all_nodes.append((name, value))
all_nodes.sort(CmpTuple())
for (name, value) in all_nodes:
node_string += name
node_string += value
return node_string
def __call__(self, x, y):
return cmp(self.get_string(x), self.get_string(y))
def PrettyPrintNode(node, indent=0):
if node.nodeType == Node.TEXT_NODE:
if node.data.strip():
print '%s%s' % (' '*indent, node.data.strip())
return
if node.childNodes:
node.normalize()
# Get the number of attributes
attr_count = 0
if node.attributes:
attr_count = node.attributes.length
# Print the main tag
if attr_count == 0:
print '%s<%s>' % (' '*indent, node.nodeName)
else:
print '%s<%s' % (' '*indent, node.nodeName)
all_attributes = []
for (name, value) in node.attributes.items():
all_attributes.append((name, value))
all_attributes.sort(CmpTuple())
for (name, value) in all_attributes:
print '%s %s="%s"' % (' '*indent, name, value)
print '%s>' % (' '*indent)
if node.nodeValue:
print '%s %s' % (' '*indent, node.nodeValue)
for sub_node in node.childNodes:
PrettyPrintNode(sub_node, indent=indent+2)
print '%s</%s>' % (' '*indent, node.nodeName)
def FlattenFilter(node):
"""Returns a list of all the node and sub nodes."""
node_list = []
if (node.attributes and
node.getAttribute('Name') == '_excluded_files'):
# We don't add the "_excluded_files" filter.
return []
for current in node.childNodes:
if current.nodeName == 'Filter':
node_list.extend(FlattenFilter(current))
else:
node_list.append(current)
return node_list
def FixFilenames(filenames, current_directory):
new_list = []
for filename in filenames:
if filename:
for key in REPLACEMENTS:
filename = filename.replace(key, REPLACEMENTS[key])
os.chdir(current_directory)
filename = filename.strip('"\' ')
if filename.startswith('$'):
new_list.append(filename)
else:
new_list.append(os.path.abspath(filename))
return new_list
def AbsoluteNode(node):
# Make all the properties we know about in this node absolute.
if node.attributes:
for (name, value) in node.attributes.items():
if name in ['InheritedPropertySheets', 'RelativePath',
'AdditionalIncludeDirectories',
'IntermediateDirectory', 'OutputDirectory',
'AdditionalLibraryDirectories']:
# We want to fix up these paths
path_list = value.split(';')
new_list = FixFilenames(path_list, os.path.dirname(ARGUMENTS[1]))
node.setAttribute(name, ';'.join(new_list))
if not value:
node.removeAttribute(name)
def CleanupVcproj(node):
# For each sub node, we call recursively this function.
for sub_node in node.childNodes:
AbsoluteNode(sub_node)
CleanupVcproj(sub_node)
# Normalize the node, and remove all extranous whitespaces.
for sub_node in node.childNodes:
if sub_node.nodeType == Node.TEXT_NODE:
sub_node.data = sub_node.data.replace("\r", "")
sub_node.data = sub_node.data.replace("\n", "")
sub_node.data = sub_node.data.rstrip()
# Fix all the semicolon separated attributes to be sorted, and we also
# remove the dups.
if node.attributes:
for (name, value) in node.attributes.items():
sorted_list = sorted(value.split(';'))
unique_list = []
[unique_list.append(i) for i in sorted_list if not unique_list.count(i)]
node.setAttribute(name, ';'.join(unique_list))
if not value:
node.removeAttribute(name)
if node.childNodes:
node.normalize()
# For each node, take a copy, and remove it from the list.
node_array = []
while node.childNodes and node.childNodes[0]:
# Take a copy of the node and remove it from the list.
current = node.childNodes[0]
node.removeChild(current)
# If the child is a filter, we want to append all its children
# to this same list.
if current.nodeName == 'Filter':
node_array.extend(FlattenFilter(current))
else:
node_array.append(current)
# Sort the list.
node_array.sort(CmpNode())
# Insert the nodes in the correct order.
for new_node in node_array:
# But don't append empty tool node.
if new_node.nodeName == 'Tool':
if new_node.attributes and new_node.attributes.length == 1:
# This one was empty.
continue
if new_node.nodeName == 'UserMacro':
continue
node.appendChild(new_node)
def GetConfiguationNodes(vcproj):
#TODO(nsylvain): Find a better way to navigate the xml.
nodes = []
for node in vcproj.childNodes:
if node.nodeName == "Configurations":
for sub_node in node.childNodes:
if sub_node.nodeName == "Configuration":
nodes.append(sub_node)
return nodes
def GetChildrenVsprops(filename):
dom = parse(filename)
if dom.documentElement.attributes:
vsprops = dom.documentElement.getAttribute('InheritedPropertySheets')
return FixFilenames(vsprops.split(';'), os.path.dirname(filename))
return []
def SeekToNode(node1, child2):
# A text node does not have properties.
if child2.nodeType == Node.TEXT_NODE:
return None
# Get the name of the current node.
current_name = child2.getAttribute("Name")
if not current_name:
# There is no name. We don't know how to merge.
return None
# Look through all the nodes to find a match.
for sub_node in node1.childNodes:
if sub_node.nodeName == child2.nodeName:
name = sub_node.getAttribute("Name")
if name == current_name:
return sub_node
# No match. We give up.
return None
def MergeAttributes(node1, node2):
# No attributes to merge?
if not node2.attributes:
return
for (name, value2) in node2.attributes.items():
# Don't merge the 'Name' attribute.
if name == 'Name':
continue
value1 = node1.getAttribute(name)
if value1:
# The attribute exist in the main node. If it's equal, we leave it
# untouched, otherwise we concatenate it.
if value1 != value2:
node1.setAttribute(name, ';'.join([value1, value2]))
else:
# The attribute does nto exist in the main node. We append this one.
node1.setAttribute(name, value2)
# If the attribute was a property sheet attributes, we remove it, since
# they are useless.
if name == 'InheritedPropertySheets':
node1.removeAttribute(name)
def MergeProperties(node1, node2):
MergeAttributes(node1, node2)
for child2 in node2.childNodes:
child1 = SeekToNode(node1, child2)
if child1:
MergeProperties(child1, child2)
else:
node1.appendChild(child2.cloneNode(True))
def main(argv):
global REPLACEMENTS
global ARGUMENTS
ARGUMENTS = argv
"""Main function of this vcproj prettifier."""
# check if we have exactly 1 parameter.
if len(argv) < 2:
print ('Usage: %s "c:\\path\\to\\vcproj.vcproj" [key1=value1] '
'[key2=value2]' % argv[0])
return
# Parse the keys
for i in range(2, len(argv)):
(key, value) = argv[i].split('=')
REPLACEMENTS[key] = value
# Open the vcproj and parse the xml.
dom = parse(argv[1])
# First thing we need to do is find the Configuration Node and merge them
# with the vsprops they include.
for configuration_node in GetConfiguationNodes(dom.documentElement):
# Get the property sheets associated with this configuration.
vsprops = configuration_node.getAttribute('InheritedPropertySheets')
# Fix the filenames to be absolute.
vsprops_list = FixFilenames(vsprops.strip().split(';'),
os.path.dirname(argv[1]))
# Extend the list of vsprops with all vsprops contained in the current
# vsprops.
for current_vsprops in vsprops_list:
vsprops_list.extend(GetChildrenVsprops(current_vsprops))
# Now that we have all the vsprops, we need to merge them.
for current_vsprops in vsprops_list:
MergeProperties(configuration_node,
parse(current_vsprops).documentElement)
# Now that everything is merged, we need to cleanup the xml.
CleanupVcproj(dom.documentElement)
# Finally, we use the prett xml function to print the vcproj back to the
# user.
#print dom.toprettyxml(newl="\n")
PrettyPrintNode(dom.documentElement)
if __name__ == '__main__':
main(sys.argv) | bsd-3-clause | -8,275,845,894,716,022,000 | 6,226,899,610,989,502,000 | 29.191083 | 78 | 0.656926 | false |
ahmetcemturan/SFACT | skeinforge_application/skeinforge_plugins/craft_plugins/limit.py | 1 | 8282 | #! /usr/bin/env python
"""
This page is in the table of contents.
This plugin limits the feed rate of the tool head, so that the stepper motors are not driven too fast and skip steps.
The limit manual page is at:
http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Limit
The maximum z feed rate is defined in speed.
==Operation==
The default 'Activate Limit' checkbox is on. When it is on, the functions described below will work, when it is off, nothing will be done.
==Settings==
===Maximum Initial Feed Rate===
Default is one millimeter per second.
Defines the maximum speed of the inital tool head move.
==Examples==
The following examples limit the file Screw Holder Bottom.stl. The examples are run in a terminal in the folder which contains Screw Holder Bottom.stl and limit.py.
> python limit.py
This brings up the limit dialog.
> python limit.py Screw Holder Bottom.stl
The limit tool is parsing the file:
Screw Holder Bottom.stl
..
The limit tool has created the file:
.. Screw Holder Bottom_limit.gcode
"""
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from datetime import date
from fabmetheus_utilities.fabmetheus_tools import fabmetheus_interpret
from fabmetheus_utilities.vector3 import Vector3
from fabmetheus_utilities import archive
from fabmetheus_utilities import euclidean
from fabmetheus_utilities import gcodec
from fabmetheus_utilities import intercircle
from fabmetheus_utilities import settings
from skeinforge_application.skeinforge_utilities import skeinforge_craft
from skeinforge_application.skeinforge_utilities import skeinforge_polyfile
from skeinforge_application.skeinforge_utilities import skeinforge_profile
import math
import os
import sys
__author__ = 'Enrique Perez ([email protected])'
__date__ = '$Date: 2008/28/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def getCraftedText(fileName, gcodeText='', repository=None):
'Limit a gcode file or text.'
return getCraftedTextFromText( archive.getTextIfEmpty(fileName, gcodeText), repository )
def getCraftedTextFromText(gcodeText, repository=None):
'Limit a gcode text.'
if gcodec.isProcedureDoneOrFileIsEmpty(gcodeText, 'limit'):
return gcodeText
if repository == None:
repository = settings.getReadRepository(LimitRepository())
if not repository.activateLimit.value:
return gcodeText
return LimitSkein().getCraftedGcode(gcodeText, repository)
def getNewRepository():
'Get new repository.'
return LimitRepository()
def writeOutput(fileName, shouldAnalyze=True):
'Limit a gcode file.'
skeinforge_craft.writeChainTextWithNounMessage(fileName, 'limit', shouldAnalyze)
class LimitRepository:
'A class to handle the limit settings.'
def __init__(self):
'Set the default settings, execute title & settings fileName.'
skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_plugins.craft_plugins.limit.html', self )
self.fileNameInput = settings.FileNameInput().getFromFileName( fabmetheus_interpret.getGNUTranslatorGcodeFileTypeTuples(), 'Open File for Limit', self, '')
self.openWikiManualHelpPage = settings.HelpPage().getOpenFromAbsolute('http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Limit')
self.activateLimit = settings.BooleanSetting().getFromValue('Activate Limit', self, False)
self.maximumInitialFeedRate = settings.FloatSpin().getFromValue(0.5, 'Maximum Initial Feed Rate (mm/s):', self, 10.0, 1.0)
self.executeTitle = 'Limit'
def execute(self):
'Limit button has been clicked.'
fileNames = skeinforge_polyfile.getFileOrDirectoryTypesUnmodifiedGcode(self.fileNameInput.value, fabmetheus_interpret.getImportPluginFileNames(), self.fileNameInput.wasCancelled)
for fileName in fileNames:
writeOutput(fileName)
class LimitSkein:
'A class to limit a skein of extrusions.'
def __init__(self):
self.distanceFeedRate = gcodec.DistanceFeedRate()
self.feedRateMinute = None
self.lineIndex = 0
self.maximumZDrillFeedRatePerSecond = 987654321.0
self.maximumZFeedRatePerSecond = 2.0
self.oldLocation = None
def getCraftedGcode(self, gcodeText, repository):
'Parse gcode text and store the limit gcode.'
self.repository = repository
self.lines = archive.getTextLines(gcodeText)
self.parseInitialization()
self.maximumZDrillFeedRatePerSecond = min(self.maximumZDrillFeedRatePerSecond, self.maximumZFeedRatePerSecond)
self.maximumZCurrentFeedRatePerSecond = self.maximumZFeedRatePerSecond
for lineIndex in xrange(self.lineIndex, len(self.lines)):
self.parseLine( lineIndex )
return self.distanceFeedRate.output.getvalue()
def getLimitedInitialMovement(self, line, splitLine):
'Get a limited linear movement.'
if self.oldLocation == None:
line = self.distanceFeedRate.getLineWithFeedRate(60.0 * self.repository.maximumInitialFeedRate.value, line, splitLine)
return line
def getZLimitedLine(self, deltaZ, distance, line, splitLine):
'Get a replaced z limited gcode movement line.'
zFeedRateSecond = self.feedRateMinute * deltaZ / distance / 60.0
if zFeedRateSecond <= self.maximumZCurrentFeedRatePerSecond:
return line
limitedFeedRateMinute = self.feedRateMinute * self.maximumZCurrentFeedRatePerSecond / zFeedRateSecond
return self.distanceFeedRate.getLineWithFeedRate(limitedFeedRateMinute, line, splitLine)
def getZLimitedLineArc(self, line, splitLine):
'Get a replaced z limited gcode arc movement line.'
self.feedRateMinute = gcodec.getFeedRateMinute(self.feedRateMinute, splitLine)
if self.feedRateMinute == None or self.oldLocation == None:
return line
relativeLocation = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine)
self.oldLocation += relativeLocation
deltaZ = abs(relativeLocation.z)
distance = gcodec.getArcDistance(relativeLocation, splitLine)
return self.getZLimitedLine(deltaZ, distance, line, splitLine)
def getZLimitedLineLinear(self, line, location, splitLine):
'Get a replaced z limited gcode linear movement line.'
self.feedRateMinute = gcodec.getFeedRateMinute(self.feedRateMinute, splitLine)
if location == self.oldLocation:
return ''
if self.feedRateMinute == None or self.oldLocation == None:
return line
deltaZ = abs(location.z - self.oldLocation.z)
distance = abs(location - self.oldLocation)
return self.getZLimitedLine(deltaZ, distance, line, splitLine)
def parseInitialization(self):
'Parse gcode initialization and store the parameters.'
for self.lineIndex in xrange(len(self.lines)):
line = self.lines[self.lineIndex]
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
firstWord = gcodec.getFirstWord(splitLine)
self.distanceFeedRate.parseSplitLine(firstWord, splitLine)
if firstWord == '(</extruderInitialization>)':
self.distanceFeedRate.addTagBracketedProcedure('limit')
return
elif firstWord == '(<maximumZDrillFeedRatePerSecond>':
self.maximumZDrillFeedRatePerSecond = float(splitLine[1])
elif firstWord == '(<maximumZFeedRatePerSecond>':
self.maximumZFeedRatePerSecond = float(splitLine[1])
self.distanceFeedRate.addLine(line)
def parseLine( self, lineIndex ):
'Parse a gcode line and add it to the limit skein.'
line = self.lines[lineIndex].lstrip()
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
if len(splitLine) < 1:
return
firstWord = gcodec.getFirstWord(splitLine)
if firstWord == 'G1':
location = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine)
line = self.getLimitedInitialMovement(line, splitLine)
line = self.getZLimitedLineLinear(line, location, splitLine)
self.oldLocation = location
elif firstWord == 'G2' or firstWord == 'G3':
line = self.getZLimitedLineArc(line, splitLine)
elif firstWord == 'M101':
self.maximumZCurrentFeedRatePerSecond = self.maximumZDrillFeedRatePerSecond
elif firstWord == 'M103':
self.maximumZCurrentFeedRatePerSecond = self.maximumZFeedRatePerSecond
self.distanceFeedRate.addLine(line)
def main():
'Display the limit dialog.'
if len(sys.argv) > 1:
writeOutput(' '.join(sys.argv[1 :]))
else:
settings.startMainLoopFromConstructor(getNewRepository())
if __name__ == '__main__':
main()
| agpl-3.0 | -4,115,154,780,594,700,300 | -4,990,972,896,427,400,000 | 40 | 180 | 0.781574 | false |
K-Constantine/Amaraki | core/deps/gyp/test/mac/gyptest-debuginfo.py | 349 | 1152 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Tests things related to debug information generation.
"""
import TestGyp
import sys
if sys.platform == 'darwin':
test = TestGyp.TestGyp(formats=['ninja', 'make', 'xcode'])
test.run_gyp('test.gyp', chdir='debuginfo')
test.build('test.gyp', test.ALL, chdir='debuginfo')
test.built_file_must_exist('libnonbundle_shared_library.dylib.dSYM',
chdir='debuginfo')
test.built_file_must_exist('nonbundle_loadable_module.so.dSYM',
chdir='debuginfo')
test.built_file_must_exist('nonbundle_executable.dSYM',
chdir='debuginfo')
test.built_file_must_exist('bundle_shared_library.framework.dSYM',
chdir='debuginfo')
test.built_file_must_exist('bundle_loadable_module.bundle.dSYM',
chdir='debuginfo')
test.built_file_must_exist('My App.app.dSYM',
chdir='debuginfo')
test.pass_test()
| mit | 7,520,165,564,098,098,000 | -8,295,941,239,977,221,000 | 31 | 72 | 0.619792 | false |
mfnch/pyrtist | old/web/in/examples/create_example.py | 1 | 2754 | import sys, os, os.path, commands, re
usage = "USAGE: python create_example.py box.example"
if len(sys.argv) != 2:
raise "Expected one argument.\n" + usage
example_file = sys.argv[1]
print "Working on '%s'..." % example_file
# Default values for variables which may be changed inside example_file
in_directory = ".."
box = "box -l g"
convert = "convert"
convert_opts = ""
highlight = "%s/../katehighlight/bin/highlight" % in_directory
rst_skeleton = "skeleton"
rst_out = None
title = None
description = None
figure_caption = None
box_source = None
out_eps = None
out_png = None
_f = open(example_file)
exec(_f)
_f.close()
if title == None:
title = "Box example: %s" % crumb
print "Removing old figure if present..."
if out_eps and os.access(out_eps, os.W_OK):
try:
os.remove(out_eps)
except:
print "Failed to remove the figure: continuing anyway..."
print "Executing the Box program..."
print commands.getoutput("%s %s" % (box, box_source))
have_figure = False
if out_eps and os.access(out_eps, os.R_OK):
print "Adjusting eps figure..."
out_png = os.path.splitext(out_eps)[0] + ".png"
print commands.getoutput("%s %s %s %s" %
(convert, convert_opts, out_eps, out_png))
print out_png
have_figure = os.access(out_png, os.R_OK)
if not have_figure:
raise "The figure '%s' has not been produced: stopping here!" % out_png
print "Highlighting the Box source..."
highlighted_source = "/tmp/h.html"
print commands.getoutput("%s Box %s %s" % (highlight, box_source, highlighted_source))
f = open(highlighted_source, "r")
htmlized_box_program = f.read()
f.close()
print "Opening the skeleton..."
f = open(rst_skeleton, "r")
data_skeleton = f.read()
f.close()
vars_dict = {
'title': title,
'description': description,
'crumb': crumb,
'box_file':box_source,
'figure_caption':figure_caption,
'image': out_png,
'htmlized_box_program': htmlized_box_program
}
r = re.compile("[$][^$]*[$]")
def substitutor(var):
try:
var_name = var.group(0)[1:-1]
except:
raise "Error when substituting variable."
if vars_dict.has_key(var_name):
return str(vars_dict[var_name])
print "WARNING: Variable '%s' not found!" % var_name
return var.group(0)
print "Filling the skeleton..."
out = re.sub(r, substitutor, data_skeleton)
f = open(rst_out, "w")
f.write(out)
f.close()
print "Output produced (%s)" % rst_out
print "Generating thumbnail..."
html_out = os.path.splitext(out_png)[0] + ".html"
out_thumb_png = "small_" + out_png
scale_opts = "-scale 100"
print commands.getoutput("%s %s %s %s"
% (convert, scale_opts, out_png, out_thumb_png))
f = open("thumbnails.dat", "a")
f.write("%s, %s\n" % (html_out, out_thumb_png))
f.close()
| lgpl-2.1 | -6,309,499,082,295,713,000 | -6,484,457,350,736,000,000 | 24.738318 | 86 | 0.649601 | false |
amir-qayyum-khan/edx-platform | cms/djangoapps/contentstore/management/commands/fix_not_found.py | 62 | 1152 | """
Script for fixing the item not found errors in a course
"""
from django.core.management.base import BaseCommand, CommandError
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import modulestore
from xmodule.modulestore import ModuleStoreEnum
# To run from command line: ./manage.py cms fix_not_found course-v1:org+course+run
class Command(BaseCommand):
"""Fix a course's item not found errors"""
help = "Fix a course's ItemNotFound errors"
def add_arguments(self, parser):
parser.add_argument('course_id')
def handle(self, *args, **options):
"""Execute the command"""
course_id = options.get('course_id', None)
course_key = CourseKey.from_string(course_id)
# for now only support on split mongo
# pylint: disable=protected-access
owning_store = modulestore()._get_modulestore_for_courselike(course_key)
if hasattr(owning_store, 'fix_not_found'):
owning_store.fix_not_found(course_key, ModuleStoreEnum.UserID.mgmt_command)
else:
raise CommandError("The owning modulestore does not support this command.")
| agpl-3.0 | 459,218,099,340,763,100 | 3,462,203,325,859,610,600 | 37.4 | 87 | 0.698785 | false |
Jgarcia-IAS/Fidelizacion_odoo | openerp/addons/project/res_partner.py | 334 | 1953 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields,osv
class res_partner(osv.osv):
def _task_count(self, cr, uid, ids, field_name, arg, context=None):
Task = self.pool['project.task']
return {
partner_id: Task.search_count(cr,uid, [('partner_id', '=', partner_id)], context=context)
for partner_id in ids
}
""" Inherits partner and adds Tasks information in the partner form """
_inherit = 'res.partner'
_columns = {
'task_ids': fields.one2many('project.task', 'partner_id', 'Tasks'),
'task_count': fields.function(_task_count, string='# Tasks', type='integer'),
}
def copy(self, cr, uid, record_id, default=None, context=None):
if default is None:
default = {}
default['task_ids'] = []
return super(res_partner, self).copy(
cr, uid, record_id, default=default, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,682,860,376,948,806,000 | 7,261,464,752,417,731,000 | 40.553191 | 101 | 0.600614 | false |
kohnle-lernmodule/exeLearningPlus1_04 | twisted/test/test_stateful.py | 20 | 1570 | # Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Test cases for twisted.protocols.stateful
"""
from twisted.test import test_protocols
from twisted.protocols.stateful import StatefulProtocol
from struct import pack, unpack
class MyInt32StringReceiver(StatefulProtocol):
MAX_LENGTH = 99999
def getInitialState(self):
return self._getHeader, 4
def _getHeader(self, msg):
length, = unpack("!i", msg)
if length > self.MAX_LENGTH:
self.transport.loseConnection()
return
return self._getString, length
def _getString(self, msg):
self.stringReceived(msg)
return self._getHeader, 4
def stringReceived(self, msg):
"""Override this.
"""
raise NotImplementedError
def sendString(self, data):
"""Send an int32-prefixed string to the other end of the connection.
"""
self.transport.write(pack("!i",len(data))+data)
class TestInt32(MyInt32StringReceiver):
def connectionMade(self):
self.received = []
def stringReceived(self, s):
self.received.append(s)
MAX_LENGTH = 50
closed = 0
def connectionLost(self, reason):
self.closed = 1
class Int32TestCase(test_protocols.Int32TestCase):
protocol = TestInt32
def testBigReceive(self):
r = self.getProtocol()
big = ""
for s in self.strings * 4:
big += pack("!i",len(s))+s
r.dataReceived(big)
self.assertEquals(r.received, self.strings * 4)
| gpl-2.0 | 478,055,204,393,950,300 | 8,518,234,622,106,604,000 | 23.920635 | 76 | 0.636306 | false |
PythonSanSebastian/epcon | p3/management/commands/attendify_speakers_xlsx.py | 2 | 6861 | # -*- coding: utf-8 -*-
""" Update an Attendify speakers XLSX file with the current list of
speakers.
Usage: manage.py attendify_speakers_xlsx ep2016 speakers.xlsx
Note that for Attendify you have to download the speakers before
running this script, since they add meta data to the downloaded
file which has to be kept around when uploading it again.
The script updates speakers.xlsx in place. Unfortunately, Attendify
currently has a bug in that it doesn't accept the file format
generated by openpyxl. Opening the file in LibreOffice and saving
it (without changes) fixes this as work-around.
Attendify Worksheet "Schedule" format
-------------------------------------
Row A4: First Name, Last Name, Company (Optional), Position
(Optional), Group (Optional). Profile (Optional), Email
(Optional), Phone (Optional), Twitter (Optional), Facebook
(Optional), LinkedIn (Optional), Google+ (Optional), UID (do not
delete)
Row A6: Start of data
"""
from django.core.management.base import BaseCommand, CommandError
from django.core import urlresolvers
from django.conf import settings
from django.utils.html import strip_tags
from conference import models as cmodels
from conference import utils
from p3 import models
import datetime
from collections import defaultdict
from optparse import make_option
import operator
import markdown2
import openpyxl
### Globals
# Debug output ?
_debug = 1
# These must match the talk .type or .admin_type
from accepted_talks import TYPE_NAMES
### Helpers
def profile_url(user):
return urlresolvers.reverse('conference-profile',
args=[user.attendeeprofile.slug])
def format_text(text, remove_tags=False, output_html=True):
# Remove whitespace
text = text.strip()
if not text:
return text
# Remove links, tags, etc.
if remove_tags:
text = strip_tags(text)
# Remove quotes
if text[0] == '"' and text[-1] == '"':
text = text[1:-1]
# Convert markdown markup to HTML
if output_html:
text = markdown2.markdown(text)
return text
def add_speaker(data, speaker):
# Get speaker profile
user = speaker.user
profile = cmodels.AttendeeProfile.objects.get(user=user)
p3profile = models.P3Profile.objects.get(profile=profile)
# Skip speakers without public profile. Speaker profiles must be
# public, but you never know. See conference/models.py
if profile.visibility != 'p':
return
# Collect data
first_name = speaker.user.first_name.title()
last_name = speaker.user.last_name.title()
company = profile.company
position = profile.job_title
profile_text = (u'<a href="%s%s">Profile on EuroPython Website</a>' %
(settings.DEFAULT_URL_PREFIX, profile_url(user)))
twitter = p3profile.twitter
if twitter.startswith(('https://twitter.com/', 'http://twitter.com/')):
twitter = twitter.split('/')[-1]
# Skip special entries
full_name = first_name + last_name
if first_name == 'To Be' and last_name == 'Announced':
return
# UID
uid = u''
data.append((
first_name,
last_name,
company,
position,
u'', # group
profile_text,
u'', # email: not published
u'', # phone: not published
twitter,
u'', # facebook
u'', # linkedin
u'', # google+
uid))
# Start row of data in spreadsheet (Python 0-based index)
SPEAKERS_WS_START_DATA = 5
# Column number of UID columns (Python 0-based index)
SPEAKERS_UID_COLUMN = 12
# Number of columns to make row unique (first, last, company)
SPEAKERS_UNIQUE_COLS = 3
def update_speakers(speakers_xlsx, new_data, updated_xlsx=None):
# Load workbook
wb = openpyxl.load_workbook(speakers_xlsx)
assert wb.sheetnames == [u'Instructions', u'Speakers', u'System']
ws = wb['Speakers']
# Extract data values
ws_data = list(ws.values)[SPEAKERS_WS_START_DATA:]
print ('read %i data lines' % len(ws_data))
print ('first line: %r' % ws_data[:1])
print ('last line: %r' % ws_data[-1:])
# Reconcile UIDs / talks
uids = {}
for line in ws_data:
uid = line[SPEAKERS_UID_COLUMN]
if not uid:
continue
uids[tuple(line[:SPEAKERS_UNIQUE_COLS])] = uid
# Add UID to new data
new_speakers = []
for line in new_data:
key = tuple(line[:SPEAKERS_UNIQUE_COLS])
if key not in uids:
print ('New speaker %s found' % (key,))
uid = u''
else:
uid = uids[key]
line = tuple(line[:SPEAKERS_UID_COLUMN]) + (uid,)
new_speakers.append(line)
new_data = new_speakers
# Replace old data with new data
old_data_rows = len(ws_data)
new_data_rows = len(new_data)
print ('new data: %i data lines' % new_data_rows)
offset = SPEAKERS_WS_START_DATA + 1
print ('new_data = %i rows' % len(new_data))
for j, row in enumerate(ws[offset: offset + new_data_rows - 1]):
new_row = new_data[j]
if _debug:
print ('updating row %i with %r' % (j, new_row))
if len(row) > len(new_row):
row = row[:len(new_row)]
for i, cell in enumerate(row):
cell.value = new_row[i]
# Overwrite unused cells with None
if new_data_rows < old_data_rows:
for j, row in enumerate(ws[offset + new_data_rows + 1:
offset + old_data_rows + 1]):
if _debug:
print ('clearing row %i' % (j,))
for i, cell in enumerate(row):
cell.value = None
# Write updated data
if updated_xlsx is None:
updated_xlsx = speakers_xlsx
wb.save(updated_xlsx)
###
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
# make_option('--option',
# action='store',
# dest='option_attr',
# default=0,
# type='int',
# help='Help text',
# ),
)
def handle(self, *args, **options):
try:
conference = args[0]
except IndexError:
raise CommandError('conference not specified')
try:
speakers_xlsx = args[1]
except IndexError:
raise CommandError('XLSX file not specified')
# Get speaker records
speakers = set()
talks = cmodels.Talk.objects.accepted(conference)
for t in talks:
speakers |= set(t.get_all_speakers())
# Collect profiles
data = []
for speaker in speakers:
add_speaker(data, speaker)
data.sort()
# Update spreadsheet with new data
update_speakers(speakers_xlsx, data)
| bsd-2-clause | 3,491,019,686,800,562,000 | 109,022,809,648,480,460 | 28.573276 | 75 | 0.603265 | false |
modulexcite/PTVS | Python/Tests/TestData/VirtualEnv/env/Lib/encodings/iso8859_5.py | 93 | 13578 | """ Python Character Mapping Codec iso8859_5 generated from 'MAPPINGS/ISO8859/8859-5.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-5',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\u0401' # 0xA1 -> CYRILLIC CAPITAL LETTER IO
u'\u0402' # 0xA2 -> CYRILLIC CAPITAL LETTER DJE
u'\u0403' # 0xA3 -> CYRILLIC CAPITAL LETTER GJE
u'\u0404' # 0xA4 -> CYRILLIC CAPITAL LETTER UKRAINIAN IE
u'\u0405' # 0xA5 -> CYRILLIC CAPITAL LETTER DZE
u'\u0406' # 0xA6 -> CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0407' # 0xA7 -> CYRILLIC CAPITAL LETTER YI
u'\u0408' # 0xA8 -> CYRILLIC CAPITAL LETTER JE
u'\u0409' # 0xA9 -> CYRILLIC CAPITAL LETTER LJE
u'\u040a' # 0xAA -> CYRILLIC CAPITAL LETTER NJE
u'\u040b' # 0xAB -> CYRILLIC CAPITAL LETTER TSHE
u'\u040c' # 0xAC -> CYRILLIC CAPITAL LETTER KJE
u'\xad' # 0xAD -> SOFT HYPHEN
u'\u040e' # 0xAE -> CYRILLIC CAPITAL LETTER SHORT U
u'\u040f' # 0xAF -> CYRILLIC CAPITAL LETTER DZHE
u'\u0410' # 0xB0 -> CYRILLIC CAPITAL LETTER A
u'\u0411' # 0xB1 -> CYRILLIC CAPITAL LETTER BE
u'\u0412' # 0xB2 -> CYRILLIC CAPITAL LETTER VE
u'\u0413' # 0xB3 -> CYRILLIC CAPITAL LETTER GHE
u'\u0414' # 0xB4 -> CYRILLIC CAPITAL LETTER DE
u'\u0415' # 0xB5 -> CYRILLIC CAPITAL LETTER IE
u'\u0416' # 0xB6 -> CYRILLIC CAPITAL LETTER ZHE
u'\u0417' # 0xB7 -> CYRILLIC CAPITAL LETTER ZE
u'\u0418' # 0xB8 -> CYRILLIC CAPITAL LETTER I
u'\u0419' # 0xB9 -> CYRILLIC CAPITAL LETTER SHORT I
u'\u041a' # 0xBA -> CYRILLIC CAPITAL LETTER KA
u'\u041b' # 0xBB -> CYRILLIC CAPITAL LETTER EL
u'\u041c' # 0xBC -> CYRILLIC CAPITAL LETTER EM
u'\u041d' # 0xBD -> CYRILLIC CAPITAL LETTER EN
u'\u041e' # 0xBE -> CYRILLIC CAPITAL LETTER O
u'\u041f' # 0xBF -> CYRILLIC CAPITAL LETTER PE
u'\u0420' # 0xC0 -> CYRILLIC CAPITAL LETTER ER
u'\u0421' # 0xC1 -> CYRILLIC CAPITAL LETTER ES
u'\u0422' # 0xC2 -> CYRILLIC CAPITAL LETTER TE
u'\u0423' # 0xC3 -> CYRILLIC CAPITAL LETTER U
u'\u0424' # 0xC4 -> CYRILLIC CAPITAL LETTER EF
u'\u0425' # 0xC5 -> CYRILLIC CAPITAL LETTER HA
u'\u0426' # 0xC6 -> CYRILLIC CAPITAL LETTER TSE
u'\u0427' # 0xC7 -> CYRILLIC CAPITAL LETTER CHE
u'\u0428' # 0xC8 -> CYRILLIC CAPITAL LETTER SHA
u'\u0429' # 0xC9 -> CYRILLIC CAPITAL LETTER SHCHA
u'\u042a' # 0xCA -> CYRILLIC CAPITAL LETTER HARD SIGN
u'\u042b' # 0xCB -> CYRILLIC CAPITAL LETTER YERU
u'\u042c' # 0xCC -> CYRILLIC CAPITAL LETTER SOFT SIGN
u'\u042d' # 0xCD -> CYRILLIC CAPITAL LETTER E
u'\u042e' # 0xCE -> CYRILLIC CAPITAL LETTER YU
u'\u042f' # 0xCF -> CYRILLIC CAPITAL LETTER YA
u'\u0430' # 0xD0 -> CYRILLIC SMALL LETTER A
u'\u0431' # 0xD1 -> CYRILLIC SMALL LETTER BE
u'\u0432' # 0xD2 -> CYRILLIC SMALL LETTER VE
u'\u0433' # 0xD3 -> CYRILLIC SMALL LETTER GHE
u'\u0434' # 0xD4 -> CYRILLIC SMALL LETTER DE
u'\u0435' # 0xD5 -> CYRILLIC SMALL LETTER IE
u'\u0436' # 0xD6 -> CYRILLIC SMALL LETTER ZHE
u'\u0437' # 0xD7 -> CYRILLIC SMALL LETTER ZE
u'\u0438' # 0xD8 -> CYRILLIC SMALL LETTER I
u'\u0439' # 0xD9 -> CYRILLIC SMALL LETTER SHORT I
u'\u043a' # 0xDA -> CYRILLIC SMALL LETTER KA
u'\u043b' # 0xDB -> CYRILLIC SMALL LETTER EL
u'\u043c' # 0xDC -> CYRILLIC SMALL LETTER EM
u'\u043d' # 0xDD -> CYRILLIC SMALL LETTER EN
u'\u043e' # 0xDE -> CYRILLIC SMALL LETTER O
u'\u043f' # 0xDF -> CYRILLIC SMALL LETTER PE
u'\u0440' # 0xE0 -> CYRILLIC SMALL LETTER ER
u'\u0441' # 0xE1 -> CYRILLIC SMALL LETTER ES
u'\u0442' # 0xE2 -> CYRILLIC SMALL LETTER TE
u'\u0443' # 0xE3 -> CYRILLIC SMALL LETTER U
u'\u0444' # 0xE4 -> CYRILLIC SMALL LETTER EF
u'\u0445' # 0xE5 -> CYRILLIC SMALL LETTER HA
u'\u0446' # 0xE6 -> CYRILLIC SMALL LETTER TSE
u'\u0447' # 0xE7 -> CYRILLIC SMALL LETTER CHE
u'\u0448' # 0xE8 -> CYRILLIC SMALL LETTER SHA
u'\u0449' # 0xE9 -> CYRILLIC SMALL LETTER SHCHA
u'\u044a' # 0xEA -> CYRILLIC SMALL LETTER HARD SIGN
u'\u044b' # 0xEB -> CYRILLIC SMALL LETTER YERU
u'\u044c' # 0xEC -> CYRILLIC SMALL LETTER SOFT SIGN
u'\u044d' # 0xED -> CYRILLIC SMALL LETTER E
u'\u044e' # 0xEE -> CYRILLIC SMALL LETTER YU
u'\u044f' # 0xEF -> CYRILLIC SMALL LETTER YA
u'\u2116' # 0xF0 -> NUMERO SIGN
u'\u0451' # 0xF1 -> CYRILLIC SMALL LETTER IO
u'\u0452' # 0xF2 -> CYRILLIC SMALL LETTER DJE
u'\u0453' # 0xF3 -> CYRILLIC SMALL LETTER GJE
u'\u0454' # 0xF4 -> CYRILLIC SMALL LETTER UKRAINIAN IE
u'\u0455' # 0xF5 -> CYRILLIC SMALL LETTER DZE
u'\u0456' # 0xF6 -> CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
u'\u0457' # 0xF7 -> CYRILLIC SMALL LETTER YI
u'\u0458' # 0xF8 -> CYRILLIC SMALL LETTER JE
u'\u0459' # 0xF9 -> CYRILLIC SMALL LETTER LJE
u'\u045a' # 0xFA -> CYRILLIC SMALL LETTER NJE
u'\u045b' # 0xFB -> CYRILLIC SMALL LETTER TSHE
u'\u045c' # 0xFC -> CYRILLIC SMALL LETTER KJE
u'\xa7' # 0xFD -> SECTION SIGN
u'\u045e' # 0xFE -> CYRILLIC SMALL LETTER SHORT U
u'\u045f' # 0xFF -> CYRILLIC SMALL LETTER DZHE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 | 5,462,717,361,390,391,000 | -8,841,477,517,486,255,000 | 42.228013 | 107 | 0.526734 | false |
Nitaco/ansible | test/units/modules/network/nxos/test_nxos_portchannel.py | 53 | 2843 | # (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.tests.mock import patch
from ansible.modules.network.nxos import _nxos_portchannel
from .nxos_module import TestNxosModule, set_module_args
class TestNxosPortchannelModule(TestNxosModule):
module = _nxos_portchannel
def setUp(self):
super(TestNxosPortchannelModule, self).setUp()
self.mock_run_commands = patch('ansible.modules.network.nxos._nxos_portchannel.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_load_config = patch('ansible.modules.network.nxos._nxos_portchannel.load_config')
self.load_config = self.mock_load_config.start()
self.mock_get_config = patch('ansible.modules.network.nxos._nxos_portchannel.get_config')
self.get_config = self.mock_get_config.start()
self.mock_get_capabilities = patch('ansible.modules.network.nxos._nxos_portchannel.get_capabilities')
self.get_capabilities = self.mock_get_capabilities.start()
self.get_capabilities.return_value = {'network_api': 'cliconf'}
def tearDown(self):
super(TestNxosPortchannelModule, self).tearDown()
self.mock_run_commands.stop()
self.mock_load_config.stop()
self.mock_get_config.stop()
self.mock_get_capabilities.stop()
def load_fixtures(self, commands=None, device=''):
self.load_config.return_value = None
def test_nxos_portchannel(self):
set_module_args(dict(group='99',
members=['Ethernet2/1', 'Ethernet2/2'],
mode='active',
state='present'))
result = self.execute_module(changed=True)
self.assertEqual(result['commands'], ['interface port-channel99',
'interface Ethernet2/1',
'channel-group 99 mode active',
'interface Ethernet2/2',
'channel-group 99 mode active'])
| gpl-3.0 | -8,741,480,723,807,831,000 | -8,612,564,078,928,236,000 | 41.432836 | 109 | 0.647907 | false |
skidzo/sympy | sympy/physics/quantum/cartesian.py | 98 | 8766 | """Operators and states for 1D cartesian position and momentum.
TODO:
* Add 3D classes to mappings in operatorset.py
"""
from __future__ import print_function, division
from sympy import DiracDelta, exp, I, Interval, pi, S, sqrt
from sympy.core.compatibility import range
from sympy.physics.quantum.constants import hbar
from sympy.physics.quantum.hilbert import L2
from sympy.physics.quantum.operator import DifferentialOperator, HermitianOperator
from sympy.physics.quantum.state import Ket, Bra, State
__all__ = [
'XOp',
'YOp',
'ZOp',
'PxOp',
'X',
'Y',
'Z',
'Px',
'XKet',
'XBra',
'PxKet',
'PxBra',
'PositionState3D',
'PositionKet3D',
'PositionBra3D'
]
#-------------------------------------------------------------------------
# Position operators
#-------------------------------------------------------------------------
class XOp(HermitianOperator):
"""1D cartesian position operator."""
@classmethod
def default_args(self):
return ("X",)
@classmethod
def _eval_hilbert_space(self, args):
return L2(Interval(S.NegativeInfinity, S.Infinity))
def _eval_commutator_PxOp(self, other):
return I*hbar
def _apply_operator_XKet(self, ket):
return ket.position*ket
def _apply_operator_PositionKet3D(self, ket):
return ket.position_x*ket
def _represent_PxKet(self, basis, **options):
index = options.pop("index", 1)
states = basis._enumerate_state(2, start_index=index)
coord1 = states[0].momentum
coord2 = states[1].momentum
d = DifferentialOperator(coord1)
delta = DiracDelta(coord1 - coord2)
return I*hbar*(d*delta)
class YOp(HermitianOperator):
""" Y cartesian coordinate operator (for 2D or 3D systems) """
@classmethod
def default_args(self):
return ("Y",)
@classmethod
def _eval_hilbert_space(self, args):
return L2(Interval(S.NegativeInfinity, S.Infinity))
def _apply_operator_PositionKet3D(self, ket):
return ket.position_y*ket
class ZOp(HermitianOperator):
""" Z cartesian coordinate operator (for 3D systems) """
@classmethod
def default_args(self):
return ("Z",)
@classmethod
def _eval_hilbert_space(self, args):
return L2(Interval(S.NegativeInfinity, S.Infinity))
def _apply_operator_PositionKet3D(self, ket):
return ket.position_z*ket
#-------------------------------------------------------------------------
# Momentum operators
#-------------------------------------------------------------------------
class PxOp(HermitianOperator):
"""1D cartesian momentum operator."""
@classmethod
def default_args(self):
return ("Px",)
@classmethod
def _eval_hilbert_space(self, args):
return L2(Interval(S.NegativeInfinity, S.Infinity))
def _apply_operator_PxKet(self, ket):
return ket.momentum*ket
def _represent_XKet(self, basis, **options):
index = options.pop("index", 1)
states = basis._enumerate_state(2, start_index=index)
coord1 = states[0].position
coord2 = states[1].position
d = DifferentialOperator(coord1)
delta = DiracDelta(coord1 - coord2)
return -I*hbar*(d*delta)
X = XOp('X')
Y = YOp('Y')
Z = ZOp('Z')
Px = PxOp('Px')
#-------------------------------------------------------------------------
# Position eigenstates
#-------------------------------------------------------------------------
class XKet(Ket):
"""1D cartesian position eigenket."""
@classmethod
def _operators_to_state(self, op, **options):
return self.__new__(self, *_lowercase_labels(op), **options)
def _state_to_operators(self, op_class, **options):
return op_class.__new__(op_class,
*_uppercase_labels(self), **options)
@classmethod
def default_args(self):
return ("x",)
@classmethod
def dual_class(self):
return XBra
@property
def position(self):
"""The position of the state."""
return self.label[0]
def _enumerate_state(self, num_states, **options):
return _enumerate_continuous_1D(self, num_states, **options)
def _eval_innerproduct_XBra(self, bra, **hints):
return DiracDelta(self.position - bra.position)
def _eval_innerproduct_PxBra(self, bra, **hints):
return exp(-I*self.position*bra.momentum/hbar)/sqrt(2*pi*hbar)
class XBra(Bra):
"""1D cartesian position eigenbra."""
@classmethod
def default_args(self):
return ("x",)
@classmethod
def dual_class(self):
return XKet
@property
def position(self):
"""The position of the state."""
return self.label[0]
class PositionState3D(State):
""" Base class for 3D cartesian position eigenstates """
@classmethod
def _operators_to_state(self, op, **options):
return self.__new__(self, *_lowercase_labels(op), **options)
def _state_to_operators(self, op_class, **options):
return op_class.__new__(op_class,
*_uppercase_labels(self), **options)
@classmethod
def default_args(self):
return ("x", "y", "z")
@property
def position_x(self):
""" The x coordinate of the state """
return self.label[0]
@property
def position_y(self):
""" The y coordinate of the state """
return self.label[1]
@property
def position_z(self):
""" The z coordinate of the state """
return self.label[2]
class PositionKet3D(Ket, PositionState3D):
""" 3D cartesian position eigenket """
def _eval_innerproduct_PositionBra3D(self, bra, **options):
x_diff = self.position_x - bra.position_x
y_diff = self.position_y - bra.position_y
z_diff = self.position_z - bra.position_z
return DiracDelta(x_diff)*DiracDelta(y_diff)*DiracDelta(z_diff)
@classmethod
def dual_class(self):
return PositionBra3D
class PositionBra3D(Bra, PositionState3D):
""" 3D cartesian position eigenbra """
@classmethod
def dual_class(self):
return PositionKet3D
#-------------------------------------------------------------------------
# Momentum eigenstates
#-------------------------------------------------------------------------
class PxKet(Ket):
"""1D cartesian momentum eigenket."""
@classmethod
def _operators_to_state(self, op, **options):
return self.__new__(self, *_lowercase_labels(op), **options)
def _state_to_operators(self, op_class, **options):
return op_class.__new__(op_class,
*_uppercase_labels(self), **options)
@classmethod
def default_args(self):
return ("px",)
@classmethod
def dual_class(self):
return PxBra
@property
def momentum(self):
"""The momentum of the state."""
return self.label[0]
def _enumerate_state(self, *args, **options):
return _enumerate_continuous_1D(self, *args, **options)
def _eval_innerproduct_XBra(self, bra, **hints):
return exp(I*self.momentum*bra.position/hbar)/sqrt(2*pi*hbar)
def _eval_innerproduct_PxBra(self, bra, **hints):
return DiracDelta(self.momentum - bra.momentum)
class PxBra(Bra):
"""1D cartesian momentum eigenbra."""
@classmethod
def default_args(self):
return ("px",)
@classmethod
def dual_class(self):
return PxKet
@property
def momentum(self):
"""The momentum of the state."""
return self.label[0]
#-------------------------------------------------------------------------
# Global helper functions
#-------------------------------------------------------------------------
def _enumerate_continuous_1D(*args, **options):
state = args[0]
num_states = args[1]
state_class = state.__class__
index_list = options.pop('index_list', [])
if len(index_list) == 0:
start_index = options.pop('start_index', 1)
index_list = list(range(start_index, start_index + num_states))
enum_states = [0 for i in range(len(index_list))]
for i, ind in enumerate(index_list):
label = state.args[0]
enum_states[i] = state_class(str(label) + "_" + str(ind), **options)
return enum_states
def _lowercase_labels(ops):
if not isinstance(ops, set):
ops = [ops]
return [str(arg.label[0]).lower() for arg in ops]
def _uppercase_labels(ops):
if not isinstance(ops, set):
ops = [ops]
new_args = [str(arg.label[0])[0].upper() +
str(arg.label[0])[1:] for arg in ops]
return new_args
| bsd-3-clause | -1,574,032,976,420,027,600 | 3,752,450,210,134,690,000 | 24.782353 | 82 | 0.563199 | false |
pelodelfuego/word2vec-toolbox | toolbox/cpLib/test/testConcept.py | 1 | 2049 | #!/usr/bin/env python
# encoding: utf-8
import unittest
import cpLib.concept as cp
import cpLib.conceptDB as db
import numpy as np
class ConceptTest(unittest.TestCase):
def setUp(self):
self.d = db.DB('../data/voc/npy/googleNews_mini.npy')
def test_transform(self):
k = self.d.get('king')
norm = np.linalg.norm(k.vect)
k_p = k.polarVect()
k_a = k.angularVect()
for a, b in zip(np.concatenate(([norm], k_a)), k_p):
self.assertAlmostEquals(a, b, places=5)
# DISTANCE
def test_cosSim(self):
k = self.d.get('king')
q = self.d.get('queen')
self.assertAlmostEquals(cp.cosSim(k, q), cp.cosSim(q, k), places=5)
self.assertAlmostEquals(cp.cosSim(k, k), 1.0, places=5)
def test_euclDist(self):
k = self.d.get('king')
q = self.d.get('queen')
self.assertEqual(cp.euclDist(k, q), cp.euclDist(q, k))
self.assertAlmostEquals(cp.euclDist(k, k), 0.0, places=5)
def test_manaDist(self):
k = self.d.get('king')
q = self.d.get('queen')
self.assertEqual(cp.manaDist(k, q), cp.manaDist(q, k))
self.assertAlmostEquals(cp.manaDist(k, k), 0.0, places=5)
# OPERATION
def test_arith(self):
# k - m = q - w
k = self.d.get('king')
q = self.d.get('queen')
m = self.d.get('man')
w = self.d.get('woman')
v1 = cp.add(k, w)
v1 = cp.sub(v1, m)
v2 = cp.sub(k, m)
v2 = cp.add(v2, w)
v3 = cp.addSub([k, w], [m])
v4 = cp.sub(k.normalized(), m.normalized())
v4 = cp.add(v4, w.normalized())
self.assertAlmostEquals(cp.cosSim(v1, v2), 1.0, places=5)
self.assertAlmostEquals(cp.cosSim(v3, v4), 1.0, places=5)
self.assertEquals(self.d.find_cosSim(v1)[0][1], 'queen')
self.assertEquals(self.d.find_cosSim(v2)[0][1], 'queen')
self.assertEquals(self.d.find_cosSim(v3)[0][1], 'queen')
self.assertEquals(self.d.find_cosSim(v4)[0][1], 'queen')
| gpl-3.0 | 1,670,949,941,857,759,500 | -3,214,873,028,406,419,500 | 26.32 | 75 | 0.561249 | false |
ltilve/ChromiumGStreamerBackend | tools/telemetry/third_party/gsutilz/gslib/tests/test_cp.py | 11 | 93771 | # -*- coding: utf-8 -*-
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for cp command."""
from __future__ import absolute_import
import base64
import binascii
import datetime
import httplib
import logging
import os
import pickle
import pkgutil
import random
import re
import string
import sys
from apitools.base.py import exceptions as apitools_exceptions
import boto
from boto import storage_uri
from boto.exception import ResumableTransferDisposition
from boto.exception import ResumableUploadException
from boto.exception import StorageResponseError
from boto.storage_uri import BucketStorageUri
from gslib.cloud_api import ResumableDownloadException
from gslib.cloud_api import ResumableUploadException
from gslib.cloud_api import ResumableUploadStartOverException
from gslib.copy_helper import GetTrackerFilePath
from gslib.copy_helper import TrackerFileType
from gslib.cs_api_map import ApiSelector
from gslib.gcs_json_api import GcsJsonApi
from gslib.hashing_helper import CalculateMd5FromContents
from gslib.storage_url import StorageUrlFromString
import gslib.tests.testcase as testcase
from gslib.tests.testcase.base import NotParallelizable
from gslib.tests.testcase.integration_testcase import SkipForS3
from gslib.tests.util import GenerationFromURI as urigen
from gslib.tests.util import HAS_S3_CREDS
from gslib.tests.util import ObjectToURI as suri
from gslib.tests.util import PerformsFileToObjectUpload
from gslib.tests.util import SetBotoConfigForTest
from gslib.tests.util import unittest
from gslib.third_party.storage_apitools import storage_v1_messages as apitools_messages
from gslib.tracker_file import DeleteTrackerFile
from gslib.tracker_file import GetRewriteTrackerFilePath
from gslib.util import EIGHT_MIB
from gslib.util import IS_WINDOWS
from gslib.util import MakeHumanReadable
from gslib.util import ONE_KIB
from gslib.util import ONE_MIB
from gslib.util import Retry
from gslib.util import START_CALLBACK_PER_BYTES
from gslib.util import UTF8
# Custom test callbacks must be pickleable, and therefore at global scope.
class _HaltingCopyCallbackHandler(object):
"""Test callback handler for intentionally stopping a resumable transfer."""
def __init__(self, is_upload, halt_at_byte):
self._is_upload = is_upload
self._halt_at_byte = halt_at_byte
# pylint: disable=invalid-name
def call(self, total_bytes_transferred, total_size):
"""Forcibly exits if the transfer has passed the halting point."""
if total_bytes_transferred >= self._halt_at_byte:
sys.stderr.write(
'Halting transfer after byte %s. %s/%s transferred.\r\n' % (
self._halt_at_byte, MakeHumanReadable(total_bytes_transferred),
MakeHumanReadable(total_size)))
if self._is_upload:
raise ResumableUploadException('Artifically halting upload.')
else:
raise ResumableDownloadException('Artifically halting download.')
class _JSONForceHTTPErrorCopyCallbackHandler(object):
"""Test callback handler that raises an arbitrary HTTP error exception."""
def __init__(self, startover_at_byte, http_error_num):
self._startover_at_byte = startover_at_byte
self._http_error_num = http_error_num
self.started_over_once = False
# pylint: disable=invalid-name
def call(self, total_bytes_transferred, total_size):
"""Forcibly exits if the transfer has passed the halting point."""
if (total_bytes_transferred >= self._startover_at_byte
and not self.started_over_once):
sys.stderr.write(
'Forcing HTTP error %s after byte %s. '
'%s/%s transferred.\r\n' % (
self._http_error_num,
self._startover_at_byte,
MakeHumanReadable(total_bytes_transferred),
MakeHumanReadable(total_size)))
self.started_over_once = True
raise apitools_exceptions.HttpError(
{'status': self._http_error_num}, None, None)
class _XMLResumableUploadStartOverCopyCallbackHandler(object):
"""Test callback handler that raises start-over exception during upload."""
def __init__(self, startover_at_byte):
self._startover_at_byte = startover_at_byte
self.started_over_once = False
# pylint: disable=invalid-name
def call(self, total_bytes_transferred, total_size):
"""Forcibly exits if the transfer has passed the halting point."""
if (total_bytes_transferred >= self._startover_at_byte
and not self.started_over_once):
sys.stderr.write(
'Forcing ResumableUpload start over error after byte %s. '
'%s/%s transferred.\r\n' % (
self._startover_at_byte,
MakeHumanReadable(total_bytes_transferred),
MakeHumanReadable(total_size)))
self.started_over_once = True
raise boto.exception.ResumableUploadException(
'Forcing upload start over',
ResumableTransferDisposition.START_OVER)
class _DeleteBucketThenStartOverCopyCallbackHandler(object):
"""Test callback handler that deletes bucket then raises start-over."""
def __init__(self, startover_at_byte, bucket_uri):
self._startover_at_byte = startover_at_byte
self._bucket_uri = bucket_uri
self.started_over_once = False
# pylint: disable=invalid-name
def call(self, total_bytes_transferred, total_size):
"""Forcibly exits if the transfer has passed the halting point."""
if (total_bytes_transferred >= self._startover_at_byte
and not self.started_over_once):
sys.stderr.write('Deleting bucket (%s)' %(self._bucket_uri.bucket_name))
@Retry(StorageResponseError, tries=5, timeout_secs=1)
def DeleteBucket():
bucket_list = list(self._bucket_uri.list_bucket(all_versions=True))
for k in bucket_list:
self._bucket_uri.get_bucket().delete_key(k.name,
version_id=k.version_id)
self._bucket_uri.delete_bucket()
DeleteBucket()
sys.stderr.write(
'Forcing ResumableUpload start over error after byte %s. '
'%s/%s transferred.\r\n' % (
self._startover_at_byte,
MakeHumanReadable(total_bytes_transferred),
MakeHumanReadable(total_size)))
self.started_over_once = True
raise ResumableUploadStartOverException(
'Artificially forcing start-over')
class _RewriteHaltException(Exception):
pass
class _HaltingRewriteCallbackHandler(object):
"""Test callback handler for intentionally stopping a rewrite operation."""
def __init__(self, halt_at_byte):
self._halt_at_byte = halt_at_byte
# pylint: disable=invalid-name
def call(self, total_bytes_rewritten, unused_total_size):
"""Forcibly exits if the operation has passed the halting point."""
if total_bytes_rewritten >= self._halt_at_byte:
raise _RewriteHaltException('Artificially halting rewrite')
class _EnsureRewriteResumeCallbackHandler(object):
"""Test callback handler for ensuring a rewrite operation resumed."""
def __init__(self, required_byte):
self._required_byte = required_byte
# pylint: disable=invalid-name
def call(self, total_bytes_rewritten, unused_total_size):
"""Forcibly exits if the operation has passed the halting point."""
if total_bytes_rewritten <= self._required_byte:
raise _RewriteHaltException(
'Rewrite did not resume; %s bytes written, but %s bytes should '
'have already been written.' % (total_bytes_rewritten,
self._required_byte))
class _ResumableUploadRetryHandler(object):
"""Test callback handler for causing retries during a resumable transfer."""
def __init__(self, retry_at_byte, exception_to_raise, exc_args,
num_retries=1):
self._retry_at_byte = retry_at_byte
self._exception_to_raise = exception_to_raise
self._exception_args = exc_args
self._num_retries = num_retries
self._retries_made = 0
# pylint: disable=invalid-name
def call(self, total_bytes_transferred, unused_total_size):
"""Cause a single retry at the retry point."""
if (total_bytes_transferred >= self._retry_at_byte
and self._retries_made < self._num_retries):
self._retries_made += 1
raise self._exception_to_raise(*self._exception_args)
class TestCp(testcase.GsUtilIntegrationTestCase):
"""Integration tests for cp command."""
# For tests that artificially halt, we need to ensure at least one callback
# occurs.
halt_size = START_CALLBACK_PER_BYTES * 2
def _get_test_file(self, name):
contents = pkgutil.get_data('gslib', 'tests/test_data/%s' % name)
return self.CreateTempFile(file_name=name, contents=contents)
@PerformsFileToObjectUpload
def test_noclobber(self):
key_uri = self.CreateObject(contents='foo')
fpath = self.CreateTempFile(contents='bar')
stderr = self.RunGsUtil(['cp', '-n', fpath, suri(key_uri)],
return_stderr=True)
self.assertIn('Skipping existing item: %s' % suri(key_uri), stderr)
self.assertEqual(key_uri.get_contents_as_string(), 'foo')
stderr = self.RunGsUtil(['cp', '-n', suri(key_uri), fpath],
return_stderr=True)
with open(fpath, 'r') as f:
self.assertIn('Skipping existing item: %s' % suri(f), stderr)
self.assertEqual(f.read(), 'bar')
def test_dest_bucket_not_exist(self):
fpath = self.CreateTempFile(contents='foo')
invalid_bucket_uri = (
'%s://%s' % (self.default_provider, self.nonexistent_bucket_name))
stderr = self.RunGsUtil(['cp', fpath, invalid_bucket_uri],
expected_status=1, return_stderr=True)
self.assertIn('does not exist.', stderr)
def test_copy_in_cloud_noclobber(self):
bucket1_uri = self.CreateBucket()
bucket2_uri = self.CreateBucket()
key_uri = self.CreateObject(bucket_uri=bucket1_uri, contents='foo')
stderr = self.RunGsUtil(['cp', suri(key_uri), suri(bucket2_uri)],
return_stderr=True)
# Rewrite API may output an additional 'Copying' progress notification.
self.assertGreaterEqual(stderr.count('Copying'), 1)
self.assertLessEqual(stderr.count('Copying'), 2)
stderr = self.RunGsUtil(['cp', '-n', suri(key_uri), suri(bucket2_uri)],
return_stderr=True)
self.assertIn('Skipping existing item: %s' %
suri(bucket2_uri, key_uri.object_name), stderr)
@PerformsFileToObjectUpload
def test_streaming(self):
bucket_uri = self.CreateBucket()
stderr = self.RunGsUtil(['cp', '-', '%s' % suri(bucket_uri, 'foo')],
stdin='bar', return_stderr=True)
self.assertIn('Copying from <STDIN>', stderr)
key_uri = bucket_uri.clone_replace_name('foo')
self.assertEqual(key_uri.get_contents_as_string(), 'bar')
def test_streaming_multiple_arguments(self):
bucket_uri = self.CreateBucket()
stderr = self.RunGsUtil(['cp', '-', '-', suri(bucket_uri)],
stdin='bar', return_stderr=True, expected_status=1)
self.assertIn('Multiple URL strings are not supported with streaming',
stderr)
# TODO: Implement a way to test both with and without using magic file.
@PerformsFileToObjectUpload
def test_detect_content_type(self):
"""Tests local detection of content type."""
bucket_uri = self.CreateBucket()
dsturi = suri(bucket_uri, 'foo')
self.RunGsUtil(['cp', self._get_test_file('test.mp3'), dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check1():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
if IS_WINDOWS:
self.assertTrue(
re.search(r'Content-Type:\s+audio/x-mpg', stdout) or
re.search(r'Content-Type:\s+audio/mpeg', stdout))
else:
self.assertRegexpMatches(stdout, r'Content-Type:\s+audio/mpeg')
_Check1()
self.RunGsUtil(['cp', self._get_test_file('test.gif'), dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check2():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
self.assertRegexpMatches(stdout, r'Content-Type:\s+image/gif')
_Check2()
def test_content_type_override_default(self):
"""Tests overriding content type with the default value."""
bucket_uri = self.CreateBucket()
dsturi = suri(bucket_uri, 'foo')
self.RunGsUtil(['-h', 'Content-Type:', 'cp',
self._get_test_file('test.mp3'), dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check1():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
self.assertRegexpMatches(stdout,
r'Content-Type:\s+application/octet-stream')
_Check1()
self.RunGsUtil(['-h', 'Content-Type:', 'cp',
self._get_test_file('test.gif'), dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check2():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
self.assertRegexpMatches(stdout,
r'Content-Type:\s+application/octet-stream')
_Check2()
def test_content_type_override(self):
"""Tests overriding content type with a value."""
bucket_uri = self.CreateBucket()
dsturi = suri(bucket_uri, 'foo')
self.RunGsUtil(['-h', 'Content-Type:text/plain', 'cp',
self._get_test_file('test.mp3'), dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check1():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
self.assertRegexpMatches(stdout, r'Content-Type:\s+text/plain')
_Check1()
self.RunGsUtil(['-h', 'Content-Type:text/plain', 'cp',
self._get_test_file('test.gif'), dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check2():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
self.assertRegexpMatches(stdout, r'Content-Type:\s+text/plain')
_Check2()
@unittest.skipIf(IS_WINDOWS, 'magicfile is not available on Windows.')
@PerformsFileToObjectUpload
def test_magicfile_override(self):
"""Tests content type override with magicfile value."""
bucket_uri = self.CreateBucket()
dsturi = suri(bucket_uri, 'foo')
fpath = self.CreateTempFile(contents='foo/bar\n')
self.RunGsUtil(['cp', fpath, dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check1():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
use_magicfile = boto.config.getbool('GSUtil', 'use_magicfile', False)
content_type = ('text/plain' if use_magicfile
else 'application/octet-stream')
self.assertRegexpMatches(stdout, r'Content-Type:\s+%s' % content_type)
_Check1()
@PerformsFileToObjectUpload
def test_content_type_mismatches(self):
"""Tests overriding content type when it does not match the file type."""
bucket_uri = self.CreateBucket()
dsturi = suri(bucket_uri, 'foo')
fpath = self.CreateTempFile(contents='foo/bar\n')
self.RunGsUtil(['-h', 'Content-Type:image/gif', 'cp',
self._get_test_file('test.mp3'), dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check1():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
self.assertRegexpMatches(stdout, r'Content-Type:\s+image/gif')
_Check1()
self.RunGsUtil(['-h', 'Content-Type:image/gif', 'cp',
self._get_test_file('test.gif'), dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check2():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
self.assertRegexpMatches(stdout, r'Content-Type:\s+image/gif')
_Check2()
self.RunGsUtil(['-h', 'Content-Type:image/gif', 'cp', fpath, dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check3():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
self.assertRegexpMatches(stdout, r'Content-Type:\s+image/gif')
_Check3()
@PerformsFileToObjectUpload
def test_content_type_header_case_insensitive(self):
"""Tests that content type header is treated with case insensitivity."""
bucket_uri = self.CreateBucket()
dsturi = suri(bucket_uri, 'foo')
fpath = self._get_test_file('test.gif')
self.RunGsUtil(['-h', 'content-Type:text/plain', 'cp',
fpath, dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check1():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
self.assertRegexpMatches(stdout, r'Content-Type:\s+text/plain')
self.assertNotRegexpMatches(stdout, r'image/gif')
_Check1()
self.RunGsUtil(['-h', 'CONTENT-TYPE:image/gif',
'-h', 'content-type:image/gif',
'cp', fpath, dsturi])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check2():
stdout = self.RunGsUtil(['ls', '-L', dsturi], return_stdout=True)
self.assertRegexpMatches(stdout, r'Content-Type:\s+image/gif')
self.assertNotRegexpMatches(stdout, r'image/gif,\s*image/gif')
_Check2()
@PerformsFileToObjectUpload
def test_other_headers(self):
"""Tests that non-content-type headers are applied successfully on copy."""
bucket_uri = self.CreateBucket()
dst_uri = suri(bucket_uri, 'foo')
fpath = self._get_test_file('test.gif')
self.RunGsUtil(['-h', 'Cache-Control:public,max-age=12',
'-h', 'x-%s-meta-1:abcd' % self.provider_custom_meta, 'cp',
fpath, dst_uri])
stdout = self.RunGsUtil(['ls', '-L', dst_uri], return_stdout=True)
self.assertRegexpMatches(stdout, r'Cache-Control\s*:\s*public,max-age=12')
self.assertRegexpMatches(stdout, r'Metadata:\s*1:\s*abcd')
dst_uri2 = suri(bucket_uri, 'bar')
self.RunGsUtil(['cp', dst_uri, dst_uri2])
# Ensure metadata was preserved across copy.
stdout = self.RunGsUtil(['ls', '-L', dst_uri2], return_stdout=True)
self.assertRegexpMatches(stdout, r'Cache-Control\s*:\s*public,max-age=12')
self.assertRegexpMatches(stdout, r'Metadata:\s*1:\s*abcd')
@PerformsFileToObjectUpload
def test_versioning(self):
"""Tests copy with versioning."""
bucket_uri = self.CreateVersionedBucket()
k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data2')
k2_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1')
g1 = urigen(k2_uri)
self.RunGsUtil(['cp', suri(k1_uri), suri(k2_uri)])
k2_uri = bucket_uri.clone_replace_name(k2_uri.object_name)
k2_uri = bucket_uri.clone_replace_key(k2_uri.get_key())
g2 = urigen(k2_uri)
k2_uri.set_contents_from_string('data3')
g3 = urigen(k2_uri)
fpath = self.CreateTempFile()
# Check to make sure current version is data3.
self.RunGsUtil(['cp', k2_uri.versionless_uri, fpath])
with open(fpath, 'r') as f:
self.assertEqual(f.read(), 'data3')
# Check contents of all three versions
self.RunGsUtil(['cp', '%s#%s' % (k2_uri.versionless_uri, g1), fpath])
with open(fpath, 'r') as f:
self.assertEqual(f.read(), 'data1')
self.RunGsUtil(['cp', '%s#%s' % (k2_uri.versionless_uri, g2), fpath])
with open(fpath, 'r') as f:
self.assertEqual(f.read(), 'data2')
self.RunGsUtil(['cp', '%s#%s' % (k2_uri.versionless_uri, g3), fpath])
with open(fpath, 'r') as f:
self.assertEqual(f.read(), 'data3')
# Copy first version to current and verify.
self.RunGsUtil(['cp', '%s#%s' % (k2_uri.versionless_uri, g1),
k2_uri.versionless_uri])
self.RunGsUtil(['cp', k2_uri.versionless_uri, fpath])
with open(fpath, 'r') as f:
self.assertEqual(f.read(), 'data1')
# Attempt to specify a version-specific URI for destination.
stderr = self.RunGsUtil(['cp', fpath, k2_uri.uri], return_stderr=True,
expected_status=1)
self.assertIn('cannot be the destination for gsutil cp', stderr)
@SkipForS3('S3 lists versioned objects in reverse timestamp order.')
def test_recursive_copying_versioned_bucket(self):
"""Tests that cp -R with versioned buckets copies all versions in order."""
bucket1_uri = self.CreateVersionedBucket()
bucket2_uri = self.CreateVersionedBucket()
# Write two versions of an object to the bucket1.
self.CreateObject(bucket_uri=bucket1_uri, object_name='k', contents='data0')
self.CreateObject(bucket_uri=bucket1_uri, object_name='k',
contents='longer_data1')
self.AssertNObjectsInBucket(bucket1_uri, 2, versioned=True)
self.AssertNObjectsInBucket(bucket2_uri, 0, versioned=True)
# Recursively copy to second versioned bucket.
self.RunGsUtil(['cp', '-R', suri(bucket1_uri, '*'), suri(bucket2_uri)])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check2():
"""Validates the results of the cp -R."""
listing1 = self.RunGsUtil(['ls', '-la', suri(bucket1_uri)],
return_stdout=True).split('\n')
listing2 = self.RunGsUtil(['ls', '-la', suri(bucket2_uri)],
return_stdout=True).split('\n')
# 2 lines of listing output, 1 summary line, 1 empty line from \n split.
self.assertEquals(len(listing1), 4)
self.assertEquals(len(listing2), 4)
# First object in each bucket should match in size and version-less name.
size1, _, uri_str1, _ = listing1[0].split()
self.assertEquals(size1, str(len('data0')))
self.assertEquals(storage_uri(uri_str1).object_name, 'k')
size2, _, uri_str2, _ = listing2[0].split()
self.assertEquals(size2, str(len('data0')))
self.assertEquals(storage_uri(uri_str2).object_name, 'k')
# Similarly for second object in each bucket.
size1, _, uri_str1, _ = listing1[1].split()
self.assertEquals(size1, str(len('longer_data1')))
self.assertEquals(storage_uri(uri_str1).object_name, 'k')
size2, _, uri_str2, _ = listing2[1].split()
self.assertEquals(size2, str(len('longer_data1')))
self.assertEquals(storage_uri(uri_str2).object_name, 'k')
_Check2()
@PerformsFileToObjectUpload
@SkipForS3('Preconditions not supported for S3.')
def test_cp_generation_zero_match(self):
"""Tests that cp handles an object-not-exists precondition header."""
bucket_uri = self.CreateBucket()
fpath1 = self.CreateTempFile(contents='data1')
# Match 0 means only write the object if it doesn't already exist.
gen_match_header = 'x-goog-if-generation-match:0'
# First copy should succeed.
# TODO: This can fail (rarely) if the server returns a 5xx but actually
# commits the bytes. If we add restarts on small uploads, handle this
# case.
self.RunGsUtil(['-h', gen_match_header, 'cp', fpath1, suri(bucket_uri)])
# Second copy should fail with a precondition error.
stderr = self.RunGsUtil(['-h', gen_match_header, 'cp', fpath1,
suri(bucket_uri)],
return_stderr=True, expected_status=1)
self.assertIn('PreconditionException', stderr)
@PerformsFileToObjectUpload
@SkipForS3('Preconditions not supported for S3.')
def test_cp_v_generation_match(self):
"""Tests that cp -v option handles the if-generation-match header."""
bucket_uri = self.CreateVersionedBucket()
k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1')
g1 = k1_uri.generation
tmpdir = self.CreateTempDir()
fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data2')
gen_match_header = 'x-goog-if-generation-match:%s' % g1
# First copy should succeed.
self.RunGsUtil(['-h', gen_match_header, 'cp', fpath1, suri(k1_uri)])
# Second copy should fail the precondition.
stderr = self.RunGsUtil(['-h', gen_match_header, 'cp', fpath1,
suri(k1_uri)],
return_stderr=True, expected_status=1)
self.assertIn('PreconditionException', stderr)
# Specifiying a generation with -n should fail before the request hits the
# server.
stderr = self.RunGsUtil(['-h', gen_match_header, 'cp', '-n', fpath1,
suri(k1_uri)],
return_stderr=True, expected_status=1)
self.assertIn('ArgumentException', stderr)
self.assertIn('Specifying x-goog-if-generation-match is not supported '
'with cp -n', stderr)
@PerformsFileToObjectUpload
def test_cp_nv(self):
"""Tests that cp -nv works when skipping existing file."""
bucket_uri = self.CreateVersionedBucket()
k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1')
tmpdir = self.CreateTempDir()
fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data2')
# First copy should succeed.
self.RunGsUtil(['cp', '-nv', fpath1, suri(k1_uri)])
# Second copy should skip copying.
stderr = self.RunGsUtil(['cp', '-nv', fpath1, suri(k1_uri)],
return_stderr=True)
self.assertIn('Skipping existing item:', stderr)
@PerformsFileToObjectUpload
@SkipForS3('S3 lists versioned objects in reverse timestamp order.')
def test_cp_v_option(self):
""""Tests that cp -v returns the created object's version-specific URI."""
bucket_uri = self.CreateVersionedBucket()
k1_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data1')
k2_uri = self.CreateObject(bucket_uri=bucket_uri, contents='data2')
# Case 1: Upload file to object using one-shot PUT.
tmpdir = self.CreateTempDir()
fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data1')
self._run_cp_minus_v_test('-v', fpath1, k2_uri.uri)
# Case 2: Upload file to object using resumable upload.
size_threshold = ONE_KIB
boto_config_for_test = ('GSUtil', 'resumable_threshold',
str(size_threshold))
with SetBotoConfigForTest([boto_config_for_test]):
file_as_string = os.urandom(size_threshold)
tmpdir = self.CreateTempDir()
fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents=file_as_string)
self._run_cp_minus_v_test('-v', fpath1, k2_uri.uri)
# Case 3: Upload stream to object.
self._run_cp_minus_v_test('-v', '-', k2_uri.uri)
# Case 4: Download object to file. For this case we just expect output of
# gsutil cp -v to be the URI of the file.
tmpdir = self.CreateTempDir()
fpath1 = self.CreateTempFile(tmpdir=tmpdir)
dst_uri = storage_uri(fpath1)
stderr = self.RunGsUtil(['cp', '-v', suri(k1_uri), suri(dst_uri)],
return_stderr=True)
self.assertIn('Created: %s' % dst_uri.uri, stderr.split('\n')[-2])
# Case 5: Daisy-chain from object to object.
self._run_cp_minus_v_test('-Dv', k1_uri.uri, k2_uri.uri)
# Case 6: Copy object to object in-the-cloud.
self._run_cp_minus_v_test('-v', k1_uri.uri, k2_uri.uri)
def _run_cp_minus_v_test(self, opt, src_str, dst_str):
"""Runs cp -v with the options and validates the results."""
stderr = self.RunGsUtil(['cp', opt, src_str, dst_str], return_stderr=True)
match = re.search(r'Created: (.*)\n', stderr)
self.assertIsNotNone(match)
created_uri = match.group(1)
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check1():
stdout = self.RunGsUtil(['ls', '-a', dst_str], return_stdout=True)
lines = stdout.split('\n')
# Final (most recent) object should match the "Created:" URI. This is
# in second-to-last line (last line is '\n').
self.assertGreater(len(lines), 2)
self.assertEqual(created_uri, lines[-2])
_Check1()
@PerformsFileToObjectUpload
def test_stdin_args(self):
"""Tests cp with the -I option."""
tmpdir = self.CreateTempDir()
fpath1 = self.CreateTempFile(tmpdir=tmpdir, contents='data1')
fpath2 = self.CreateTempFile(tmpdir=tmpdir, contents='data2')
bucket_uri = self.CreateBucket()
self.RunGsUtil(['cp', '-I', suri(bucket_uri)],
stdin='\n'.join((fpath1, fpath2)))
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check1():
stdout = self.RunGsUtil(['ls', suri(bucket_uri)], return_stdout=True)
self.assertIn(os.path.basename(fpath1), stdout)
self.assertIn(os.path.basename(fpath2), stdout)
self.assertNumLines(stdout, 2)
_Check1()
def test_cross_storage_class_cloud_cp(self):
bucket1_uri = self.CreateBucket(storage_class='STANDARD')
bucket2_uri = self.CreateBucket(
storage_class='DURABLE_REDUCED_AVAILABILITY')
key_uri = self.CreateObject(bucket_uri=bucket1_uri, contents='foo')
# Server now allows copy-in-the-cloud across storage classes.
self.RunGsUtil(['cp', suri(key_uri), suri(bucket2_uri)])
@unittest.skipUnless(HAS_S3_CREDS, 'Test requires both S3 and GS credentials')
def test_cross_provider_cp(self):
s3_bucket = self.CreateBucket(provider='s3')
gs_bucket = self.CreateBucket(provider='gs')
s3_key = self.CreateObject(bucket_uri=s3_bucket, contents='foo')
gs_key = self.CreateObject(bucket_uri=gs_bucket, contents='bar')
self.RunGsUtil(['cp', suri(s3_key), suri(gs_bucket)])
self.RunGsUtil(['cp', suri(gs_key), suri(s3_bucket)])
@unittest.skipUnless(HAS_S3_CREDS, 'Test requires both S3 and GS credentials')
@unittest.skip('This test performs a large copy but remains here for '
'debugging purposes.')
def test_cross_provider_large_cp(self):
s3_bucket = self.CreateBucket(provider='s3')
gs_bucket = self.CreateBucket(provider='gs')
s3_key = self.CreateObject(bucket_uri=s3_bucket, contents='f'*1024*1024)
gs_key = self.CreateObject(bucket_uri=gs_bucket, contents='b'*1024*1024)
self.RunGsUtil(['cp', suri(s3_key), suri(gs_bucket)])
self.RunGsUtil(['cp', suri(gs_key), suri(s3_bucket)])
with SetBotoConfigForTest([
('GSUtil', 'resumable_threshold', str(ONE_KIB)),
('GSUtil', 'json_resumable_chunk_size', str(ONE_KIB * 256))]):
# Ensure copy also works across json upload chunk boundaries.
self.RunGsUtil(['cp', suri(s3_key), suri(gs_bucket)])
@unittest.skip('This test is slow due to creating many objects, '
'but remains here for debugging purposes.')
def test_daisy_chain_cp_file_sizes(self):
"""Ensure daisy chain cp works with a wide of file sizes."""
bucket_uri = self.CreateBucket()
bucket2_uri = self.CreateBucket()
exponent_cap = 28 # Up to 256 MiB in size.
for i in range(exponent_cap):
one_byte_smaller = 2**i - 1
normal = 2**i
one_byte_larger = 2**i + 1
self.CreateObject(bucket_uri=bucket_uri, contents='a'*one_byte_smaller)
self.CreateObject(bucket_uri=bucket_uri, contents='b'*normal)
self.CreateObject(bucket_uri=bucket_uri, contents='c'*one_byte_larger)
self.AssertNObjectsInBucket(bucket_uri, exponent_cap*3)
self.RunGsUtil(['-m', 'cp', '-D', suri(bucket_uri, '**'),
suri(bucket2_uri)])
self.AssertNObjectsInBucket(bucket2_uri, exponent_cap*3)
def test_daisy_chain_cp(self):
"""Tests cp with the -D option."""
bucket1_uri = self.CreateBucket(storage_class='STANDARD')
bucket2_uri = self.CreateBucket(
storage_class='DURABLE_REDUCED_AVAILABILITY')
key_uri = self.CreateObject(bucket_uri=bucket1_uri, contents='foo')
# Set some headers on source object so we can verify that headers are
# presereved by daisy-chain copy.
self.RunGsUtil(['setmeta', '-h', 'Cache-Control:public,max-age=12',
'-h', 'Content-Type:image/gif',
'-h', 'x-%s-meta-1:abcd' % self.provider_custom_meta,
suri(key_uri)])
# Set public-read (non-default) ACL so we can verify that cp -D -p works.
self.RunGsUtil(['acl', 'set', 'public-read', suri(key_uri)])
acl_json = self.RunGsUtil(['acl', 'get', suri(key_uri)], return_stdout=True)
# Perform daisy-chain copy and verify that source object headers and ACL
# were preserved. Also specify -n option to test that gsutil correctly
# removes the x-goog-if-generation-match:0 header that was set at uploading
# time when updating the ACL.
stderr = self.RunGsUtil(['cp', '-Dpn', suri(key_uri), suri(bucket2_uri)],
return_stderr=True)
self.assertNotIn('Copy-in-the-cloud disallowed', stderr)
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
uri = suri(bucket2_uri, key_uri.object_name)
stdout = self.RunGsUtil(['ls', '-L', uri], return_stdout=True)
self.assertRegexpMatches(stdout, r'Cache-Control:\s+public,max-age=12')
self.assertRegexpMatches(stdout, r'Content-Type:\s+image/gif')
self.assertRegexpMatches(stdout, r'Metadata:\s+1:\s+abcd')
new_acl_json = self.RunGsUtil(['acl', 'get', uri], return_stdout=True)
self.assertEqual(acl_json, new_acl_json)
_Check()
def test_daisy_chain_cp_download_failure(self):
"""Tests cp with the -D option when the download thread dies."""
bucket1_uri = self.CreateBucket()
bucket2_uri = self.CreateBucket()
key_uri = self.CreateObject(bucket_uri=bucket1_uri,
contents='a' * self.halt_size)
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5)))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
'-D', suri(key_uri), suri(bucket2_uri)],
expected_status=1, return_stderr=True)
# Should have two exception traces; one from the download thread and
# one from the upload thread.
self.assertEqual(stderr.count(
'ResumableDownloadException: Artifically halting download'), 2)
def test_canned_acl_cp(self):
"""Tests copying with a canned ACL."""
bucket1_uri = self.CreateBucket()
bucket2_uri = self.CreateBucket()
key_uri = self.CreateObject(bucket_uri=bucket1_uri, contents='foo')
self.RunGsUtil(['cp', '-a', 'public-read', suri(key_uri),
suri(bucket2_uri)])
# Set public-read on the original key after the copy so we can compare
# the ACLs.
self.RunGsUtil(['acl', 'set', 'public-read', suri(key_uri)])
public_read_acl = self.RunGsUtil(['acl', 'get', suri(key_uri)],
return_stdout=True)
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check():
uri = suri(bucket2_uri, key_uri.object_name)
new_acl_json = self.RunGsUtil(['acl', 'get', uri], return_stdout=True)
self.assertEqual(public_read_acl, new_acl_json)
_Check()
@PerformsFileToObjectUpload
def test_canned_acl_upload(self):
"""Tests uploading a file with a canned ACL."""
bucket1_uri = self.CreateBucket()
key_uri = self.CreateObject(bucket_uri=bucket1_uri, contents='foo')
# Set public-read on the object so we can compare the ACLs.
self.RunGsUtil(['acl', 'set', 'public-read', suri(key_uri)])
public_read_acl = self.RunGsUtil(['acl', 'get', suri(key_uri)],
return_stdout=True)
file_name = 'bar'
fpath = self.CreateTempFile(file_name=file_name, contents='foo')
self.RunGsUtil(['cp', '-a', 'public-read', fpath, suri(bucket1_uri)])
new_acl_json = self.RunGsUtil(['acl', 'get', suri(bucket1_uri, file_name)],
return_stdout=True)
self.assertEqual(public_read_acl, new_acl_json)
resumable_size = ONE_KIB
boto_config_for_test = ('GSUtil', 'resumable_threshold',
str(resumable_size))
with SetBotoConfigForTest([boto_config_for_test]):
resumable_file_name = 'resumable_bar'
resumable_contents = os.urandom(resumable_size)
resumable_fpath = self.CreateTempFile(
file_name=resumable_file_name, contents=resumable_contents)
self.RunGsUtil(['cp', '-a', 'public-read', resumable_fpath,
suri(bucket1_uri)])
new_resumable_acl_json = self.RunGsUtil(
['acl', 'get', suri(bucket1_uri, resumable_file_name)],
return_stdout=True)
self.assertEqual(public_read_acl, new_resumable_acl_json)
def test_cp_key_to_local_stream(self):
bucket_uri = self.CreateBucket()
contents = 'foo'
key_uri = self.CreateObject(bucket_uri=bucket_uri, contents=contents)
stdout = self.RunGsUtil(['cp', suri(key_uri), '-'], return_stdout=True)
self.assertIn(contents, stdout)
def test_cp_local_file_to_local_stream(self):
contents = 'content'
fpath = self.CreateTempFile(contents=contents)
stdout = self.RunGsUtil(['cp', fpath, '-'], return_stdout=True)
self.assertIn(contents, stdout)
@PerformsFileToObjectUpload
def test_cp_zero_byte_file(self):
dst_bucket_uri = self.CreateBucket()
src_dir = self.CreateTempDir()
fpath = os.path.join(src_dir, 'zero_byte')
with open(fpath, 'w') as unused_out_file:
pass # Write a zero byte file
self.RunGsUtil(['cp', fpath, suri(dst_bucket_uri)])
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check1():
stdout = self.RunGsUtil(['ls', suri(dst_bucket_uri)], return_stdout=True)
self.assertIn(os.path.basename(fpath), stdout)
_Check1()
download_path = os.path.join(src_dir, 'zero_byte_download')
self.RunGsUtil(['cp', suri(dst_bucket_uri, 'zero_byte'), download_path])
self.assertTrue(os.stat(download_path))
def test_copy_bucket_to_bucket(self):
"""Tests that recursively copying from bucket to bucket.
This should produce identically named objects (and not, in particular,
destination objects named by the version-specific URI from source objects).
"""
src_bucket_uri = self.CreateVersionedBucket()
dst_bucket_uri = self.CreateVersionedBucket()
self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj0',
contents='abc')
self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj1',
contents='def')
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _CopyAndCheck():
self.RunGsUtil(['cp', '-R', suri(src_bucket_uri),
suri(dst_bucket_uri)])
stdout = self.RunGsUtil(['ls', '-R', dst_bucket_uri.uri],
return_stdout=True)
self.assertIn('%s%s/obj0\n' % (dst_bucket_uri,
src_bucket_uri.bucket_name), stdout)
self.assertIn('%s%s/obj1\n' % (dst_bucket_uri,
src_bucket_uri.bucket_name), stdout)
_CopyAndCheck()
def test_copy_bucket_to_dir(self):
"""Tests recursively copying from bucket to a directory.
This should produce identically named objects (and not, in particular,
destination objects named by the version- specific URI from source objects).
"""
src_bucket_uri = self.CreateBucket()
dst_dir = self.CreateTempDir()
self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj0',
contents='abc')
self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj1',
contents='def')
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _CopyAndCheck():
"""Copies the bucket recursively and validates the results."""
self.RunGsUtil(['cp', '-R', suri(src_bucket_uri), dst_dir])
dir_list = []
for dirname, _, filenames in os.walk(dst_dir):
for filename in filenames:
dir_list.append(os.path.join(dirname, filename))
dir_list = sorted(dir_list)
self.assertEqual(len(dir_list), 2)
self.assertEqual(os.path.join(dst_dir, src_bucket_uri.bucket_name,
'obj0'), dir_list[0])
self.assertEqual(os.path.join(dst_dir, src_bucket_uri.bucket_name,
'obj1'), dir_list[1])
_CopyAndCheck()
def test_recursive_download_with_leftover_dir_placeholder(self):
"""Tests that we correctly handle leftover dir placeholders."""
src_bucket_uri = self.CreateBucket()
dst_dir = self.CreateTempDir()
self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj0',
contents='abc')
self.CreateObject(bucket_uri=src_bucket_uri, object_name='obj1',
contents='def')
# Create a placeholder like what can be left over by web GUI tools.
key_uri = src_bucket_uri.clone_replace_name('/')
key_uri.set_contents_from_string('')
self.AssertNObjectsInBucket(src_bucket_uri, 3)
stderr = self.RunGsUtil(['cp', '-R', suri(src_bucket_uri), dst_dir],
return_stderr=True)
self.assertIn('Skipping cloud sub-directory placeholder object', stderr)
dir_list = []
for dirname, _, filenames in os.walk(dst_dir):
for filename in filenames:
dir_list.append(os.path.join(dirname, filename))
dir_list = sorted(dir_list)
self.assertEqual(len(dir_list), 2)
self.assertEqual(os.path.join(dst_dir, src_bucket_uri.bucket_name,
'obj0'), dir_list[0])
self.assertEqual(os.path.join(dst_dir, src_bucket_uri.bucket_name,
'obj1'), dir_list[1])
def test_copy_quiet(self):
bucket_uri = self.CreateBucket()
key_uri = self.CreateObject(bucket_uri=bucket_uri, contents='foo')
stderr = self.RunGsUtil(['-q', 'cp', suri(key_uri),
suri(bucket_uri.clone_replace_name('o2'))],
return_stderr=True)
self.assertEqual(stderr.count('Copying '), 0)
def test_cp_md5_match(self):
"""Tests that the uploaded object has the expected MD5.
Note that while this does perform a file to object upload, MD5's are
not supported for composite objects so we don't use the decorator in this
case.
"""
bucket_uri = self.CreateBucket()
fpath = self.CreateTempFile(contents='bar')
with open(fpath, 'r') as f_in:
file_md5 = base64.encodestring(binascii.unhexlify(
CalculateMd5FromContents(f_in))).rstrip('\n')
self.RunGsUtil(['cp', fpath, suri(bucket_uri)])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _Check1():
stdout = self.RunGsUtil(['ls', '-L', suri(bucket_uri)],
return_stdout=True)
self.assertRegexpMatches(stdout,
r'Hash\s+\(md5\):\s+%s' % re.escape(file_md5))
_Check1()
@unittest.skipIf(IS_WINDOWS,
'Unicode handling on Windows requires mods to site-packages')
@PerformsFileToObjectUpload
def test_cp_manifest_upload_unicode(self):
return self._ManifestUpload('foo-unicöde', 'bar-unicöde',
'manifest-unicöde')
@PerformsFileToObjectUpload
def test_cp_manifest_upload(self):
"""Tests uploading with a mnifest file."""
return self._ManifestUpload('foo', 'bar', 'manifest')
def _ManifestUpload(self, file_name, object_name, manifest_name):
"""Tests uploading with a manifest file."""
bucket_uri = self.CreateBucket()
dsturi = suri(bucket_uri, object_name)
fpath = self.CreateTempFile(file_name=file_name, contents='bar')
logpath = self.CreateTempFile(file_name=manifest_name, contents='')
# Ensure the file is empty.
open(logpath, 'w').close()
self.RunGsUtil(['cp', '-L', logpath, fpath, dsturi])
with open(logpath, 'r') as f:
lines = f.readlines()
self.assertEqual(len(lines), 2)
expected_headers = ['Source', 'Destination', 'Start', 'End', 'Md5',
'UploadId', 'Source Size', 'Bytes Transferred',
'Result', 'Description']
self.assertEqual(expected_headers, lines[0].strip().split(','))
results = lines[1].strip().split(',')
self.assertEqual(results[0][:7], 'file://') # source
self.assertEqual(results[1][:5], '%s://' %
self.default_provider) # destination
date_format = '%Y-%m-%dT%H:%M:%S.%fZ'
start_date = datetime.datetime.strptime(results[2], date_format)
end_date = datetime.datetime.strptime(results[3], date_format)
self.assertEqual(end_date > start_date, True)
if self.RunGsUtil == testcase.GsUtilIntegrationTestCase.RunGsUtil:
# Check that we didn't do automatic parallel uploads - compose doesn't
# calculate the MD5 hash. Since RunGsUtil is overriden in
# TestCpParallelUploads to force parallel uploads, we can check which
# method was used.
self.assertEqual(results[4], 'rL0Y20zC+Fzt72VPzMSk2A==') # md5
self.assertEqual(int(results[6]), 3) # Source Size
self.assertEqual(int(results[7]), 3) # Bytes Transferred
self.assertEqual(results[8], 'OK') # Result
@PerformsFileToObjectUpload
def test_cp_manifest_download(self):
"""Tests downloading with a manifest file."""
key_uri = self.CreateObject(contents='foo')
fpath = self.CreateTempFile(contents='')
logpath = self.CreateTempFile(contents='')
# Ensure the file is empty.
open(logpath, 'w').close()
self.RunGsUtil(['cp', '-L', logpath, suri(key_uri), fpath],
return_stdout=True)
with open(logpath, 'r') as f:
lines = f.readlines()
self.assertEqual(len(lines), 2)
expected_headers = ['Source', 'Destination', 'Start', 'End', 'Md5',
'UploadId', 'Source Size', 'Bytes Transferred',
'Result', 'Description']
self.assertEqual(expected_headers, lines[0].strip().split(','))
results = lines[1].strip().split(',')
self.assertEqual(results[0][:5], '%s://' %
self.default_provider) # source
self.assertEqual(results[1][:7], 'file://') # destination
date_format = '%Y-%m-%dT%H:%M:%S.%fZ'
start_date = datetime.datetime.strptime(results[2], date_format)
end_date = datetime.datetime.strptime(results[3], date_format)
self.assertEqual(end_date > start_date, True)
self.assertEqual(results[4], 'rL0Y20zC+Fzt72VPzMSk2A==') # md5
self.assertEqual(int(results[6]), 3) # Source Size
# Bytes transferred might be more than 3 if the file was gzipped, since
# the minimum gzip header is 10 bytes.
self.assertGreaterEqual(int(results[7]), 3) # Bytes Transferred
self.assertEqual(results[8], 'OK') # Result
@PerformsFileToObjectUpload
def test_copy_unicode_non_ascii_filename(self):
key_uri = self.CreateObject(contents='foo')
# Make file large enough to cause a resumable upload (which hashes filename
# to construct tracker filename).
fpath = self.CreateTempFile(file_name=u'Аудиоархив',
contents='x' * 3 * 1024 * 1024)
fpath_bytes = fpath.encode(UTF8)
stderr = self.RunGsUtil(['cp', fpath_bytes, suri(key_uri)],
return_stderr=True)
self.assertIn('Copying file:', stderr)
# Note: We originally one time implemented a test
# (test_copy_invalid_unicode_filename) that invalid unicode filenames were
# skipped, but it turns out os.walk() on MacOS doesn't have problems with
# such files (so, failed that test). Given that, we decided to remove the
# test.
def test_gzip_upload_and_download(self):
bucket_uri = self.CreateBucket()
contents = 'x' * 10000
tmpdir = self.CreateTempDir()
self.CreateTempFile(file_name='test.html', tmpdir=tmpdir, contents=contents)
self.CreateTempFile(file_name='test.js', tmpdir=tmpdir, contents=contents)
self.CreateTempFile(file_name='test.txt', tmpdir=tmpdir, contents=contents)
# Test that copying specifying only 2 of the 3 prefixes gzips the correct
# files, and test that including whitespace in the extension list works.
self.RunGsUtil(['cp', '-z', 'js, html',
os.path.join(tmpdir, 'test.*'), suri(bucket_uri)])
self.AssertNObjectsInBucket(bucket_uri, 3)
uri1 = suri(bucket_uri, 'test.html')
uri2 = suri(bucket_uri, 'test.js')
uri3 = suri(bucket_uri, 'test.txt')
stdout = self.RunGsUtil(['stat', uri1], return_stdout=True)
self.assertRegexpMatches(stdout, r'Content-Encoding:\s+gzip')
stdout = self.RunGsUtil(['stat', uri2], return_stdout=True)
self.assertRegexpMatches(stdout, r'Content-Encoding:\s+gzip')
stdout = self.RunGsUtil(['stat', uri3], return_stdout=True)
self.assertNotRegexpMatches(stdout, r'Content-Encoding:\s+gzip')
fpath4 = self.CreateTempFile()
for uri in (uri1, uri2, uri3):
self.RunGsUtil(['cp', uri, suri(fpath4)])
with open(fpath4, 'r') as f:
self.assertEqual(f.read(), contents)
def test_upload_with_subdir_and_unexpanded_wildcard(self):
fpath1 = self.CreateTempFile(file_name=('tmp', 'x', 'y', 'z'))
bucket_uri = self.CreateBucket()
wildcard_uri = '%s*' % fpath1[:-5]
stderr = self.RunGsUtil(['cp', '-R', wildcard_uri, suri(bucket_uri)],
return_stderr=True)
self.assertIn('Copying file:', stderr)
self.AssertNObjectsInBucket(bucket_uri, 1)
def test_cp_object_ending_with_slash(self):
"""Tests that cp works with object names ending with slash."""
tmpdir = self.CreateTempDir()
bucket_uri = self.CreateBucket()
self.CreateObject(bucket_uri=bucket_uri,
object_name='abc/',
contents='dir')
self.CreateObject(bucket_uri=bucket_uri,
object_name='abc/def',
contents='def')
self.AssertNObjectsInBucket(bucket_uri, 2)
self.RunGsUtil(['cp', '-R', suri(bucket_uri), tmpdir])
# Check that files in the subdir got copied even though subdir object
# download was skipped.
with open(os.path.join(tmpdir, bucket_uri.bucket_name, 'abc', 'def')) as f:
self.assertEquals('def', '\n'.join(f.readlines()))
def test_cp_without_read_access(self):
"""Tests that cp fails without read access to the object."""
# TODO: With 401's triggering retries in apitools, this test will take
# a long time. Ideally, make apitools accept a num_retries config for this
# until we stop retrying the 401's.
bucket_uri = self.CreateBucket()
object_uri = self.CreateObject(bucket_uri=bucket_uri, contents='foo')
# Use @Retry as hedge against bucket listing eventual consistency.
self.AssertNObjectsInBucket(bucket_uri, 1)
with self.SetAnonymousBotoCreds():
stderr = self.RunGsUtil(['cp', suri(object_uri), 'foo'],
return_stderr=True, expected_status=1)
self.assertIn('AccessDenied', stderr)
@unittest.skipIf(IS_WINDOWS, 'os.symlink() is not available on Windows.')
def test_cp_minus_e(self):
fpath_dir = self.CreateTempDir()
fpath1 = self.CreateTempFile(tmpdir=fpath_dir)
fpath2 = os.path.join(fpath_dir, 'cp_minus_e')
bucket_uri = self.CreateBucket()
os.symlink(fpath1, fpath2)
stderr = self.RunGsUtil(
['cp', '-e', '%s%s*' % (fpath_dir, os.path.sep),
suri(bucket_uri, 'files')],
return_stderr=True)
self.assertIn('Copying file', stderr)
self.assertIn('Skipping symbolic link file', stderr)
def test_cp_multithreaded_wildcard(self):
"""Tests that cp -m works with a wildcard."""
num_test_files = 5
tmp_dir = self.CreateTempDir(test_files=num_test_files)
bucket_uri = self.CreateBucket()
wildcard_uri = '%s%s*' % (tmp_dir, os.sep)
self.RunGsUtil(['-m', 'cp', wildcard_uri, suri(bucket_uri)])
self.AssertNObjectsInBucket(bucket_uri, num_test_files)
def test_cp_duplicate_source_args(self):
"""Tests that cp -m works when a source argument is provided twice."""
object_contents = 'edge'
object_uri = self.CreateObject(object_name='foo', contents=object_contents)
tmp_dir = self.CreateTempDir()
self.RunGsUtil(['-m', 'cp', suri(object_uri), suri(object_uri), tmp_dir])
with open(os.path.join(tmp_dir, 'foo'), 'r') as in_fp:
contents = in_fp.read()
# Contents should be not duplicated.
self.assertEqual(contents, object_contents)
@SkipForS3('No resumable upload support for S3.')
def test_cp_resumable_upload_break(self):
"""Tests that an upload can be resumed after a connection break."""
bucket_uri = self.CreateBucket()
fpath = self.CreateTempFile(contents='a' * self.halt_size)
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_HaltingCopyCallbackHandler(True, 5)))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
fpath, suri(bucket_uri)],
expected_status=1, return_stderr=True)
self.assertIn('Artifically halting upload', stderr)
stderr = self.RunGsUtil(['cp', fpath, suri(bucket_uri)],
return_stderr=True)
self.assertIn('Resuming upload', stderr)
@SkipForS3('No resumable upload support for S3.')
def test_cp_resumable_upload_retry(self):
"""Tests that a resumable upload completes with one retry."""
bucket_uri = self.CreateBucket()
fpath = self.CreateTempFile(contents='a' * self.halt_size)
# TODO: Raising an httplib or socket error blocks bucket teardown
# in JSON for 60-120s on a multiprocessing lock acquire. Figure out why;
# until then, raise an apitools retryable exception.
if self.test_api == ApiSelector.XML:
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_ResumableUploadRetryHandler(
5, httplib.BadStatusLine, ('unused',))))
else:
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_ResumableUploadRetryHandler(
5, apitools_exceptions.BadStatusCodeError,
('unused', 'unused', 'unused'))))
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['-D', 'cp', '--testcallbackfile',
test_callback_file, fpath, suri(bucket_uri)],
return_stderr=1)
if self.test_api == ApiSelector.XML:
self.assertIn('Got retryable failure', stderr)
else:
self.assertIn('Retrying', stderr)
@SkipForS3('No resumable upload support for S3.')
def test_cp_resumable_streaming_upload_retry(self):
"""Tests that a streaming resumable upload completes with one retry."""
if self.test_api == ApiSelector.XML:
return unittest.skip('XML does not support resumable streaming uploads.')
bucket_uri = self.CreateBucket()
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_ResumableUploadRetryHandler(
5, apitools_exceptions.BadStatusCodeError,
('unused', 'unused', 'unused'))))
# Need to reduce the JSON chunk size since streaming uploads buffer a
# full chunk.
boto_configs_for_test = [('GSUtil', 'json_resumable_chunk_size',
str(256 * ONE_KIB)),
('Boto', 'num_retries', '2')]
with SetBotoConfigForTest(boto_configs_for_test):
stderr = self.RunGsUtil(
['-D', 'cp', '--testcallbackfile', test_callback_file, '-',
suri(bucket_uri, 'foo')],
stdin='a' * 512 * ONE_KIB, return_stderr=1)
self.assertIn('Retrying', stderr)
@SkipForS3('No resumable upload support for S3.')
def test_cp_resumable_upload(self):
"""Tests that a basic resumable upload completes successfully."""
bucket_uri = self.CreateBucket()
fpath = self.CreateTempFile(contents='a' * self.halt_size)
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
self.RunGsUtil(['cp', fpath, suri(bucket_uri)])
@SkipForS3('No resumable upload support for S3.')
def test_resumable_upload_break_leaves_tracker(self):
"""Tests that a tracker file is created with a resumable upload."""
bucket_uri = self.CreateBucket()
fpath = self.CreateTempFile(file_name='foo',
contents='a' * self.halt_size)
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
tracker_filename = GetTrackerFilePath(
StorageUrlFromString(suri(bucket_uri, 'foo')),
TrackerFileType.UPLOAD, self.test_api)
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_HaltingCopyCallbackHandler(True, 5)))
try:
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
fpath, suri(bucket_uri, 'foo')],
expected_status=1, return_stderr=True)
self.assertIn('Artifically halting upload', stderr)
self.assertTrue(os.path.exists(tracker_filename),
'Tracker file %s not present.' % tracker_filename)
finally:
if os.path.exists(tracker_filename):
os.unlink(tracker_filename)
@SkipForS3('No resumable upload support for S3.')
def test_cp_resumable_upload_break_file_size_change(self):
"""Tests a resumable upload where the uploaded file changes size.
This should fail when we read the tracker data.
"""
bucket_uri = self.CreateBucket()
tmp_dir = self.CreateTempDir()
fpath = self.CreateTempFile(file_name='foo', tmpdir=tmp_dir,
contents='a' * self.halt_size)
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_HaltingCopyCallbackHandler(True, 5)))
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
fpath, suri(bucket_uri)],
expected_status=1, return_stderr=True)
self.assertIn('Artifically halting upload', stderr)
fpath = self.CreateTempFile(file_name='foo', tmpdir=tmp_dir,
contents='a' * self.halt_size * 2)
stderr = self.RunGsUtil(['cp', fpath, suri(bucket_uri)],
expected_status=1, return_stderr=True)
self.assertIn('ResumableUploadAbortException', stderr)
@SkipForS3('No resumable upload support for S3.')
def test_cp_resumable_upload_break_file_content_change(self):
"""Tests a resumable upload where the uploaded file changes content."""
if self.test_api == ApiSelector.XML:
return unittest.skip(
'XML doesn\'t make separate HTTP calls at fixed-size boundaries for '
'resumable uploads, so we can\'t guarantee that the server saves a '
'specific part of the upload.')
bucket_uri = self.CreateBucket()
tmp_dir = self.CreateTempDir()
fpath = self.CreateTempFile(file_name='foo', tmpdir=tmp_dir,
contents='a' * ONE_KIB * 512)
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_HaltingCopyCallbackHandler(True,
int(ONE_KIB) * 384)))
resumable_threshold_for_test = (
'GSUtil', 'resumable_threshold', str(ONE_KIB))
resumable_chunk_size_for_test = (
'GSUtil', 'json_resumable_chunk_size', str(ONE_KIB * 256))
with SetBotoConfigForTest([resumable_threshold_for_test,
resumable_chunk_size_for_test]):
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
fpath, suri(bucket_uri)],
expected_status=1, return_stderr=True)
self.assertIn('Artifically halting upload', stderr)
fpath = self.CreateTempFile(file_name='foo', tmpdir=tmp_dir,
contents='b' * ONE_KIB * 512)
stderr = self.RunGsUtil(['cp', fpath, suri(bucket_uri)],
expected_status=1, return_stderr=True)
self.assertIn('doesn\'t match cloud-supplied digest', stderr)
@SkipForS3('No resumable upload support for S3.')
def test_cp_resumable_upload_break_file_smaller_size(self):
"""Tests a resumable upload where the uploaded file changes content.
This should fail hash validation.
"""
bucket_uri = self.CreateBucket()
tmp_dir = self.CreateTempDir()
fpath = self.CreateTempFile(file_name='foo', tmpdir=tmp_dir,
contents='a' * ONE_KIB * 512)
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_HaltingCopyCallbackHandler(True,
int(ONE_KIB) * 384)))
resumable_threshold_for_test = (
'GSUtil', 'resumable_threshold', str(ONE_KIB))
resumable_chunk_size_for_test = (
'GSUtil', 'json_resumable_chunk_size', str(ONE_KIB * 256))
with SetBotoConfigForTest([resumable_threshold_for_test,
resumable_chunk_size_for_test]):
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
fpath, suri(bucket_uri)],
expected_status=1, return_stderr=True)
self.assertIn('Artifically halting upload', stderr)
fpath = self.CreateTempFile(file_name='foo', tmpdir=tmp_dir,
contents='a' * ONE_KIB)
stderr = self.RunGsUtil(['cp', fpath, suri(bucket_uri)],
expected_status=1, return_stderr=True)
self.assertIn('ResumableUploadAbortException', stderr)
# This temporarily changes the tracker directory to unwritable which
# interferes with any parallel running tests that use the tracker directory.
@NotParallelizable
@SkipForS3('No resumable upload support for S3.')
@unittest.skipIf(IS_WINDOWS, 'chmod on dir unsupported on Windows.')
@PerformsFileToObjectUpload
def test_cp_unwritable_tracker_file(self):
"""Tests a resumable upload with an unwritable tracker file."""
bucket_uri = self.CreateBucket()
tracker_filename = GetTrackerFilePath(
StorageUrlFromString(suri(bucket_uri, 'foo')),
TrackerFileType.UPLOAD, self.test_api)
tracker_dir = os.path.dirname(tracker_filename)
fpath = self.CreateTempFile(file_name='foo', contents='a' * ONE_KIB)
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
save_mod = os.stat(tracker_dir).st_mode
try:
os.chmod(tracker_dir, 0)
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', fpath, suri(bucket_uri)],
expected_status=1, return_stderr=True)
self.assertIn('Couldn\'t write tracker file', stderr)
finally:
os.chmod(tracker_dir, save_mod)
if os.path.exists(tracker_filename):
os.unlink(tracker_filename)
# This temporarily changes the tracker directory to unwritable which
# interferes with any parallel running tests that use the tracker directory.
@NotParallelizable
@unittest.skipIf(IS_WINDOWS, 'chmod on dir unsupported on Windows.')
def test_cp_unwritable_tracker_file_download(self):
"""Tests downloads with an unwritable tracker file."""
object_uri = self.CreateObject(contents='foo' * ONE_KIB)
tracker_filename = GetTrackerFilePath(
StorageUrlFromString(suri(object_uri)),
TrackerFileType.DOWNLOAD, self.test_api)
tracker_dir = os.path.dirname(tracker_filename)
fpath = self.CreateTempFile()
save_mod = os.stat(tracker_dir).st_mode
try:
os.chmod(tracker_dir, 0)
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(EIGHT_MIB))
with SetBotoConfigForTest([boto_config_for_test]):
# Should succeed because we are below the threshold.
self.RunGsUtil(['cp', suri(object_uri), fpath])
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', suri(object_uri), fpath],
expected_status=1, return_stderr=True)
self.assertIn('Couldn\'t write tracker file', stderr)
finally:
os.chmod(tracker_dir, save_mod)
if os.path.exists(tracker_filename):
os.unlink(tracker_filename)
def test_cp_resumable_download_break(self):
"""Tests that a download can be resumed after a connection break."""
bucket_uri = self.CreateBucket()
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents='a' * self.halt_size)
fpath = self.CreateTempFile()
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5)))
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
suri(object_uri), fpath],
expected_status=1, return_stderr=True)
self.assertIn('Artifically halting download.', stderr)
tracker_filename = GetTrackerFilePath(
StorageUrlFromString(fpath), TrackerFileType.DOWNLOAD, self.test_api)
self.assertTrue(os.path.isfile(tracker_filename))
stderr = self.RunGsUtil(['cp', suri(object_uri), fpath],
return_stderr=True)
self.assertIn('Resuming download', stderr)
def test_cp_resumable_download_etag_differs(self):
"""Tests that download restarts the file when the source object changes.
This causes the etag not to match.
"""
bucket_uri = self.CreateBucket()
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents='a' * self.halt_size)
fpath = self.CreateTempFile()
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5)))
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
# This will create a tracker file with an ETag.
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
suri(object_uri), fpath],
expected_status=1, return_stderr=True)
self.assertIn('Artifically halting download.', stderr)
# Create a new object with different contents - it should have a
# different ETag since the content has changed.
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents='b' * self.halt_size)
stderr = self.RunGsUtil(['cp', suri(object_uri), fpath],
return_stderr=True)
self.assertNotIn('Resuming download', stderr)
def test_cp_resumable_download_file_larger(self):
"""Tests download deletes the tracker file when existing file is larger."""
bucket_uri = self.CreateBucket()
fpath = self.CreateTempFile()
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents='a' * self.halt_size)
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5)))
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
suri(object_uri), fpath],
expected_status=1, return_stderr=True)
self.assertIn('Artifically halting download.', stderr)
with open(fpath, 'w') as larger_file:
for _ in range(self.halt_size * 2):
larger_file.write('a')
stderr = self.RunGsUtil(['cp', suri(object_uri), fpath],
expected_status=1, return_stderr=True)
self.assertNotIn('Resuming download', stderr)
self.assertIn('is larger', stderr)
self.assertIn('Deleting tracker file', stderr)
def test_cp_resumable_download_content_differs(self):
"""Tests that we do not re-download when tracker file matches existing file.
We only compare size, not contents, so re-download should not occur even
though the contents are technically different. However, hash validation on
the file should still occur and we will delete the file then because
the hashes differ.
"""
bucket_uri = self.CreateBucket()
tmp_dir = self.CreateTempDir()
fpath = self.CreateTempFile(tmpdir=tmp_dir, contents='abcd' * ONE_KIB)
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents='efgh' * ONE_KIB)
stdout = self.RunGsUtil(['ls', '-L', suri(object_uri)], return_stdout=True)
etag_match = re.search(r'\s*ETag:\s*(.*)', stdout)
self.assertIsNotNone(etag_match, 'Could not get object ETag')
self.assertEqual(len(etag_match.groups()), 1,
'Did not match expected single ETag')
etag = etag_match.group(1)
tracker_filename = GetTrackerFilePath(
StorageUrlFromString(fpath), TrackerFileType.DOWNLOAD, self.test_api)
try:
with open(tracker_filename, 'w') as tracker_fp:
tracker_fp.write(etag)
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', suri(object_uri), fpath],
return_stderr=True, expected_status=1)
self.assertIn('Download already complete for file', stderr)
self.assertIn('doesn\'t match cloud-supplied digest', stderr)
# File and tracker file should be deleted.
self.assertFalse(os.path.isfile(fpath))
self.assertFalse(os.path.isfile(tracker_filename))
finally:
if os.path.exists(tracker_filename):
os.unlink(tracker_filename)
def test_cp_resumable_download_content_matches(self):
"""Tests download no-ops when tracker file matches existing file."""
bucket_uri = self.CreateBucket()
tmp_dir = self.CreateTempDir()
matching_contents = 'abcd' * ONE_KIB
fpath = self.CreateTempFile(tmpdir=tmp_dir, contents=matching_contents)
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents=matching_contents)
stdout = self.RunGsUtil(['ls', '-L', suri(object_uri)], return_stdout=True)
etag_match = re.search(r'\s*ETag:\s*(.*)', stdout)
self.assertIsNotNone(etag_match, 'Could not get object ETag')
self.assertEqual(len(etag_match.groups()), 1,
'Did not match expected single ETag')
etag = etag_match.group(1)
tracker_filename = GetTrackerFilePath(
StorageUrlFromString(fpath), TrackerFileType.DOWNLOAD, self.test_api)
with open(tracker_filename, 'w') as tracker_fp:
tracker_fp.write(etag)
try:
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', suri(object_uri), fpath],
return_stderr=True)
self.assertIn('Download already complete for file', stderr)
# Tracker file should be removed after successful hash validation.
self.assertFalse(os.path.isfile(tracker_filename))
finally:
if os.path.exists(tracker_filename):
os.unlink(tracker_filename)
def test_cp_resumable_download_tracker_file_not_matches(self):
"""Tests that download overwrites when tracker file etag does not match."""
bucket_uri = self.CreateBucket()
tmp_dir = self.CreateTempDir()
fpath = self.CreateTempFile(tmpdir=tmp_dir, contents='abcd' * ONE_KIB)
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents='efgh' * ONE_KIB)
stdout = self.RunGsUtil(['ls', '-L', suri(object_uri)], return_stdout=True)
etag_match = re.search(r'\s*ETag:\s*(.*)', stdout)
self.assertIsNotNone(etag_match, 'Could not get object ETag')
self.assertEqual(len(etag_match.groups()), 1,
'Did not match regex for exactly one object ETag')
etag = etag_match.group(1)
etag += 'nonmatching'
tracker_filename = GetTrackerFilePath(
StorageUrlFromString(fpath), TrackerFileType.DOWNLOAD, self.test_api)
with open(tracker_filename, 'w') as tracker_fp:
tracker_fp.write(etag)
try:
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', suri(object_uri), fpath],
return_stderr=True)
self.assertNotIn('Resuming download', stderr)
# Ensure the file was overwritten.
with open(fpath, 'r') as in_fp:
contents = in_fp.read()
self.assertEqual(contents, 'efgh' * ONE_KIB,
'File not overwritten when it should have been '
'due to a non-matching tracker file.')
self.assertFalse(os.path.isfile(tracker_filename))
finally:
if os.path.exists(tracker_filename):
os.unlink(tracker_filename)
def test_cp_resumable_download_gzip(self):
"""Tests that download can be resumed successfully with a gzipped file."""
# Generate some reasonably incompressible data. This compresses to a bit
# around 128K in practice, but we assert specifically below that it is
# larger than self.halt_size to guarantee that we can halt the download
# partway through.
object_uri = self.CreateObject()
random.seed(0)
contents = str([random.choice(string.ascii_letters)
for _ in xrange(ONE_KIB * 128)])
random.seed() # Reset the seed for any other tests.
fpath1 = self.CreateTempFile(file_name='unzipped.txt', contents=contents)
self.RunGsUtil(['cp', '-z', 'txt', suri(fpath1), suri(object_uri)])
# Use @Retry as hedge against bucket listing eventual consistency.
@Retry(AssertionError, tries=3, timeout_secs=1)
def _GetObjectSize():
stdout = self.RunGsUtil(['du', suri(object_uri)], return_stdout=True)
size_match = re.search(r'(\d+)\s+.*', stdout)
self.assertIsNotNone(size_match, 'Could not get object size')
self.assertEqual(len(size_match.groups()), 1,
'Did not match regex for exactly one object size.')
return long(size_match.group(1))
object_size = _GetObjectSize()
self.assertGreaterEqual(object_size, self.halt_size,
'Compresed object size was not large enough to '
'allow for a halted download, so the test results '
'would be invalid. Please increase the compressed '
'object size in the test.')
fpath2 = self.CreateTempFile()
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_HaltingCopyCallbackHandler(False, 5)))
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
suri(object_uri), suri(fpath2)],
return_stderr=True, expected_status=1)
self.assertIn('Artifically halting download.', stderr)
tracker_filename = GetTrackerFilePath(
StorageUrlFromString(fpath2), TrackerFileType.DOWNLOAD, self.test_api)
self.assertTrue(os.path.isfile(tracker_filename))
self.assertIn('Downloading to temp gzip filename', stderr)
# We should have a temporary gzipped file, a tracker file, and no
# final file yet.
self.assertTrue(os.path.isfile('%s_.gztmp' % fpath2))
stderr = self.RunGsUtil(['cp', suri(object_uri), suri(fpath2)],
return_stderr=True)
self.assertIn('Resuming download', stderr)
with open(fpath2, 'r') as f:
self.assertEqual(f.read(), contents, 'File contents did not match.')
self.assertFalse(os.path.isfile(tracker_filename))
self.assertFalse(os.path.isfile('%s_.gztmp' % fpath2))
@SkipForS3('No resumable upload support for S3.')
def test_cp_resumable_upload_bucket_deleted(self):
"""Tests that a not found exception is raised if bucket no longer exists."""
bucket_uri = self.CreateBucket()
fpath = self.CreateTempFile(contents='a' * 2 * ONE_KIB)
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(
_DeleteBucketThenStartOverCopyCallbackHandler(5, bucket_uri)))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
fpath, suri(bucket_uri)], return_stderr=True,
expected_status=1)
self.assertIn('Deleting bucket', stderr)
self.assertIn('bucket does not exist', stderr)
@SkipForS3('No resumable upload support for S3.')
def test_cp_resumable_upload_start_over_http_error(self):
for start_over_error in (404, 410):
self.start_over_error_test_helper(start_over_error)
def start_over_error_test_helper(self, http_error_num):
bucket_uri = self.CreateBucket()
fpath = self.CreateTempFile(contents='a' * 2 * ONE_KIB)
boto_config_for_test = ('GSUtil', 'resumable_threshold', str(ONE_KIB))
if self.test_api == ApiSelector.JSON:
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(_JSONForceHTTPErrorCopyCallbackHandler(5, 404)))
elif self.test_api == ApiSelector.XML:
test_callback_file = self.CreateTempFile(
contents=pickle.dumps(
_XMLResumableUploadStartOverCopyCallbackHandler(5)))
with SetBotoConfigForTest([boto_config_for_test]):
stderr = self.RunGsUtil(['cp', '--testcallbackfile', test_callback_file,
fpath, suri(bucket_uri)], return_stderr=True)
self.assertIn('Restarting upload from scratch', stderr)
def test_cp_minus_c(self):
bucket_uri = self.CreateBucket()
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents='foo')
self.RunGsUtil(
['cp', '-c', suri(bucket_uri) + '/foo2', suri(object_uri),
suri(bucket_uri) + '/dir/'],
expected_status=1)
self.RunGsUtil(['stat', '%s/dir/foo' % suri(bucket_uri)])
def test_rewrite_cp(self):
"""Tests the JSON Rewrite API."""
if self.test_api == ApiSelector.XML:
return unittest.skip('Rewrite API is only supported in JSON.')
bucket_uri = self.CreateBucket()
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents='bar')
gsutil_api = GcsJsonApi(BucketStorageUri, logging.getLogger(),
self.default_provider)
key = object_uri.get_key()
src_obj_metadata = apitools_messages.Object(
name=key.name, bucket=key.bucket.name, contentType=key.content_type)
dst_obj_metadata = apitools_messages.Object(
bucket=src_obj_metadata.bucket,
name=self.MakeTempName('object'),
contentType=src_obj_metadata.contentType)
gsutil_api.CopyObject(src_obj_metadata, dst_obj_metadata)
self.assertEqual(
gsutil_api.GetObjectMetadata(src_obj_metadata.bucket,
src_obj_metadata.name,
fields=['md5Hash']).md5Hash,
gsutil_api.GetObjectMetadata(dst_obj_metadata.bucket,
dst_obj_metadata.name,
fields=['md5Hash']).md5Hash,
'Error: Rewritten object\'s hash doesn\'t match source object.')
def test_rewrite_cp_resume(self):
"""Tests the JSON Rewrite API, breaking and resuming via a tracker file."""
if self.test_api == ApiSelector.XML:
return unittest.skip('Rewrite API is only supported in JSON.')
bucket_uri = self.CreateBucket()
# Second bucket needs to be a different storage class so the service
# actually rewrites the bytes.
bucket_uri2 = self.CreateBucket(
storage_class='DURABLE_REDUCED_AVAILABILITY')
# maxBytesPerCall must be >= 1 MiB, so create an object > 2 MiB because we
# need 2 response from the service: 1 success, 1 failure prior to
# completion.
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents=('12'*ONE_MIB) + 'bar',
prefer_json_api=True)
gsutil_api = GcsJsonApi(BucketStorageUri, logging.getLogger(),
self.default_provider)
key = object_uri.get_key()
src_obj_metadata = apitools_messages.Object(
name=key.name, bucket=key.bucket.name, contentType=key.content_type,
etag=key.etag.strip('"\''))
dst_obj_name = self.MakeTempName('object')
dst_obj_metadata = apitools_messages.Object(
bucket=bucket_uri2.bucket_name,
name=dst_obj_name,
contentType=src_obj_metadata.contentType)
tracker_file_name = GetRewriteTrackerFilePath(
src_obj_metadata.bucket, src_obj_metadata.name,
dst_obj_metadata.bucket, dst_obj_metadata.name, self.test_api)
try:
try:
gsutil_api.CopyObject(
src_obj_metadata, dst_obj_metadata,
progress_callback=_HaltingRewriteCallbackHandler(ONE_MIB*2).call,
max_bytes_per_call=ONE_MIB)
self.fail('Expected _RewriteHaltException.')
except _RewriteHaltException:
pass
# Tracker file should be left over.
self.assertTrue(os.path.exists(tracker_file_name))
# Now resume. Callback ensures we didn't start over.
gsutil_api.CopyObject(
src_obj_metadata, dst_obj_metadata,
progress_callback=_EnsureRewriteResumeCallbackHandler(ONE_MIB*2).call,
max_bytes_per_call=ONE_MIB)
# Copy completed; tracker file should be deleted.
self.assertFalse(os.path.exists(tracker_file_name))
self.assertEqual(
gsutil_api.GetObjectMetadata(src_obj_metadata.bucket,
src_obj_metadata.name,
fields=['md5Hash']).md5Hash,
gsutil_api.GetObjectMetadata(dst_obj_metadata.bucket,
dst_obj_metadata.name,
fields=['md5Hash']).md5Hash,
'Error: Rewritten object\'s hash doesn\'t match source object.')
finally:
# Clean up if something went wrong.
DeleteTrackerFile(tracker_file_name)
def test_rewrite_cp_resume_source_changed(self):
"""Tests that Rewrite starts over when the source object has changed."""
if self.test_api == ApiSelector.XML:
return unittest.skip('Rewrite API is only supported in JSON.')
bucket_uri = self.CreateBucket()
# Second bucket needs to be a different storage class so the service
# actually rewrites the bytes.
bucket_uri2 = self.CreateBucket(
storage_class='DURABLE_REDUCED_AVAILABILITY')
# maxBytesPerCall must be >= 1 MiB, so create an object > 2 MiB because we
# need 2 response from the service: 1 success, 1 failure prior to
# completion.
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents=('12'*ONE_MIB) + 'bar',
prefer_json_api=True)
gsutil_api = GcsJsonApi(BucketStorageUri, logging.getLogger(),
self.default_provider)
key = object_uri.get_key()
src_obj_metadata = apitools_messages.Object(
name=key.name, bucket=key.bucket.name, contentType=key.content_type,
etag=key.etag.strip('"\''))
dst_obj_name = self.MakeTempName('object')
dst_obj_metadata = apitools_messages.Object(
bucket=bucket_uri2.bucket_name,
name=dst_obj_name,
contentType=src_obj_metadata.contentType)
tracker_file_name = GetRewriteTrackerFilePath(
src_obj_metadata.bucket, src_obj_metadata.name,
dst_obj_metadata.bucket, dst_obj_metadata.name, self.test_api)
try:
try:
gsutil_api.CopyObject(
src_obj_metadata, dst_obj_metadata,
progress_callback=_HaltingRewriteCallbackHandler(ONE_MIB*2).call,
max_bytes_per_call=ONE_MIB)
self.fail('Expected _RewriteHaltException.')
except _RewriteHaltException:
pass
# Overwrite the original object.
object_uri2 = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents='bar', prefer_json_api=True)
key2 = object_uri2.get_key()
src_obj_metadata2 = apitools_messages.Object(
name=key2.name, bucket=key2.bucket.name,
contentType=key2.content_type, etag=key2.etag.strip('"\''))
# Tracker file for original object should still exist.
self.assertTrue(os.path.exists(tracker_file_name))
# Copy the new object.
gsutil_api.CopyObject(src_obj_metadata2, dst_obj_metadata,
max_bytes_per_call=ONE_MIB)
# Copy completed; original tracker file should be deleted.
self.assertFalse(os.path.exists(tracker_file_name))
self.assertEqual(
gsutil_api.GetObjectMetadata(src_obj_metadata2.bucket,
src_obj_metadata2.name,
fields=['md5Hash']).md5Hash,
gsutil_api.GetObjectMetadata(dst_obj_metadata.bucket,
dst_obj_metadata.name,
fields=['md5Hash']).md5Hash,
'Error: Rewritten object\'s hash doesn\'t match source object.')
finally:
# Clean up if something went wrong.
DeleteTrackerFile(tracker_file_name)
def test_rewrite_cp_resume_command_changed(self):
"""Tests that Rewrite starts over when the arguments changed."""
if self.test_api == ApiSelector.XML:
return unittest.skip('Rewrite API is only supported in JSON.')
bucket_uri = self.CreateBucket()
# Second bucket needs to be a different storage class so the service
# actually rewrites the bytes.
bucket_uri2 = self.CreateBucket(
storage_class='DURABLE_REDUCED_AVAILABILITY')
# maxBytesPerCall must be >= 1 MiB, so create an object > 2 MiB because we
# need 2 response from the service: 1 success, 1 failure prior to
# completion.
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents=('12'*ONE_MIB) + 'bar',
prefer_json_api=True)
gsutil_api = GcsJsonApi(BucketStorageUri, logging.getLogger(),
self.default_provider)
key = object_uri.get_key()
src_obj_metadata = apitools_messages.Object(
name=key.name, bucket=key.bucket.name, contentType=key.content_type,
etag=key.etag.strip('"\''))
dst_obj_name = self.MakeTempName('object')
dst_obj_metadata = apitools_messages.Object(
bucket=bucket_uri2.bucket_name,
name=dst_obj_name,
contentType=src_obj_metadata.contentType)
tracker_file_name = GetRewriteTrackerFilePath(
src_obj_metadata.bucket, src_obj_metadata.name,
dst_obj_metadata.bucket, dst_obj_metadata.name, self.test_api)
try:
try:
gsutil_api.CopyObject(
src_obj_metadata, dst_obj_metadata, canned_acl='private',
progress_callback=_HaltingRewriteCallbackHandler(ONE_MIB*2).call,
max_bytes_per_call=ONE_MIB)
self.fail('Expected _RewriteHaltException.')
except _RewriteHaltException:
pass
# Tracker file for original object should still exist.
self.assertTrue(os.path.exists(tracker_file_name))
# Copy the same object but with different call parameters.
gsutil_api.CopyObject(src_obj_metadata, dst_obj_metadata,
canned_acl='public-read',
max_bytes_per_call=ONE_MIB)
# Copy completed; original tracker file should be deleted.
self.assertFalse(os.path.exists(tracker_file_name))
new_obj_metadata = gsutil_api.GetObjectMetadata(
dst_obj_metadata.bucket, dst_obj_metadata.name,
fields=['acl,md5Hash'])
self.assertEqual(
gsutil_api.GetObjectMetadata(src_obj_metadata.bucket,
src_obj_metadata.name,
fields=['md5Hash']).md5Hash,
new_obj_metadata.md5Hash,
'Error: Rewritten object\'s hash doesn\'t match source object.')
# New object should have a public-read ACL from the second command.
found_public_acl = False
for acl_entry in new_obj_metadata.acl:
if acl_entry.entity == 'allUsers':
found_public_acl = True
self.assertTrue(found_public_acl,
'New object was not written with a public ACL.')
finally:
# Clean up if something went wrong.
DeleteTrackerFile(tracker_file_name)
class TestCpUnitTests(testcase.GsUtilUnitTestCase):
"""Unit tests for gsutil cp."""
def testDownloadWithNoHashAvailable(self):
"""Tests a download with no valid server-supplied hash."""
# S3 should have a special message for non-MD5 etags.
bucket_uri = self.CreateBucket(provider='s3')
object_uri = self.CreateObject(bucket_uri=bucket_uri, contents='foo')
object_uri.get_key().etag = '12345' # Not an MD5
dst_dir = self.CreateTempDir()
log_handler = self.RunCommand(
'cp', [suri(object_uri), dst_dir], return_log_handler=True)
warning_messages = log_handler.messages['warning']
self.assertEquals(2, len(warning_messages))
self.assertRegexpMatches(
warning_messages[0],
r'Non-MD5 etag \(12345\) present for key .*, '
r'data integrity checks are not possible')
self.assertIn('Integrity cannot be assured', warning_messages[1])
def test_object_and_prefix_same_name(self):
bucket_uri = self.CreateBucket()
object_uri = self.CreateObject(bucket_uri=bucket_uri, object_name='foo',
contents='foo')
self.CreateObject(bucket_uri=bucket_uri,
object_name='foo/bar', contents='bar')
fpath = self.CreateTempFile()
# MockKey doesn't support hash_algs, so the MD5 will not match.
with SetBotoConfigForTest([('GSUtil', 'check_hashes', 'never')]):
self.RunCommand('cp', [suri(object_uri), fpath])
with open(fpath, 'r') as f:
self.assertEqual(f.read(), 'foo')
def test_cp_upload_respects_no_hashes(self):
bucket_uri = self.CreateBucket()
fpath = self.CreateTempFile(contents='abcd')
with SetBotoConfigForTest([('GSUtil', 'check_hashes', 'never')]):
log_handler = self.RunCommand('cp', [fpath, suri(bucket_uri)],
return_log_handler=True)
warning_messages = log_handler.messages['warning']
self.assertEquals(1, len(warning_messages))
self.assertIn('Found no hashes to validate object upload',
warning_messages[0])
| bsd-3-clause | -5,402,575,778,516,591,000 | 6,583,022,846,036,247,000 | 45.391885 | 87 | 0.644894 | false |
sestrella/ansible | lib/ansible/modules/network/f5/bigip_firewall_log_profile.py | 23 | 28678 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2019, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_firewall_log_profile
short_description: Manages AFM logging profiles configured in the system
description:
- Manages AFM logging profiles configured in the system along with basic information about each profile.
version_added: 2.9
options:
name:
description:
- Specifies the name of the log profile.
type: str
required: True
description:
description:
- Description of the log profile.
type: str
dos_protection:
description:
- Configures DoS related settings of the log profile.
suboptions:
dns_publisher:
description:
- Specifies the name of the log publisher used for DNS DoS events.
- To specify the log_publisher on a different partition from the AFM log profile, specify the name in fullpath
format, e.g. C(/Foobar/log-publisher), otherwise the partition for log publisher
is inferred from C(partition) module parameter.
type: str
sip_publisher:
description:
- Specifies the name of the log publisher used for SIP DoS events.
- To specify the log_publisher on a different partition from the AFM log profile, specify the name in fullpath
format, e.g. C(/Foobar/log-publisher), otherwise the partition for log publisher
is inferred from C(partition) module parameter.
type: str
network_publisher:
description:
- Specifies the name of the log publisher used for DoS Network events.
- To specify the log_publisher on a different partition from the AFM log profile, specify the name in fullpath
format, e.g. C(/Foobar/log-publisher), otherwise the partition for log publisher
is inferred from C(partition) module parameter.
type: str
type: dict
ip_intelligence:
description:
- Configures IP Intelligence related settings of the log profile.
suboptions:
log_publisher:
description:
- Specifies the name of the log publisher used for IP Intelligence events.
- To specify the log_publisher on a different partition from the AFM log profile, specify the name in fullpath
format, e.g. C(/Foobar/log-publisher), otherwise the partition for log publisher
is inferred from C(partition) module parameter.
type: str
rate_limit:
description:
- Defines a rate limit for all combined IP intelligence log messages per second. Beyond this rate limit,
log messages are not logged until the threshold drops below the specified rate.
- To specify an indefinite rate, use the value C(indefinite).
- If specifying a numeric rate, the value must be between C(1) and C(4294967295).
type: str
log_rtbh:
description:
- Specifies, when C(yes), that remotely triggered blackholing events are logged.
type: bool
log_shun:
description:
- Specifies, when C(yes), that IP Intelligence shun list events are logged.
- This option can only be set on C(global-network) built-in profile
type: bool
log_translation_fields:
description:
- This option is used to enable or disable the logging of translated (i.e server side) fields in IP
Intelligence log messages.
- Translated fields include (but are not limited to) source address/port, destination address/port,
IP protocol, route domain, and VLAN.
type: bool
type: dict
port_misuse:
description:
- Port Misuse log configuration.
suboptions:
log_publisher:
description:
- Specifies the name of the log publisher used for Port Misuse events.
- To specify the log_publisher on a different partition from the AFM log profile, specify the name in fullpath
format, e.g. C(/Foobar/log-publisher), otherwise the partition for log publisher
is inferred from C(partition) module parameter.
type: str
rate_limit:
description:
- Defines a rate limit for all combined port misuse log messages per second. Beyond this rate limit,
log messages are not logged until the threshold drops below the specified rate.
- To specify an indefinite rate, use the value C(indefinite).
- If specifying a numeric rate, the value must be between C(1) and C(4294967295).
type: str
type: dict
partition:
description:
- Device partition to create log profile on.
- Parameter also used when specifying names for log publishers, unless log publisher names are in fullpath format.
type: str
default: Common
state:
description:
- When C(state) is C(present), ensures the resource exists.
- When C(state) is C(absent), ensures that resource is removed. Attempts to remove built-in system profiles are
ignored and no change is returned.
type: str
choices:
- present
- absent
default: present
extends_documentation_fragment: f5
author:
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create a basic log profile with port misuse
bigip_firewall_log_profile:
name: barbaz
port_misuse:
rate_limit: 30000
log_publisher: local-db-pub
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Change ip_intelligence settings, publisher on different partition, remove port misuse
bigip_firewall_log_profile:
name: barbaz
ip_intelligence:
rate_limit: 400000
log_translation_fields: yes
log_rtbh: yes
log_publisher: "/foobar/non-local-db"
port_misuse:
log_publisher: ""
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Create a log profile with dos protection, different partition
bigip_firewall_log_profile:
name: foobar
partition: foobar
dos_protection:
dns_publisher: "/Common/local-db-pub"
sip_publisher: "non-local-db"
network_publisher: "/Common/local-db-pub"
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
- name: Remove log profile
bigip_firewall_log_profile:
name: barbaz
partition: Common
state: absent
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
description:
description: New description of the AFM log profile.
returned: changed
type: str
sample: This is my description
dos_protection:
description: Log publishers used in DoS related settings of the log profile.
type: complex
returned: changed
contains:
dns_publisher:
description: The name of the log publisher used for DNS DoS events.
returned: changed
type: str
sample: "/Common/local-db-publisher"
sip_publisher:
description: The name of the log publisher used for SIP DoS events.
returned: changed
type: str
sample: "/Common/local-db-publisher"
network_publisher:
description: The name of the log publisher used for DoS Network events.
returned: changed
type: str
sample: "/Common/local-db-publisher"
sample: hash/dictionary of values
ip_intelligence:
description: IP Intelligence related settings of the log profile.
type: complex
returned: changed
contains:
log_publisher:
description: The name of the log publisher used for IP Intelligence events.
returned: changed
type: str
sample: "/Common/local-db-publisher"
rate_limit:
description: The rate limit for all combined IP intelligence log messages per second.
returned: changed
type: str
sample: "indefinite"
log_rtbh:
description: Logging of remotely triggered blackholing events.
returned: changed
type: bool
sample: yes
log_shun:
description: Logging of IP Intelligence shun list events.
returned: changed
type: bool
sample: no
log_translation_fields:
description: Logging of translated fields in IP Intelligence log messages.
returned: changed
type: bool
sample: no
sample: hash/dictionary of values
port_misuse:
description: Port Misuse related settings of the log profile.
type: complex
returned: changed
contains:
log_publisher:
description: The name of the log publisher used for Port Misuse events.
returned: changed
type: str
sample: "/Common/local-db-publisher"
rate_limit:
description: The rate limit for all combined Port Misuse log messages per second.
returned: changed
type: str
sample: "indefinite"
sample: hash/dictionary of values
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import flatten_boolean
from library.module_utils.network.f5.compare import compare_dictionary
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import flatten_boolean
from ansible.module_utils.network.f5.compare import compare_dictionary
class Parameters(AnsibleF5Parameters):
api_map = {
'ipIntelligence': 'ip_intelligence',
'portMisuse': 'port_misuse',
'protocolDnsDosPublisher': 'dns_publisher',
'protocolSipDosPublisher': 'sip_publisher',
'dosNetworkPublisher': 'network_publisher',
}
api_attributes = [
'description',
'ipIntelligence',
'portMisuse',
'dosNetworkPublisher',
'protocolDnsDosPublisher',
'protocolSipDosPublisher',
]
returnables = [
'ip_intelligence',
'dns_publisher',
'sip_publisher',
'network_publisher',
'port_misuse',
'description',
'ip_log_publisher',
'ip_rate_limit',
'ip_log_rthb',
'ip_log_shun',
'ip_log_translation_fields',
'port_rate_limit',
'port_log_publisher',
]
updatables = [
'dns_publisher',
'sip_publisher',
'network_publisher',
'description',
'ip_log_publisher',
'ip_rate_limit',
'ip_log_rthb',
'ip_log_shun',
'ip_log_translation_fields',
'port_rate_limit',
'port_log_publisher',
]
class ApiParameters(Parameters):
@property
def ip_log_publisher(self):
result = self._values['ip_intelligence'].get('logPublisher', None)
return result
@property
def ip_rate_limit(self):
return self._values['ip_intelligence']['aggregateRate']
@property
def port_rate_limit(self):
return self._values['port_misuse']['aggregateRate']
@property
def port_log_publisher(self):
result = self._values['port_misuse'].get('logPublisher', None)
return result
@property
def ip_log_rtbh(self):
return self._values['ip_intelligence']['logRtbh']
@property
def ip_log_shun(self):
if self._values['name'] != 'global-network':
return None
return self._values['ip_intelligence']['logShun']
@property
def ip_log_translation_fields(self):
return self._values['ip_intelligence']['logTranslationFields']
class ModuleParameters(Parameters):
def _transform_log_publisher(self, log_publisher):
if log_publisher is None:
return None
if log_publisher in ['', 'none']:
return {}
return fq_name(self.partition, log_publisher)
def _validate_rate_limit(self, rate_limit):
if rate_limit is None:
return None
if rate_limit == 'indefinite':
return 4294967295
if 0 <= int(rate_limit) <= 4294967295:
return int(rate_limit)
raise F5ModuleError(
"Valid 'maximum_age' must be in range 0 - 4294967295, or 'indefinite'."
)
@property
def ip_log_rtbh(self):
if self._values['ip_intelligence'] is None:
return None
result = flatten_boolean(self._values['ip_intelligence']['log_rtbh'])
if result == 'yes':
return 'enabled'
if result == 'no':
return 'disabled'
return result
@property
def ip_log_shun(self):
if self._values['ip_intelligence'] is None:
return None
if 'global-network' not in self._values['name']:
return None
result = flatten_boolean(self._values['ip_intelligence']['log_shun'])
if result == 'yes':
return 'enabled'
if result == 'no':
return 'disabled'
return result
@property
def ip_log_translation_fields(self):
if self._values['ip_intelligence'] is None:
return None
result = flatten_boolean(self._values['ip_intelligence']['log_translation_fields'])
if result == 'yes':
return 'enabled'
if result == 'no':
return 'disabled'
return result
@property
def ip_log_publisher(self):
if self._values['ip_intelligence'] is None:
return None
result = self._transform_log_publisher(self._values['ip_intelligence']['log_publisher'])
return result
@property
def ip_rate_limit(self):
if self._values['ip_intelligence'] is None:
return None
return self._validate_rate_limit(self._values['ip_intelligence']['rate_limit'])
@property
def port_rate_limit(self):
if self._values['port_misuse'] is None:
return None
return self._validate_rate_limit(self._values['port_misuse']['rate_limit'])
@property
def port_log_publisher(self):
if self._values['port_misuse'] is None:
return None
result = self._transform_log_publisher(self._values['port_misuse']['log_publisher'])
return result
@property
def dns_publisher(self):
if self._values['dos_protection'] is None:
return None
result = self._transform_log_publisher(self._values['dos_protection']['dns_publisher'])
return result
@property
def sip_publisher(self):
if self._values['dos_protection'] is None:
return None
result = self._transform_log_publisher(self._values['dos_protection']['sip_publisher'])
return result
@property
def network_publisher(self):
if self._values['dos_protection'] is None:
return None
result = self._transform_log_publisher(self._values['dos_protection']['network_publisher'])
return result
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
@property
def ip_intelligence(self):
to_filter = dict(
logPublisher=self._values['ip_log_publisher'],
aggregateRate=self._values['ip_rate_limit'],
logRtbh=self._values['ip_log_rtbh'],
logShun=self._values['ip_log_shun'],
logTranslationFields=self._values['ip_log_translation_fields']
)
result = self._filter_params(to_filter)
if result:
return result
@property
def port_misuse(self):
to_filter = dict(
logPublisher=self._values['port_log_publisher'],
aggregateRate=self._values['port_rate_limit']
)
result = self._filter_params(to_filter)
if result:
return result
class ReportableChanges(Changes):
returnables = [
'ip_intelligence',
'port_misuse',
'description',
'dos_protection',
]
def _change_rate_limit_value(self, value):
if value == 4294967295:
return 'indefinite'
else:
return value
@property
def ip_log_rthb(self):
result = flatten_boolean(self._values['ip_log_rtbh'])
return result
@property
def ip_log_shun(self):
result = flatten_boolean(self._values['ip_log_shun'])
return result
@property
def ip_log_translation_fields(self):
result = flatten_boolean(self._values['ip_log_translation_fields'])
return result
@property
def ip_intelligence(self):
if self._values['ip_intelligence'] is None:
return None
to_filter = dict(
log_publisher=self._values['ip_log_publisher'],
rate_limit=self._change_rate_limit_value(self._values['ip_rate_limit']),
log_rtbh=self.ip_log_rtbh,
log_shun=self.ip_log_shun,
log_translation_fields=self.ip_log_translation_fields
)
result = self._filter_params(to_filter)
if result:
return result
@property
def port_misuse(self):
if self._values['port_misuse'] is None:
return None
to_filter = dict(
log_publisher=self._values['port_log_publisher'],
rate_limit=self._change_rate_limit_value(self._values['port_rate_limit']),
)
result = self._filter_params(to_filter)
if result:
return result
@property
def dos_protection(self):
to_filter = dict(
dns_publisher=self._values['dns_publisher'],
sip_publisher=self._values['sip_publisher'],
network_publisher=self._values['network_publisher'],
)
result = self._filter_params(to_filter)
return result
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def ip_log_publisher(self):
result = compare_dictionary(self.want.ip_log_publisher, self.have.ip_log_publisher)
return result
@property
def port_log_publisher(self):
result = compare_dictionary(self.want.port_log_publisher, self.have.port_log_publisher)
return result
@property
def dns_publisher(self):
result = compare_dictionary(self.want.dns_publisher, self.have.dns_publisher)
return result
@property
def sip_publisher(self):
result = compare_dictionary(self.want.sip_publisher, self.have.sip_publisher)
return result
@property
def network_publisher(self):
result = compare_dictionary(self.want.network_publisher, self.have.network_publisher)
return result
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
# Built-in profiles cannot be removed
built_ins = [
'Log all requests', 'Log illegal requests',
'global-network', 'local-dos'
]
if self.want.name in built_ins:
return False
if self.exists():
return self.remove()
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/security/log/profile/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/security/log/profile/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 404, 409]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return True
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/security/log/profile/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 404, 409]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/security/log/profile/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/security/log/profile/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 404, 409]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(
required=True
),
description=dict(),
dos_protection=dict(
type='dict',
options=dict(
dns_publisher=dict(),
sip_publisher=dict(),
network_publisher=dict()
)
),
ip_intelligence=dict(
type='dict',
options=dict(
log_publisher=dict(),
log_translation_fields=dict(type='bool'),
rate_limit=dict(),
log_rtbh=dict(type='bool'),
log_shun=dict(type='bool')
)
),
port_misuse=dict(
type='dict',
options=dict(
log_publisher=dict(),
rate_limit=dict()
)
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
state=dict(
default='present',
choices=['present', 'absent']
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 | -7,391,854,630,368,684,000 | -7,570,650,060,362,638,000 | 31.588636 | 120 | 0.606249 | false |
priyaganti/rockstor-core | src/rockstor/system/iscsi.py | 2 | 2084 | """
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from osi import run_command
TGTADM_BIN = '/usr/sbin/tgtadm'
DD_BIN = '/bin/dd'
def create_target_device(tid, tname):
cmd = [TGTADM_BIN, '--lld', 'iscsi', '--mode', 'target', '--op', 'new',
'--tid', tid, '--targetname', tname]
return run_command(cmd)
def add_logical_unit(tid, lun, dev_name):
cmd = [TGTADM_BIN, '--lld', 'iscsi', '--mode', 'logicalunit', '--op',
'new', '--tid', tid, '--lun', lun, '-b', dev_name]
return run_command(cmd)
def ip_restrict(tid):
"""
no restrictions at all
"""
cmd = [TGTADM_BIN, '--lld', 'iscsi', '--mode', 'target', '--op', 'bind',
'--tid', tid, '-I', 'ALL']
return run_command(cmd)
def create_lun_file(dev_name, size):
"""
size in MB
"""
of = ('of=%s' % dev_name)
count = ('count=%d' % size)
cmd = [DD_BIN, 'if=/dev/zero', of, 'bs=1M', count]
return run_command(cmd)
def export_iscsi(tid, tname, lun, dev_name, size):
"""
main method that does everything to a share to make it available as a iscsi
device. this should be called from the api view
1. create the dev_name file with the given size using dd
2. create target device
3. add logical unit
4. authentication??
"""
create_lun_file(dev_name, size)
create_target_device(tid, tname)
add_logical_unit(tid, lun, dev_name)
ip_restrict(tid)
| gpl-3.0 | 1,116,484,804,988,205,000 | -5,999,330,902,662,648,000 | 29.202899 | 79 | 0.644434 | false |
tkwon/dj-stripe | djstripe/migrations/0025_auto_20170322_0428.py | 1 | 3906 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-22 04:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djstripe', '0024_auto_20170308_0757'),
]
operations = [
migrations.AlterField(
model_name='account',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='account',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='charge',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='charge',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='customer',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='customer',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='event',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='event',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='eventprocessingexception',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='eventprocessingexception',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='invoice',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='invoice',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='invoiceitem',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='invoiceitem',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='plan',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='plan',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='stripesource',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='stripesource',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='subscription',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='subscription',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='transfer',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='transfer',
name='modified',
field=models.DateTimeField(auto_now=True),
),
]
| mit | -1,351,102,869,000,782,800 | 6,655,449,933,985,655,000 | 30.248 | 58 | 0.536354 | false |
eqcorrscan/ci.testing | eqcorrscan/utils/stacking.py | 1 | 6254 | """
Utility module of the EQcorrscan package to allow for different methods of \
stacking of seismic signal in one place.
:copyright:
EQcorrscan developers.
:license:
GNU Lesser General Public License, Version 3
(https://www.gnu.org/copyleft/lesser.html)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from scipy.signal import hilbert
from copy import deepcopy
from eqcorrscan.core.match_filter import normxcorr2
def linstack(streams, normalize=True):
"""
Compute the linear stack of a series of seismic streams of \
multiplexed data.
:type streams: list
:param streams: List of streams to stack
:type normalize: bool
:param normalize: Normalize traces before stacking, normalizes by the RMS \
amplitude.
:returns: stacked data
:rtype: :class:`obspy.core.stream.Stream`
"""
stack = streams[np.argmax([len(stream) for stream in streams])].copy()
if normalize:
for tr in stack:
tr.data = tr.data / np.sqrt(np.mean(np.square(tr.data)))
tr.data = np.nan_to_num(tr.data)
for i in range(1, len(streams)):
for tr in stack:
matchtr = streams[i].select(station=tr.stats.station,
channel=tr.stats.channel)
if matchtr:
# Normalize the data before stacking
if normalize:
norm = matchtr[0].data /\
np.sqrt(np.mean(np.square(matchtr[0].data)))
norm = np.nan_to_num(norm)
else:
norm = matchtr[0].data
tr.data = np.sum((norm, tr.data), axis=0)
return stack
def PWS_stack(streams, weight=2, normalize=True):
"""
Compute the phase weighted stack of a series of streams.
.. note:: It is recommended to align the traces before stacking.
:type streams: list
:param streams: List of :class:`obspy.core.stream.Stream` to stack.
:type weight: float
:param weight: Exponent to the phase stack used for weighting.
:type normalize: bool
:param normalize: Normalize traces before stacking.
:return: Stacked stream.
:rtype: :class:`obspy.core.stream.Stream`
"""
# First get the linear stack which we will weight by the phase stack
Linstack = linstack(streams)
# Compute the instantaneous phase
instaphases = []
print("Computing instantaneous phase")
for stream in streams:
instaphase = stream.copy()
for tr in instaphase:
analytic = hilbert(tr.data)
envelope = np.sqrt(np.sum((np.square(analytic),
np.square(tr.data)), axis=0))
tr.data = analytic / envelope
instaphases.append(instaphase)
# Compute the phase stack
print("Computing the phase stack")
Phasestack = linstack(instaphases, normalize=normalize)
# Compute the phase-weighted stack
for tr in Phasestack:
tr.data = Linstack.select(station=tr.stats.station)[0].data *\
np.abs(tr.data ** weight)
return Phasestack
def align_traces(trace_list, shift_len, master=False, positive=False,
plot=False):
"""
Align traces relative to each other based on their cross-correlation value.
Uses the :func:`obspy.signal.cross_correlation.xcorr` function to find the
optimum shift to align traces relative to a master event. Either uses a
given master to align traces, or uses the first trace in the list.
.. Note::
The cross-correlation function may yield an error/warning
about shift_len being too large: this is raised by the
:func:`obspy.signal.cross_correlation.xcorr` routine when the shift_len
is greater than half the length of either master or a trace, then
the correlation will not be robust. We may switch to a different
correlation routine later.
:type trace_list: list
:param trace_list: List of traces to align
:type shift_len: int
:param shift_len: Length to allow shifting within in samples
:type master: obspy.core.trace.Trace
:param master: Master trace to align to, if set to False will align to \
the largest amplitude trace (default)
:type positive: bool
:param positive: Return the maximum positive cross-correlation, or the \
absolute maximum, defaults to False (absolute maximum).
:type plot: bool
:param plot: If true, will plot each trace aligned with the master.
:returns: list of shifts and correlations for best alignment in seconds.
:rtype: list
"""
from eqcorrscan.utils.plotting import xcorr_plot
traces = deepcopy(trace_list)
if not master:
# Use trace with largest MAD amplitude as master
master = traces[0]
MAD_master = np.median(np.abs(master.data))
for i in range(1, len(traces)):
if np.median(np.abs(traces[i])) > MAD_master:
master = traces[i]
MAD_master = np.median(np.abs(master.data))
else:
print('Using master given by user')
shifts = []
ccs = []
for i in range(len(traces)):
if not master.stats.sampling_rate == traces[i].stats.sampling_rate:
raise ValueError('Sampling rates not the same')
cc_vec = normxcorr2(template=traces[i].data.
astype(np.float32)[shift_len:-shift_len],
image=master.data.astype(np.float32))
cc_vec = cc_vec[0]
shift = np.abs(cc_vec).argmax()
cc = cc_vec[shift]
if plot:
xcorr_plot(template=traces[i].data.
astype(np.float32)[shift_len:-shift_len],
image=master.data.astype(np.float32), shift=shift,
cc=cc)
shift -= shift_len
if cc < 0 and positive:
cc = cc_vec.max()
shift = cc_vec.argmax() - shift_len
shifts.append(shift / master.stats.sampling_rate)
ccs.append(cc)
return shifts, ccs
if __name__ == "__main__":
import doctest
doctest.testmod()
| lgpl-3.0 | 1,382,012,111,241,483,300 | -2,261,465,630,878,862,600 | 35.573099 | 79 | 0.624081 | false |
ArtRand/signalAlign | externalTools/lastz-distrib-1.03.54/tools/any_to_qdna.py | 6 | 3214 | #!/usr/bin/env python
"""
Convert any file to a LASTZ quantum dna file, just by appending qdna headers
Qdna file format is shown below (omitting "named properties", which we don't
use). We simply create all the headers and copy the file as the "data
sequence".
offset 0x00: C4 B4 71 97 big endian magic number (97 71 B4 C4 => little endian)
offset 0x04: 00 00 02 00 version 2.0 (fourth byte is sub version)
offset 0x08: 00 00 00 14 header length (in bytes, including this field)
offset 0x0C: xx xx xx xx S, offset (from file start) to data sequence
offset 0x10: xx xx xx xx N, offset to name, 0 indicates no name
offset 0x14: xx xx xx xx length of data sequence (counted in 'items')
offset 0x18: 00 00 00 00 (offset to named properties, not used)
offset N: ... name (zero-terminated string)
offset S: ... data sequence
:Author: Bob Harris ([email protected])
"""
from sys import argv,stdin,stdout,exit
def usage(s=None):
message = """any_to_qdna [options] < any_file > qdna_file
Convert any file to a LASTZ quantum dna file.
options:
--name=<string> the name of the sequence
(by default, the sequence is unnamed)
--striplinebreaks strip line breaks from the file
(default is to include line breaks in the qdna file)
--simple create an "old-style" qdna file
(default is to create a version 2 qda file)"""
if (s == None): exit (message)
else: exit ("%s\n%s" % (s,message))
def main():
qdnaOldMagic = 0xF656659EL # big endian magic number for older qdna files
qdnaMagic = 0xC4B47197L # big endian magic number for qdna files
qdnaVersion = 0x00000200L
# parse args
name = None
strip = False
simple = False
for arg in argv[1:]:
if (arg.startswith("--name=")):
name = arg.split("=",1)[1]
elif (arg == "--striplinebreaks") or (arg == "--strip"):
strip = True
elif (arg == "--simple") or (arg == "--old"):
simple = True
elif (arg.startswith("--")):
usage("can't understand %s" % arg)
else:
usage("can't understand %s" % arg)
if (simple) and (name != None):
uaseg("simple qdna file cannot carry a sequence name")
# === read the input file ===
seq = []
for line in stdin:
if (strip): line = line.rstrip()
seq += [line]
seq = "".join(seq)
# === write the qdna file ===
if (not simple):
headerLen = 20
if (name == None):
nameOffset = 0
seqOffset = headerLen + 8;
else:
nameOffset = headerLen + 8;
seqOffset = nameOffset + len(name) + 1
# prepend magic number
if (simple): write_4(stdout,qdnaOldMagic)
else: write_4(stdout,qdnaMagic)
# write the rest of the header
if (not simple):
write_4(stdout,qdnaVersion)
write_4(stdout,headerLen)
write_4(stdout,seqOffset)
write_4(stdout,nameOffset)
write_4(stdout,len(seq))
write_4(stdout,0)
if (name != None):
stdout.write(name)
stdout.write(chr(0))
# write the sequence
stdout.write(seq)
def write_4(f,val):
f.write (chr((val >> 24) & 0xFF))
f.write (chr((val >> 16) & 0xFF))
f.write (chr((val >> 8) & 0xFF))
f.write (chr( val & 0xFF))
if __name__ == "__main__": main()
| mit | 8,727,818,812,743,589,000 | 3,139,617,523,226,503,700 | 26.237288 | 82 | 0.630367 | false |
gspilio/nova | nova/network/quantumv2/api.py | 1 | 41934 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved
# Copyright (c) 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import time
from oslo.config import cfg
from nova.compute import instance_types
from nova import conductor
from nova import context
from nova.db import base
from nova import exception
from nova.network import api as network_api
from nova.network import model as network_model
from nova.network import quantumv2
from nova.network.security_group import openstack_driver
from nova.openstack.common import excutils
from nova.openstack.common import log as logging
from nova.openstack.common import uuidutils
quantum_opts = [
cfg.StrOpt('quantum_url',
default='http://127.0.0.1:9696',
help='URL for connecting to quantum'),
cfg.IntOpt('quantum_url_timeout',
default=30,
help='timeout value for connecting to quantum in seconds'),
cfg.StrOpt('quantum_admin_username',
help='username for connecting to quantum in admin context'),
cfg.StrOpt('quantum_admin_password',
help='password for connecting to quantum in admin context',
secret=True),
cfg.StrOpt('quantum_admin_tenant_name',
help='tenant name for connecting to quantum in admin context'),
cfg.StrOpt('quantum_region_name',
help='region name for connecting to quantum in admin context'),
cfg.StrOpt('quantum_admin_auth_url',
default='http://localhost:5000/v2.0',
help='auth url for connecting to quantum in admin context'),
cfg.BoolOpt('quantum_api_insecure',
default=False,
help='if set, ignore any SSL validation issues'),
cfg.StrOpt('quantum_auth_strategy',
default='keystone',
help='auth strategy for connecting to '
'quantum in admin context'),
# TODO(berrange) temporary hack until Quantum can pass over the
# name of the OVS bridge it is configured with
cfg.StrOpt('quantum_ovs_bridge',
default='br-int',
help='Name of Integration Bridge used by Open vSwitch'),
cfg.IntOpt('quantum_extension_sync_interval',
default=600,
help='Number of seconds before querying quantum for'
' extensions'),
]
CONF = cfg.CONF
CONF.register_opts(quantum_opts)
CONF.import_opt('default_floating_pool', 'nova.network.floating_ips')
CONF.import_opt('flat_injected', 'nova.network.manager')
LOG = logging.getLogger(__name__)
NET_EXTERNAL = 'router:external'
refresh_cache = network_api.refresh_cache
update_instance_info_cache = network_api.update_instance_cache_with_nw_info
class API(base.Base):
"""API for interacting with the quantum 2.x API."""
conductor_api = conductor.API()
security_group_api = openstack_driver.get_openstack_security_group_driver()
def __init__(self):
super(API, self).__init__()
self.last_quantum_extension_sync = None
self.extensions = {}
def setup_networks_on_host(self, context, instance, host=None,
teardown=False):
"""Setup or teardown the network structures."""
def _get_available_networks(self, context, project_id,
net_ids=None):
"""Return a network list available for the tenant.
The list contains networks owned by the tenant and public networks.
If net_ids specified, it searches networks with requested IDs only.
"""
quantum = quantumv2.get_client(context)
# If user has specified to attach instance only to specific
# networks, add them to **search_opts
# (1) Retrieve non-public network list owned by the tenant.
search_opts = {"tenant_id": project_id, 'shared': False}
if net_ids:
search_opts['id'] = net_ids
nets = quantum.list_networks(**search_opts).get('networks', [])
# (2) Retrieve public network list.
search_opts = {'shared': True}
if net_ids:
search_opts['id'] = net_ids
nets += quantum.list_networks(**search_opts).get('networks', [])
_ensure_requested_network_ordering(
lambda x: x['id'],
nets,
net_ids)
return nets
@refresh_cache
def allocate_for_instance(self, context, instance, **kwargs):
"""Allocate network resources for the instance.
TODO(someone): document the rest of these parameters.
:param macs: None or a set of MAC addresses that the instance
should use. macs is supplied by the hypervisor driver (contrast
with requested_networks which is user supplied).
NB: QuantumV2 currently assigns hypervisor supplied MAC addresses
to arbitrary networks, which requires openflow switches to
function correctly if more than one network is being used with
the bare metal hypervisor (which is the only one known to limit
MAC addresses).
"""
hypervisor_macs = kwargs.get('macs', None)
available_macs = None
if hypervisor_macs is not None:
# Make a copy we can mutate: records macs that have not been used
# to create a port on a network. If we find a mac with a
# pre-allocated port we also remove it from this set.
available_macs = set(hypervisor_macs)
quantum = quantumv2.get_client(context)
LOG.debug(_('allocate_for_instance() for %s'),
instance['display_name'])
if not instance['project_id']:
msg = _('empty project id for instance %s')
raise exception.InvalidInput(
reason=msg % instance['display_name'])
requested_networks = kwargs.get('requested_networks')
ports = {}
fixed_ips = {}
net_ids = []
if requested_networks:
for network_id, fixed_ip, port_id in requested_networks:
if port_id:
port = quantum.show_port(port_id)['port']
if hypervisor_macs is not None:
if port['mac_address'] not in hypervisor_macs:
raise exception.PortNotUsable(port_id=port_id,
instance=instance['display_name'])
else:
# Don't try to use this MAC if we need to create a
# port on the fly later. Identical MACs may be
# configured by users into multiple ports so we
# discard rather than popping.
available_macs.discard(port['mac_address'])
network_id = port['network_id']
ports[network_id] = port
elif fixed_ip and network_id:
fixed_ips[network_id] = fixed_ip
if network_id:
net_ids.append(network_id)
nets = self._get_available_networks(context, instance['project_id'],
net_ids)
security_groups = kwargs.get('security_groups', [])
security_group_ids = []
# TODO(arosen) Should optimize more to do direct query for security
# group if len(security_groups) == 1
if len(security_groups):
search_opts = {'tenant_id': instance['project_id']}
user_security_groups = quantum.list_security_groups(
**search_opts).get('security_groups')
for security_group in security_groups:
name_match = None
uuid_match = None
for user_security_group in user_security_groups:
if user_security_group['name'] == security_group:
if name_match:
msg = (_("Multiple security groups found matching"
" '%s'. Use an ID to be more specific."),
security_group)
raise exception.NoUniqueMatch(msg)
name_match = user_security_group['id']
if user_security_group['id'] == security_group:
uuid_match = user_security_group['id']
# If a user names the security group the same as
# another's security groups uuid, the name takes priority.
if not name_match and not uuid_match:
raise exception.SecurityGroupNotFound(
security_group_id=security_group)
security_group_ids.append(name_match)
elif name_match:
security_group_ids.append(name_match)
elif uuid_match:
security_group_ids.append(uuid_match)
touched_port_ids = []
created_port_ids = []
for network in nets:
# If security groups are requested on an instance then the
# network must has a subnet associated with it. Some plugins
# implement the port-security extension which requires
# 'port_security_enabled' to be True for security groups.
# That is why True is returned if 'port_security_enabled'
# is not found.
if (security_groups and not (
network['subnets']
and network.get('port_security_enabled', True))):
raise exception.SecurityGroupCannotBeApplied()
network_id = network['id']
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone}}
try:
port = ports.get(network_id)
if port:
quantum.update_port(port['id'], port_req_body)
touched_port_ids.append(port['id'])
else:
fixed_ip = fixed_ips.get(network_id)
if fixed_ip:
port_req_body['port']['fixed_ips'] = [{'ip_address':
fixed_ip}]
port_req_body['port']['network_id'] = network_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = instance['project_id']
if security_group_ids:
port_req_body['port']['security_groups'] = (
security_group_ids)
if available_macs is not None:
if not available_macs:
raise exception.PortNotFree(
instance=instance['display_name'])
mac_address = available_macs.pop()
port_req_body['port']['mac_address'] = mac_address
self._populate_quantum_extension_values(instance,
port_req_body)
created_port_ids.append(
quantum.create_port(port_req_body)['port']['id'])
except Exception:
with excutils.save_and_reraise_exception():
for port_id in touched_port_ids:
port_in_server = quantum.show_port(port_id).get('port')
if not port_in_server:
raise Exception(_('Port not found'))
port_req_body = {'port': {'device_id': None}}
quantum.update_port(port_id, port_req_body)
for port_id in created_port_ids:
try:
quantum.delete_port(port_id)
except Exception as ex:
msg = _("Fail to delete port %(portid)s with"
" failure: %(exception)s")
LOG.debug(msg, {'portid': port_id,
'exception': ex})
self.trigger_security_group_members_refresh(context, instance)
self.trigger_instance_add_security_group_refresh(context, instance)
nw_info = self._get_instance_nw_info(context, instance, networks=nets)
# NOTE(danms): Only return info about ports we created in this run.
# In the initial allocation case, this will be everything we created,
# and in later runs will only be what was created that time. Thus,
# this only affects the attach case, not the original use for this
# method.
return network_model.NetworkInfo([port for port in nw_info
if port['id'] in created_port_ids +
touched_port_ids])
def _refresh_quantum_extensions_cache(self):
if (not self.last_quantum_extension_sync or
((time.time() - self.last_quantum_extension_sync)
>= CONF.quantum_extension_sync_interval)):
quantum = quantumv2.get_client(context.get_admin_context())
extensions_list = quantum.list_extensions()['extensions']
self.last_quantum_extension_sync = time.time()
self.extensions.clear()
self.extensions = dict((ext['name'], ext)
for ext in extensions_list)
def _populate_quantum_extension_values(self, instance, port_req_body):
self._refresh_quantum_extensions_cache()
if 'nvp-qos' in self.extensions:
instance_type = instance_types.extract_instance_type(instance)
rxtx_factor = instance_type.get('rxtx_factor')
port_req_body['port']['rxtx_factor'] = rxtx_factor
def deallocate_for_instance(self, context, instance, **kwargs):
"""Deallocate all network resources related to the instance."""
LOG.debug(_('deallocate_for_instance() for %s'),
instance['display_name'])
search_opts = {'device_id': instance['uuid']}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data.get('ports', [])
for port in ports:
try:
quantumv2.get_client(context).delete_port(port['id'])
except Exception as ex:
LOG.exception(_("Failed to delete quantum port %(portid)s ")
% {'portid': port['id']})
self.trigger_security_group_members_refresh(context, instance)
self.trigger_instance_remove_security_group_refresh(context, instance)
@refresh_cache
def allocate_port_for_instance(self, context, instance, port_id,
network_id=None, requested_ip=None,
conductor_api=None):
return self.allocate_for_instance(context, instance,
requested_networks=[(network_id, requested_ip, port_id)],
conductor_api=conductor_api)
@refresh_cache
def deallocate_port_for_instance(self, context, instance, port_id,
conductor_api=None):
try:
quantumv2.get_client(context).delete_port(port_id)
except Exception as ex:
LOG.exception(_("Failed to delete quantum port %(port_id)s ") %
locals())
self.trigger_security_group_members_refresh(context, instance)
self.trigger_instance_remove_security_group_refresh(context, instance)
return self._get_instance_nw_info(context, instance)
def list_ports(self, context, **search_opts):
return quantumv2.get_client(context).list_ports(**search_opts)
def show_port(self, context, port_id):
return quantumv2.get_client(context).show_port(port_id)
def get_instance_nw_info(self, context, instance, conductor_api=None,
networks=None):
result = self._get_instance_nw_info(context, instance, networks)
update_instance_info_cache(self, context, instance, result,
conductor_api)
return result
def _get_instance_nw_info(self, context, instance, networks=None):
LOG.debug(_('get_instance_nw_info() for %s'),
instance['display_name'])
nw_info = self._build_network_info_model(context, instance, networks)
return network_model.NetworkInfo.hydrate(nw_info)
@refresh_cache
def add_fixed_ip_to_instance(self, context, instance, network_id,
conductor_api=None):
"""Add a fixed ip to the instance from specified network."""
search_opts = {'network_id': network_id}
data = quantumv2.get_client(context).list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
if not ipam_subnets:
raise exception.NetworkNotFoundForInstance(
instance_id=instance['uuid'])
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone,
'network_id': network_id}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data['ports']
for p in ports:
for subnet in ipam_subnets:
fixed_ips = p['fixed_ips']
fixed_ips.append({'subnet_id': subnet['id']})
port_req_body = {'port': {'fixed_ips': fixed_ips}}
try:
quantumv2.get_client(context).update_port(p['id'],
port_req_body)
return
except Exception as ex:
msg = _("Unable to update port %(portid)s on subnet "
"%(subnet_id)s with failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'],
'subnet_id': subnet['id'],
'exception': ex})
raise exception.NetworkNotFoundForInstance(
instance_id=instance['uuid'])
@refresh_cache
def remove_fixed_ip_from_instance(self, context, instance, address,
conductor_api=None):
"""Remove a fixed ip from the instance."""
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data['ports']
for p in ports:
fixed_ips = p['fixed_ips']
new_fixed_ips = []
for fixed_ip in fixed_ips:
if fixed_ip['ip_address'] != address:
new_fixed_ips.append(fixed_ip)
port_req_body = {'port': {'fixed_ips': new_fixed_ips}}
try:
quantumv2.get_client(context).update_port(p['id'],
port_req_body)
except Exception as ex:
msg = _("Unable to update port %(portid)s with"
" failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'], 'exception': ex})
return
raise exception.FixedIpNotFoundForSpecificInstance(
instance_uuid=instance['uuid'], ip=address)
def validate_networks(self, context, requested_networks):
"""Validate that the tenant can use the requested networks."""
LOG.debug(_('validate_networks() for %s'),
requested_networks)
if not requested_networks:
return
net_ids = []
for (net_id, _i, port_id) in requested_networks:
if not port_id:
net_ids.append(net_id)
continue
port = quantumv2.get_client(context).show_port(port_id).get('port')
if not port:
raise exception.PortNotFound(port_id=port_id)
if port.get('device_id', None):
raise exception.PortInUse(port_id=port_id)
net_id = port['network_id']
if net_id in net_ids:
raise exception.NetworkDuplicated(network_id=net_id)
net_ids.append(net_id)
nets = self._get_available_networks(context, context.project_id,
net_ids)
if len(nets) != len(net_ids):
requsted_netid_set = set(net_ids)
returned_netid_set = set([net['id'] for net in nets])
lostid_set = requsted_netid_set - returned_netid_set
id_str = ''
for _id in lostid_set:
id_str = id_str and id_str + ', ' + _id or _id
raise exception.NetworkNotFound(network_id=id_str)
def _get_instance_uuids_by_ip(self, context, address):
"""Retrieve instance uuids associated with the given ip address.
:returns: A list of dicts containing the uuids keyed by 'instance_uuid'
e.g. [{'instance_uuid': uuid}, ...]
"""
search_opts = {"fixed_ips": 'ip_address=%s' % address}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data.get('ports', [])
return [{'instance_uuid': port['device_id']} for port in ports
if port['device_id']]
def get_instance_uuids_by_ip_filter(self, context, filters):
"""Return a list of dicts in the form of
[{'instance_uuid': uuid}] that matched the ip filter.
"""
# filters['ip'] is composed as '^%s$' % fixed_ip.replace('.', '\\.')
ip = filters.get('ip')
# we remove ^$\ in the ip filer
if ip[0] == '^':
ip = ip[1:]
if ip[-1] == '$':
ip = ip[:-1]
ip = ip.replace('\\.', '.')
return self._get_instance_uuids_by_ip(context, ip)
def trigger_instance_add_security_group_refresh(self, context,
instance_ref):
admin_context = context.elevated()
for group in instance_ref['security_groups']:
self.conductor_api.security_groups_trigger_handler(context,
'instance_add_security_group', instance_ref, group['name'])
def trigger_instance_remove_security_group_refresh(self, context,
instance_ref):
admin_context = context.elevated()
for group in instance_ref['security_groups']:
self.conductor_api.security_groups_trigger_handler(context,
'instance_remove_security_group', instance_ref, group['name'])
def trigger_security_group_members_refresh(self, context, instance_ref):
admin_context = context.elevated()
group_ids = [group['id'] for group in instance_ref['security_groups']]
self.conductor_api.security_groups_trigger_members_refresh(
admin_context, group_ids)
self.conductor_api.security_groups_trigger_handler(admin_context,
'security_group_members', group_ids)
def _get_port_id_by_fixed_address(self, client,
instance, address):
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone}
data = client.list_ports(**search_opts)
ports = data['ports']
port_id = None
for p in ports:
for ip in p['fixed_ips']:
if ip['ip_address'] == address:
port_id = p['id']
break
if not port_id:
raise exception.FixedIpNotFoundForAddress(address=address)
return port_id
@refresh_cache
def associate_floating_ip(self, context, instance,
floating_address, fixed_address,
affect_auto_assigned=False):
"""Associate a floating ip with a fixed ip."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
port_id = self._get_port_id_by_fixed_address(client, instance,
fixed_address)
fip = self._get_floating_ip_by_address(client, floating_address)
param = {'port_id': port_id,
'fixed_ip_address': fixed_address}
client.update_floatingip(fip['id'], {'floatingip': param})
def get_all(self, context):
client = quantumv2.get_client(context)
networks = client.list_networks().get('networks') or {}
for network in networks:
network['label'] = network['name']
return networks
def get(self, context, network_uuid):
client = quantumv2.get_client(context)
network = client.show_network(network_uuid).get('network') or {}
network['label'] = network['name']
return network
def delete(self, context, network_uuid):
raise NotImplementedError()
def disassociate(self, context, network_uuid):
raise NotImplementedError()
def get_fixed_ip(self, context, id):
raise NotImplementedError()
def get_fixed_ip_by_address(self, context, address):
uuid_maps = self._get_instance_uuids_by_ip(context, address)
if len(uuid_maps) == 1:
return uuid_maps[0]
elif not uuid_maps:
raise exception.FixedIpNotFoundForAddress(address=address)
else:
raise exception.FixedIpAssociatedWithMultipleInstances(
address=address)
def _setup_net_dict(self, client, network_id):
if not network_id:
return {}
pool = client.show_network(network_id)['network']
return {pool['id']: pool}
def _setup_port_dict(self, client, port_id):
if not port_id:
return {}
port = client.show_port(port_id)['port']
return {port['id']: port}
def _setup_pools_dict(self, client):
pools = self._get_floating_ip_pools(client)
return dict([(i['id'], i) for i in pools])
def _setup_ports_dict(self, client, project_id=None):
search_opts = {'tenant_id': project_id} if project_id else {}
ports = client.list_ports(**search_opts)['ports']
return dict([(p['id'], p) for p in ports])
def get_floating_ip(self, context, id):
client = quantumv2.get_client(context)
fip = client.show_floatingip(id)['floatingip']
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def _get_floating_ip_pools(self, client, project_id=None):
search_opts = {NET_EXTERNAL: True}
if project_id:
search_opts.update({'tenant_id': project_id})
data = client.list_networks(**search_opts)
return data['networks']
def get_floating_ip_pools(self, context):
client = quantumv2.get_client(context)
pools = self._get_floating_ip_pools(client)
return [{'name': n['name'] or n['id']} for n in pools]
def _format_floating_ip_model(self, fip, pool_dict, port_dict):
pool = pool_dict[fip['floating_network_id']]
result = {'id': fip['id'],
'address': fip['floating_ip_address'],
'pool': pool['name'] or pool['id'],
'project_id': fip['tenant_id'],
# In Quantum v2, an exact fixed_ip_id does not exist.
'fixed_ip_id': fip['port_id'],
}
# In Quantum v2 API fixed_ip_address and instance uuid
# (= device_id) are known here, so pass it as a result.
result['fixed_ip'] = {'address': fip['fixed_ip_address']}
if fip['port_id']:
instance_uuid = port_dict[fip['port_id']]['device_id']
result['instance'] = {'uuid': instance_uuid}
else:
result['instance'] = None
return result
def get_floating_ip_by_address(self, context, address):
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def get_floating_ips_by_project(self, context):
client = quantumv2.get_client(context)
project_id = context.project_id
fips = client.list_floatingips(tenant_id=project_id)['floatingips']
pool_dict = self._setup_pools_dict(client)
port_dict = self._setup_ports_dict(client, project_id)
return [self._format_floating_ip_model(fip, pool_dict, port_dict)
for fip in fips]
def get_floating_ips_by_fixed_address(self, context, fixed_address):
return []
def get_instance_id_by_floating_address(self, context, address):
"""Returns the instance id a floating ip's fixed ip is allocated to."""
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if not fip['port_id']:
return None
port = client.show_port(fip['port_id'])['port']
return port['device_id']
def get_vifs_by_instance(self, context, instance):
raise NotImplementedError()
def get_vif_by_mac_address(self, context, mac_address):
raise NotImplementedError()
def _get_floating_ip_pool_id_by_name_or_id(self, client, name_or_id):
search_opts = {NET_EXTERNAL: True, 'fields': 'id'}
if uuidutils.is_uuid_like(name_or_id):
search_opts.update({'id': name_or_id})
else:
search_opts.update({'name': name_or_id})
data = client.list_networks(**search_opts)
nets = data['networks']
if len(nets) == 1:
return nets[0]['id']
elif len(nets) == 0:
raise exception.FloatingIpPoolNotFound()
else:
msg = (_("Multiple floating IP pools matches found for name '%s'")
% name_or_id)
raise exception.NovaException(message=msg)
def allocate_floating_ip(self, context, pool=None):
"""Add a floating ip to a project from a pool."""
client = quantumv2.get_client(context)
pool = pool or CONF.default_floating_pool
pool_id = self._get_floating_ip_pool_id_by_name_or_id(client, pool)
# TODO(amotoki): handle exception during create_floatingip()
# At this timing it is ensured that a network for pool exists.
# quota error may be returned.
param = {'floatingip': {'floating_network_id': pool_id}}
fip = client.create_floatingip(param)
return fip['floatingip']['floating_ip_address']
def _get_floating_ip_by_address(self, client, address):
"""Get floatingip from floating ip address."""
data = client.list_floatingips(floating_ip_address=address)
fips = data['floatingips']
if len(fips) == 0:
raise exception.FloatingIpNotFoundForAddress(address=address)
elif len(fips) > 1:
raise exception.FloatingIpMultipleFoundForAddress(address=address)
return fips[0]
def _get_floating_ips_by_fixed_and_port(self, client, fixed_ip, port):
"""Get floatingips from fixed ip and port."""
data = client.list_floatingips(fixed_ip_address=fixed_ip, port_id=port)
return data['floatingips']
def release_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Remove a floating ip with the given address from a project."""
# Note(amotoki): We cannot handle a case where multiple pools
# have overlapping IP address range. In this case we cannot use
# 'address' as a unique key.
# This is a limitation of the current nova.
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if fip['port_id']:
raise exception.FloatingIpAssociated(address=address)
client.delete_floatingip(fip['id'])
@refresh_cache
def disassociate_floating_ip(self, context, instance, address,
affect_auto_assigned=False):
"""Disassociate a floating ip from the instance."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
client.update_floatingip(fip['id'], {'floatingip': {'port_id': None}})
def migrate_instance_start(self, context, instance, migration):
"""Start to migrate the network of an instance."""
# NOTE(wenjianhn): just pass to make migrate instance doesn't
# raise for now.
pass
def migrate_instance_finish(self, context, instance, migration):
"""Finish migrating the network of an instance."""
# NOTE(wenjianhn): just pass to make migrate instance doesn't
# raise for now.
pass
def add_network_to_project(self, context, project_id, network_uuid=None):
"""Force add a network to the project."""
raise NotImplementedError()
def _build_network_info_model(self, context, instance, networks=None):
search_opts = {'tenant_id': instance['project_id'],
'device_id': instance['uuid'], }
client = quantumv2.get_client(context, admin=True)
data = client.list_ports(**search_opts)
ports = data.get('ports', [])
if networks is None:
networks = self._get_available_networks(context,
instance['project_id'])
else:
# ensure ports are in preferred network order
_ensure_requested_network_ordering(
lambda x: x['network_id'],
ports,
[n['id'] for n in networks])
nw_info = network_model.NetworkInfo()
for port in ports:
network_name = None
for net in networks:
if port['network_id'] == net['id']:
network_name = net['name']
break
if network_name is None:
raise exception.NotFound(_('Network %(net)s for '
'port %(port_id)s not found!') %
{'net': port['network_id'],
'port': port['id']})
network_IPs = []
for fixed_ip in port['fixed_ips']:
fixed = network_model.FixedIP(address=fixed_ip['ip_address'])
floats = self._get_floating_ips_by_fixed_and_port(
client, fixed_ip['ip_address'], port['id'])
for ip in floats:
fip = network_model.IP(address=ip['floating_ip_address'],
type='floating')
fixed.add_floating_ip(fip)
network_IPs.append(fixed)
subnets = self._get_subnets_from_port(context, port)
for subnet in subnets:
subnet['ips'] = [fixed_ip for fixed_ip in network_IPs
if fixed_ip.is_in_subnet(subnet)]
bridge = None
ovs_interfaceid = None
# Network model metadata
should_create_bridge = None
vif_type = port.get('binding:vif_type')
# TODO(berrange) Quantum should pass the bridge name
# in another binding metadata field
if vif_type == network_model.VIF_TYPE_OVS:
bridge = CONF.quantum_ovs_bridge
ovs_interfaceid = port['id']
elif vif_type == network_model.VIF_TYPE_BRIDGE:
bridge = "brq" + port['network_id']
should_create_bridge = True
if bridge is not None:
bridge = bridge[:network_model.NIC_NAME_LEN]
devname = "tap" + port['id']
devname = devname[:network_model.NIC_NAME_LEN]
network = network_model.Network(
id=port['network_id'],
bridge=bridge,
injected=CONF.flat_injected,
label=network_name,
tenant_id=net['tenant_id']
)
network['subnets'] = subnets
if should_create_bridge is not None:
network['should_create_bridge'] = should_create_bridge
nw_info.append(network_model.VIF(
id=port['id'],
address=port['mac_address'],
network=network,
type=port.get('binding:vif_type'),
ovs_interfaceid=ovs_interfaceid,
devname=devname))
return nw_info
def _get_subnets_from_port(self, context, port):
"""Return the subnets for a given port."""
fixed_ips = port['fixed_ips']
# No fixed_ips for the port means there is no subnet associated
# with the network the port is created on.
# Since list_subnets(id=[]) returns all subnets visible for the
# current tenant, returned subnets may contain subnets which is not
# related to the port. To avoid this, the method returns here.
if not fixed_ips:
return []
search_opts = {'id': [ip['subnet_id'] for ip in fixed_ips]}
data = quantumv2.get_client(context).list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
subnets = []
for subnet in ipam_subnets:
subnet_dict = {'cidr': subnet['cidr'],
'gateway': network_model.IP(
address=subnet['gateway_ip'],
type='gateway'),
}
# attempt to populate DHCP server field
search_opts = {'network_id': subnet['network_id'],
'device_owner': 'network:dhcp'}
data = quantumv2.get_client(context).list_ports(**search_opts)
dhcp_ports = data.get('ports', [])
for p in dhcp_ports:
for ip_pair in p['fixed_ips']:
if ip_pair['subnet_id'] == subnet['id']:
subnet_dict['dhcp_server'] = ip_pair['ip_address']
break
subnet_object = network_model.Subnet(**subnet_dict)
for dns in subnet.get('dns_nameservers', []):
subnet_object.add_dns(
network_model.IP(address=dns, type='dns'))
# TODO(gongysh) get the routes for this subnet
subnets.append(subnet_object)
return subnets
def get_dns_domains(self, context):
"""Return a list of available dns domains.
These can be used to create DNS entries for floating ips.
"""
raise NotImplementedError()
def add_dns_entry(self, context, address, name, dns_type, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def modify_dns_entry(self, context, name, address, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def delete_dns_entry(self, context, name, domain):
"""Delete the specified dns entry."""
raise NotImplementedError()
def delete_dns_domain(self, context, domain):
"""Delete the specified dns domain."""
raise NotImplementedError()
def get_dns_entries_by_address(self, context, address, domain):
"""Get entries for address and domain."""
raise NotImplementedError()
def get_dns_entries_by_name(self, context, name, domain):
"""Get entries for name and domain."""
raise NotImplementedError()
def create_private_dns_domain(self, context, domain, availability_zone):
"""Create a private DNS domain with nova availability zone."""
raise NotImplementedError()
def create_public_dns_domain(self, context, domain, project=None):
"""Create a private DNS domain with optional nova project."""
raise NotImplementedError()
def _ensure_requested_network_ordering(accessor, unordered, preferred):
"""Sort a list with respect to the preferred network ordering."""
if preferred:
unordered.sort(key=lambda i: preferred.index(accessor(i)))
| apache-2.0 | 6,301,656,186,177,334,000 | 3,656,243,572,526,163,000 | 43.374603 | 79 | 0.563147 | false |
kangkot/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/site-packages/pythonwin/pywin/Demos/dibdemo.py | 17 | 1930 | # A demo which creates a view and a frame which displays a PPM format bitmap
#
# This hasnnt been run in a while, as I dont have many of that format around!
import win32ui
import win32con
import win32api
import string
class DIBView:
def __init__(self, doc, dib):
self.dib = dib
self.view = win32ui.CreateView(doc)
self.width = self.height = 0
# set up message handlers
# self.view.OnPrepareDC = self.OnPrepareDC
self.view.HookMessage (self.OnSize, win32con.WM_SIZE)
def OnSize (self, params):
lParam = params[3]
self.width = win32api.LOWORD(lParam)
self.height = win32api.HIWORD(lParam)
def OnDraw (self, ob, dc):
# set sizes used for "non strecth" mode.
self.view.SetScrollSizes(win32con.MM_TEXT, self.dib.GetSize())
dibSize = self.dib.GetSize()
dibRect = (0,0,dibSize[0], dibSize[1])
# stretch BMP.
#self.dib.Paint(dc, (0,0,self.width, self.height),dibRect)
# non stretch.
self.dib.Paint(dc)
class DIBDemo:
def __init__(self, filename, * bPBM):
# init data members
f = open(filename, 'rb')
dib=win32ui.CreateDIBitmap()
if len(bPBM)>0:
magic=f.readline()
if magic <> "P6\n":
print "The file is not a PBM format file"
raise "Failed"
# check magic?
rowcollist=string.split(f.readline())
cols=string.atoi(rowcollist[0])
rows=string.atoi(rowcollist[1])
f.readline() # whats this one?
dib.LoadPBMData(f,(cols,rows))
else:
dib.LoadWindowsFormatFile(f)
f.close()
# create doc/view
self.doc = win32ui.CreateDoc()
self.dibView = DIBView( self.doc, dib )
self.frame = win32ui.CreateMDIFrame()
self.frame.LoadFrame() # this will force OnCreateClient
self.doc.SetTitle ('DIB Demo')
self.frame.ShowWindow()
# display the sucka
self.frame.ActivateFrame()
def OnCreateClient( self, createparams, context ):
self.dibView.view.CreateWindow(self.frame)
return 1
if __name__=='__main__':
import demoutils
demoutils.NotAScript() | apache-2.0 | 4,893,682,773,342,061,000 | 6,453,116,194,810,711,000 | 26.985507 | 77 | 0.695337 | false |
sentient-energy/emsw-oe-mirror | contrib/mtnpatch.py | 45 | 2048 | #!/usr/bin/env python
import sys, os, string, getopt, re
mtncmd = "mtn"
def main(argv = None):
if argv is None:
argv = sys.argv
opts, list = getopt.getopt(sys.argv[1:], ':R')
if len(list) < 1:
print "You must specify a file"
return 2
reverse = False
for o, a in opts:
if o == "-R":
reverse = True
if os.path.exists(list[0]):
input = open(list[0], 'r')
renameFrom = ""
cmd = ""
for line in input:
if len(line) > 0:
if line[0] == '#':
matches = re.search("#\s+(\w+)\s+\"(.*)\"", line)
if matches is not None:
cmd = matches.group(1)
fileName = matches.group(2)
if cmd == "delete":
if reverse:
print "%s add %s" % (mtncmd, fileName)
else:
print "%s drop -e %s" % (mtncmd, fileName)
elif cmd == "add" or cmd == "add_file" or cmd == "add_dir":
if reverse:
print "%s drop -e %s" % (mtncmd, fileName)
else:
print "%s add %s" % (mtncmd, fileName)
elif cmd == "rename":
renameFrom = fileName
elif cmd == "to" and renameFrom != "":
if reverse:
print "%s rename -e %s %s" % (mtncmd, fileName, renameFrom)
else:
print "%s rename -e %s %s" % (mtncmd, renameFrom, fileName)
renameFrom = ""
else:
cmd = ""
if reverse:
print "patch -R -p0 < %s" % list[0]
else:
print "patch -p0 < %s" % list[0]
if __name__ == "__main__":
sys.exit(main())
| mit | 4,798,557,175,133,122,000 | -712,660,776,898,535,700 | 36.925926 | 91 | 0.367188 | false |
hlt-mt/tensorflow | tensorflow/python/framework/function.py | 4 | 15235 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Python front-end supports for functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import inspect
import re
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.framework import function_pb2
from tensorflow.core.framework import op_def_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
def _make_argname_from_tensor_name(name):
return re.sub(":0$", "", name).replace(":", "_o")
def _tensor_to_argdef(t):
arg = op_def_pb2.OpDef.ArgDef()
arg.name = _make_argname_from_tensor_name(t.name)
arg.type = t.dtype.as_datatype_enum
return arg
def _get_node_def_attr(op):
# pylint: disable=protected-access
return op._node_def.attr
# pylint: enable=protected-access
def _add_input_array(op, start, limit, dtype, func):
"""Adds a _ListToArray node in the func for op.inputs[start:limit]."""
node = function_pb2.FunctionDef.Node()
node.op = "_ListToArray"
ret_name = op.name + "_L2A_" + str(start)
node.ret.extend([ret_name])
node.arg.extend([_make_argname_from_tensor_name(x.name)
for x in op.inputs[start:limit]])
num = limit - start
node.attr["Tin"].CopyFrom(attr_value_pb2.AttrValue(
list=attr_value_pb2.AttrValue.ListValue(type=[dtype] * num)))
node.attr["T"].CopyFrom(attr_value_pb2.AttrValue(type=dtype))
node.attr["N"].CopyFrom(attr_value_pb2.AttrValue(i=num))
func.node.extend([node])
return ret_name
def _add_output_array(op, start, limit, dtype, func):
"""Adds a _ArrayToList node in the func for op.outputs[start:limit]."""
dtype_proto = attr_value_pb2.AttrValue(type=dtype)
# A node converting N*T to list(T)
node = function_pb2.FunctionDef.Node()
node.op = "_ArrayToList"
arg_name = op.name + "_A2L_" + str(start)
ret_name = arg_name + "_out"
node.ret.append(ret_name)
node.arg.append(arg_name)
node.attr["T"].CopyFrom(dtype_proto)
num = limit - start
node.attr["N"].CopyFrom(attr_value_pb2.AttrValue(i=num))
node.attr["out_types"].CopyFrom(attr_value_pb2.AttrValue(
list=attr_value_pb2.AttrValue.ListValue(type=[dtype] * num)))
func.node.extend([node])
num = limit - start
# Adds an identity node for each element in the array N*T so that
# uses of each element can be added easily later. These Identity
# will be eliminated before graph execution.
for i in xrange(num):
node = function_pb2.FunctionDef.Node()
node.op = "Identity"
node.arg.append(ret_name + ":" + str(i))
node.ret.append(_make_argname_from_tensor_name(op.outputs[i].name))
node.attr["T"].CopyFrom(dtype_proto)
func.node.extend([node])
return arg_name
def _add_output_list(op, start, limit, dtype_lst, func):
"""Adds a _ArrayToList node in the func for op.outputs[start:limit]."""
ret_name = op.name + "_Lst_" + str(start) + "_" + str(limit)
num = limit - start
assert len(dtype_lst) == num
# Adds an identity node for each element in the array N*T so that
# uses of each element can be added easily later. These Identity
# will be eliminated before graph execution.
for i in xrange(num):
node = function_pb2.FunctionDef.Node()
node.op = "Identity"
node.arg.append(ret_name + ":" + str(i))
node.ret.append(_make_argname_from_tensor_name(op.outputs[i].name))
node.attr["T"].CopyFrom(attr_value_pb2.AttrValue(type=dtype_lst[i]))
func.node.extend([node])
return ret_name
def _add_op_node(graph, op, func):
"""Converts an op to a function def node and add it to `func`."""
node = function_pb2.FunctionDef.Node()
node.op = op.type
# pylint: disable=protected-access
if graph._is_function(op.type):
op_def = graph._get_function(op.type).signature
else:
op_def = op_def_registry.get_registered_ops()[op.type]
# pylint: enable=protected-access
attrs = _get_node_def_attr(op)
out_index = 0
for arg_def in op_def.output_arg:
if arg_def.number_attr:
dtype = arg_def.type or attrs[arg_def.type_attr].type
num = attrs[arg_def.number_attr].i
node.ret.append(_add_output_array(op, out_index, out_index + num, dtype,
func))
out_index += num
elif arg_def.type_list_attr:
dtype_lst = attrs[arg_def.type_list_attr].list.type
num = len(dtype_lst)
node.ret.append(_add_output_list(op, out_index, out_index + num,
dtype_lst, func))
out_index += num
else:
node.ret.append(_make_argname_from_tensor_name(op.outputs[
out_index].name))
out_index += 1
inp_index = 0
for arg_def in op_def.input_arg:
if arg_def.number_attr:
dtype = arg_def.type or attrs[arg_def.type_attr].type
num = attrs[arg_def.number_attr].i
node.arg.append(_add_input_array(op, inp_index, inp_index + num, dtype,
func))
inp_index += num
elif arg_def.type_list_attr:
num = len(attrs[arg_def.type_list_attr].list.type)
node.arg.extend([_make_argname_from_tensor_name(op.inputs[i].name)
for i in range(inp_index, inp_index + num)])
inp_index += num
else:
node.arg.append(_make_argname_from_tensor_name(op.inputs[inp_index].name))
inp_index += 1
node.dep.extend([_make_argname_from_tensor_name(x.name)
for x in op.control_inputs])
for k, v in _get_node_def_attr(op).iteritems():
node.attr[k].CopyFrom(v)
func.node.extend([node])
# pylint: disable=line-too-long
def graph_to_function_def(graph, name, inputs, outputs):
"""Returns `graph` as a `FunctionDef` protocol buffer.
This method creates a [`FunctionDef`](
https://www.tensorflow.org/code/tensorflow/core/framework/function.proto)
protocol buffer that contains all the ops present in the graph. The
graph effectively becomes the body of the function.
The arguments `inputs` and `outputs` will be listed as the inputs
and outputs tensors of the function. They must be lists of
tensors present in the graph. The lists can optionally be empty.
The returned protocol buffer can be passed to the
[`Graph.add_function()`](#Graph.add_function) method of a
different graph to make it available there.
Args:
graph: GraphDef proto.
name: string. The name to use for the function.
inputs: List of tensors. Inputs to the function.
outputs: List of tensors. Outputs of the function.
Returns:
A FunctionDef protocol buffer.
"""
# pylint: enable=line-too-long
func = function_pb2.FunctionDef()
func.signature.name = name
func.signature.input_arg.extend([_tensor_to_argdef(graph.get_tensor_by_name(
i.name)) for i in inputs])
func.signature.output_arg.extend([_tensor_to_argdef(graph.get_tensor_by_name(
o.name)) for o in outputs])
func_arg_placeholders = set([i.name for i in inputs])
g = ops.get_default_graph()
for op in graph.get_operations():
tensor_name = op.values()[0].name
if tensor_name not in func_arg_placeholders:
_add_op_node(g, op, func)
return func
def call_function(func_def, *inputs, **kwargs):
"""Calls the function described by `func_def`.
This adds a `call` op to the default graph that calls the function described
by `func_def` with the tensors listed in `inputs` as arguments. It returns
the outputs of the call, which are one or more tensors.
`func_def` is a
[`FunctionDef`](
https://www.tensorflow.org/code/tensorflow/core/framework/function.proto)
protcol buffer describing a
TensorFlow function. See [`define_function()`](#define_function) for an
easy way to create one from a Python function.
You can pass an optional keyword parameters `name=string` to name the
added operation.
`func_def` is automatically added to the function library of the graph if
needed.
Args:
func_def: A `FunctionDef` protocol buffer.
*inputs: A list of tensors
**kwargs: Optional keyword arguments. Can only contain 'name'.
Returns:
A list of tensors representing the outputs of the call to `func_def`.
Raises:
ValueError: if the arguments are invalid.
"""
name = kwargs.pop("name", None)
if kwargs:
raise ValueError("Unknown keyword arguments: %s" % kwargs.keys())
func_name = func_def.signature.name
with ops.op_scope(inputs, name, func_name) as name:
if len(inputs) != len(func_def.signature.input_arg):
raise ValueError("Expected number of arguments: %d" %
len(func_def.signature.input_arg))
output_types = [dtypes.DType(x.type) for x in func_def.signature.output_arg]
# TODO(touts): Pass compute_shapes as "try if function exists"
g = ops.get_default_graph()
op = g.create_op(func_name,
list(inputs),
output_types,
name=name,
compute_shapes=False)
if op.outputs:
if len(op.outputs) == 1:
return op.outputs[0]
else:
return tuple(op.outputs)
else:
return op
def define_function(func, input_types):
"""Creates a `FunctionDef` for a python function.
`func` is a Python function that receives zero or more tensors and returns at
least one tensor. It should add ops to the default graph the usual way by
calling TensorFlow functions such as `tf.constant()`, `tf.matmul()`, etc.
`input_types` is a dictionary of strings to `tf.Dtype` objects. Keys are
names arguments to `func`. The value indicate the type of tensor expected
by the function.
The returned `FunctionDef` protocol buffer is also added to the
default graph library. After it has been added you can add calls to
the function by passing it to `tf.call_function()`, together with a
list of tensors to use as inputs for the function.
Notes:
* `func` is called once, with `placeholder` tensors of the types specified in
`input_types` as arguments.
* Values returned by `func` must be tensors and they are recorded as being
the output of the function def.
* While `func` is a called, an empty graph is temporarily pushed as the
default graph. All ops added by `func` to that graph are part of the body
of the returned function def.
Example, but also see the [How To on functions](link_needed).
```python
# A function that receives two tensors x, y and returns their
# sum and difference.
def my_func(x, y):
return x + y, x - y
# Create a FunctionDef for 'my_func'. (This does not change the default
graph.)
my_func_def = tf.define_function(my_func, {'x': tf.float32, 'y': tf.float32})
# Build the graph, calling the function.
a = tf.constant([1.0])
b = tf.constant([2.0])
c, d = tf.call_function(my_func_def, a, b, name='mycall')
```
Args:
func: a Python function.
input_types: dict. Keys are the names of the arguments of `func`, values
are their expected `tf.DType`.
Returns:
A FunctionDef protocol buffer.
Raises:
ValueError: if the arguments are invalid.
"""
# TODO(touts): Lift the limitation that func can only receive Tensor args.
if inspect.isfunction(func):
func_name = func.__name__
elif inspect.ismethod(func):
func_name = func.im_self.__name__ + "." + func.__name__
else:
raise ValueError("Argument must be a function")
argspec = inspect.getargspec(func)
if argspec.varargs or argspec.keywords or argspec.defaults:
raise ValueError("Only functions with plain arglists are supported.")
if inspect.isfunction(func):
if len(argspec.args) != len(input_types):
raise ValueError("The function must have the same number of arguments "
"as the number of specified input types.")
args = argspec.args
elif inspect.ismethod(func):
if len(argspec.args) != 1 + len(input_types):
raise ValueError(
"The class function must have the same number of arguments "
"as the number of specified input types.")
args = argspec.args[1:] # 1st argument is the "class" type.
# Create the func_def object.
temp_graph = ops.Graph()
with temp_graph.as_default():
# List of placeholders for the function_def.
inputs = []
# Arglist to call 'func'
kwargs = {}
for argname in args:
if argname not in input_types:
raise ValueError("Missing type for argument: " + argname)
argholder = array_ops.placeholder(input_types[argname], name=argname)
inputs.append(argholder)
kwargs[argname] = argholder
# Call func and gather the output tensors.
outputs = func(**kwargs)
if not outputs:
raise ValueError("Function must return at least one tensor")
# Convenience: if func only returned one value, make it a tuple.
if not isinstance(outputs, (list, tuple)):
outputs = (outputs,)
# Build the FunctionDef
func_def = graph_to_function_def(temp_graph, func_name, inputs, outputs)
g = ops.get_default_graph()
g._add_function(func_def) # pylint: disable=protected-access
return func_def
class Defun(object):
"""Decorator used to define TensorFlow functions.
Use this decorator to make a Python function usable directly as a TensorFlow
function.
The decorated function must add ops to the default graph and return zero or
more `Tensor` objects. Call the decorator with named arguments, one for each
argument of the function to decorate, with the expected type of the argument
as value.
For example if the function to decorate accepts to `tf.float32` arguments
named `x` and `y`, call the decorator with:
@Defun(x=tf.float32, y=tf.float32)
def foo(x, y):
...
When you call the decorated function it will add `call` ops to the graph.
Example, but also see the [How To on functions](link_needed).
```python
# Defining the function.
@tf.Defun(x=tf.float32, y=tf.float32)
def MyFunc(x, y):
return x + y, x - y
# Building the graph.
a = tf.Constant([1.0])
b = tf.Constant([2.0])
c, d = MyFunc(a, b, name='mycall')
```
@@__init__
"""
def __init__(self, **input_types):
"""Create a `Defun` decorator.
Args:
**input_types: Dict mapping string with `tf.DType`
One key for each argument of the function to decorate.
"""
self._input_types = input_types
def __call__(self, f):
func_def = define_function(f, self._input_types)
return lambda *args, **kwargs: call_function(func_def, *args, **kwargs)
| apache-2.0 | 3,135,151,159,115,315,700 | -4,387,985,237,028,706,300 | 35.447368 | 80 | 0.671874 | false |
Chealion/yycbike | archive/weatherLoad.py | 1 | 6271 | #! /usr/bin/python
# :set tabstop=4 shiftwidth=4 expandtab
# Downoads Environment Canada data and sends the data to Graphite. Additionally logs the data to a file we can use to import later
import csv
import time
import graphitesend
import urllib2
from datetime import date, timedelta
import datetime
graphitesend.init(graphite_server='localhost',prefix='yycbike',system_name='')
metriclog = open('/home/ubuntu/devmetriclog.log', 'a')
# Watch out for timezones - this script fails to function past 5 PM MST.
yesterday = date.today() - timedelta(1)
year = yesterday.strftime('%Y')
month = yesterday.strftime('%m')
day = yesterday.strftime('%d')
#Installations
# URLs per ftp://ftp.tor.ec.gc.ca/Pub/Get_More_Data_Plus_de_donnees/Readme.txt
HOURLY_URL='http://climate.weather.gc.ca/climate_data/bulk_data_e.html?format=csv&stationID=50430&Year=' + year + '&Month=' + month + '&Day=' + day + '&submit=Download+Data&timeframe=1'
DAILY_URL= 'http://climate.weather.gc.ca/climate_data/bulk_data_e.html?format=csv&stationID=50430&Year=' + year + '&Month=' + month + '&Day=' + day + '&submit=Download+Data&timeframe=2'
## HOURLY
url = HOURLY_URL
print 'Loading Hourly Weather Data...'
response = urllib2.urlopen(url)
csv_data = response.read()
# Delete first 17 lines - up to and inlcuding header line
cleaned_data = '\n'.join(csv_data.split('\n')[17:])
# split into list, and use non unicode field names
csv_reader = csv.DictReader(cleaned_data.split('\n'), fieldnames=['Date', 'Year', 'Month', 'Day', 'Time', 'Quality', 'Temp', 'TempFlag', 'DewPoint', 'DewPointFlag', 'Humidity', 'HumFlag', 'WindDir', 'WindFlag', 'WindSpd', 'WindFlg', 'Visbility', 'VisFlag', 'Pressure', 'PressFlag', 'Humidex', 'HmdxFlag', 'WindChill', 'WindChillFlag', 'Weather'])
for row in csv_reader:
#Create timestamp
timestamp = time.mktime(datetime.datetime.strptime(row['Date'], "%Y-%m-%d %H:%M").timetuple())
yesterday_timestamp = float(yesterday.strftime('%s'))
#Ignore any data "newer" than yesterday. Data that doesn't exist yet.
if timestamp > yesterday_timestamp:
break
else:
timestamp = str(int(timestamp))
#print row
# Data Cleaning - Wind Chill or Humidex - merge
if row['Temp'] is None or row['Temp'] == '':
continue
if row['Humidex'] == '' and row['WindChill'] == '':
feelslike = row['Temp']
elif row['Humidex'] == '':
feelslike = row['WindChill']
else:
feelslike = row['Humidex']
if row['WindSpd'] == '':
row['WindSpd'] = 0
if row['WindDir'] == '':
row['WindDir'] = 0
metric_string = 'weather.hourly.temp ' + str(row['Temp']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.temp', str(row['Temp']), timestamp)
metric_string = 'weather.hourly.windspeed ' + str(row['WindSpd']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.windspeed', str(row['WindSpd']), timestamp)
metric_string = 'weather.hourly.winddir ' + str(row['WindDir']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.winddir', str(row['WindDir']), timestamp)
metric_string = 'weather.hourly.humidity ' + str(row['Humidity']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.humidity', str(row['Humidity']), timestamp)
metric_string = 'weather.hourly.feelslike ' + str(feelslike) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.feelslike', str(feelslike), timestamp)
## DAILY
url = DAILY_URL
print 'Loading Daily Weather Data...'
response = urllib2.urlopen(url)
csv_data = response.read()
# Delete first 26 lines - up to and including header line
cleaned_data = '\n'.join(csv_data.split('\n')[26:])
# split into list, and use non unicode field names
csv_reader = csv.DictReader(cleaned_data.split('\n'), fieldnames=['Date', 'Year', 'Month', 'Day', 'Quality', 'Max', 'MaxFlag', 'Min', 'MinFlag', 'Mean', 'MeanFlag', 'Heat1', 'Heat2', 'Heat3', 'Heat4', 'Rain', 'RainFlag', 'Snow', 'SnowFlag', 'TotalPrecip', 'PrecipFlag', 'SnowonGround', 'SnowFlag', 'Wind1', 'Wind2', 'Wind3', 'Wind4'])
for row in csv_reader:
#Create timestamp
timestamp = time.mktime(datetime.datetime.strptime(row['Date'], "%Y-%m-%d").timetuple())
yesterday_timestamp = float(yesterday.strftime('%s'))
#Ignore any data "newer" than yesterday. Data that doesn't exist yet.
if timestamp > yesterday_timestamp:
break
else:
timestamp = str(int(timestamp))
#print row
if row['Max'] is None or row['Max'] == '' or row['Min'] == '':
continue
metric_string = 'weather.daily.high ' + str(row['Max']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.high', str(row['Max']), timestamp)
metric_string = 'weather.daily.low ' + str(row['Min']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.low', str(row['Min']), timestamp)
metric_string = 'weather.daily.mean ' + str(row['Mean']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.mean', str(row['Mean']), timestamp)
# Data Cleaning
if row['TotalPrecip'] == '':
row['TotalPrecip'] = 0
metric_string = 'weather.daily.precip ' + str(row['TotalPrecip']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.precip', str(row['TotalPrecip']), timestamp)
# Data Cleaning
if row['SnowonGround'] == '':
row['SnowonGround'] = 0
metric_string = 'weather.daily.snowamt ' + str(row['SnowonGround']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.snowamt', str(row['SnowonGround']), timestamp)
# OUTPUT FORMAT:
# <metric path> <metric value> <metric timestamp>
# yycbike.peacebridge.north.trips 5 123456789
metriclog.close()
print 'Done.'
| mit | 8,163,162,519,795,310,000 | -7,492,954,743,064,431,000 | 39.986928 | 346 | 0.635784 | false |
gmimano/commcaretest | corehq/apps/api/util.py | 2 | 1330 | from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from couchdbkit.exceptions import ResourceNotFound
def get_object_or_not_exist(cls, doc_id, domain, additional_doc_types=None):
"""
Given a Document class, id, and domain, get that object or raise
an ObjectDoesNotExist exception if it's not found, not the right
type, or doesn't belong to the domain.
"""
additional_doc_types = additional_doc_types or []
doc_type = getattr(cls, '_doc_type', cls.__name__)
additional_doc_types.append(doc_type)
try:
doc = cls.get(doc_id)
if doc and doc.domain == domain and doc.doc_type in additional_doc_types:
return doc
except ResourceNotFound:
pass # covered by the below
except AttributeError:
# there's a weird edge case if you reference a form with a case id
# that explodes on the "version" property. might as well swallow that
# too.
pass
raise object_does_not_exist(doc_type, doc_id)
def object_does_not_exist(doc_type, doc_id):
"""
Builds a 404 error message with standard, translated, verbiage
"""
return ObjectDoesNotExist(_("Could not find %(doc_type)s with id %(id)s") % \
{"doc_type": doc_type, "id": doc_id})
| bsd-3-clause | -5,438,594,499,145,403,000 | -506,003,358,115,091,460 | 38.117647 | 81 | 0.658647 | false |
Ziqi-Li/bknqgis | bokeh/bokeh/server/server.py | 1 | 10467 | ''' Provides a Server which instantiates Application instances as clients connect
'''
from __future__ import absolute_import, print_function
import atexit
import logging
log = logging.getLogger(__name__)
import signal
import tornado
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado import netutil
from .tornado import BokehTornado
from bokeh import __version__
from bokeh.application import Application
from bokeh.resources import DEFAULT_SERVER_PORT
def _create_hosts_whitelist(host_list, port):
if not host_list:
return ['localhost:' + str(port)]
hosts = []
for host in host_list:
if '*' in host:
log.warning(
"Host wildcard %r will allow websocket connections originating "
"from multiple (or possibly all) hostnames or IPs. Use non-wildcard "
"values to restrict access explicitly", host)
if host == '*':
# do not append the :80 port suffix in that case: any port is
# accepted
hosts.append(host)
continue
parts = host.split(':')
if len(parts) == 1:
if parts[0] == "":
raise ValueError("Empty host value")
hosts.append(host+":80")
elif len(parts) == 2:
try:
int(parts[1])
except ValueError:
raise ValueError("Invalid port in host value: %s" % host)
if parts[0] == "":
raise ValueError("Empty host value")
hosts.append(host)
else:
raise ValueError("Invalid host value: %s" % host)
return hosts
def _bind_sockets(address, port):
'''Like tornado.netutil.bind_sockets(), but also returns the
assigned port number.
'''
ss = netutil.bind_sockets(port=port or 0, address=address)
assert len(ss)
ports = {s.getsockname()[1] for s in ss}
assert len(ports) == 1, "Multiple ports assigned??"
actual_port = ports.pop()
if port:
assert actual_port == port
return ss, actual_port
class Server(object):
''' A Server which creates a new Session for each connection, using an Application to initialize each Session.
Args:
applications (dict of str: bokeh.application.Application) or bokeh.application.Application:
mapping from URL paths to Application instances, or a single Application to put at the root URL
The Application is a factory for Document, with a new Document initialized for each Session.
Each application should be identified by a path meant to go in a URL, like "/" or "/foo"
Kwargs:
num_procs (str):
Number of worker processes for an app. Default to one. Using 0 will autodetect number of cores
tornado_server_kwargs (dict):
Additional arguments passed to tornado.httpserver.HTTPServer. E.g. max_buffer_size to
specify the maximum upload size. More details can be found at:
http://www.tornadoweb.org/en/stable/httpserver.html#http-server
'''
def __init__(self, applications, io_loop=None, tornado_server_kwargs=None, **kwargs):
log.info("Starting Bokeh server version %s (running on Tornado %s)" % (__version__, tornado.version))
if isinstance(applications, Application):
self._applications = { '/' : applications }
else:
self._applications = applications
tornado_kwargs = { key: kwargs[key] for key in ['extra_patterns',
'secret_key',
'sign_sessions',
'generate_session_ids',
'keep_alive_milliseconds',
'check_unused_sessions_milliseconds',
'unused_session_lifetime_milliseconds',
'stats_log_frequency_milliseconds',
]
if key in kwargs }
prefix = kwargs.get('prefix')
if prefix is None:
prefix = ""
prefix = prefix.strip("/")
if prefix:
prefix = "/" + prefix
self._prefix = prefix
self._started = False
self._stopped = False
port = kwargs.get('port', DEFAULT_SERVER_PORT)
self._address = kwargs.get('address') or None
if tornado_server_kwargs is None:
tornado_server_kwargs = {}
tornado_server_kwargs.setdefault('xheaders', kwargs.get('use_xheaders', False))
self._num_procs = kwargs.get('num_procs', 1)
if self._num_procs != 1:
assert all(app.safe_to_fork for app in self._applications.values()), (
'User code has ran before attempting to run multiple '
'processes. This is considered an unsafe operation.')
sockets, self._port = _bind_sockets(self._address, port)
try:
tornado_kwargs['extra_websocket_origins'] = _create_hosts_whitelist(kwargs.get('allow_websocket_origin'), self._port)
tornado_kwargs['use_index'] = kwargs.get('use_index', True)
tornado_kwargs['redirect_root'] = kwargs.get('redirect_root', True)
self._tornado = BokehTornado(self._applications, self.prefix, **tornado_kwargs)
self._http = HTTPServer(self._tornado, **tornado_server_kwargs)
self._http.start(self._num_procs)
self._http.add_sockets(sockets)
except Exception:
for s in sockets:
s.close()
raise
# Can only instantiate the IO loop after HTTPServer.start() was
# called because of `num_procs`, see issue #5524
if io_loop is None:
io_loop = IOLoop.current()
self._loop = io_loop
self._tornado.initialize(io_loop=io_loop, **tornado_kwargs)
@property
def port(self):
'''The actual port number the server is listening on for HTTP
requests.
'''
return self._port
@property
def address(self):
'''The address the server is listening on for HTTP requests
(may be empty or None).
'''
return self._address
@property
def prefix(self):
return self._prefix
@property
def io_loop(self):
return self._loop
def start(self):
''' Start the Bokeh Server and its background tasks.
Notes:
This method does not block and does not affect the state of
the Tornado I/O loop. You must start and stop the loop yourself.
'''
assert not self._started, "Already started"
self._started = True
self._tornado.start()
def stop(self, wait=True):
''' Stop the Bokeh Server.
Args:
fast (boolean): whether to wait for orderly cleanup (default: True)
Returns:
None
'''
assert not self._stopped, "Already stopped"
self._stopped = True
self._tornado.stop(wait)
self._http.stop()
def run_until_shutdown(self):
''' Run the Bokeh Server until shutdown is requested by the user,
either via a Keyboard interrupt (Ctrl-C) or SIGTERM.
'''
if not self._started:
self.start()
# Install shutdown hooks
atexit.register(self._atexit)
signal.signal(signal.SIGTERM, self._sigterm)
try:
self._loop.start()
except KeyboardInterrupt:
print("\nInterrupted, shutting down")
self.stop()
_atexit_ran = False
def _atexit(self):
if self._atexit_ran:
return
self._atexit_ran = True
log.debug("Shutdown: cleaning up")
if not self._stopped:
self.stop(wait=False)
def _sigterm(self, signum, frame):
print("Received signal %d, shutting down" % (signum,))
# Tell self._loop.start() to return.
self._loop.add_callback_from_signal(self._loop.stop)
def unlisten(self):
'''Stop listening on ports (Server will no longer be usable after calling this)
Returns:
None
'''
self._http.close_all_connections()
self._http.stop()
def get_session(self, app_path, session_id):
'''Gets a session by name (session must already exist)'''
return self._tornado.get_session(app_path, session_id)
def get_sessions(self, app_path=None):
'''Gets all live sessions for an application.'''
if app_path is not None:
return self._tornado.get_sessions(app_path)
all_sessions = []
for path in self._tornado.app_paths:
all_sessions += self._tornado.get_sessions(path)
return all_sessions
def show(self, app_path, browser=None, new='tab'):
''' Opens an app in a browser window or tab.
Useful for testing server applications on your local desktop but
should not call when running bokeh-server on an actual server.
Args:
app_path (str) : the app path to open
The part of the URL after the hostname:port, with leading slash.
browser (str, optional) : browser to show with (default: None)
For systems that support it, the **browser** argument allows
specifying which browser to display in, e.g. "safari", "firefox",
"opera", "windows-default" (see the ``webbrowser`` module
documentation in the standard lib for more details).
new (str, optional) : window or tab (default: "tab")
If ``new`` is 'tab', then opens a new tab.
If ``new`` is 'window', then opens a new window.
Returns:
None
'''
if not app_path.startswith("/"):
raise ValueError("app_path must start with a /")
address_string = 'localhost'
if self.address is not None and self.address != '':
address_string = self.address
url = "http://%s:%d%s%s" % (address_string, self.port, self.prefix, app_path)
from bokeh.util.browser import view
view(url, browser=browser, new=new)
| gpl-2.0 | -110,495,552,372,560,670 | -7,754,970,697,576,903,000 | 35.217993 | 129 | 0.572179 | false |
uwdata/termite-data-server | web2py/applications-original/admin/controllers/mercurial.py | 34 | 2545 | from gluon.fileutils import read_file, write_file
if DEMO_MODE or MULTI_USER_MODE:
session.flash = T('disabled in demo mode')
redirect(URL('default', 'site'))
if not have_mercurial:
session.flash = T("Sorry, could not find mercurial installed")
redirect(URL('default', 'design', args=request.args(0)))
_hgignore_content = """\
syntax: glob
*~
*.pyc
*.pyo
*.bak
*.bak2
cache/*
private/*
uploads/*
databases/*
sessions/*
errors/*
"""
def hg_repo(path):
import os
uio = ui.ui()
uio.quiet = True
if not os.environ.get('HGUSER') and not uio.config("ui", "username"):
os.environ['HGUSER'] = 'web2py@localhost'
try:
repo = hg.repository(ui=uio, path=path)
except:
repo = hg.repository(ui=uio, path=path, create=True)
hgignore = os.path.join(path, '.hgignore')
if not os.path.exists(hgignore):
write_file(hgignore, _hgignore_content)
return repo
def commit():
app = request.args(0)
path = apath(app, r=request)
repo = hg_repo(path)
form = FORM(T('Comment:'), INPUT(_name='comment', requires=IS_NOT_EMPTY()),
INPUT(_type='submit', _value=T('Commit')))
if form.accepts(request.vars, session):
oldid = repo[repo.lookup('.')]
addremove(repo)
repo.commit(text=form.vars.comment)
if repo[repo.lookup('.')] == oldid:
response.flash = T('no changes')
try:
files = TABLE(*[TR(file) for file in repo[repo.lookup('.')].files()])
changes = TABLE(TR(TH('revision'), TH('description')))
for change in repo.changelog:
ctx = repo.changectx(change)
revision, description = ctx.rev(), ctx.description()
changes.append(TR(A(revision, _href=URL('revision',
args=(app, revision))),
description))
except:
files = []
changes = []
return dict(form=form, files=files, changes=changes, repo=repo)
def revision():
app = request.args(0)
path = apath(app, r=request)
repo = hg_repo(path)
revision = request.args(1)
ctx = repo.changectx(revision)
form = FORM(INPUT(_type='submit', _value=T('Revert')))
if form.accepts(request.vars):
hg.update(repo, revision)
session.flash = T("reverted to revision %s") % ctx.rev()
redirect(URL('default', 'design', args=app))
return dict(
files=ctx.files(),
rev=str(ctx.rev()),
desc=ctx.description(),
form=form
)
| bsd-3-clause | -4,812,649,575,118,469,000 | 9,094,478,775,980,833,000 | 28.941176 | 79 | 0.582711 | false |
JamesDickenson/aima-python | submissions/Dickenson/vacuum2Runner.py | 18 | 6345 | import agents as ag
import envgui as gui
# change this line ONLY to refer to your project
import submissions.Dickenson.vacuum2 as v2
# ______________________________________________________________________________
# Vacuum environment
class Dirt(ag.Thing):
pass
class VacuumEnvironment(ag.XYEnvironment):
"""The environment of [Ex. 2.12]. Agent perceives dirty or clean,
and bump (into obstacle) or not; 2D discrete world of unknown size;
performance measure is 100 for each dirt cleaned, and -1 for
each turn taken."""
def __init__(self, width=4, height=3):
super(VacuumEnvironment, self).__init__(width, height)
self.add_walls()
def thing_classes(self):
return [ag.Wall, Dirt,
# ReflexVacuumAgent, RandomVacuumAgent,
# TableDrivenVacuumAgent, ModelBasedVacuumAgent
]
def percept(self, agent):
"""The percept is a tuple of ('Dirty' or 'Clean', 'Bump' or 'None').
Unlike the TrivialVacuumEnvironment, location is NOT perceived."""
status = ('Dirty' if self.some_things_at(
agent.location, Dirt) else 'Clean')
bump = ('Bump' if agent.bump else'None')
return (bump, status)
def execute_action(self, agent, action):
if action == 'Suck':
dirt_list = self.list_things_at(agent.location, Dirt)
if dirt_list != []:
dirt = dirt_list[0]
agent.performance += 100
self.delete_thing(dirt)
else:
super(VacuumEnvironment, self).execute_action(agent, action)
if action != 'NoOp':
agent.performance -= 1
# # Launch a Text-Based Environment
# print('Two Cells, Agent on Left:')
# v = VacuumEnvironment(4, 3)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (2, 1))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (1, 1))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
#
# # Repeat, but put Agent on the Right
# print('Two Cells, Agent on Right:')
# v = VacuumEnvironment(4, 3)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (2, 1))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (2, 1))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
#
# # Repeat, but put Agent on the Right
# print('Two Cells, Agent on Top:')
# v = VacuumEnvironment(3, 4)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (1, 2))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (1, 1))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
#
# # Repeat, but put Agent on the Right
# print('Two Cells, Agent on Bottom:')
# v = VacuumEnvironment(3, 4)
# v.add_thing(Dirt(), (1, 1))
# v.add_thing(Dirt(), (1, 2))
# a = v2.HW2Agent()
# a = ag.TraceAgent(a)
# v.add_thing(a, (1, 2))
# t = gui.EnvTUI(v)
# t.mapImageNames({
# ag.Wall: '#',
# Dirt: '@',
# ag.Agent: 'V',
# })
# t.step(0)
# t.list_things(Dirt)
# t.step(4)
# if len(t.env.get_things(Dirt)) > 0:
# t.list_things(Dirt)
# else:
# print('All clean!')
#
# # Check to continue
# if input('Do you want to continue [y/N]? ') != 'y':
# exit(0)
# else:
# print('----------------------------------------')
def testVacuum(label, w=4, h=3,
dloc=[(1,1),(2,1)],
vloc=(1,1),
limit=6):
print(label)
v = VacuumEnvironment(w, h)
for loc in dloc:
v.add_thing(Dirt(), loc)
a = v2.HW2Agent()
a = ag.TraceAgent(a)
v.add_thing(a, vloc)
t = gui.EnvTUI(v)
t.mapImageNames({
ag.Wall: '#',
Dirt: '@',
ag.Agent: 'V',
})
t.step(0)
t.list_things(Dirt)
t.step(limit)
if len(t.env.get_things(Dirt)) > 0:
t.list_things(Dirt)
else:
print('All clean!')
# Check to continue
if input('Do you want to continue [Y/n]? ') == 'n':
exit(0)
else:
print('----------------------------------------')
testVacuum('Two Cells, Agent on Left:')
testVacuum('Two Cells, Agent on Right:', vloc=(2,1))
testVacuum('Two Cells, Agent on Top:', w=3, h=4,
dloc=[(1,1), (1,2)], vloc=(1,1) )
testVacuum('Two Cells, Agent on Bottom:', w=3, h=4,
dloc=[(1,1), (1,2)], vloc=(1,2) )
testVacuum('Five Cells, Agent on Left:', w=7, h=3,
dloc=[(2,1), (4,1)], vloc=(1,1), limit=12)
testVacuum('Five Cells, Agent near Right:', w=7, h=3,
dloc=[(2,1), (3,1)], vloc=(4,1), limit=12)
testVacuum('Five Cells, Agent on Top:', w=3, h=7,
dloc=[(1,2), (1,4)], vloc=(1,1), limit=12 )
testVacuum('Five Cells, Agent Near Bottom:', w=3, h=7,
dloc=[(1,2), (1,3)], vloc=(1,4), limit=12 )
testVacuum('5x4 Grid, Agent in Top Left:', w=7, h=6,
dloc=[(1,4), (2,2), (3, 3), (4,1), (5,2)],
vloc=(1,1), limit=46 )
testVacuum('5x4 Grid, Agent near Bottom Right:', w=7, h=6,
dloc=[(1,3), (2,2), (3, 4), (4,1), (5,2)],
vloc=(4, 3), limit=46 )
v = VacuumEnvironment(6, 3)
a = v2.HW2Agent()
a = ag.TraceAgent(a)
loc = v.random_location_inbounds()
v.add_thing(a, location=loc)
v.scatter_things(Dirt)
g = gui.EnvGUI(v, 'Vaccuum')
c = g.getCanvas()
c.mapImageNames({
ag.Wall: 'images/wall.jpg',
# Floor: 'images/floor.png',
Dirt: 'images/dirt.png',
ag.Agent: 'images/vacuum.png',
})
c.update()
g.mainloop() | mit | 7,146,894,426,262,654,000 | -2,969,652,747,950,904,300 | 26.71179 | 80 | 0.527502 | false |
jelly/calibre | src/calibre/db/cli/cmd_catalog.py | 2 | 3866 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
# License: GPLv3 Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from calibre.customize.ui import available_catalog_formats, plugin_for_catalog_format
from calibre.db.cli import integers_from_string
readonly = True
version = 0 # change this if you change signature of implementation()
needs_srv_ctx = True
no_remote = True
def implementation(db, notify_changes, ctx):
raise NotImplementedError()
def option_parser(get_parser, args): # {{{
def add_plugin_parser_options(fmt, parser):
# Fetch the extension-specific CLI options from the plugin
# library.catalogs.<format>.py
plugin = plugin_for_catalog_format(fmt)
p = parser.add_option_group(_('{} OPTIONS').format(fmt.upper()))
for option in plugin.cli_options:
if option.action:
p.add_option(
option.option,
default=option.default,
dest=option.dest,
action=option.action,
help=option.help
)
else:
p.add_option(
option.option,
default=option.default,
dest=option.dest,
help=option.help
)
# Entry point
parser = get_parser(
_(
'''\
%prog catalog /path/to/destination.(csv|epub|mobi|xml...) [options]
Export a catalog in format specified by path/to/destination extension.
Options control how entries are displayed in the generated catalog output.
Note that different catalog formats support different sets of options.
'''
)
)
# Add options common to all catalog plugins
parser.add_option(
'-i',
'--ids',
default=None,
dest='ids',
help=_(
"Comma-separated list of database IDs to catalog.\n"
"If declared, --search is ignored.\n"
"Default: all"
)
)
parser.add_option(
'-s',
'--search',
default=None,
dest='search_text',
help=_(
"Filter the results by the search query. "
"For the format of the search query, please see "
"the search-related documentation in the User Manual.\n"
"Default: no filtering"
)
)
parser.add_option(
'-v',
'--verbose',
default=False,
action='store_true',
dest='verbose',
help=_('Show detailed output information. Useful for debugging')
)
fmt = 'epub'
if args and '.' in args[0]:
fmt = args[0].rpartition('.')[-1].lower()
if fmt not in available_catalog_formats():
fmt = 'epub'
# Add options specific to fmt plugin
add_plugin_parser_options(fmt, parser)
return parser
# }}}
def main(opts, args, dbctx):
if len(args) < 1:
raise SystemExit(_('You must specify a catalog output file'))
if opts.ids:
opts.ids = list(integers_from_string(opts.ids))
fmt = args[0].rpartition('.')[-1]
if fmt not in available_catalog_formats():
raise SystemExit(
_('Cannot generate a catalog in the {} format').format(fmt.upper())
)
# No support for connected device in CLI environment
# Parallel initialization in calibre.gui2.tools:generate_catalog()
opts.connected_device = {
'is_device_connected': False,
'kind': None,
'name': None,
'save_template': None,
'serial': None,
'storage': None,
}
dest = os.path.abspath(os.path.expanduser(args[0]))
plugin = plugin_for_catalog_format(fmt)
with plugin:
plugin.run(dest, opts, dbctx.db)
return 0
| gpl-3.0 | 4,541,491,892,372,772,000 | -1,248,499,655,351,036,400 | 28.51145 | 85 | 0.579152 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.