repo_name
stringlengths 5
100
| path
stringlengths 4
254
| copies
stringlengths 1
5
| size
stringlengths 4
7
| content
stringlengths 681
1M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,298,349B
| line_mean
float64 3.5
100
| line_max
int64 15
1k
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class | ratio
float64 1.5
8.15
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
mattmelachrinos/Creative-Programming | MelachrinosMatthew_Bot/TwitterBot.py | 1 | 2392 | import random
import twitter
import json
players = []
teams = []
with open('player_names','r') as player_file:
for player in player_file:
players.append(player)
with open('football_teams','r') as teams_file:
for team in teams_file:
teams.append(team)
random_team = random.choice(teams)
random_player = random.choice(players)
#Keys and Tokens
Consumer_Key = "MsB3P0A9c8DPsbLYCyEVcmAA9"
Consumer_Secret = "gstX2eUuBOte0Zpow8mHPLujt7r5yRndzgLMq4ofV1ASLPiR4O"
Access_Token = "851599589878771712-AAB4jMmz8RoZRm08rVH8WNKISc4kuJe"
Access_Token_Secret = "uACJNnJYF5fG12KcUesPXSDHMwZiKfABdTnkKSVFNYo6N"
# connect to Twitter with our OAuth settings
api = twitter.Api(consumer_key = Consumer_Key, consumer_secret = Consumer_Secret, access_token_key = Access_Token, access_token_secret = Access_Token_Secret)
#Twitter Query
query = "https://api.twitter.com/1.1/search/tweets.json?q=%nfl&since_id=24012619984051000&result_type=mixed&count=15"
def generate_tweet(text):
for team in teams:
if team.strip() in text:
index = text.find(team.strip())
text = text[:index] + random_team.strip() + text[index+len(team)-1:]
break
for player in players:
if player.strip() in text:
index = text.find(player.strip())
text = text[:index] + random_player.strip() + text[index+len(player)-1:]
break
return text
def main():
# search_results = api.GetSearch(raw_query="q=nfl%20&result_type=recent&since=2014-07-19&count=1")
# print search_results
# search_results = json.dumps(search_results)
# tweet_list = []
# for line in search_results:
# tweet_list.append(json.loads(line))
#
# print tweet_list
incoming_tweet = '''Seahawks GM says team has listened to trade offers regarding cornerback Richard Sherman http://apne.ws/2nKxQda'''
tweet = generate_tweet(incoming_tweet)
if len(tweet) > 140:
tweet = tweet[:140]
try:
status = api.PostUpdate(tweet) # try posting
print '- success!'
with open("Tweets.txt","a") as tweets_file:
tweets_file.write("\n")
tweets_file.write(incoming_tweet + "\n")
tweets_file.write(tweet + "\n")
except twitter.TwitterError, e: # if an error, let us know
print '- error posting!'
print e
if __name__ == "__main__":
main()
| mit | -2,408,954,984,201,946,600 | 30.893333 | 157 | 0.661371 | false | 3.058824 | false | false | false |
carthagecollege/django-djdoop | djdoop/bin/get_ens.py | 1 | 1541 | # -*- coding: utf-8 -*-
import os, sys
# env
sys.path.append('/usr/local/lib/python2.7/dist-packages/')
sys.path.append('/usr/lib/python2.7/dist-packages/')
sys.path.append('/usr/lib/python2.7/')
sys.path.append('/data2/django_projects/')
sys.path.append('/data2/django_third/')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djdoop.settings")
from djzbar.utils.informix import do_sql
from optparse import OptionParser
"""
Fetch data from a MSSQL database
"""
# set up command-line options
desc = """
Accepts as input a college ID
"""
parser = OptionParser(description=desc)
parser.add_option(
"-i", "--cid",
help="Please provide a college ID.",
dest="cid"
)
FIELDS = ['aa','beg_date','end_date','line1','line2','line3',
'phone','phone_ext','cell_carrier','opt_out']
CODES = ['MIS1','MIS2','ICE','ICE2','ENS']
def main():
"""
main method
"""
for c in CODES:
print "++%s++++++++++++++++++++++" % c
sql = "SELECT * FROM aa_rec WHERE aa = '%s' AND id='%s'" % (c,cid)
result = do_sql(sql).fetchone()
for f in FIELDS:
if result[f]:
print "%s = %s" % (f,result[f])
######################
# shell command line
######################
if __name__ == "__main__":
(options, args) = parser.parse_args()
cid = options.cid
mandatories = ['cid',]
for m in mandatories:
if not options.__dict__[m]:
print "mandatory option is missing: %s\n" % m
parser.print_help()
exit(-1)
sys.exit(main())
| bsd-3-clause | -1,375,509,877,524,543,000 | 23.078125 | 74 | 0.565217 | false | 3.157787 | false | false | false |
jackcrowe/bike-tools | bikesizecalculator.py | 1 | 1881 | """
bikesizecalculator:: a module for calculating the bike size appropriate for a person.
"""
from math import *
# globals to store categorization of bike types
mountain_geometry = "MTN"
road_geometry = "ROAD"
stepthrough_geometry = "STEP"
# dictionary for bike type to geometry categorization
bike_type_categories = {
'Touring' : road_geometry,
'Commuter' : road_geometry,
'Track' : road_geometry,
'Road' : road_geometry,
'Mixte' : stepthrough_geometry,
'Hardtail' : mountain_geometry,
'XC' : mountain_geometry }
""" calculates the correct bike size for the given bike type and person's height"""
def calculate_bike_size(bike_type, inseam):
category = get_geometry_categorization(bike_type)
if category == road_geometry:
return get_road_size(inseam)
else:
return get_mountain_size(inseam)
""" generates a craigslist query given an array of bike types and a person's height"""
def generate_craigslist_query(bike_types, inseam):
if len(bike_types) == 0:
return ''
query = ''
for bike_type in bike_types:
bike_size = int(calculate_bike_size(bike_type, inseam))
query += '"'+bike_type+' '+str(bike_size)+'"|'
location = 'http://chicago.craigslist.org/'
category = 'bik'
search_type = 'T'
search_url = '%ssearch/%s?query=%s&srchType=%s' % (
location, category, query, search_type)
return search_url
""" looks up the category of geometry for a bike type """
def get_geometry_categorization(bike_type):
return bike_type_categories[bike_type]
""" returns the appropriate road bike size for a person of the given height """
def get_road_size(inseam):
return floor(1.72*float(inseam) - 0.68)
""" returns the appropriate mountain bike size for a person of the given height """
def get_mountain_size(inseam):
return inseam-10
| apache-2.0 | 5,846,033,641,118,190,000 | 32.607143 | 86 | 0.672515 | false | 3.19898 | false | false | false |
WinHeapExplorer/WinHeap-Explorer | IDAscripts/dll_parser.py | 1 | 11768 | '''
BSD 2-Clause License
Copyright (c) 2013-2016,
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.s
*/
'''
''' This script is used to perform system dlls parsing to get a list of potentially
dangerous library calls and their instructions
'''
import os
import sys
import idc
import idaapi
import idautils
from time import strftime
''' banned functions MSDN SDLC '''
list_of_banned_functions = ["strcpy", "strcpyA", "strcpyW", "wcscpy", "_tcscpy",\
"_mbscpy", "StrCpy", "StrCpyA", "StrCpyW", "lstrcpy", "lstrcpyA",\
"lstrcpyW", "_tccpy", "_mbccpy", "_ftcscpy", "strncpy", "wcsncpy",\
"_tcsncpy", "_mbsncpy", "_mbsnbcpy", "StrCpyN", "StrCpyNA", \
"StrCpyNW", "StrNCpy", "strcpynA", "StrNCpyA", "StrNCpyW", \
"lstrcpyn", "lstrcpynA", "lstrcpynW"]
list_of_banned_functions += ["strcat", "strcatA", "strcatW", "wcscat", "_tcscat", \
"_mbscat", "StrCat", "StrCatA", "StrCatW", "lstrcat", \
"lstrcatA", "lstrcatW", "StrCatBuff", "StrCatBuffA", \
"StrCatBuffW", "StrCatChainW", "_tccat", "_mbccat", \
"_ftcscat", "strncat", "wcsncat", "_tcsncat", "_mbsncat",\
"_mbsnbcat", "StrCatN", "StrCatNA", "StrCatNW", "StrNCat", \
"StrNCatA", "StrNCatW", "lstrncat", "lstrcatnA", \
"lstrcatnW", "lstrcatn"]
list_of_banned_functions += ["sprintfW", "sprintfA", "wsprintf", "wsprintfW", \
"wsprintfA", "sprintf", "swprintf", "_stprintf", \
"wvsprintf", "wvsprintfA", "wvsprintfW", "vsprintf", \
"_vstprintf", "vswprintf"]
list_of_banned_functions += ["wvsprintf", "wvsprintfA", "wvsprintfW", "vsprintf", \
"_vstprintf", "vswprintf"]
list_of_banned_functions += ["_fstrncpy", " _fstrncat", "gets", "_getts", "_gettws"]
list_of_banned_functions += ["IsBadWritePtr", "IsBadHugeWritePtr", "IsBadReadPtr", \
"IsBadHugeReadPtr", "IsBadCodePtr", "IsBadStringPtr"]
list_of_banned_functions += ["memcpy", "RtlCopyMemory", "CopyMemory", "wmemcpy"]
''' not recommended functions MSDN SDLC '''
list_of_not_recommended_functions = ["scanf", "wscanf", "_tscanf", "sscanf", "swscanf", \
"_stscanf"]
list_of_not_recommended_functions += ["wnsprintf", "wnsprintfA", "wnsprintfW", \
"_snwprintf", "snprintf", "sntprintf _vsnprintf", \
"vsnprintf", "_vsnwprintf", "_vsntprintf", \
"wvnsprintf", "wvnsprintfA", "wvnsprintfW"]
list_of_not_recommended_functions += ["_snwprintf", "_snprintf", "_sntprintf", "nsprintf"]
list_of_not_recommended_functions += ["_vsnprintf", "_vsnwprintf", "_vsntprintf", \
"wvnsprintf", "wvnsprintfA", "wvnsprintfW"]
list_of_not_recommended_functions += ["strtok", "_tcstok", "wcstok", "_mbstok"]
list_of_not_recommended_functions += ["makepath", "_tmakepath", "_makepath", "_wmakepath"]
list_of_not_recommended_functions += ["_splitpath", "_tsplitpath", "_wsplitpath"]
list_of_not_recommended_functions += ["snscanf", "snwscanf", "_sntscanf"]
list_of_not_recommended_functions += ["_itoa", "_itow", "_i64toa", "_i64tow", \
"_ui64toa", "_ui64tot", "_ui64tow", "_ultoa", \
"_ultot", "_ultow"]
list_of_not_recommended_functions += ["CharToOem", "CharToOemA", "CharToOemW", \
"OemToChar", "OemToCharA", "OemToCharW", \
"CharToOemBuffA", "CharToOemBuffW"]
list_of_not_recommended_functions += ["alloca", "_alloca"]
list_of_not_recommended_functions += ["strlen", "wcslen", "_mbslen", "_mbstrlen", \
"StrLen", "lstrlen"]
list_of_not_recommended_functions += ["ChangeWindowMessageFilter"]
WINHE_RESULTS_DIR = None
def enumerate_function_chunks(f_start):
'''
The function gets a list of chunks for the function.
@f_start - first address of the function
@return - list of chunks
'''
# Enumerate all chunks in the function
chunks = list()
first_chunk = idc.FirstFuncFchunk(f_start)
chunks.append((first_chunk, idc.GetFchunkAttr(first_chunk, idc.FUNCATTR_END)))
next_chunk = first_chunk
while next_chunk != 0xffffffffL:
next_chunk = idc.NextFuncFchunk(f_start, next_chunk)
if next_chunk != 0xffffffffL:
chunks.append((next_chunk, idc.GetFchunkAttr(next_chunk, idc.FUNCATTR_END)))
return chunks
def get_list_of_function_instr(addr):
'''
The function returns a list of instructions from a function
@addr - is function entry point
@return - list of instruction's addresses
'''
f_start = addr
f_end = idc.FindFuncEnd(addr)
chunks = enumerate_function_chunks(f_start)
list_of_addr = list()
image_base = idaapi.get_imagebase(addr)
for chunk in chunks:
for head in idautils.Heads(chunk[0], chunk[1]):
# If the element is an instruction
if head == hex(0xffffffffL):
raise Exception("Invalid head for parsing")
if idc.isCode(idc.GetFlags(head)):
head = head - image_base
head = str(hex(head))
head = head.replace("L", "")
head = head.replace("0x", "")
list_of_addr.append(head)
return list_of_addr
def enumerate_function_names():
'''
The function enumerates all functions in a dll.
@return - dictionary {function_name : list of corresponded instructions}
'''
func_name = dict()
for seg_ea in idautils.Segments():
# For each of the functions
function_ea = seg_ea
while function_ea != 0xffffffffL:
function_name = idc.GetFunctionName(function_ea)
# if already analyzed
if func_name.get(function_name, None) != None:
function_ea = idc.NextFunction(function_ea)
continue
image_base = idaapi.get_imagebase(function_ea)
addr = function_ea - image_base
addr = str(hex(addr))
addr = addr.replace("L", "")
addr = addr.replace("0x", "")
func_name[function_name] = get_list_of_function_instr(function_ea)
function_ea = idc.NextFunction(function_ea)
return func_name
def search_dangerous_functions():
''' The function searches for all potentially dangerous library calls in a module
@ return - tuple<a list of instructions from a list of potentially dangerous libcalls,
a list of potentially dangerous libcalls found in a module
'''
global list_of_banned_functions, list_of_not_recommended_functions
''' key - name, value - list of (instructions - module offset) '''
func_names = dict()
list_of_instrs = list()
list_of_func_names = list()
func_names = enumerate_function_names()
for banned_function in list_of_banned_functions:
if banned_function in func_names:
list_of_instrs.append(func_names[banned_function])
print 'Found banned function ', banned_function
list_of_func_names.append(banned_function)
continue
elif ("_" + banned_function) in func_names:
list_of_instrs.append(func_names["_" + banned_function])
print 'Found banned function ', "_" + banned_function
list_of_func_names.append("_" + banned_function)
continue
for not_recommended_func in list_of_not_recommended_functions:
if not_recommended_func in func_names:
list_of_instrs.append(func_names[not_recommended_func])
print 'Found not recommended function ', not_recommended_func
list_of_func_names.append(not_recommended_func)
continue
elif ("_" + not_recommended_func) in func_names:
list_of_instrs.append(func_names["_" + not_recommended_func])
print 'Found not recommended function ', "_" + not_recommended_func
list_of_func_names.append("_" + not_recommended_func)
continue
return list_of_instrs,list_of_func_names
def get_unique(lists_of_instr):
''' The function returns a list of unique instructions from the list of instructions
@list_of_instr - a list of instructions
@return a list of unique instructions
'''
result_list = list()
for list_of_instr in lists_of_instr:
for instr in list_of_instr:
if instr not in result_list:
result_list.append(instr)
return result_list
def save_results(lists_of_instr, list_of_func_names):
''' The function saves results in a file
@list_of_instr - a list of instructions to save_results
@list_of_func_name - a list of functions names to save
'''
one_file = "sysdlls_instr_to_instrument.txt"
analyzed_file = idc.GetInputFile()
analyzed_file = analyzed_file.replace(".","_")
current_time = strftime("%Y-%m-%d_%H-%M-%S")
file_name = WINHE_RESULTS_DIR + "\\" + one_file
file_log = WINHE_RESULTS_DIR + "\\" + analyzed_file + "_" + current_time + ".txt"
file = open(file_name, 'a')
log = open(file_log, 'w')
analyzed_file = analyzed_file.lower()
list_of_instr = get_unique(lists_of_instr)
for instr in list_of_instr:
file.write(idaapi.get_input_file_path().lower() + "!" + str(instr) + "\n")
log.write(str(len(list_of_func_names)) + "\n")
for name in list_of_func_names:
log.write(name + "\n")
file.close()
log.close()
def init_analysis():
results = search_dangerous_functions()
save_results(results[0], results[1])
def main():
global WINHE_RESULTS_DIR
print "Start analysis"
idc.Wait() #wait while ida finish analysis
DEPTH_LEVEL = os.getenv('DEPTH_LEVEL')
auto_mode = 0
# set WINHE_RESULTS_DIR variable in the cmd in case if you want to run IDA in the
# silent mode.
WINHE_RESULTS_DIR = os.getenv('WINHE_RESULTS_DIR')
if WINHE_RESULTS_DIR == None:
WINHE_RESULTS_DIR = os.getcwd()
else:
auto_mode = 1
print "saving results in ", WINHE_RESULTS_DIR
init_analysis()
if auto_mode == 1:
Exit(0)
if __name__ == "__main__":
main()
| bsd-2-clause | -7,655,870,875,429,818,000 | 44.612403 | 91 | 0.611404 | false | 3.554213 | false | false | false |
Answeror/lit | pywingui/dialog.py | 1 | 14777 | ## Copyright (c) 2003 Henk Punt
## Permission is hereby granted, free of charge, to any person obtaining
## a copy of this software and associated documentation files (the
## "Software"), to deal in the Software without restriction, including
## without limitation the rights to use, copy, modify, merge, publish,
## distribute, sublicense, and/or sell copies of the Software, and to
## permit persons to whom the Software is furnished to do so, subject to
## the following conditions:
## The above copyright notice and this permission notice shall be
## included in all copies or substantial portions of the Software.
## THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
## EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
## MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
## NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
## LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
## OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
## WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE
## Thanx to Brad Clements for this contribution!
from .version_microsoft import WINVER
from types import IntType, LongType
from ctypes import *
from .windows import *
from .wtl_core import *
from .comctl import *
memcpy = cdll.msvcrt.memcpy
# Dialog Box Template Styles
DS_ABSALIGN = 0x01
DS_SYSMODAL = 0x02
DS_LOCALEDIT = 0x20 # Edit items get Local storage
DS_SETFONT = 0x40 # User specified font for Dlg controls
DS_MODALFRAME = 0x80 # Can be combined with WS_CAPTION
DS_NOIDLEMSG = 0x100 # WM_ENTERIDLE message will not be sent
DS_SETFOREGROUND = 0x200 # not in win3.1
if WINVER >= 0x0400:
DS_3DLOOK = 0x0004
DS_FIXEDSYS = 0x0008
DS_NOFAILCREATE = 0x0010
DS_CONTROL = 0x0400
DS_CENTER = 0x0800
DS_CENTERMOUSE = 0x1000
DS_CONTEXTHELP = 0x2000
DS_SHELLFONT = DS_SETFONT | DS_FIXEDSYS
#if(_WIN32_WCE >= 0x0500)
#DS_USEPIXELS = 0x8000L
# Dialog Codes
DLGC_WANTARROWS = 0x0001 # Control wants arrow keys
DLGC_WANTTAB = 0x0002 # Control wants tab keys
DLGC_WANTALLKEYS = 0x0004 # Control wants all keys
DLGC_WANTMESSAGE = 0x0004 # Pass message to control
DLGC_HASSETSEL = 0x0008 # Understands EM_SETSEL message
DLGC_DEFPUSHBUTTON = 0x0010 # Default pushbutton
DLGC_UNDEFPUSHBUTTON = 0x0020 # Non-default pushbutton
DLGC_RADIOBUTTON = 0x0040 # Radio button
DLGC_WANTCHARS = 0x0080 # Want WM_CHAR messages
DLGC_STATIC = 0x0100 # Static item: don't include
DLGC_BUTTON = 0x2000 # Button item: can be checked
class StringOrOrd:
"""Pack up a string or ordinal"""
def __init__(self, value):
if value is None or value == "":
self.value = c_ushort(0)
elif type(value) in (IntType, LongType):
# treat as an atom
if not value:
self.value = c_ushort(0) # 0 is not a valid atom
else:
ordinaltype = c_ushort * 2
ordinal = ordinaltype(0xffff, value)
self.value = ordinal
else:
value = str(value)
mbLen = MultiByteToWideChar(CP_ACP, 0, value, -1, 0, 0)
if mbLen < 1:
raise RuntimeError("Could not determine multibyte string length for %s" % \
repr(value))
#this does not work for me:, why needed?
#if (mbLen % 2):
# mbLen += 1 # round up to next word in size
stringtype = c_ushort * mbLen
string = stringtype()
result = MultiByteToWideChar(CP_ACP, 0, value, -1, addressof(string), sizeof(string))
if result < 1:
raise RuntimeError("could not convert multibyte string %s" % repr(value))
self.value = string
def __len__(self):
return sizeof(self.value)
class DialogTemplate(WindowsObject):
__dispose__ = GlobalFree
_window_class_ = None
_window_style_ = WS_CHILD
_window_style_ex_ = 0
_class_font_size_ = 8
_class_font_name_ = "MS Sans Serif"
def __init__(self,
wclass = None, # the window class
title = "",
menu=None,
style = None,
exStyle = None,
fontSize=None,
fontName=None,
rcPos = RCDEFAULT,
orStyle = None,
orExStyle = None,
nandStyle = None,
nandExStyle = None,
items=[]):
if wclass is not None:
wclass = StringOrOrd(wclass)
else:
wclass = StringOrOrd(self._window_class_)
title = StringOrOrd(title)
menu = StringOrOrd(menu)
if style is None:
style = self._window_style_
if exStyle is None:
exStyle = self._window_style_ex_
if orStyle:
style |= orStyle
if orExStyle:
exStyle |= orExStyle
if nandStyle:
style &= ~nandStyle
if rcPos.left == CW_USEDEFAULT:
cx = 50
x = 0
else:
cx = rcPos.right
x = rcPos.left
if rcPos.top == CW_USEDEFAULT:
cy = 50
y = 0
else:
cy = rcPos.bottom
y = rcPos.top
if style & DS_SETFONT:
if fontSize is None:
fontSize = self._class_font_size_
if fontName is None:
fontName = StringOrOrd(self._class_font_name_)
else:
fontSize = None
fontName = None
header = DLGTEMPLATE()
byteCount = sizeof(header)
byteCount += len(wclass) + len(title) + len(menu)
if fontName or fontSize:
byteCount += 2 + len(fontName)
d, rem = divmod(byteCount, 4) # align on dword
byteCount += rem
itemOffset = byteCount # remember this for later
for i in items:
byteCount += len(i)
valuetype = c_ubyte * byteCount
value = valuetype()
header = DLGTEMPLATE.from_address(addressof(value))
# header is overlayed on value
header.exStyle = exStyle
header.style = style
header.cDlgItems = len(items)
header.x = x
header.y = y
header.cx = cx
header.cy = cy
offset = sizeof(header)
# now, memcpy over the menu
memcpy(addressof(value)+offset, addressof(menu.value), len(menu)) # len really returns sizeof menu.value
offset += len(menu)
# and the window class
memcpy(addressof(value)+offset, addressof(wclass.value), len(wclass)) # len really returns sizeof wclass.value
offset += len(wclass)
# now copy the title
memcpy(addressof(value)+offset, addressof(title.value), len(title))
offset += len(title)
if fontSize or fontName:
fsPtr = c_ushort.from_address(addressof(value)+offset)
fsPtr.value = fontSize
offset += 2
# now copy the fontname
memcpy(addressof(value)+offset, addressof(fontName.value), len(fontName))
offset += len(fontName)
# and now the items
assert offset <= itemOffset, "offset %d beyond items %d" % (offset, itemOffset)
offset = itemOffset
for item in items:
memcpy(addressof(value)+offset, addressof(item.value), len(item))
offset += len(item)
assert (offset % 4) == 0, "Offset not dword aligned for item"
self.m_handle = GlobalAlloc(0, sizeof(value))
memcpy(self.m_handle, addressof(value), sizeof(value))
self.value = value
def __len__(self):
return sizeof(self.value)
class DialogItemTemplate(object):
_window_class_ = None
_window_style_ = WS_CHILD|WS_VISIBLE
_window_style_ex_ = 0
def __init__(self,
wclass = None, # the window class
id = 0, # the control id
title = "",
style = None,
exStyle = None,
rcPos = RCDEFAULT,
orStyle = None,
orExStyle = None,
nandStyle = None,
nandExStyle = None):
if not self._window_class_ and not wclass:
raise ValueError("A window class must be specified")
if wclass is not None:
wclass = StringOrOrd(wclass)
else:
wclass = StringOrOrd(self._window_class_)
title = StringOrOrd(title)
if style is None:
style = self._window_style_
if exStyle is None:
exStyle = self._window_style_ex_
if orStyle:
style |= orStyle
if orExStyle:
exStyle |= orExStyle
if nandStyle:
style &= ~nandStyle
if rcPos.left == CW_USEDEFAULT:
cx = 50
x = 0
else:
cx = rcPos.right
x = rcPos.left
if rcPos.top == CW_USEDEFAULT:
cy = 50
y = 0
else:
cy = rcPos.bottom
y = rcPos.top
header = DLGITEMTEMPLATE()
byteCount = sizeof(header)
byteCount += 2 # two bytes for extraCount
byteCount += len(wclass) + len(title)
d, rem = divmod(byteCount, 4)
byteCount += rem # must be a dword multiple
valuetype = c_ubyte * byteCount
value = valuetype()
header = DLGITEMTEMPLATE.from_address(addressof(value))
# header is overlayed on value
header.exStyle = exStyle
header.style = style
header.x = x
header.y = y
header.cx = cx
header.cy = cy
header.id = id
# now, memcpy over the window class
offset = sizeof(header)
memcpy(addressof(value)+offset, addressof(wclass.value), len(wclass))
# len really returns sizeof wclass.value
offset += len(wclass)
# now copy the title
memcpy(addressof(value)+offset, addressof(title.value), len(title))
offset += len(title)
extraCount = c_ushort.from_address(addressof(value)+offset)
extraCount.value = 0
self.value = value
def __len__(self):
return sizeof(self.value)
PUSHBUTTON = 0x80
EDITTEXT = 0x81
LTEXT = 0x82
LISTBOX = 0x83
SCROLLBAR = 0x84
COMBOBOX = 0x85
class PushButton(DialogItemTemplate):
_window_class_ = PUSHBUTTON
_window_style_ = WS_CHILD|WS_VISIBLE|WS_TABSTOP
class DefPushButton(DialogItemTemplate):
_window_class_ = PUSHBUTTON
_window_style_ = WS_CHILD|WS_VISIBLE|WS_TABSTOP|BS_DEFPUSHBUTTON
class GroupBox(DialogItemTemplate):
_window_class_ = PUSHBUTTON
_window_style_ = WS_CHILD|WS_VISIBLE|BS_GROUPBOX
class EditText(DialogItemTemplate):
_window_class_ = EDITTEXT
_window_style_ = WS_CHILD|WS_VISIBLE|WS_BORDER|WS_TABSTOP
class StaticText(DialogItemTemplate):
_window_class_ = LTEXT
_window_style_ = WS_CHILD|WS_VISIBLE|WS_GROUP
class ListBox(DialogItemTemplate):
_window_class_ = LISTBOX
_window_style_ = LBS_STANDARD
class ScrollBar(DialogItemTemplate):
_window_class_ = SCROLLBAR
_window_style_ = WS_CHILD|WS_VISIBLE|WS_TABSTOP|SBS_VERT|SBS_RIGHTALIGN
class ComboBox(DialogItemTemplate):
_window_class_ = COMBOBOX
_window_style_ = WS_VISIBLE|WS_CHILD|WS_OVERLAPPED|WS_VSCROLL|WS_TABSTOP|CBS_DROPDOWNLIST
class RadioButton(DialogItemTemplate):
_window_class_ = PUSHBUTTON
_window_style_ = WS_CHILD|WS_VISIBLE|WS_GROUP|WS_TABSTOP|BS_RADIOBUTTON
class AutoRadioButton(DialogItemTemplate):
_window_class_ = PUSHBUTTON
_window_style_ = WS_CHILD|WS_VISIBLE|WS_GROUP|WS_TABSTOP|BS_AUTORADIOBUTTON
class CheckBox(DialogItemTemplate):
_window_class_ = PUSHBUTTON
_window_style_ = WS_CHILD|WS_VISIBLE|WS_GROUP|WS_TABSTOP|BS_CHECKBOX
class AutoCheckBox(DialogItemTemplate):
_window_class_ = PUSHBUTTON
_window_style_ = WS_CHILD|WS_VISIBLE|WS_GROUP|WS_TABSTOP|BS_AUTOCHECKBOX
class Dialog(Window):
"""supports _dialog_id_ and _dialog_module_ class properties or
use _dialog_template_"""
_dialog_template_ = None
_dialog_module_ = None
_dialog_id_ = None
def __init__(self, template = None, id = None, module = None):
"""module and dlgid can be passed as parameters or be given as class properties"""
self.module = None
self.id = None
self.template = None
if template or self._dialog_template_:
self.template = template or self._dialog_template_
elif module or self._dialog_module_:
self.module = module or self._dialog_module_
self.id = id or self._dialog_id_
if self.module and type(self.module) == type(''): #module is given as path name
self.module = LoadLibrary(self.module)
self.m_handle = 0 #filled in on init dialog
def DoModal(self, parent = 0, center = 1):
self.center = center
if self.template:
return DialogBoxIndirectParam(self.module,
self.template.handle,
handle(parent),
DialogProc(self.DlgProc),
0)
else:
return DialogBoxParam(self.module, self.id, handle(parent),
DialogProc(self.DlgProc), 0)
def DlgProc(self, hwnd, uMsg, wParam, lParam):
handled, result = self._msg_map_.Dispatch(self, hwnd, uMsg, wParam, lParam)
return result
def GetDlgItem(self, nIDDlgItem, windowClass = None):
"""specify window class to get a 'Venster' wrapped control"""
hWnd = GetDlgItem(self.handle, nIDDlgItem)
if hWnd and windowClass:
return windowClass(hWnd = hWnd)
else:
return hWnd
def EndDialog(self, exitCode):
EndDialog(self.handle, exitCode)
def OnOK(self, event):
self.EndDialog(IDOK)
def OnCancel(self, event):
self.EndDialog(IDCANCEL)
def OnInitDialog(self, event):
self.m_handle = event.handle
if self.center: self.CenterWindow()
return 0
_msg_map_ = MSG_MAP([MSG_HANDLER(WM_INITDIALOG, OnInitDialog),
CMD_ID_HANDLER(IDOK, OnOK),
CMD_ID_HANDLER(IDCANCEL, OnCancel)])
| mit | -6,103,811,622,903,813,000 | 31.264192 | 121 | 0.582662 | false | 3.771567 | false | false | false |
atareao/cpu-g | src/upower.py | 1 | 7755 | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
#
# CPU-G is a program that displays information about your CPU,
# RAM, Motherboard and some general information about your System.
#
# Copyright © 2009 Fotis Tsamis <ftsamis at gmail dot com>.
# Copyright © 2016-2019 Lorenzo Carbonell (aka atareao)
# <lorenzo.carbonell.cerezo at gmail dot com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import dbus
from collections import namedtuple
from functools import partial
from comun import _
def convert(dbus_obj):
"""Converts dbus_obj from dbus type to python type.
:param dbus_obj: dbus object.
:returns: dbus_obj in python type.
"""
_isinstance = partial(isinstance, dbus_obj)
ConvertType = namedtuple('ConvertType', 'pytype dbustypes')
pyint = ConvertType(int, (dbus.Byte, dbus.Int16, dbus.Int32, dbus.Int64,
dbus.UInt16, dbus.UInt32, dbus.UInt64))
pybool = ConvertType(bool, (dbus.Boolean, ))
pyfloat = ConvertType(float, (dbus.Double, ))
pylist = ConvertType(lambda _obj: list(map(convert, dbus_obj)),
(dbus.Array, ))
pytuple = ConvertType(lambda _obj: tuple(map(convert, dbus_obj)),
(dbus.Struct, ))
types_str = (dbus.ObjectPath, dbus.Signature, dbus.String)
pystr = ConvertType(str, types_str)
pydict = ConvertType(
lambda _obj: dict(zip(map(convert, dbus_obj.keys()),
map(convert, dbus_obj.values())
)
),
(dbus.Dictionary, )
)
for conv in (pyint, pybool, pyfloat, pylist, pytuple, pystr, pydict):
if any(map(_isinstance, conv.dbustypes)):
return conv.pytype(dbus_obj)
return dbus_obj
class BatteryDriver():
def __init__(self):
bus = dbus.SystemBus()
bat0_object = bus.get_object(
'org.freedesktop.UPower',
'/org/freedesktop/UPower/devices/battery_BAT0')
self.__statistics = bat0_object.get_dbus_method(
'GetStatistics',
'org.freedesktop.UPower.Device')
self.__history = bat0_object.get_dbus_method(
'GetHistory',
'org.freedesktop.UPower.Device')
self.bat0 = dbus.Interface(bat0_object,
'org.freedesktop.DBus.Properties')
def __get(self, parameter):
return self.bat0.Get('org.freedesktop.UPower.Device', parameter)
def get_native_path(self):
return self.__get('NativePath')
def get_vendor(self):
return self.__get('Vendor')
def get_model(self):
return self.__get('Model')
def get_serial(self):
return self.__get('Serial')
def get_update_time(self):
return self.__get('UpdateTime')
def get_type(self):
ans = self.__get('Type')
if ans == 0:
return _('Unknown')
elif ans == 1:
return _('Line Power')
elif ans == 2:
return _('Battery')
elif ans == 3:
return _('Ups')
elif ans == 4:
return _('Monitor')
elif ans == 5:
return _('Mouse')
elif ans == 6:
return _('Keyboard')
elif ans == 7:
return _('Pda')
elif ans == 8:
return _('Phone')
return _('Unknown')
def get_power_supply(self):
return convert(self.__get('PowerSupply'))
def get_has_history(self):
return convert(self.__get('HasHistory'))
def get_online(self):
return convert(self.__get('Online'))
def get_energy(self):
return convert(self.__get('Energy')) # Wh
def get_energy_empty(self):
return self.__get('EnergyEmpty')
def get_energy_full(self):
return self.__get('EnergyFull')
def get_energy_full_design(self):
return self.__get('EnergyFullDesign')
def get_energy_rate(self):
return self.__get('EnergyRate')
def get_voltage(self): # v
return self.__get('Voltage')
def get_time_to_empty(self): # s
return self.__get('TimeToEmpty')
def get_time_to_full(self): # s
return self.__get('TimeToFull')
def get_percentage(self):
return self.__get('Percentage')
def get_is_present(self):
return convert(self.__get('IsPresent'))
def get_state(self):
ans = self.__get('State')
if ans == 0:
return _('Unknown')
elif ans == 1:
return _('Charging')
elif ans == 2:
return _('Discharging')
elif ans == 3:
return _('Empty')
elif ans == 4:
return _('Fully charged')
elif ans == 5:
return _('Pending charge')
elif ans == 6:
return _('Pending discharge')
return _('Unknown')
def get_capacity(self): # < 75% renew battery
return self.__get('Capacity')
def get_technology(self):
ans = self.__get('Technology')
if ans == 0:
return _('Unknown')
elif ans == 1:
return _('Lithium ion')
elif ans == 2:
return _('Lithium polymer')
elif ans == 3:
return _('Lithium iron phosphate')
elif ans == 4:
return _('Lead acid')
elif ans == 5:
return _('Nickel cadmium')
elif ans == 6:
return _('Nickel metal hydride')
return _('Unknown')
def get_statistics_discharging(self):
return convert(self.__statistics('discharging'))
def get_statistics_charging(self):
return convert(self.__statistics('charging'))
def get_history_rate(self, ndata=1000):
'''
time: The time value in seconds from the gettimeofday() method.
value: the rate in W.
state: The state of the device, for instance charging or discharging.
'''
return convert(self.__history('rate', 0, ndata))
def get_history_charge(self, ndata=1000):
'''
time: The time value in seconds from the gettimeofday() method.
value: the charge in %.
state: The state of the device, for instance charging or discharging.
'''
return convert(self.__history('charge', 0, ndata))
if __name__ == '__main__':
bd = BatteryDriver()
print(bd.get_native_path())
print(bd.get_vendor())
print(bd.get_model())
print(bd.get_serial())
print(bd.get_update_time())
print(bd.get_type())
print(bd.get_power_supply())
print(bd.get_has_history())
print(bd.get_online())
print(bd.get_energy())
print(bd.get_energy_empty())
print(bd.get_energy_full())
print(bd.get_energy_full_design())
print(bd.get_energy_rate())
print(bd.get_voltage())
print(bd.get_time_to_empty())
print(bd.get_time_to_full())
print(bd.get_percentage())
print(bd.get_is_present())
print(bd.get_state())
print(bd.get_capacity())
print(bd.get_technology())
print(bd.get_statistics_discharging())
print(bd.get_statistics_charging())
print(bd.get_history_rate())
print(bd.get_history_charge())
| gpl-3.0 | -4,603,124,261,284,974,600 | 30.644898 | 77 | 0.58042 | false | 3.758119 | false | false | false |
maikelwever/gtkuttle | appindicator_replacement.py | 1 | 2820 | #=========================
#
# AppIndicator for GTK
# drop-in replacement
#
# Copyright 2010
# Nathan Osman
#
#=========================
#
# Original source unknown.
# I downloaded this from:
# https://github.com/captn3m0/hackertray
# If you made this gem, please let me know.
#
# They hardcoded the icon file path in here,
# so i'll do the same.
#
#=========================
# We require PyGTK
import gtk
import gobject
# We also need os and sys
import os
# Types
CATEGORY_APPLICATION_STATUS = 0
# Status
STATUS_ACTIVE = 0
STATUS_ATTENTION = 1
# Locations to search for the given icon
def get_icon_filename(icon_name):
# Determine where the icon is
return os.path.abspath(os.path.join(os.path.dirname(__file__), 'icons', 'gtkuttle_{0}.png'.format(icon_name)))
# The main class
class Indicator:
# Constructor
def __init__ (self,unknown,icon,category):
# Store the settings
self.inactive_icon = get_icon_filename("down")
self.active_icon = get_icon_filename("down")
# Create the status icon
self.icon = gtk.StatusIcon()
# Initialize to the default icon
self.icon.set_from_file(self.inactive_icon)
# Set the rest of the vars
self.menu = None # We have no menu yet
def set_menu(self,menu):
# Save a copy of the menu
self.menu = menu
# Now attach the icon's signal
# to the menu so that it becomes displayed
# whenever the user clicks it
self.icon.connect("activate", self.show_menu)
def set_status(self, status):
# Status defines whether the active or inactive
# icon should be displayed.
if status == STATUS_ACTIVE:
self.icon.set_from_file(self.inactive_icon)
else:
self.icon.set_from_file(self.active_icon)
def set_label(self, label):
self.icon.set_title(label)
return
def set_icon(self, icon):
# Set the new icon
self.icon.set_from_file(get_icon_filename(icon))
def set_attention_icon(self, icon):
# Set the icon filename as the attention icon
self.active_icon = get_icon_filename(icon)
def show_menu(self, widget):
# Show the menu
self.menu.popup(None,None,None,0,0)
# Get the location and size of the window
mouse_rect = self.menu.get_window().get_frame_extents()
self.x = mouse_rect.x
self.y = mouse_rect.y
self.right = self.x + mouse_rect.width
self.bottom = self.y + mouse_rect.height
# Set a timer to poll the menu
self.timer = gobject.timeout_add(100, self.check_mouse)
def check_mouse(self):
if not self.menu.get_window().is_visible():
return
# Now check the global mouse coords
root = self.menu.get_screen().get_root_window()
x,y,z = root.get_pointer()
if x < self.x or x > self.right or y < self.y or y > self.bottom:
self.hide_menu()
else:
return True
def hide_menu(self):
self.menu.popdown()
| gpl-3.0 | 7,842,098,730,820,717,000 | 21.204724 | 111 | 0.658511 | false | 3.065217 | false | false | false |
JohnReid/biopsy | Python/biopsy/gapped_pssms/__init__.py | 1 | 3770 | #
# Copyright John Reid 2006
#
import numpy, numpy.random, scipy.special, math
from _maths import *
from _generate import *
from _generate_2 import *
from _variational import *
from gapped_pssms import *
from weblogo import *
#
# Try to import C++ part of module if installed.
#
try:
from _gapped_pssms import *
#
# The c implementation does not hold the data as numpy arrays
# so provide some functions to create numpy arrays from the data
#
def _gapped_pssm_alpha_array( model ):
"""Beta prior parameters for gamma: the likelihood of a gap"""
return numpy.array(
[
model.alpha( i )
for i in xrange( 2 )
],
dtype = numpy.float64
)
VariationalModel_C.alpha_array = _gapped_pssm_alpha_array
def _gapped_pssm_varphi_array( model ):
"Dirichlet prior parameters for background distribution"
return numpy.array(
[
model.varphi( i )
for i in xrange( 4 )
],
dtype = numpy.float64
)
VariationalModel_C.varphi_array = _gapped_pssm_varphi_array
def _gapped_pssm_phi_array( model ):
"Dirichlet prior parameters for pssm distribution"
return numpy.array(
[
model.phi( i )
for i in xrange( 4 )
],
dtype = numpy.float64
)
VariationalModel_C.phi_array = _gapped_pssm_phi_array
def _gapped_pssm_lambda_array( model ):
"Variational parameter for gamma"
return numpy.array(
[
model.lambda_( i )
for i in xrange( 2 )
],
dtype = numpy.float64
)
VariationalModel_C.lambda_array = _gapped_pssm_lambda_array
def _gapped_pssm_eta_array( model ):
"Variational parameter for location of the gap"
return numpy.array(
[
model.eta( i )
for i in xrange( model.K - 1 )
],
dtype = numpy.float64
)
VariationalModel_C.eta_array = _gapped_pssm_eta_array
def _gapped_pssm_mu_array( model ):
"Variational parameter for g: has_gap variable"
return numpy.array(
[
model.mu( i )
for i in xrange( model.N )
],
dtype = numpy.float64
)
VariationalModel_C.mu_array = _gapped_pssm_mu_array
def _gapped_pssm_omega_array( model ):
"Variational parameters for background and pss distributions"
return numpy.array(
[
[
model.omega( r, x )
for x in xrange( 4 )
]
for r in xrange( model.K+1 )
],
dtype = numpy.float64
)
VariationalModel_C.omega_array = _gapped_pssm_omega_array
def _gapped_pssm_nu_sequence( model ):
"Variational parameters for start positions of sites"
return [
numpy.array(
[
model.nu( n, i )
for i in xrange( 2 * (model.sequence_length( n ) - model.K) )
],
dtype = numpy.float64
)
for n in xrange( model.N )
]
VariationalModel_C.nu_sequence = _gapped_pssm_nu_sequence
except ImportError:
import warnings
warnings.warn('Could not import C++ gapped PSSM module')
| mit | -4,675,622,362,759,512,000 | 30.416667 | 93 | 0.491777 | false | 4.269536 | false | false | false |
jpurplefox/PokeMovesetEvaluator | moves.py | 1 | 1267 | class Move():
def __init__(self, name, power, cooldown, energy):
self.name = name
self.power = power
self.cooldown = cooldown
self.energy = energy
def get_total_power(self):
return self.get_atacks_count() * self.power
def get_total_cooldown(self):
return self.get_atacks_count() * self.cooldown
def __str__(self):
return self.name
class FastMove(Move):
def get_atacks_count(self):
count = 100 / self.energy
rest = 100 % self.energy
if rest:
count += 1
return count
class ChargeMove(Move):
def get_atacks_count(self):
return 100 / self.energy
BUBBLE = FastMove('Bubble', 31.25, 2.3, 15)
MUD_SHOT = FastMove('Mud Shot', 6, 0.55, 7)
WATER_GUN = FastMove('Water Gun', 7.5, 0.5, 7)
TACKLE = FastMove('Tackle', 12, 1.1, 7)
HYDRO_PUMP = ChargeMove('Hydro Pump', 112.5, 3.8, 100)
ICE_PUNCH = ChargeMove('Ice Punch', 45, 3.5, 33)
SUBMISSION = ChargeMove('Submission', 37.5, 2.1, 33)
AQUA_TAIL = ChargeMove('Aqua Tail', 56.25, 2.35, 50)
WATER_PULSE = ChargeMove('Water Pulse', 43.75, 3.3, 25)
POWER_GEM = ChargeMove('Power Gem', 40, 2.9, 33)
PSYCHIC = ChargeMove('Psychic', 68.75, 2.8, 50)
| gpl-3.0 | -593,390,082,307,868,000 | 30.675 | 55 | 0.590371 | false | 2.853604 | false | false | false |
dls-controls/scanpointgenerator | scanpointgenerator/generators/zipgenerator.py | 1 | 1984 | from annotypes import Anno, deserialize_object, Array, Sequence, Union
from scanpointgenerator.core import Generator, AAlternate
with Anno("List of Generators to zip"):
AGenerators = Array[Generator]
UGenerators = Union[AGenerators, Sequence[Generator], Generator]
@Generator.register_subclass(
"scanpointgenerator:generator/ZipGenerator:1.0")
class ZipGenerator(Generator):
""" Zip generators together, combining all generators into one """
def __init__(self, generators, alternate=False):
# type: (UGenerators, AAlternate) -> None
self.generators = AGenerators([deserialize_object(g, Generator)
for g in generators])
assert len(self.generators), "At least one generator needed"
units = []
axes = []
size = self.generators[0].size
for generator in self.generators:
assert generator.axes not in axes, "You cannot zip generators " \
"on the same axes"
assert generator.size == size, "You cannot zip generators " \
"of different sizes"
assert not generator.alternate, \
"Alternate should not be set on the component generators of a" \
"zip generator. Set it on the top level ZipGenerator only."
axes += generator.axes
units += generator.units
super(ZipGenerator, self).__init__(axes=axes,
size=size,
units=units,
alternate=alternate)
def prepare_arrays(self, index_array):
# The ZipGenerator gets its positions from its sub-generators
zipped_arrays = {}
for generator in self.generators:
arrays = generator.prepare_arrays(index_array)
zipped_arrays.update(arrays)
return zipped_arrays
| apache-2.0 | 8,799,549,726,361,614,000 | 38.68 | 80 | 0.580141 | false | 4.984925 | false | false | false |
alurin/alurinium-image-processing | alurinium/image/image.py | 1 | 1292 | from urllib.parse import urljoin
from django.conf import settings
from PIL import Image
import os
class ImageDescriptor(object):
name = None
url = None
fullname = None
width = None
height = None
@classmethod
def create_from_file(cls, filename, parse=True):
image = cls()
image.name = filename
image.url = urljoin(settings.MEDIA_URL, 'thumbs/' + filename)
image.fullname = os.path.join(settings.MEDIA_ROOT, 'thumbs', filename)
# get image size
# TODO: Add exception handling
if parse:
image.update()
# return result image
return image
@classmethod
def create_from_image(cls, fullname, result_image):
image = cls()
image.fullname = fullname
image.name = os.path.basename(fullname)
image.url = urljoin(settings.MEDIA_URL, 'thumbs/' + image.name)
# get image size
# TODO: Add exception handling
image.update(result_image)
# return result image
return image
def update(self, image=None):
if not image:
image = Image.open(self.fullname)
image.width, image.height = image.size
def __str__(self):
return "%s: %sx%s" % (self.url, self.width, self.height) | mit | 797,458,417,655,209,200 | 24.86 | 78 | 0.607585 | false | 4.11465 | false | false | false |
google/jax-cfd | jax_cfd/ml/model_utils.py | 1 | 3026 | """Helper methods for constructing trajectory functions in model_builder.py."""
import functools
from jax_cfd.base import array_utils
def with_preprocessing(fn, preprocess_fn):
"""Generates a function that computes `fn` on `preprocess_fn(x)`."""
@functools.wraps(fn)
def apply_fn(x, *args, **kwargs):
return fn(preprocess_fn(x), *args, **kwargs)
return apply_fn
def with_post_processing(fn, post_process_fn):
"""Generates a function that applies `post_process_fn` to outputs of `fn`."""
@functools.wraps(fn)
def apply_fn(*args, **kwargs):
return post_process_fn(*fn(*args, **kwargs))
return apply_fn
def with_split_input(fn, split_index, time_axis=0):
"""Decorates `fn` to be evaluated on first `split_index` time slices.
The returned function is a generalization to pytrees of the function:
`fn(x[:split_index], *args, **kwargs)`
Args:
fn: function to be transformed.
split_index: number of input elements along the time axis to use.
time_axis: axis corresponding to time dimension in `x` to decorated `fn`.
Returns:
decorated `fn` that is evaluated on only `split_index` first time slices of
provided inputs.
"""
@functools.wraps(fn)
def apply_fn(x, *args, **kwargs):
init, _ = array_utils.split_along_axis(x, split_index, axis=time_axis)
return fn(init, *args, **kwargs)
return apply_fn
def with_input_included(trajectory_fn, time_axis=0):
"""Returns a `trajectory_fn` that concatenates inputs `x` to trajectory."""
@functools.wraps(trajectory_fn)
def _trajectory(x, *args, **kwargs):
final, unroll = trajectory_fn(x, *args, **kwargs)
return final, array_utils.concat_along_axis([x, unroll], time_axis)
return _trajectory
def decoded_trajectory_with_inputs(model, num_init_frames):
"""Returns trajectory_fn operating on decoded data.
The returned function uses `num_init_frames` of the physics space trajectory
provided as an input to initialize the model state, unrolls the trajectory of
specified length that is decoded to the physics space using `model.decode_fn`.
Args:
model: model of a dynamical system used to obtain the trajectory.
num_init_frames: number of time frames used from the physics trajectory to
initialize the model state.
Returns:
Trajectory function that operates on physics space trajectories and returns
unrolls in physics space.
"""
def _trajectory_fn(x, steps, repeated_length=1):
trajectory_fn = functools.partial(
model.trajectory, post_process_fn=model.decode)
# add preprocessing to convert data to model state.
trajectory_fn = with_preprocessing(trajectory_fn, model.encode)
# concatenate input trajectory to output trajectory for easier comparison.
trajectory_fn = with_input_included(trajectory_fn)
# make trajectories operate on full examples by splitting the init.
trajectory_fn = with_split_input(trajectory_fn, num_init_frames)
return trajectory_fn(x, steps, repeated_length)
return _trajectory_fn
| apache-2.0 | 106,992,124,681,665,070 | 34.6 | 80 | 0.71844 | false | 3.74042 | false | false | false |
axeleratio/CurlySMILESpy | csm_aliases.py | 1 | 13621 | """
This file: csm_aliases.py
Last modified: October 21, 2010
Package: CurlySMILES Version 1.0.1
Author: Axel Drefahl
E-mail: [email protected]
Internet: http://www.axeleratio.com/csm/proj/main.htm
Python module csm_aliases manages primary and secondary
aliases, which are replaced by a component notation (SFN,
Composite, SMILES or annotated SMILES code), when a user
notation is turned into a work notation.
The central method is compnt_notation(self, sAlias)
to get the component notation for an alias.
Copyright (C) 2010 Axel Drefahl
This file is part of the CurlySMILES package.
The CurlySMILES package is free software: you can redistribute it
and/or modify it under the terms of the GNU General Public License
as published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
The CurlySMILES package is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the implied warranty
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the CurlySMILES package.
If not, see <http://www.gnu.org/licenses/>.
"""
import sys, os
class AliasNotations:
def __init__(self,sCsmpyDir,assignDict=1):
"""
dictionaries with aliases:
dictPrimAliases with primary aliases
dictSecAliases with secondary aliases (for a secondary alias
a corresponding primary one must exist)
dictClientAliases with client-provided aliases
"""
self.sCsmpyDir = sCsmpyDir
self.dictPrimAliases = None
self.dictSecAliases = None
if assignDict == 1:
self.initDict()
def initDict(self):
self.dictPrimAliases = self.load_prim_aliases()
self.dictSecAliases = self.load_sec_aliases()
#===================================================================#
# LOAD aliases #
#===================================================================#
"""------------------------------------------------------------------
loadPrimAliases:
load primary aliases from dictionaries in python modules
located in directory aliases (relative to directory of this
module, which is expected to be in subdirectory csm/py);
return: dictAliases = {sAliasGroup: dictGroup,...}
sAliasGroup = alias group name such as neutral,
cation1p, anion1p, etc., equal to
module name in directory aliases;
dictGroup = {sPrimAlias:sSmiles,...}
"""
def load_prim_aliases(self):
(sDirAliases,lstPyMod) = self.get_module_paths('aliases')
# get all dictionaries with primary aliases
dictPrimAliases = {}
code0 = "sys.path.append('%s')" % sDirAliases
exec code0
for sPyMod in lstPyMod:
lstParts = sPyMod.split(os.sep)
sAliasGroup = lstParts[-1][0:-3]
code1 = "import %s" % sAliasGroup
exec code1
sClassName = sAliasGroup[0].upper() + sAliasGroup[1:]
sClassName = 'Alias' + sClassName
code2 = "oDict = %s.%s()" % (sAliasGroup,sClassName)
exec code2
dictPrimAliases[sAliasGroup] = oDict.getDict()
del oDict
return dictPrimAliases
"""------------------------------------------------------------------
load_sec_aliases:
load secondary aliases from dictionaries in python modules
located in directory secalia (relative to directory of this
this module, which is expected to be in subdirectory csm/py);
return: dictAliases = {sAliasGroup: dictGroup,...}
sAliasGroup = alias group name such as neutral,
cation1p, anion1p, etc., equal to
module name in directory aliases;
dictGroup = {sSecAlias:sPrimAlias,...}
"""
def load_sec_aliases(self):
(sDirAliases,lstPyMod) = self.get_module_paths('secalia')
# get all dictionaries with secondary aliases
dictSecAliases = {}
code0 = "sys.path.append('%s')" % sDirAliases
exec code0
for sPyMod in lstPyMod:
lstParts = sPyMod.split(os.sep)
sAliasGroup = lstParts[-1][0:-3]
# take only modules having a name starting with 'sec_'
if cmp(sAliasGroup[0:4],'sec_') == 0:
sAliasGroup = sAliasGroup[4:]
else:
continue
code1 = "import sec_%s" % sAliasGroup
exec code1
sClassName = sAliasGroup[0].upper() + sAliasGroup[1:]
sClassName = 'Alias' + sClassName
code2 = "oDict = sec_%s.%s()" % (sAliasGroup,sClassName)
exec code2
dictSecAliases[sAliasGroup] = oDict.getDict()
del oDict
return dictSecAliases
"""------------------------------------------------------------------
get_module_paths: find and list absolute path for
each alias module either in sub-directory
sSubdir = 'aliases' or 'secalia'
return: (sDirAliases,lstPyMod), where
sDirAliases is absolute path to subdirectory, and
lstPyMod is list of absolute paths to module files
"""
def get_module_paths(self,sSubdir):
# absolute path to aliases directory
sDirAliases = self.sCsmpyDir + os.sep + sSubdir
# get names of aliases modules
lstPyMod = []
lstFiles = []
lstFiles = os.listdir(sDirAliases)
for sFile in lstFiles:
if len(sFile) < 6 or sFile[-3:] != '.py':
continue
sCompletePath = sDirAliases + os.sep + sFile
if os.path.isfile(sCompletePath):
lstPyMod.append(sCompletePath)
return (sDirAliases,lstPyMod)
#===================================================================#
# LOOK-UP alias (and group-id) #
#===================================================================#
"""------------------------------------------------------------------
compnt_notation: look up alias
return: string with component notation; or
None, if alias not found
"""
def compnt_notation(self, sAlias):
(sCompntNotation,sGroupID) = \
self.compnt_notation_and_groupid(sAlias)
return sCompntNotation
"""------------------------------------------------------------------
compnt_notation_and_groupid: look up alias
return: (sCompntNotation,sGroupId) or (None,None), if not found
sCompntNotation = notation for alias replacement
sGroupId = alias group name such as neutral, cation1p,
anion1p,etc.
"""
def compnt_notation_and_groupid(self,sAlias):
sCompntNotation = None
sGroupId = None
# Primary alias first ...
(sCompntNotation,sGroupId) = self.lookup_as_prim_alias(sAlias)
# ... if not found ...
if sCompntNotation == None: # look up as secondary alias
(sPrimAlias,sCompntNotation,sGroupId) = \
self.lookup_as_sec_alias(sAlias)
return (sCompntNotation,sGroupId)
"""------------------------------------------------------------------
lookup_as_prim_alias:
return: (sCompntNotation,sGroupId) for primary alias or
(None,None) if not found
"""
def lookup_as_prim_alias(self,sPrimAlias):
for sGroupId in self.dictPrimAliases:
dict = self.dictPrimAliases[sGroupId]
if dict.has_key(sPrimAlias):
return (dict[sPrimAlias],sGroupId)
return (None,None)
"""------------------------------------------------------------------
lookup_as_prim_alias_by_groupid:
return: sCompntNotation for primary alias or None if not found
"""
def lookup_as_prim_alias_by_groupid(self,sPrimAlias,sGroupId):
if self.dictPrimAliases.has_key(sGroupId):
dict = self.dictPrimAliases[sGroupId]
if dict.has_key(sPrimAlias):
return dict[sPrimAlias]
else:
return None
"""------------------------------------------------------------------
lookup_as_sec_alias:
return: (sPrimAlias, sCompntNotation,sGroupId) for secondary alias
or (None,None,None) if not found
"""
def lookup_as_sec_alias(self,sSecAlias):
for sGroupId in self.dictSecAliases:
dict = self.dictSecAliases[sGroupId]
if dict.has_key(sSecAlias):
sPrimAlias = dict[sSecAlias]
sCompntNotation = \
self.lookup_as_prim_alias_by_groupid(sPrimAlias,sGroupId)
if sCompntNotation != None:
return (sPrimAlias,sCompntNotation,sGroupId)
return (None,None,None)
#===================================================================#
# MAKE alias dictionary containing primary and secondary aliases #
#===================================================================#
"""------------------------------------------------------------------
makeAliasDict: check consistency of alias-alias and alias-groupid
relations and make dictionary that has both
primary and secondary aliases as key, while value
is the corresponding primary alias (if key is a
primary alias then key and value are the same)
NOTE: this method is for use during development
and extension of alias dictionaries
return: (lstAmbig,dictAliases)
lstAmbig = list of lines, each line reporting an
ambiguity (multiply used alias name)
empty if no ambiguities
dictAliases: {sAlias: sPrimAlias,...}
sAlias = primary or secondary alias
sPrimAlias = primary alias corresponding
to sAlias
Note: client aliases are not considered here
"""
def makeAliasDict(self):
lstAmbig = []
dictAliases = {}
# primary aliases
dictPrimGroupId = {} # dict with first encountered group id
for sPrimGroupId in self.dictPrimAliases:
dictPrim = self.dictPrimAliases[sPrimGroupId]
for sPrimAlias in dictPrim:
if dictAliases.has_key(sPrimAlias):
sLine = '"%s" with two group ids: "%s" and "%s"' % \
(sPrimAlias,sPrimGroupId, dictPrimGroupId[sPrimAlias])
sLine += ' (both for primary alias)'
lstAmbig.append(sLine)
else:
dictPrimGroupId[sPrimAlias] = sPrimGroupId
dictAliases[sPrimAlias] = sPrimAlias
# secondary aliases
dictSecGroupId = {} # dict with first encountered group id
for sSecGroupId in self.dictSecAliases:
dictSec = self.dictSecAliases[sSecGroupId]
for sSecAlias in dictSec:
sPrimAliasCorresp = dictSec[sSecAlias]
# first, check if sec. alias was already used as prim. alias
if dictAliases.has_key(sSecAlias):
sLine = 'sec. alias "%s" ' % sSecAlias
sLine += 'with group id "%s" conflicts ' % sSecGroupId
sLine += 'with same-name prim. alias of group "%s"' % \
dictPrimGroupId[sSecAlias]
lstAmbig.append(sLine)
continue
# also make sure the corresp. prim. alias exists
elif not dictAliases.has_key(sPrimAliasCorresp):
sLine = 'sec. alias "%s" ' % sSecAlias
sLine += 'with group id "%s" ' % sSecGroupId
sLine += 'has no corresponding prim. alias '
sLine += 'named "%s"' % sPrimAliasCorresp
lstAmbig.append(sLine)
continue
else:
# also make sure prim. and sec. share same group id
(sSmiles,sGroupIdCorresp) = \
self.lookupAsPrimAlias(sPrimAliasCorresp)
if cmp(sSecGroupId,sGroupIdCorresp) != 0:
sLine = 'group id mismatch for sec. alias '
sLine += '"%s" in group "%s": ' % \
(sSecAlias,sSecGroupId)
sLine += 'corresp. prim. alias "%s" ' % \
sPrimAliasCorresp
sLine += 'is in group "%s"' % sGroupIdCorresp
lstAmbig.append(sLine)
continue
# check if sec. alias is used twice
if dictSecGroupId.has_key(sSecAlias):
sLine = '"%s" with two group ids: "%s" and "%s"' % \
(sSecAlias,sSecGroupId, dictSecGroupId[sPrimAlias])
sLine += ' (both for secondary alias)'
lstAmbig.append(sLine)
else:
dictSecGroupId[sSecAlias] = sSecGroupId
dictAliases[sSecAlias] = sPrimAliasCorresp
return (lstAmbig,dictAliases)
| gpl-3.0 | -908,742,521,821,812,900 | 41.170279 | 79 | 0.532046 | false | 4.236703 | false | false | false |
DevHugo/zds-site | zds/mp/validators.py | 2 | 3770 | # -*- coding: utf-8 -*-
from django.http import Http404
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from zds.api.validators import Validator
from zds.member.models import Profile
class ParticipantsUserValidator(Validator):
can_be_empty = False
def validate_participants(self, value):
msg = None
if value or self.can_be_empty:
for participant in value:
if participant.username == self.get_current_user().username:
msg = _(u'Vous ne pouvez pas vous écrire à vous-même !')
try:
current = get_object_or_404(Profile, user__username=participant)
if not Profile.objects.contactable_members().filter(pk=current.pk).exists():
msg = _(u'Vous avez tenté d\'ajouter un utilisateur injoignable.')
except Http404:
msg = _(u'Un des participants saisi est introuvable')
else:
msg = _(u'Vous devez spécifier des participants.')
if msg is not None:
self.throw_error('participants', msg)
return value
def get_current_user(self):
raise NotImplementedError('`get_current_user()` must be implemented.')
class ParticipantsStringValidator(Validator):
"""
Validates participants field of a MP.
"""
def validate_participants(self, value, username):
"""
Checks about participants.
:param value: participants value
:return: participants value
"""
msg = None
if value:
participants = value.strip()
if participants != '':
if len(participants) == 1 and participants[0].strip() == ',':
msg = _(u'Vous devez spécfier des participants valides')
for participant in participants.split(','):
participant = participant.strip()
if participant == '':
continue
if participant.strip().lower() == username.lower():
msg = _(u'Vous ne pouvez pas vous écrire à vous-même !')
try:
current = get_object_or_404(Profile, user__username=participant)
if not Profile.objects.contactable_members().filter(pk=current.pk).exists():
msg = _(u'Vous avez tenté d\'ajouter un utilisateur injoignable.')
except Http404:
msg = _(u'Un des participants saisi est introuvable')
else:
msg = _(u'Le champ participants ne peut être vide')
if msg is not None:
self.throw_error('participants', msg)
return value
class TitleValidator(Validator):
"""
Validates title field of a MP.
"""
def validate_title(self, value):
"""
Checks about title.
:param value: title value
:return: title value
"""
msg = None
if value:
if value.strip() == '':
msg = _(u'Le champ titre ne peut être vide')
if msg is not None:
self.throw_error('title', msg)
return value
class TextValidator(Validator):
"""
Validates text field of a MP.
"""
def validate_text(self, value):
"""
Checks about text.
:param value: text value
:return: text value
"""
msg = None
if value:
if value.strip() == '':
msg = _(u'Le champ text ne peut être vide')
if msg is not None:
self.throw_error('text', msg)
return value
| gpl-3.0 | -8,151,883,243,706,463,000 | 33.154545 | 100 | 0.542188 | false | 4.298627 | false | false | false |
odrotleff/ROOTPWA | pyInterface/package/utils/_treeUtils.py | 3 | 5804 |
import sys
import pyRootPwa
import pyRootPwa.utils
_geantParticleCodes = {}
_geantParticleCodes[0] = "unknown"
_geantParticleCodes[1] = "gamma"
_geantParticleCodes[2] = "e"
_geantParticleCodes[3] = "e"
_geantParticleCodes[7] = "pi0"
_geantParticleCodes[8] = "pi"
_geantParticleCodes[9] = "pi"
_geantParticleCodes[11] = "K"
_geantParticleCodes[12] = "K"
_geantParticleCodes[13] = "n"
_geantParticleCodes[14] = "p"
_geantParticleCodes[15] = "pbar"
_geantParticleCodes[16] = "K0"
_geantParticleCodes[17] = "eta"
_geantParticleCodes[18] = "lambda"
_geantParticleCodes[57] = "rho(770)"
_geantParticleCodes[58] = "rho(770)"
_geantParticleCodes[59] = "rho(770)"
_geantParticleCodes[60] = "omega(782)"
_geantParticleCodes[61] = "eta'(958)"
_geantParticleCodes[62] = "phi(1020)"
_geantParticleCodes[45] = "d"
class _EventFile:
evtfile = None
lineCounter = 0
nLines = 0
def __init__(self, infile):
self.evtfile = infile
beginning = self.evtfile.tell()
nLinesPerParticle = int(self.evtfile.readline()[:-1]) + 1
i = 1
while self.evtfile.readline() != "":
i += 1
self.nLines = int(i / nLinesPerParticle)
self.evtfile.seek(beginning)
def __iter__(self):
return self
def __len__(self):
return self.nLines
def next(self):
n_lines_to_read = self.evtfile.readline()[:-1]
if n_lines_to_read == "":
raise StopIteration()
lines = [n_lines_to_read]
for i in range(0, int(n_lines_to_read)):
lines.append(self.evtfile.readline()[:-1])
if lines[-1] == "":
pyRootPwa.utils.printErr("Unexpected end of event file. Aborting...")
sys.exit(1)
return _Event(lines)
def writeEvent(self, event):
for line in event.lines:
self.evtfile.write(line + "\n")
class _Event:
lines = []
particleNames = []
physicsEvent = []
def __init__(self, lines):
self.lines = lines
def sort(self):
new_lines = self.lines[2:]
new_lines = sorted(new_lines, key=lambda entry: int(entry.split()[0]))
self.lines = self.lines[0:2] + new_lines
self.physicsEvent = []
self.particleNames = []
def __convertLineToPartProps(self, line):
part = line.split(' ')
(part[0], part[1]) = (int(part[0]), int(part[1]))
for j in range(2, 6):
part[j] = float(part[j])
partname = _geantParticleCodes[part[0]]
if part[1] > 0:
partname += "+"
elif part[1] < 0:
partname += "-"
else:
partname += "0"
part[0] = partname
part.pop(1)
part.pop(len(part)-1)
return part
def getPhysicsEvent(self):
if self.physicsEvent:
return self.physicsEvent
nmbParticles = int(self.lines[0])
part = self.__convertLineToPartProps(self.lines[1])
self.physicsEvent.append(pyRootPwa.ROOT.TVector3(part[1], part[2], part[3]))
fillPN = False
if self.particleNames == []:
fillPN = True
self.particleNames.append(part[0])
for i in range(2, nmbParticles + 1):
part = self.__convertLineToPartProps(self.lines[i])
if fillPN:
self.particleNames.append(part[0])
self.physicsEvent.append(pyRootPwa.ROOT.TVector3(part[1], part[2], part[3]))
return self.physicsEvent
def getParticleNames(self):
if not self.particleNames:
self.getPhysicsEvent()
return self.particleNames
def __str__(self):
retval = ""
for line in self.lines:
retval += line + '\n'
return retval[:-1]
def getTreeFromEvtFile(filename, treename = ""):
if pyRootPwa.config is None:
raise pyRootPwa.rootPwaException("pyRootPwa configuration not initialized")
if treename == "":
treename = str(hash(filename))
outTree = pyRootPwa.ROOT.TTree(treename, treename)
prodKinMomenta = pyRootPwa.ROOT.TClonesArray("TVector3")
decayKinMomenta = pyRootPwa.ROOT.TClonesArray("TVector3")
prodKinPartName = pyRootPwa.ROOT.TClonesArray("TObjString")
decayKinPartName = pyRootPwa.ROOT.TClonesArray("TObjString")
prodKinMomentaLeafName = pyRootPwa.config.prodKinMomentaLeafName
decayKinMomentaLeafName= pyRootPwa.config.decayKinMomentaLeafName
outTree.Branch(prodKinMomentaLeafName, "TClonesArray", prodKinMomenta)
outTree.Branch(decayKinMomentaLeafName, "TClonesArray", decayKinMomenta)
pyRootPwa.utils.printInfo('Converting "' + filename + '" to memory residing TTree...')
with open(filename, 'r') as infile:
inEventFile = _EventFile(infile)
index = 0
events = len(inEventFile)
progressbar = pyRootPwa.utils.progressBar(0, events)
progressbar.start()
try:
first = True
for event in inEventFile:
event.sort()
physicsVectors = event.getPhysicsEvent()
# check for the correct ordering of the names
particleNames = event.getParticleNames()
if first:
prodKinPartName[0] = pyRootPwa.ROOT.TObjString(particleNames[0])
for i in range(1, len(particleNames)):
decayKinPartName[i-1] = pyRootPwa.ROOT.TObjString(particleNames[i])
else:
if len(particleNames) != prodKinPartName.GetEntriesFast() + decayKinPartName.GetEntriesFast():
progressbar.cancel()
raise pyRootPwa.rootPwaException("Mismatch between number of particle names in TClonesArray and number of particles in event")
if prodKinPartName[0].GetString() != particleNames[0]:
progressbar.cancel()
raise pyRootPwa.rootPwaException("Inconsistent production particle types")
for i in range(1, len(particleNames)):
if decayKinPartName[i-1].GetString() != particleNames[i]:
progressbar.cancel()
raise pyRootPwa.rootPwaException("Inconsistent decay particle types")
# set the physics vectors in the tree
prodKinMomenta[0] = physicsVectors[0]
for i in range(len(physicsVectors[1:])):
decayKinMomenta[i] = physicsVectors[i+1]
outTree.Fill()
index += 1
progressbar.update(index)
except:
progressbar.cancel()
raise
pyRootPwa.utils.printSucc('Successfully created TTree with ' + str(events) + ' events.')
return (prodKinPartName, decayKinPartName, outTree)
| gpl-3.0 | -3,600,636,337,452,054,000 | 27.732673 | 132 | 0.699345 | false | 2.799807 | false | false | false |
jawaidss/halalar-web | halalar/api/tests/models.py | 1 | 4684 | from datetime import datetime
import mailchimp
from django.conf import settings
from django.contrib.auth.models import User
from django.core import mail
from django.test import TestCase
from . import TEST_DATA, BODY, create_user, create_profile, create_message
from ..models import Profile
class ProfileTestCase(TestCase):
m = mailchimp.Mailchimp()
def tearDown(self):
self.m.lists.batch_unsubscribe(settings.MAILCHIMP_LIST_ID,
[{'email': TEST_DATA[0]['email']}],
delete_member=True,
send_goodbye=False)
def test_save(self):
# tests that a token is generated on save
# if it is not given
user = User.objects.create_user('user1')
profile = Profile(user=user, age=0)
profile.save()
self.assertEqual(len(profile.token), 40)
# if it is null
user = User.objects.create_user('user2')
profile = Profile(user=user, age=0, token=None)
profile.save()
self.assertEqual(len(profile.token), 40)
# if it is blank
user = User.objects.create_user('user3')
profile = Profile(user=user, age=0, token='')
profile.save()
self.assertEqual(len(profile.token), 40)
old_token = profile.token
# tests that the token does not change on save
profile.save()
new_token = profile.token
self.assertEqual(old_token, new_token)
# tests that a given token is not overridden on save
user = User.objects.create_user('user4')
profile = Profile(user=user, age=0, token='token')
profile.save()
self.assertEqual(profile.token, 'token')
def test_serialize(self):
user = create_user()
profile = create_profile(user)
expected = {'age': TEST_DATA[0]['age'],
'career': TEST_DATA[0]['career'],
'city': TEST_DATA[0]['city'],
'community': TEST_DATA[0]['community'],
'country': TEST_DATA[0]['country'],
'email': TEST_DATA[0]['email'],
'family': TEST_DATA[0]['family'],
'gender': TEST_DATA[0]['gender'],
'photo': None,
'religion': TEST_DATA[0]['religion'],
'self': TEST_DATA[0]['self'],
'username': TEST_DATA[0]['username']}
self.assertEqual(profile.serialize(), expected)
del expected['email']
self.assertEqual(profile.serialize(False), expected)
def test_send_delayed_welcome_email(self):
user = create_user()
profile = create_profile(user)
profile.send_delayed_welcome_email()
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
self.assertTrue(email.subject)
self.assertTrue(email.message)
self.assertTrue(email.from_email)
self.assertTrue(len(email.to), 1)
self.assertEqual(email.to[0], user.email)
self.assertTrue(86399 <= (email.send_at - datetime.now()).seconds <= 86400)
def test_send_signup_notification_email(self):
user = create_user()
profile = create_profile(user)
profile.send_signup_notification_email()
self.assertEqual(len(mail.outbox), 1)
email = mail.outbox[0]
self.assertTrue(email.subject)
self.assertTrue(email.message)
self.assertTrue(email.from_email)
self.assertTrue(len(email.to), 1)
self.assertEqual(email.to[0], settings.ASANA_EMAIL)
def test_subscribe_to_mailchimp_list(self):
user = create_user()
profile = create_profile(user)
profile.subscribe_to_mailchimp_list()
self.assertEqual(self.m.lists.member_info(settings.MAILCHIMP_LIST_ID,
[{'email': TEST_DATA[0]['email']}])['success_count'], 1)
class MessageTestCase(TestCase):
def test_serialize(self):
sender = create_profile(create_user())
recipient = create_profile(create_user(1), 1)
message = create_message(sender, recipient)
expected = {'sender': TEST_DATA[0]['username'],
'recipient': TEST_DATA[1]['username'],
'timestamp': 'now',
'body': BODY}
self.assertEqual(message.serialize(), expected)
def test_send_push_notification(self):
sender = create_profile(create_user())
recipient = create_profile(create_user(1), 1)
message = create_message(sender, recipient)
message.send_push_notification() | mit | -4,581,687,343,895,741,000 | 35.317829 | 106 | 0.58006 | false | 4.083697 | true | false | false |
danianr/NINJa | jobqueue.py | 1 | 9587 | from Tkinter import *
import os
import posix
from joblist import JobList
import cups
import re
from collections import deque
import time
import sys
class Job(object):
def __init__(self, conn=None, jobId=None, maxsize=2147483647):
print >> sys.stderr, time.time(), 'Entry into Job(jobId=%s)' % (jobId,)
self.jobId = jobId
self.authenticated = False
printerUri = conn.getJobAttributes(jobId).get('printer-uri')
# This will raise an IPPError if the job has not finished transferring
# in the form of IPPError: (1030, 'client-error-not-found')
doc = conn.getDocument(printerUri, jobId, 1)
print >> sys.stderr, time.time(), 'After getDocument() for jobId:', jobId
self.size = os.stat(doc['file']).st_size
if self.size > maxsize:
print >> sys.stderr, time.time(), 'Document size is larger than accepted:', self.size
os.remove(doc['file'])
self.error = 'Document PostScript is too large to be printed;\ntry printing from a Mac'
self.pages = 0
self.sha512 = 'NA'
else:
# Note that the getDocument command must be issued prior to requesting
# detailed job attributes such as document-format, job-originating-host-name
# and job-originating-user-name, otherwise these attributes will be blank
digest_cmd = '/usr/bin/nice /usr/bin/openssl dgst -sha512 %s' % ( doc['file'] )
pagecount_cmd = './pagecount.sh %s %s' % ( doc['document-format'], doc['file'] )
sha512 = os.popen(digest_cmd).read()
print >> sys.stderr, time.time(), 'After the digest for jobId:', jobId
pagecount = os.popen(pagecount_cmd).read()
print >> sys.stderr, time.time(), 'After the pagecount for jobId:', jobId
try:
self.pages = int(pagecount)
self.error = None
except ValueError:
self.pages = 1
self.error = 'Unable to determine pagecount, you will be charged for actual usage'
self.sha512 = sha512[-129:-1]
self.docFormat = doc['document-format']
attr = conn.getJobAttributes(jobId)
self.uuid = attr['job-uuid']
self.creation = attr['time-at-creation']
self.username = attr['job-originating-user-name'].encode('ascii','ignore')
self.hostname = attr['job-originating-host-name'].encode('ascii','ignore')
self.title = attr['job-name'].encode('ascii','replace')
self.displayTitle = self.title[:47]
self.jobState = attr['job-state']
self.remote = printerUri.endswith('/remote')
# There is no need to keep the tmpfile around for remote jobs
if self.remote and doc['file'] != "":
os.remove(doc['file'])
self.tmpfile = None
elif self.size > maxsize:
self.tmpfile = None
else:
self.tmpfile = doc['file']
if ( attr.has_key('Duplex') and attr['Duplex'] != u'None' ):
self.duplex = True
self.pages = ( self.pages % 2 + self.pages ) / 2
else:
self.duplex = False
# Use the initially supplied jobId for the returned hash value
# defined using a lambda with a closure to make value immutable
self.__hash__ = lambda : jobId
def __cmp__(self, other):
if self.creation < other.creation:
return -1
elif self.creation > other.creation:
return 1
else:
return 0
def __repr__(self):
return '<jobId: %d, uuid: \'%s\', creation: %d, username: \'%s\', hostname: \'%s\', title:\'%s\', pages: %d, jobState: %d, duplex: %s>' \
% ( self.jobId, self.uuid, self.creation, self.username, self.hostname, self.title, self.pages, self.jobState, self.duplex )
def __str__(self):
return '%4d %-12s %-18s %-48s %6s' % ( self.jobId, self.username, self.hostname[:18], self.displayTitle[:48], self.pages )
def removeTmpFile(self):
if self.tmpfile is not None and self.tmpfile != "":
os.remove(self.tmpfile)
class JobMapping(object):
# Takes a sequence of Job objects, produces an iterator
# suitable for supplying to a a listbox (textual description)
# and allows direct access to Job objects based on their
# position. Also takes a list of positions and returns
# a tuple of Job objects associated with each
def __init__(self, iterable, username):
self.timestamp = time.time()
self.internal = list()
self.internal.extend(iterable)
self.username = username
self.dirty = False
def isDirty(self):
return self.dirty
def setDirty(self):
self.dirty = True
def map(self, iterable):
return map(lambda i: self.internal[int(i)], iterable)
# Only define getter accessors since this is technically
# a read-only snapshot
def __getitem__(self, x):
return self.internal[x]
def __getslice__(self, x, y):
return self.internal[x:y]
def __len__(self):
return len(self.internal)
def __iter__(self):
return iter(map(lambda j: j.__str__(), self.internal))
class JobQueue(object):
def __init__(self, unipattern, conn, multicastHandler=None, cloudAdapter=None, maxsize=2147483647):
self.unipattern = unipattern
self.conn = conn
self.mcast = multicastHandler
self.cloud = cloudAdapter
self.jobs = dict()
self.claimed = dict()
self.unclaimed = deque()
self.refreshReq = deque()
self.claimedMapFrame = None
self.unclaimedMapFrame = None
self.delay = 23 # seconds
self.maxsize = maxsize
self.processing = None
def getMapping(self, username=None):
self.refresh()
if username is None:
if self.unclaimedMapFrame is None or \
self.unclaimedMapFrame.isDirty():
self.unclaimedMapFrame = JobMapping(self.unclaimed, None)
return self.unclaimedMapFrame
else:
if self.claimedMapFrame is None or \
self.claimedMapFrame.isDirty() or \
self.claimedMapFrame.username != username:
if self.claimed.has_key(username):
self.claimedMapFrame = JobMapping(self.claimed[username], username)
else:
self.claimedMapFrame = JobMapping([], username)
return self.claimedMapFrame
def refresh(self, event=None, interjobHook=None, force=False):
if self.processing is not None:
return
now = time.time()
self.refreshReq.append(now)
for req in self.refreshReq:
if force or (req + self.delay) < now:
self.processing = now
break
else:
return
incompleteJobs = self.conn.getJobs(which_jobs='not-completed')
self.remove( filter( lambda x: not incompleteJobs.has_key(x), self.jobs.keys()) )
for jobId in filter( lambda x: not self.jobs.has_key(x), incompleteJobs.keys()):
try:
j = Job(self.conn, jobId, self.maxsize)
if not j.remote:
self.add(j)
except cups.IPPError as e:
print("caught an IPPError",e)
continue
if interjobHook is not None:
interjobHook()
self.refreshReq.clear()
rettime = time.time()
print >> sys.stderr, rettime, 'Total elapsed time for jobqueue.refresh():', rettime - now
self.processing = None
def add(self, job):
# updates the main index
self.jobs[job.jobId] = job
if self.unipattern.match(job.username):
if job.username not in self.claimed:
self.claimed[job.username] = deque()
self.claimed[job.username].appendleft(job)
if self.claimedMapFrame is not None and \
self.claimedMapFrame.username == job.username:
self.claimedMapFrame.setDirty()
if self.cloud is not None and self.mcast is not None and job.size <= self.cloud.maxsize:
self.mcast.advertise(job)
self.cloud.storeJob(job)
else:
self.unclaimed.appendleft(job)
if self.unclaimedMapFrame is not None:
self.unclaimedMapFrame.setDirty()
def remove(self, removedJobs):
for id in filter( lambda x: self.jobs.has_key(x), removedJobs):
j = self.jobs[id]
if j in self.unclaimed:
self.unclaimed.remove(j)
if self.unclaimedMapFrame is not None:
self.unclaimedMapFrame.setDirty()
else:
username=j.username
if self.claimed.has_key(username):
self.claimed[username].remove(j)
if ( len(self.claimed[username]) == 0 ):
del self.claimed[username]
if self.claimedMapFrame is not None and \
self.claimedMapFrame.username == username:
self.claimedMapFrame.setDirty()
del self.jobs[id]
def getClaimedUuids(self, username):
uuids = []
if username in self.claimed:
for j in self.claimed[username]:
urnuuid = j.uuid
uuids.append(urnuuid[9:])
return uuids
def __getitem__(self,x):
if x in self.jobs:
return self.jobs[x]
incompleteJobs = self.conn.getJobs(which_jobs='not-completed')
if incompleteJobs.has_key(x):
return Job(self.conn, x)
else:
return None
| mit | 4,192,363,589,211,776,000 | 36.303502 | 145 | 0.593303 | false | 3.917859 | false | false | false |
XIMDEX/ximdex | public_xmd/vendors/kupu/tools/compress.py | 1 | 1657 | #!/usr/bin/env python
"""Remove comments, newlines and redundant whitespace from JavaScript code
This reads all paths that were passed in as arguments on the command-line
and removes everything that is ignored by JavaScript. This makes that
the source isn't readable anymore (which I personally consider bad),
but also that less bytes have to be served by the server, scripts are
loaded faster and also that they're executed faster.
WARNING: This script converts files in place! Original files will be
overwritten. Do *not* run this on a development version of your code,
since you won't be able to get them back into the original state. This
should be ran only by system administrators if they want to speed up
their setups.
"""
import sys, re
one_line_comment = re.compile(r'^\s*//.*$', re.M)
trailing_comment = re.compile(r'//(\w|\s)*$', re.M)
multi_line_comment = re.compile(r'^\s*/\*.*?\*/', re.M | re.S)
whitespace_after_separator = re.compile(r';\s*', re.M | re.S)
whitespace_after_opening_bracket = re.compile(r'{\s*', re.M | re.S)
starting_whitespace = re.compile(r'^\s*', re.M | re.S)
def strip(data):
"""Processes the data, removing comments and unecessary whitespace."""
data = one_line_comment.sub('', data)
data = trailing_comment.sub('', data)
data = multi_line_comment.sub('', data)
data = whitespace_after_separator.sub(';', data)
data = whitespace_after_opening_bracket.sub('{', data)
data = starting_whitespace.sub('', data)
return data.strip()
for file in sys.argv[1:]:
data = open(file).read()
data = strip(data)
open(file, 'w').write(data)
| agpl-3.0 | -4,272,553,897,930,234,400 | 40.425 | 77 | 0.683162 | false | 3.633772 | false | false | false |
fish2000/django-delegate | setup.py | 1 | 1992 | #/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
import sys
import os
sys.path.append(os.getcwd())
import version
if 'sdist' in sys.argv and 'upload' in sys.argv:
import commands
finder = "/usr/bin/find %s \( -name \*.pyc -or -name .DS_Store \) -delete"
theplace = os.getcwd()
if theplace not in (".", "/"):
print("+ Deleting crapola from %s..." % theplace)
print("$ %s" % finder % theplace)
commands.getstatusoutput(finder % theplace)
print("")
setup(
name='django-delegate',
version='%s.%s.%s' % version.__version__,
description=version.__doc__,
long_description=version.__doc__,
author=version.__author__,
author_email=version.__email__,
maintainer=version.__author__,
maintainer_email=version.__email__,
license='BSD',
url='http://github.com/fish2000/django-delegate/',
download_url='https://github.com/fish2000/django-delegate/zipball/master',
keywords=[
'django',
'delegate',
'queryset',
'manager',
'method',
'dispatch',
'syntax-sugar'],
packages=[
'delegate'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Environment :: Other Environment',
'Environment :: Plugins',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
'Programming Language :: Python :: Implementation :: Jython',
'Topic :: Database',
'Topic :: Utilities']
)
| bsd-3-clause | 102,548,618,383,134,560 | 29.646154 | 78 | 0.5999 | false | 4.167364 | false | true | false |
ijmarshall/robotreviewer3 | robotreviewer/formatting.py | 1 | 1789 | """
formatting.py
functions for displaying RobotReviewer internal data in useful ways
"""
from robotreviewer.app import app
import logging
log = logging.getLogger(__name__)
def format_authors(author_list, max_authors=1):
et_al = False
if len(author_list) > max_authors:
et_al = True
author_list = author_list[:max_authors]
authors = u", ".join([u"{lastname} {initials}".format(**a) for a in author_list])
if et_al:
authors += " et al"
return authors
@app.context_processor
def short_citation_fn():
def short_citation(article):
try:
return u"{} {}, {}".format(article['authors'][0]['lastname'], article['authors'][0]['initials'], article.get('year', '[unknown year]'))
except Exception as e:
log.debug("Fallback: {} raised".format(e))
return article['filename']
return dict(short_citation=short_citation)
@app.context_processor
def long_citation_fn():
def long_citation(article):
try:
bracket_issue = u"({})".format(article['issue']) if article.get('issue') else u""
return u"{}. {} {} {}. {}{}; {}".format(format_authors(article['authors']), article['title'], article.get('journal_abbr', article['journal']), article.get('year', '[unknown year]'), article.get('volume', '?'), bracket_issue, article.get('pages', '?'))
except Exception as e:
log.debug("Fallback: {} raised".format(e))
return u"Unable to extract citation information for file {}".format(article['filename'])
return dict(long_citation=long_citation)
@app.context_processor
def not_rcts_fn():
def not_rcts(articles):
return [r for r in articles if r.get('rct', {}).get('is_rct', True) == False]
return dict(not_rcts=not_rcts)
| gpl-3.0 | 5,898,080,818,070,069,000 | 36.270833 | 263 | 0.624371 | false | 3.719335 | false | false | false |
lambdalisue/django-permission | src/permission/logics/staff.py | 1 | 5221 | # coding=utf-8
"""
Permission logic module for author based permission system
"""
from permission.conf import settings
from permission.logics.base import PermissionLogic
from permission.compat import is_authenticated
class StaffPermissionLogic(PermissionLogic):
"""
Permission logic class for is_staff authority based permission system
"""
def __init__(self,
any_permission=None,
add_permission=None,
change_permission=None,
delete_permission=None):
"""
Constructor
Parameters
----------
any_permission : boolean
True for give any permission of the specified object to the staff
user. Default value will be taken from
``PERMISSION_DEFAULT_SPL_ANY_PERMISSION`` in
settings.
add_permission : boolean
True for give change permission of the specified object to the
staff user.
It will be ignored if :attr:`any_permission` is True.
Default value will be taken from
``PERMISSION_DEFAULT_SPL_ADD_PERMISSION`` in
settings.
change_permission : boolean
True for give change permission of the specified object to the
staff user.
It will be ignored if :attr:`any_permission` is True.
Default value will be taken from
``PERMISSION_DEFAULT_SPL_CHANGE_PERMISSION`` in
settings.
delete_permission : boolean
True for give delete permission of the specified object to the
staff user.
It will be ignored if :attr:`any_permission` is True.
Default value will be taken from
``PERMISSION_DEFAULT_SPL_DELETE_PERMISSION`` in
settings.
"""
self.any_permission = any_permission
self.add_permission = add_permission
self.change_permission = change_permission
self.delete_permission = delete_permission
if self.any_permission is None:
self.any_permission = \
settings.PERMISSION_DEFAULT_SPL_ANY_PERMISSION
if self.add_permission is None:
self.add_permission = \
settings.PERMISSION_DEFAULT_SPL_ADD_PERMISSION
if self.change_permission is None:
self.change_permission = \
settings.PERMISSION_DEFAULT_SPL_CHANGE_PERMISSION
if self.delete_permission is None:
self.delete_permission = \
settings.PERMISSION_DEFAULT_SPL_DELETE_PERMISSION
def has_perm(self, user_obj, perm, obj=None):
"""
Check if user have permission (of object)
If the user_obj is not authenticated, it return ``False``.
If no object is specified, it return ``True`` when the corresponding
permission was specified to ``True`` (changed from v0.7.0).
This behavior is based on the django system.
https://code.djangoproject.com/wiki/RowLevelPermissions
If an object is specified, it will return ``True`` if the user is
staff. The staff can add, change or delete the object (you can change
this behavior to set ``any_permission``, ``add_permission``,
``change_permission``, or ``delete_permission`` attributes of this
instance).
Parameters
----------
user_obj : django user model instance
A django user model instance which be checked
perm : string
`app_label.codename` formatted permission string
obj : None or django model instance
None or django model instance for object permission
Returns
-------
boolean
Weather the specified user have specified permission (of specified
object).
"""
if not is_authenticated(user_obj):
return False
# construct the permission full name
add_permission = self.get_full_permission_string('add')
change_permission = self.get_full_permission_string('change')
delete_permission = self.get_full_permission_string('delete')
if obj is None:
if user_obj.is_staff:
if self.add_permission and perm == add_permission:
return True
if self.change_permission and perm == change_permission:
return True
if self.delete_permission and perm == delete_permission:
return True
return self.any_permission
return False
elif user_obj.is_active:
if user_obj.is_staff:
if self.any_permission:
# have any kind of permissions to the obj
return True
if (self.add_permission and
perm == add_permission):
return True
if (self.change_permission and
perm == change_permission):
return True
if (self.delete_permission and
perm == delete_permission):
return True
return False
| mit | -2,682,839,995,101,287,000 | 38.854962 | 78 | 0.586861 | false | 5.133727 | false | false | false |
socialplanning/opencore | opencore/listen/featurelet.py | 1 | 2026 | import logging
from opencore.featurelets.interfaces import IListenContainer
from opencore.featurelets.interfaces import IListenFeatureletInstalled
from opencore.feed.interfaces import ICanFeed
from opencore.interfaces import IProject
from opencore.interfaces.event import ListenFeatureletCreatedEvent
from opencore.listen.mailinglist import OpenMailingList
from Products.CMFCore.utils import getToolByName
from Products.listen.interfaces import IListLookup
from topp.featurelets.base import BaseFeaturelet
from topp.featurelets.interfaces import IFeaturelet
from topp.featurelets.interfaces import IFeatureletSupporter
from zope.component import getMultiAdapter
from zope.component import getUtility
from zope.interface import Interface
from zope.interface import alsoProvides
from zope.interface import implements
from zope.event import notify
log = logging.getLogger('opencore.featurelets.listen')
class ListenFeaturelet(BaseFeaturelet):
"""
A featurelet that installs a folder for managing listen based
mailing lists.
"""
implements(IFeaturelet)
id = "listen"
title = "Mailing lists"
#config_view = "listen_config"
installed_marker = IListenFeatureletInstalled
_info = {'content': ({'id': 'lists', 'title': 'Mailing lists',
'portal_type': 'Folder'},),
'menu_items': ({'title': u'Mailing lists',
'description': u'Mailing lists',
'action': u'lists',
'order': 0,
},
),
}
def deliverPackage(self, obj):
"""
See IFeaturelet.
"""
BaseFeaturelet.deliverPackage(self, obj)
container = obj._getOb(self._info['content'][0]['id'])
container.setLayout('mailing_lists')
alsoProvides(container, IListenContainer)
alsoProvides(container, ICanFeed)
notify(ListenFeatureletCreatedEvent(obj))
return self._info
| gpl-3.0 | -6,922,858,965,203,851,000 | 35.836364 | 70 | 0.678677 | false | 4.472406 | false | false | false |
h-hirokawa/swampdragon | swampdragon/tests/test_serializer_tools.py | 1 | 3469 | from .dragon_test_case import DragonTestCase
from ..serializers.model_serializer import ModelSerializer
from ..serializers import serializer_tools
from django.db import models
from swampdragon.tests.models import SDModel
class ReverseM2M(SDModel):
number = models.IntegerField()
class M2M(SDModel):
name = models.CharField(max_length=10)
many = models.ManyToManyField(ReverseM2M)
class ReverseFk(SDModel):
name = models.CharField(max_length=10)
class Fk(SDModel):
number = models.IntegerField()
reverse_fk = models.ForeignKey(ReverseFk)
class ReverseM2MSerializer(ModelSerializer):
m2m_set = 'M2MSerializer'
class Meta:
model = ReverseM2M
publish_fields = ('number', 'm2m_set')
class ReverseO2O(SDModel):
bar = models.CharField(max_length=100)
class O2O(SDModel):
foo = models.CharField(max_length=100)
reverse_o2o = models.OneToOneField(ReverseO2O)
class M2MSerializer(ModelSerializer):
many = ReverseM2MSerializer
class Meta:
model = M2M
class FKSerializer(ModelSerializer):
reverse_fk = 'ReverseFKSerializer'
class Meta:
model = Fk
publish_fields = ('reverse_fk', )
class ReverseFKSerializer(ModelSerializer):
fk_set = FKSerializer
class Meta:
model = ReverseFk
publish_fields = ('fk_set', )
class O2OSerializer(ModelSerializer):
reverse_o2o = 'ReverseO2OSerializer'
class Meta:
model = O2O
class ReverseO2OSerializer(ModelSerializer):
o2o = O2OSerializer
class Meta:
model = ReverseO2O
fields = ('o2o')
class TestSerializerTools(DragonTestCase):
def test_m2m(self):
reverse_m2m = ReverseM2M.objects.create(number=12)
m2m = M2M.objects.create(name='test')
m2m.many.add(reverse_m2m)
mapping = serializer_tools.get_id_mappings(M2MSerializer(instance=m2m))
self.assertEqual(list(mapping['many']), [reverse_m2m.pk])
def test_reverse_m2m(self):
reverse_m2m = ReverseM2M.objects.create(number=12)
m2m = M2M.objects.create(name='test')
m2m.many.add(reverse_m2m)
mapping = serializer_tools.get_id_mappings(ReverseM2MSerializer(instance=reverse_m2m))
self.assertEqual(list(mapping['m2m_set']), [m2m.pk])
def test_fk(self):
reverse_fk = ReverseFk.objects.create(name='test')
fk = Fk.objects.create(number=99, reverse_fk=reverse_fk)
mapping = serializer_tools.get_id_mappings(FKSerializer(instance=fk))
self.assertEqual(mapping['reverse_fk'], reverse_fk.pk)
def test_reverse_fk(self):
reverse_fk = ReverseFk.objects.create(name='test')
fk = Fk.objects.create(number=99, reverse_fk=reverse_fk)
mapping = serializer_tools.get_id_mappings(ReverseFKSerializer(instance=reverse_fk))
self.assertEqual(list(mapping['fk_set']), [fk.pk])
def test_one2one(self):
ro2o = ReverseO2O.objects.create(bar='another test')
o2o = O2O.objects.create(foo='test', reverse_o2o=ro2o)
mapping = serializer_tools.get_id_mappings(O2OSerializer(instance=o2o))
self.assertEqual(mapping['reverse_o2o'], ro2o.pk)
def test_reverse_one2one(self):
ro2o = ReverseO2O.objects.create(bar='another test')
o2o = O2O.objects.create(foo='test', reverse_o2o=ro2o)
mapping = serializer_tools.get_id_mappings(ReverseO2OSerializer(instance=ro2o))
self.assertEqual(mapping['o2o'], o2o.pk)
| bsd-3-clause | -2,531,740,457,972,185,600 | 28.398305 | 94 | 0.686077 | false | 3.278828 | true | false | false |
hydroshare/hydroshare | hs_rest_api/resources/file_metadata.py | 1 | 8812 | import os
from django.core.exceptions import ObjectDoesNotExist
from rest_framework.response import Response
from rest_framework import generics
from rest_framework import serializers
from rest_framework.exceptions import APIException, NotFound
from hs_core.models import ResourceFile
from hs_rest_api.permissions import CanViewOrEditResourceMetadata
from hs_core import hydroshare
# TODO: Once we upgrade past Django Rest Framework 3.3, this won't be necessary
class JSONSerializerField(serializers.Field):
""" Serializer for JSONField -- required to make field writable"""
def to_internal_value(self, data):
return data
def to_representation(self, value):
return value
class FileMetaDataSerializer(serializers.Serializer):
title = serializers.CharField(required=False)
keywords = JSONSerializerField(required=False)
spatial_coverage = JSONSerializerField(required=False)
extra_metadata = JSONSerializerField(required=False)
temporal_coverage = JSONSerializerField(required=False)
logical_file = JSONSerializerField(required=False)
class FileMetaDataRetrieveUpdateDestroy(generics.RetrieveUpdateDestroyAPIView):
serializer_class = FileMetaDataSerializer
allowed_methods = ('GET', 'PUT',)
permission_classes = (CanViewOrEditResourceMetadata,)
def get(self, request, pk, pathname):
"""
Get a resource file's metadata.
## Parameters
* `id` - alphanumeric uuid of the resource, i.e. cde01b3898c94cdab78a2318330cf795
* `pathname` - The pathname of the file
to get these
## Returns
```
{
"keywords": [
"keyword1",
"keyword2"
],
"spatial_coverage": {
"units": "Decimal degrees",
"east": -84.0465,
"north": 49.6791,
"name": "12232",
"projection": "WGS 84 EPSG:4326"
},
"extra_metadata": {
"extended1": "one"
},
"temporal_coverage": {
"start": "2018-02-22",
"end": "2018-02-24"
},
"title": "File Metadata Title",
"logical_file": {}
}
```
"""
try:
resource_file = hydroshare.get_resource_file(pk, pathname)
logical_file = resource_file.logical_file
metadata = resource_file.metadata
except ObjectDoesNotExist:
# Backwards compatibility for file_id
try:
resource_file = ResourceFile.objects.get(id=pathname)
logical_file = resource_file.logical_file
metadata = resource_file.metadata
except Exception:
# is it a folder?
resource = hydroshare.get_resource_by_shortkey(pk)
dir_path = pk + os.path.join("/data/contents/", pathname)
logical_file = resource.get_folder_aggregation_object(dir_path)
metadata = None
title = logical_file.dataset_name \
if logical_file else ""
keywords = metadata.keywords \
if metadata else []
spatial_coverage = metadata.spatial_coverage.value \
if metadata and metadata.spatial_coverage else {}
extra_metadata = metadata.extra_metadata \
if metadata else {}
temporal_coverage = metadata.temporal_coverage.value if \
metadata and metadata.temporal_coverage else {}
extra_data = logical_file.metadata.dict() \
if logical_file else {}
# TODO: How to leverage serializer for this?
return Response({
"title": title,
"keywords": keywords,
"spatial_coverage": spatial_coverage,
"extra_metadata": extra_metadata,
"temporal_coverage": temporal_coverage,
"logical_file": extra_data
})
def put(self, request, pk, pathname):
"""
Update a resource file's metadata
Accepts application/json encoding.
## Parameters
* `id` - alphanumeric uuid of the resource, i.e. cde01b3898c94cdab78a2318330cf795
* `pathname` - The pathname of the file
* `data` - see the "returns" section for formatting
## Returns
```
{
"keywords": [
"keyword1",
"keyword2"
],
"spatial_coverage": {
"units": "Decimal degrees",
"east": -84.0465,
"north": 49.6791,
"name": "12232",
"projection": "WGS 84 EPSG:4326"
},
"extra_metadata": {
"extended1": "one"
},
"temporal_coverage": {
"start": "2018-02-22",
"end": "2018-02-24"
},
"title": "File Metadata Title"
}
```
"""
file_serializer = FileMetaDataSerializer(request.data)
try:
title = file_serializer.data.pop("title", "")
try:
resource_file = hydroshare.get_resource_file(pk, pathname)
except ObjectDoesNotExist:
# Backwards compatibility for file_id
resource_file = ResourceFile.objects.get(id=pathname)
if resource_file is None:
raise NotFound("File {} in resource {} does not exist".format(pathname, pk))
resource_file.metadata.logical_file.dataset_name = title
resource_file.metadata.logical_file.save()
spatial_coverage = file_serializer.data.pop("spatial_coverage", None)
if spatial_coverage is not None:
# defaulting to point if not provided for backwards compatibility
type = spatial_coverage["type"] if "type" in spatial_coverage else "point"
if resource_file.metadata.spatial_coverage is not None:
cov_id = resource_file.metadata.spatial_coverage.id
resource_file.metadata.update_element('coverage',
cov_id,
type=type,
value=spatial_coverage)
elif resource_file.metadata.spatial_coverage is None:
resource_file.metadata.create_element('coverage', type=type,
value=spatial_coverage)
temporal_coverage = file_serializer.data.pop("temporal_coverage", None)
if temporal_coverage is not None:
if resource_file.metadata.temporal_coverage is not None:
cov_id = resource_file.metadata.temporal_coverage.id
resource_file.metadata.update_element('coverage',
cov_id,
type='period',
value=temporal_coverage)
elif resource_file.metadata.temporal_coverage is None:
resource_file.metadata.create_element('coverage', type="period",
value=temporal_coverage)
keywords = file_serializer.data.pop("keywords", None)
if keywords is not None:
resource_file.metadata.keywords = keywords
extra_metadata = file_serializer.data.pop("extra_metadata", None)
if extra_metadata is not None:
resource_file.metadata.extra_metadata = extra_metadata
resource_file.metadata.save()
except Exception as e:
raise APIException(e)
# TODO: How to leverage serializer for this?
title = resource_file.metadata.logical_file.dataset_name \
if resource_file.metadata.logical_file else ""
keywords = resource_file.metadata.keywords \
if resource_file.metadata else []
spatial_coverage = resource_file.metadata.spatial_coverage.value \
if resource_file.metadata.spatial_coverage else {}
extra_metadata = resource_file.metadata.extra_metadata \
if resource_file.metadata else {}
temporal_coverage = resource_file.metadata.temporal_coverage.value if \
resource_file.metadata.temporal_coverage else {}
return Response({
"title": title,
"keywords": keywords,
"spatial_coverage": spatial_coverage,
"extra_metadata": extra_metadata,
"temporal_coverage": temporal_coverage
})
| bsd-3-clause | -4,462,537,700,914,126,300 | 38.515695 | 92 | 0.556627 | false | 4.847085 | false | false | false |
swift-lang/swift-e-lab | parsl/tests/sites/test_multinode_mpi.py | 1 | 1473 | import argparse
import pytest
import parsl
from parsl.app.app import App
from parsl.tests.configs.cori_ipp_multinode import config
from parsl.tests.conftest import load_dfk
parsl.clear()
parsl.load(config)
parsl.set_stream_logger()
@App("python")
def python_app_slow(duration):
import platform
import time
time.sleep(duration)
return "Hello from {0}".format(platform.uname())
@pytest.mark.skip('not asserting anything')
def test_python_remote(count=10):
"""Run with no delay"""
fus = []
for i in range(0, count):
fu = python_app_slow(0)
fus.extend([fu])
for fu in fus:
print(fu.result())
@pytest.mark.skip('not asserting anything')
def test_python_remote_slow(count=20):
fus = []
for i in range(0, count):
fu = python_app_slow(count)
fus.extend([fu])
for fu in fus:
print(fu.result())
@App("bash")
def bash_mpi_app(stdout=None, stderr=None):
return """ls -thor
mpi_hello
"""
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--config", default='local',
help="Path to configuration file to run")
args = parser.parse_args()
load_dfk(args.config)
items = []
for i in range(0, 4):
x = bash_mpi_app(stdout="parsl.{0}.out".format(i),
stderr="parsl.{0}.err".format(i))
items.extend([x])
for i in items:
print(i.result())
| apache-2.0 | -7,146,927,764,417,135,000 | 20.347826 | 65 | 0.604888 | false | 3.295302 | true | false | false |
1905410/Misago | misago/readtracker/tests/test_readtracker.py | 1 | 9625 | from datetime import timedelta
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.utils import timezone
from misago.acl import add_acl
from misago.categories.models import Category
from misago.threads import testutils
from misago.users.models import AnonymousUser
from .. import categoriestracker, threadstracker
class ReadTrackerTests(TestCase):
def setUp(self):
self.categories = list(Category.objects.all_categories()[:1])
self.category = self.categories[0]
User = get_user_model()
self.user = User.objects.create_user("Bob", "[email protected]", "Pass.123")
self.anon = AnonymousUser()
def post_thread(self, datetime):
return testutils.post_thread(
category=self.category,
started_on=datetime
)
class CategorysTrackerTests(ReadTrackerTests):
def test_anon_empty_category_read(self):
"""anon users content is always read"""
categoriestracker.make_read_aware(self.anon, self.categories)
self.assertIsNone(self.category.last_post_on)
self.assertTrue(self.category.is_read)
def test_anon_category_with_recent_reply_read(self):
"""anon users content is always read"""
categoriestracker.make_read_aware(self.anon, self.categories)
self.category.last_post_on = timezone.now()
self.assertTrue(self.category.is_read)
def test_empty_category_is_read(self):
"""empty category is read for signed in user"""
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
def test_make_read_aware_sets_read_flag_for_empty_category(self):
"""make_read_aware sets read flag on empty category"""
categoriestracker.make_read_aware(self.anon, self.categories)
self.assertTrue(self.category.is_read)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
def test_make_read_aware_sets_read_flag_for_category_with_old_thread(self):
"""make_read_aware sets read flag on category with old thread"""
self.category.last_post_on = self.user.joined_on - timedelta(days=1)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
def test_make_read_aware_sets_unread_flag_for_category_with_new_thread(self):
"""make_read_aware sets unread flag on category with new thread"""
self.category.last_post_on = self.user.joined_on + timedelta(days=1)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
def test_sync_record_for_empty_category(self):
"""sync_record sets read flag on empty category"""
add_acl(self.user, self.categories)
categoriestracker.sync_record(self.user, self.category)
self.user.categoryread_set.get(category=self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
def test_sync_record_for_category_with_old_thread_and_reply(self):
"""
sync_record sets read flag on category with old thread,
then changes flag to unread when new reply is posted
"""
self.post_thread(self.user.joined_on - timedelta(days=1))
add_acl(self.user, self.categories)
categoriestracker.sync_record(self.user, self.category)
self.user.categoryread_set.get(category=self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
thread = self.post_thread(self.user.joined_on + timedelta(days=1))
categoriestracker.sync_record(self.user, self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
def test_sync_record_for_category_with_new_thread(self):
"""
sync_record sets read flag on category with old thread,
then keeps flag to unread when new reply is posted
"""
self.post_thread(self.user.joined_on + timedelta(days=1))
add_acl(self.user, self.categories)
categoriestracker.sync_record(self.user, self.category)
self.user.categoryread_set.get(category=self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
self.post_thread(self.user.joined_on + timedelta(days=1))
categoriestracker.sync_record(self.user, self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
def test_sync_record_for_category_with_deleted_threads(self):
"""unread category reverts to read after its emptied"""
self.post_thread(self.user.joined_on + timedelta(days=1))
self.post_thread(self.user.joined_on + timedelta(days=1))
self.post_thread(self.user.joined_on + timedelta(days=1))
add_acl(self.user, self.categories)
categoriestracker.sync_record(self.user, self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
self.category.thread_set.all().delete()
self.category.synchronize()
self.category.save()
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
def test_sync_record_for_category_with_many_threads(self):
"""sync_record sets unread flag on category with many threads"""
self.post_thread(self.user.joined_on + timedelta(days=1))
self.post_thread(self.user.joined_on - timedelta(days=1))
self.post_thread(self.user.joined_on + timedelta(days=1))
self.post_thread(self.user.joined_on - timedelta(days=1))
add_acl(self.user, self.categories)
categoriestracker.sync_record(self.user, self.category)
self.user.categoryread_set.get(category=self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
self.post_thread(self.user.joined_on + timedelta(days=1))
categoriestracker.sync_record(self.user, self.category)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
def test_read_leaf_category(self):
"""read_category reads leaf category for user"""
categoriestracker.read_category(self.user, self.category)
self.assertTrue(self.user.categoryread_set.get(category=self.category))
def test_read_root_category(self):
"""read_category reads its subcategories for user"""
root_category = Category.objects.root_category()
categoriestracker.read_category(self.user, root_category)
child_read = self.user.categoryread_set.get(category=self.category)
self.assertTrue(child_read.last_read_on > timezone.now() -timedelta(seconds=3))
class ThreadsTrackerTests(ReadTrackerTests):
def setUp(self):
super(ThreadsTrackerTests, self).setUp()
self.thread = self.post_thread(timezone.now() - timedelta(days=10))
def reply_thread(self, is_hidden=False, is_unapproved=False):
self.post = testutils.reply_thread(
thread=self.thread,
is_hidden=is_hidden,
is_unapproved=is_unapproved,
posted_on=timezone.now()
)
return self.post
def test_thread_read_for_guest(self):
"""threads are always read for guests"""
threadstracker.make_read_aware(self.anon, self.thread)
self.assertTrue(self.thread.is_read)
self.reply_thread()
threadstracker.make_read_aware(self.anon, [self.thread])
self.assertTrue(self.thread.is_read)
def test_thread_read_for_user(self):
"""thread is read for user"""
threadstracker.make_read_aware(self.user, self.thread)
self.assertTrue(self.thread.is_read)
def test_thread_replied_unread_for_user(self):
"""replied thread is unread for user"""
self.reply_thread()
threadstracker.make_read_aware(self.user, self.thread)
self.assertFalse(self.thread.is_read)
def _test_thread_read(self):
"""thread read flag is set for user, then its set as unread by reply"""
self.reply_thread()
add_acl(self.user, self.categories)
threadstracker.make_read_aware(self.user, self.thread)
self.assertFalse(self.thread.is_read)
threadstracker.read_thread(self.user, self.thread, self.post)
threadstracker.make_read_aware(self.user, self.thread)
self.assertTrue(self.thread.is_read)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertTrue(self.category.is_read)
self.thread.last_post_on = timezone.now()
self.thread.save()
self.category.synchronize()
self.category.save()
self.reply_thread()
threadstracker.make_read_aware(self.user, self.thread)
self.assertFalse(self.thread.is_read)
categoriestracker.make_read_aware(self.user, self.categories)
self.assertFalse(self.category.is_read)
posts = [post for post in self.thread.post_set.order_by('id')]
threadstracker.make_posts_read_aware(self.user, self.thread, posts)
for post in posts[:-1]:
self.assertTrue(post.is_read)
self.assertFalse(posts[-1].is_read)
| gpl-2.0 | -4,961,946,293,181,603,000 | 39.783898 | 87 | 0.682494 | false | 3.576737 | true | false | false |
pakpoomton/CellmodellerShadow | CellModeller/Biophysics/BacterialModels/CLBacterium.py | 1 | 54289 | import sys
import math
import numpy
import pyopencl as cl
import pyopencl.array as cl_array
from pyopencl.array import vec
from pyopencl.elementwise import ElementwiseKernel
from pyopencl.reduction import ReductionKernel
import random
ct_map = {}
class CLBacterium:
"""A rigid body model of bacterial growth implemented using
OpenCL.
"""
def __init__(self, simulator,
max_substeps=8,
max_cells=2**15,
max_contacts=32,
max_planes=4,
max_sqs=64**2,
grid_spacing=5.0,
muA=1.0,
gamma=10.0,
cgs_tol=1e-3,
reg_param=0.2,
jitter_z=True,
alternate_divisions=False):
self.frame_no = 0
self.simulator = simulator
self.regulator = None
self.max_cells = max_cells
self.max_contacts = max_contacts
self.max_planes = max_planes
self.max_sqs = max_sqs
self.grid_spacing = grid_spacing
self.muA = muA
self.gamma = gamma
self.cgs_tol = cgs_tol
self.reg_param = numpy.float32(reg_param)
self.max_substeps = max_substeps
self.n_cells = 0
self.n_cts = 0
self.n_planes = 0
self.next_id = 0
self.grid_x_min = 0
self.grid_x_max = 0
self.grid_y_min = 0
self.grid_y_max = 0
self.n_sqs = 0
self.init_cl()
self.init_kernels()
self.init_data()
self.parents = {}
self.jitter_z = jitter_z
self.alternate_divisions = alternate_divisions
self.maxVel = 1.0
# Biophysical Model interface
def reset(self):
self.n_cells=0
self.n_cts=0
self.n_planes=0
def setRegulator(self, regulator):
self.regulator = regulator
def addCell(self, cellState, pos=(0,0,0), dir=(1,0,0), len=4.0, rad=0.5):
i = cellState.idx
self.n_cells += 1
cid = cellState.id
self.cell_centers[i] = tuple(pos+(0,))
self.cell_dirs[i] = tuple(dir+(0,))
self.cell_lens[i] = len
self.cell_rads[i] = rad
self.initCellState(cellState)
self.set_cells()
self.calc_cell_geom() # cell needs a volume
def addPlane(self, pt, norm, coeff):
pidx = self.n_planes
self.n_planes += 1
self.plane_pts[pidx] = tuple(pt)+(0,)
self.plane_norms[pidx] = tuple(norm) + (0,)
self.plane_coeffs[pidx] = coeff
self.set_planes()
def hasNeighbours(self):
return False
def divide(self, parentState, daughter1State, daughter2State, *args, **kwargs):
self.divide_cell(parentState.idx, daughter1State.idx, daughter2State.idx)
# Initialise cellState data
self.initCellState(daughter1State)
self.initCellState(daughter2State)
def init_cl(self):
if self.simulator:
(self.context, self.queue) = self.simulator.getOpenCL()
def init_kernels(self):
"""Set up the OpenCL kernels."""
kernel_src = open('CellModeller/Biophysics/BacterialModels/CLBacterium.cl', 'r').read()
self.program = cl.Program(self.context, kernel_src).build(cache_dir=False)
# Some kernels that seem like they should be built into pyopencl...
self.vclearf = ElementwiseKernel(self.context, "float8 *v", "v[i]=0.0", "vecclearf")
self.vcleari = ElementwiseKernel(self.context, "int *v", "v[i]=0", "veccleari")
self.vadd = ElementwiseKernel(self.context, "float8 *res, const float8 *in1, const float8 *in2",
"res[i] = in1[i] + in2[i]", "vecadd")
self.vsub = ElementwiseKernel(self.context, "float8 *res, const float8 *in1, const float8 *in2",
"res[i] = in1[i] - in2[i]", "vecsub")
self.vaddkx = ElementwiseKernel(self.context,
"float8 *res, const float k, const float8 *in1, const float8 *in2",
"res[i] = in1[i] + k*in2[i]", "vecaddkx")
self.vsubkx = ElementwiseKernel(self.context,
"float8 *res, const float k, const float8 *in1, const float8 *in2",
"res[i] = in1[i] - k*in2[i]", "vecsubkx")
self.vmax = ReductionKernel(self.context, numpy.float32, neutral="0",
reduce_expr="a>b ? a : b", map_expr="length(x[i])",
arguments="__global float4 *x")
# cell geometry kernels
self.calc_cell_area = ElementwiseKernel(self.context, "float* res, float* r, float* l",
"res[i] = 2.f*3.1415927f*r[i]*(2.f*r[i]+l[i])", "cell_area_kern")
self.calc_cell_vol = ElementwiseKernel(self.context, "float* res, float* r, float* l",
"res[i] = 3.1415927f*r[i]*r[i]*(2.f*r[i]+l[i])", "cell_vol_kern")
# A dot product as sum of float4 dot products -
# i.e. like flattening vectors of float8s into big float vectors
# then computing dot
# NB. Some openCLs seem not to implement dot(float8,float8) so split
# into float4's
self.vdot = ReductionKernel(self.context, numpy.float32, neutral="0",
reduce_expr="a+b", map_expr="dot(x[i].s0123,y[i].s0123)+dot(x[i].s4567,y[i].s4567)",
arguments="__global float8 *x, __global float8 *y")
def init_data(self):
"""Set up the data OpenCL will store on the device."""
# cell data
cell_geom = (self.max_cells,)
self.cell_centers = numpy.zeros(cell_geom, vec.float4)
self.cell_centers_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.cell_dirs = numpy.zeros(cell_geom, vec.float4)
self.cell_dirs_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.cell_lens = numpy.zeros(cell_geom, numpy.float32)
self.cell_lens_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.pred_cell_centers = numpy.zeros(cell_geom, vec.float4)
self.pred_cell_centers_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.pred_cell_dirs = numpy.zeros(cell_geom, vec.float4)
self.pred_cell_dirs_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.pred_cell_lens = numpy.zeros(cell_geom, numpy.float32)
self.pred_cell_lens_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_rads = numpy.zeros(cell_geom, numpy.float32)
self.cell_rads_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_sqs = numpy.zeros(cell_geom, numpy.int32)
self.cell_sqs_dev = cl_array.zeros(self.queue, cell_geom, numpy.int32)
self.cell_n_cts = numpy.zeros(cell_geom, numpy.int32)
self.cell_n_cts_dev = cl_array.zeros(self.queue, cell_geom, numpy.int32)
self.cell_dcenters = numpy.zeros(cell_geom, vec.float4)
self.cell_dcenters_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.cell_dangs = numpy.zeros(cell_geom, vec.float4)
self.cell_dangs_dev = cl_array.zeros(self.queue, cell_geom, vec.float4)
self.cell_dlens = numpy.zeros(cell_geom, numpy.float32)
self.cell_dlens_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_target_dlens_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_growth_rates = numpy.zeros(cell_geom, numpy.float32)
# cell geometry calculated from l and r
self.cell_areas_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_vols_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
self.cell_old_vols_dev = cl_array.zeros(self.queue, cell_geom, numpy.float32)
# gridding
self.sq_inds = numpy.zeros((self.max_sqs,), numpy.int32)
self.sq_inds_dev = cl_array.zeros(self.queue, (self.max_sqs,), numpy.int32)
self.sorted_ids = numpy.zeros(cell_geom, numpy.int32)
self.sorted_ids_dev = cl_array.zeros(self.queue, cell_geom, numpy.int32)
# constraint planes
plane_geom = (self.max_planes,)
self.plane_pts = numpy.zeros(plane_geom, vec.float4)
self.plane_pts_dev = cl_array.zeros(self.queue, plane_geom, vec.float4)
self.plane_norms = numpy.zeros(plane_geom, vec.float4)
self.plane_norms_dev = cl_array.zeros(self.queue, plane_geom, vec.float4)
self.plane_coeffs = numpy.zeros(plane_geom, numpy.float32)
self.plane_coeffs_dev = cl_array.zeros(self.queue, plane_geom, numpy.float32)
# contact data
ct_geom = (self.max_cells, self.max_contacts)
self.ct_frs = numpy.zeros(ct_geom, numpy.int32)
self.ct_frs_dev = cl_array.zeros(self.queue, ct_geom, numpy.int32)
self.ct_tos = numpy.zeros(ct_geom, numpy.int32)
self.ct_tos_dev = cl_array.zeros(self.queue, ct_geom, numpy.int32)
self.ct_dists = numpy.zeros(ct_geom, numpy.float32)
self.ct_dists_dev = cl_array.zeros(self.queue, ct_geom, numpy.float32)
self.ct_pts = numpy.zeros(ct_geom, vec.float4)
self.ct_pts_dev = cl_array.zeros(self.queue, ct_geom, vec.float4)
self.ct_norms = numpy.zeros(ct_geom, vec.float4)
self.ct_norms_dev = cl_array.zeros(self.queue, ct_geom, vec.float4)
self.ct_stiff_dev = cl_array.zeros(self.queue, ct_geom, numpy.float32)
# where the contacts pointing to this cell are collected
self.cell_tos = numpy.zeros(ct_geom, numpy.int32)
self.cell_tos_dev = cl_array.zeros(self.queue, ct_geom, numpy.int32)
self.n_cell_tos = numpy.zeros(cell_geom, numpy.int32)
self.n_cell_tos_dev = cl_array.zeros(self.queue, cell_geom, numpy.int32)
# the constructed 'matrix'
mat_geom = (self.max_cells*self.max_contacts,)
self.ct_inds = numpy.zeros(mat_geom, numpy.int32)
self.ct_inds_dev = cl_array.zeros(self.queue, mat_geom, numpy.int32)
self.ct_reldists = numpy.zeros(mat_geom, numpy.float32)
self.ct_reldists_dev = cl_array.zeros(self.queue, mat_geom, numpy.float32)
self.fr_ents = numpy.zeros(mat_geom, vec.float8)
self.fr_ents_dev = cl_array.zeros(self.queue, mat_geom, vec.float8)
self.to_ents = numpy.zeros(mat_geom, vec.float8)
self.to_ents_dev = cl_array.zeros(self.queue, mat_geom, vec.float8)
# vectors and intermediates
self.deltap = numpy.zeros(cell_geom, vec.float8)
self.deltap_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
self.Mx = numpy.zeros(mat_geom, numpy.float32)
self.Mx_dev = cl_array.zeros(self.queue, mat_geom, numpy.float32)
self.MTMx = numpy.zeros(cell_geom, vec.float8)
self.MTMx_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
self.Minvx_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
# CGS intermediates
self.p_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
self.Ap_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
self.res_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
self.rhs_dev = cl_array.zeros(self.queue, cell_geom, vec.float8)
def load_from_cellstates(self, cell_states):
for (cid,cs) in cell_states.items():
i = cs.idx
self.cell_centers[i] = tuple(cs.pos)+(0,)
self.cell_dirs[i] = tuple(cs.dir)+(0,)
self.cell_rads[i] = cs.radius
self.cell_lens[i] = cs.length
self.n_cells = len(cell_states)
self.set_cells()
def load_test_data(self):
import CellModeller.Biophysics.BacterialModels.CLData as data
self.cell_centers.put(range(len(data.pos)), data.pos)
self.cell_dirs.put(range(len(data.dirs)), data.dirs)
self.cell_lens.put(range(len(data.lens)), data.lens)
self.cell_rads.put(range(len(data.rads)), data.rads)
self.n_cells = data.n_cells
self.set_cells()
def load_1_cell(self):
self.cell_centers.put([0], [(0,0,0,0)])
self.cell_dirs.put([0], [(1,0,0,0)])
self.cell_lens.put([0], [2.0])
self.cell_rads.put([0], [0.5])
self.n_cells = 1
self.set_cells()
def load_2_cells(self):
root2 = numpy.sqrt(2.0)
self.cell_centers.put([0,1], [(-root2-0.5, 0, 0, 0), (root2+0.5, 0, 0, 0)])
self.cell_dirs.put([0,1], [(root2/2.0, root2/2.0, 0, 0), (-root2/2.0, root2/2.0, 0, 0)])
self.cell_lens.put([0,1], [4.0, 4.0])
self.cell_rads.put([0,1], [0.5, 0.5])
self.n_cells = 2
self.set_cells()
def load_3_cells(self):
root2 = numpy.sqrt(2.0)
self.cell_centers.put([0,1,2], [(-root2-0.5, 0, 0, 0), (root2+0.5, 0, 0, 0), (root2+0.5+3.3, 0, 0, 0)])
self.cell_dirs.put([0,1,2], [(root2/2.0, root2/2.0, 0, 0), (-root2/2.0, root2/2.0, 0, 0), (1, 0, 0, 0)])
self.cell_lens.put([0,1,2], [3.0, 3.0, 3.0])
self.cell_rads.put([0,1,2], [0.5, 0.5, 0.5])
self.n_cells = 3
self.set_cells()
def load_3_cells_1_plane(self):
root2 = numpy.sqrt(2.0)
self.cell_centers.put([0,1,2], [(-root2-0.5, 0, 0, 0), (root2+0.5, 0, 0, 0), (root2+0.5+3.3, 0, 0, 0)])
self.cell_dirs.put([0,1,2], [(root2/2.0, root2/2.0, 0, 0), (-root2/2.0, root2/2.0, 0, 0), (1, 0, 0, 0)])
self.cell_lens.put([0,1,2], [3.0, 3.0, 3.0])
self.cell_rads.put([0,1,2], [0.5, 0.5, 0.5])
self.n_cells = 3
self.set_cells()
self.n_planes = 1
self.plane_pts.put([0], [(0, 0, -0.5, 0)])
self.plane_norms.put([0], [(0, 0, 1, 0)])
self.plane_coeffs.put([0], [0.5])
self.set_planes()
def load_3_cells_2_planes(self):
root2 = numpy.sqrt(2.0)
self.cell_centers.put([0,1,2], [(-root2-0.5, 0, 0, 0), (root2+0.5, 0, 0, 0), (root2+0.5+3.3, 0, 0, 0)])
self.cell_dirs.put([0,1,2], [(root2/2.0, root2/2.0, 0, 0), (-root2/2.0, root2/2.0, 0, 0), (1, 0, 0, 0)])
self.cell_lens.put([0,1,2], [3.0, 3.0, 3.0])
self.cell_rads.put([0,1,2], [0.5, 0.5, 0.5])
self.n_cells = 3
self.set_cells()
self.n_planes = 2
self.plane_pts.put([0,1], [(0, 0, -0.5, 0), (0, 0, 0.5, 0)])
self.plane_norms.put([0,1], [(0, 0, 1, 0), (0, 0, -1, 0)])
self.plane_coeffs.put([0,1], [0.5, 0.1])
self.set_planes()
def load_1_cell_1_plane(self):
self.cell_centers.put([0], [(0,0,0,0)])
self.cell_dirs.put([0], [(1,0,0,0)])
self.cell_lens.put([0], [3.0])
self.cell_rads.put([0], [0.5])
self.n_cells = 1
self.set_cells()
self.plane_pts.put([0], [(4, 0, 0, 0)])
self.plane_norms.put([0], [(-1, 0, 0, 0)])
self.plane_coeffs.put([0], [0.5])
self.n_planes = 1
self.set_planes()
def load_1024_cells(self):
d = 32
for i in range(-d/2,d/2):
for j in range(-d/2,d/2):
n = (i+d/2)*d + (j+d/2)
x = i*3.5 + random.uniform(-0.05,0.05)
y = j*2.0 + random.uniform(-0.05,0.05)
th = random.uniform(-0.15, 0.15)
dir_x = math.cos(th)
dir_y = math.sin(th)
self.cell_centers.put([n], [(x, y, 0, 0)])
self.cell_dirs.put([n], [(dir_x, dir_y, 0, 0)])
self.cell_lens.put([n], [2])
self.cell_rads.put([n], 0.5)
self.n_cells = d*d
self.set_cells()
def load_from_cellstates(self, cell_states):
for (id, cs) in cell_states.items():
self.cell_centers.put([cs.idx], [tuple(cs.pos)+(0,)])
self.cell_dirs.put([cs.idx], [tuple(cs.dir)+(0,)])
self.cell_lens.put([cs.idx], [cs.length])
self.cell_rads.put([cs.idx], cs.radius)
self.n_cells = len(cell_states)
self.set_cells()
def get_cells(self):
"""Copy cell centers, dirs, lens, and rads from the device."""
self.cell_centers = self.cell_centers_dev.get()
self.cell_dirs = self.cell_dirs_dev.get()
self.cell_lens = self.cell_lens_dev.get()
self.cell_rads = self.cell_rads_dev.get()
self.cell_dlens = self.cell_dlens_dev.get()
self.cell_dcenters = self.cell_dcenters_dev.get()
self.cell_dangs = self.cell_dangs_dev.get()
def set_cells(self):
"""Copy cell centers, dirs, lens, and rads to the device from local."""
self.cell_centers_dev.set(self.cell_centers)
self.cell_dirs_dev.set(self.cell_dirs)
self.cell_lens_dev.set(self.cell_lens)
self.cell_rads_dev.set(self.cell_rads)
self.cell_dlens_dev.set(self.cell_dlens)
self.cell_dcenters_dev.set(self.cell_dcenters)
self.cell_dangs_dev.set(self.cell_dangs)
def set_planes(self):
"""Copy plane pts, norms, and coeffs to the device from local."""
self.plane_pts_dev.set(self.plane_pts)
self.plane_norms_dev.set(self.plane_norms)
self.plane_coeffs_dev.set(self.plane_coeffs)
def get_cts(self):
"""Copy contact froms, tos, dists, pts, and norms from the device."""
self.ct_frs = self.ct_frs_dev.get()
self.ct_tos = self.ct_tos_dev.get()
self.ct_dists = self.ct_dists_dev.get()
self.ct_pts = self.ct_pts_dev.get()
self.ct_norms = self.ct_norms_dev.get()
self.cell_n_cts = self.cell_n_cts_dev.get()
def matrixTest(self):
x_dev = cl_array.zeros(self.queue, (self.n_cells,), vec.float8)
Ax_dev = cl_array.zeros(self.queue, (self.n_cells,), vec.float8)
opstring = ''
for i in range(self.n_cells):
x = numpy.zeros((self.n_cells,), vec.float8)
for j in range(7):
if j>0:
x[i][j-1]=0.0
x[i][j]=1.0
x_dev.set(x)
self.calculate_Ax(Ax_dev, x_dev)
Ax = Ax_dev.get()
for ii in range(self.n_cells):
for jj in range(7):
opstring += str(Ax[ii][jj])
if ii!=self.n_cells-1 or jj!=6:
opstring = opstring + '\t'
opstring = opstring + '\n'
print "MTM"
print opstring
open('CellModeller/Biophysics/BacterialModels/matrix.mat', 'w').write(opstring)
def dump_cell_data(self, n):
import cPickle
filename = 'data/data-%04i.pickle'%n
outfile = open(filename, 'wb')
data = (self.n_cells,
self.cell_centers_dev.get(),
self.cell_dirs_dev.get(),
self.cell_lens_dev.get(),
self.cell_rads_dev.get(),
self.parents),
cPickle.dump(data, outfile, protocol=-1)
def step(self, dt):
"""Step forward dt units of time.
Assumes that:
cell_centers is up to date when it starts.
"""
self.set_cells()
# Take dt/10 because this was what worked with EdgeDetector, need to
# make timescales consistent at some point
dt = dt*0.1
# Choose good time-step for biophysics to work nicely, then do multiple
# ticks to integrate over dt
#delta_t = max(0.05, 0.25/max(self.maxVel,1.0)) #0.1/math.sqrt(self.n_cells)
#delta_t = 0.7/math.sqrt(self.n_cells)
#delta_t = 5*0.1/self.n_cells
delta_t = 0.005
n_ticks = int(math.ceil(dt/delta_t))
actual_dt = dt / float(n_ticks)
#print 'delta_t %f nticks %f actual_dt %f'%(delta_t,n_ticks,actual_dt)
for i in range(n_ticks):
self.tick(actual_dt)
self.frame_no += 1
if self.frame_no % 10 == 0:
#self.dump_cell_data(frame_no/100)
print '% 8i % 8i cells % 8i contacts' % (self.frame_no, self.n_cells, self.n_cts)
# pull cells from the device and update simulator
if self.simulator:
self.get_cells()
for state in self.simulator.cellStates.values():
self.updateCellState(state)
def tick(self, dt):
# set target dlens (taken from growth rates set by updateCellStates)
#self.cell_target_dlens_dev.set(dt*self.cell_growth_rates)
#self.cell_dlens_dev.set(dt*self.cell_dlens)
self.cell_dlens_dev.set(dt*self.cell_growth_rates)
# redefine gridding based on the range of cell positions
self.cell_centers = self.cell_centers_dev.get()
self.update_grid() # we assume local cell_centers is current
# get each cell into the correct sq and retrieve from the device
self.bin_cells()
# sort cells and find sq index starts in the list
self.cell_sqs = self.cell_sqs_dev.get() # get updated cell sqs
self.sort_cells()
self.sorted_ids_dev.set(self.sorted_ids) # push changes to the device
self.sq_inds_dev.set(self.sq_inds)
new_cts = 1
self.n_cts = 0
self.vcleari(self.cell_n_cts_dev) # clear the accumulated contact count
i=0
while new_cts>0 and i<self.max_substeps:
old_n_cts = self.n_cts
self.predict()
# find all contacts
self.find_contacts()
# place 'backward' contacts in cells
self.collect_tos()
new_cts = self.n_cts - old_n_cts
if new_cts>0 or i==0:
self.build_matrix() # Calculate entries of the matrix
#print "max cell contacts = %i"%cl_array.max(self.cell_n_cts_dev).get()
self.CGSSolve() # invert MTMx to find deltap
self.add_impulse()
i += 1
# Calculate estimated max cell velocity
#self.maxVel = self.vmax(self.cell_dcenters_dev).get() + cl_array.max(self.cell_dlens_dev).get()
#print "maxVel = " + str(self.maxVel)
self.integrate()
self.calc_cell_geom()
def initCellState(self, state):
cid = state.id
i = state.idx
state.pos = [self.cell_centers[i][j] for j in range(3)]
state.dir = [self.cell_dirs[i][j] for j in range(3)]
state.radius = self.cell_rads[i]
state.length = self.cell_lens[i]
state.volume = state.length # TO DO: do something better here
pa = numpy.array(state.pos)
da = numpy.array(state.dir)
state.ends = (pa-da*state.length*0.5, pa+da*state.length*0.5)
state.strainRate = state.growthRate/state.length
self.cell_dlens[i] = state.growthRate
state.startVol = state.volume
def updateCellState(self, state):
cid = state.id
i = state.idx
state.strainRate = self.cell_dlens[i]/state.length
state.pos = [self.cell_centers[i][j] for j in range(3)]
state.dir = [self.cell_dirs[i][j] for j in range(3)]
state.radius = self.cell_rads[i]
state.length = self.cell_lens[i]
state.volume = state.length # TO DO: do something better here
pa = numpy.array(state.pos)
da = numpy.array(state.dir)
state.ends = (pa-da*state.length*0.5, pa+da*state.length*0.5)
# Length vel is linearisation of exponential growth
self.cell_growth_rates[i] = state.growthRate*state.length
def update_grid(self):
"""Update our grid_(x,y)_min, grid_(x,y)_max, and n_sqs.
Assumes that our copy of cell_centers is current.
"""
coords = self.cell_centers.view(numpy.float32).reshape((self.max_cells, 4))
x_coords = coords[:,0]
min_x_coord = x_coords.min()
max_x_coord = x_coords.max()
self.grid_x_min = int(math.floor(min_x_coord / self.grid_spacing))
self.grid_x_max = int(math.ceil(max_x_coord / self.grid_spacing))
if self.grid_x_min == self.grid_x_max:
self.grid_x_max += 1
y_coords = coords[:,1]
min_y_coord = y_coords.min()
max_y_coord = y_coords.max()
self.grid_y_min = int(math.floor(min_y_coord / self.grid_spacing))
self.grid_y_max = int(math.ceil(max_y_coord / self.grid_spacing))
if self.grid_y_min == self.grid_y_max:
self.grid_y_max += 1
self.n_sqs = (self.grid_x_max-self.grid_x_min)*(self.grid_y_max-self.grid_y_min)
def bin_cells(self):
"""Call the bin_cells kernel.
Assumes cell_centers is current on the device.
Calculates cell_sqs.
"""
self.program.bin_cells(self.queue,
(self.n_cells,),
None,
numpy.int32(self.grid_x_min),
numpy.int32(self.grid_x_max),
numpy.int32(self.grid_y_min),
numpy.int32(self.grid_y_max),
numpy.float32(self.grid_spacing),
self.cell_centers_dev.data,
self.cell_sqs_dev.data).wait()
def sort_cells(self):
"""Sort the cells by grid square and find the start of each
grid square's cells in that list.
Assumes that the local copy of cell_sqs is current.
Calculates local sorted_ids and sq_inds.
"""
self.sorted_ids.put(numpy.arange(self.n_cells), numpy.argsort(self.cell_sqs[:self.n_cells]))
self.sorted_ids_dev.set(self.sorted_ids)
# find the start of each sq in the list of sorted cell ids and send to the device
sorted_sqs = numpy.sort(self.cell_sqs[:self.n_cells])
self.sq_inds.put(numpy.arange(self.n_sqs), numpy.searchsorted(sorted_sqs, numpy.arange(self.n_sqs), side='left'))
self.sq_inds_dev.set(self.sq_inds)
def find_contacts(self, predict=True):
"""Call the find_contacts kernel.
Assumes that cell_centers, cell_dirs, cell_lens, cell_rads,
cell_sqs, cell_dcenters, cell_dlens, cell_dangs,
sorted_ids, and sq_inds are current on the device.
Calculates cell_n_cts, ct_frs, ct_tos, ct_dists, ct_pts,
ct_norms, ct_reldists, and n_cts.
"""
if predict:
centers = self.pred_cell_centers_dev
dirs = self.pred_cell_dirs_dev
lens = self.pred_cell_lens_dev
else:
centers = self.cell_centers_dev
dirs = self.cell_dirs_dev
lens = self.cell_lens_dev
self.program.find_plane_contacts(self.queue,
(self.n_cells,),
None,
numpy.int32(self.max_cells),
numpy.int32(self.max_contacts),
numpy.int32(self.n_planes),
self.plane_pts_dev.data,
self.plane_norms_dev.data,
self.plane_coeffs_dev.data,
centers.data,
dirs.data,
lens.data,
self.cell_rads_dev.data,
self.cell_n_cts_dev.data,
self.ct_frs_dev.data,
self.ct_tos_dev.data,
self.ct_dists_dev.data,
self.ct_pts_dev.data,
self.ct_norms_dev.data,
self.ct_reldists_dev.data,
self.ct_stiff_dev.data).wait()
self.program.find_contacts(self.queue,
(self.n_cells,),
None,
numpy.int32(self.max_cells),
numpy.int32(self.n_cells),
numpy.int32(self.grid_x_min),
numpy.int32(self.grid_x_max),
numpy.int32(self.grid_y_min),
numpy.int32(self.grid_y_max),
numpy.int32(self.n_sqs),
numpy.int32(self.max_contacts),
centers.data,
dirs.data,
lens.data,
self.cell_rads_dev.data,
self.cell_sqs_dev.data,
self.sorted_ids_dev.data,
self.sq_inds_dev.data,
self.cell_n_cts_dev.data,
self.ct_frs_dev.data,
self.ct_tos_dev.data,
self.ct_dists_dev.data,
self.ct_pts_dev.data,
self.ct_norms_dev.data,
self.ct_reldists_dev.data,
self.ct_stiff_dev.data).wait()
# set dtype to int32 so we don't overflow the int32 when summing
#self.n_cts = self.cell_n_cts_dev.get().sum(dtype=numpy.int32)
self.n_cts = cl_array.sum(self.cell_n_cts_dev).get()
def collect_tos(self):
"""Call the collect_tos kernel.
Assumes that cell_sqs, sorted_ids, sq_inds, cell_n_cts,
ct_frs, and ct_tos are current on the device.
Calculates cell_tos and n_cell_tos.
"""
self.program.collect_tos(self.queue,
(self.n_cells,),
None,
numpy.int32(self.max_cells),
numpy.int32(self.n_cells),
numpy.int32(self.grid_x_min),
numpy.int32(self.grid_x_max),
numpy.int32(self.grid_y_min),
numpy.int32(self.grid_y_max),
numpy.int32(self.n_sqs),
numpy.int32(self.max_contacts),
self.cell_sqs_dev.data,
self.sorted_ids_dev.data,
self.sq_inds_dev.data,
self.cell_n_cts_dev.data,
self.ct_frs_dev.data,
self.ct_tos_dev.data,
self.cell_tos_dev.data,
self.n_cell_tos_dev.data).wait()
def build_matrix(self):
"""Build the matrix so we can calculate M^TMx = Ax.
Assumes cell_centers, cell_dirs, cell_lens, cell_rads,
ct_inds, ct_frs, ct_tos, ct_dists, and ct_norms are current on
the device.
Calculates fr_ents and to_ents.
"""
self.program.build_matrix(self.queue,
(self.n_cells, self.max_contacts),
None,
numpy.int32(self.max_contacts),
numpy.float32(self.muA),
numpy.float32(self.gamma),
self.pred_cell_centers_dev.data,
self.pred_cell_dirs_dev.data,
self.pred_cell_lens_dev.data,
self.cell_rads_dev.data,
self.cell_n_cts_dev.data,
self.ct_frs_dev.data,
self.ct_tos_dev.data,
self.ct_dists_dev.data,
self.ct_pts_dev.data,
self.ct_norms_dev.data,
self.fr_ents_dev.data,
self.to_ents_dev.data,
self.ct_stiff_dev.data).wait()
def calculate_Ax(self, Ax, x):
self.program.calculate_Mx(self.queue,
(self.n_cells, self.max_contacts),
None,
numpy.int32(self.max_contacts),
self.ct_frs_dev.data,
self.ct_tos_dev.data,
self.fr_ents_dev.data,
self.to_ents_dev.data,
x.data,
self.Mx_dev.data).wait()
self.program.calculate_MTMx(self.queue,
(self.n_cells,),
None,
numpy.int32(self.max_contacts),
self.cell_n_cts_dev.data,
self.n_cell_tos_dev.data,
self.cell_tos_dev.data,
self.fr_ents_dev.data,
self.to_ents_dev.data,
self.Mx_dev.data,
Ax.data).wait()
# Tikhonov test
#self.vaddkx(Ax, numpy.float32(0.01), Ax, x)
# Energy minimizing regularization
self.program.calculate_Minv_x(self.queue,
(self.n_cells,),
None,
numpy.float32(self.muA),
numpy.float32(self.gamma),
self.cell_dirs_dev.data,
self.cell_lens_dev.data,
self.cell_rads_dev.data,
x.data,
self.Minvx_dev.data).wait()
self.vaddkx(Ax, self.reg_param/math.sqrt(self.n_cells), Ax, self.Minvx_dev).wait()
def CGSSolve(self):
# Solve A^TA\deltap=A^Tb (Ax=b)
# There must be a way to do this using built in pyopencl - what
# is it?!
self.vclearf(self.deltap_dev)
self.vclearf(self.rhs_dev)
# put M^T n^Tv_rel in rhs (b)
self.program.calculate_MTMx(self.queue,
(self.n_cells,),
None,
numpy.int32(self.max_contacts),
self.cell_n_cts_dev.data,
self.n_cell_tos_dev.data,
self.cell_tos_dev.data,
self.fr_ents_dev.data,
self.to_ents_dev.data,
self.ct_reldists_dev.data,
self.rhs_dev.data).wait()
self.calculate_Ax(self.MTMx_dev, self.deltap_dev)
# res = b-Ax
self.vsub(self.res_dev, self.rhs_dev, self.MTMx_dev)
# p = res
cl.enqueue_copy(self.queue, self.p_dev.data, self.res_dev.data)
# rsold = l2norm(res)
rsold = self.vdot(self.res_dev, self.res_dev).get()
if math.sqrt(rsold/self.n_cells) < self.cgs_tol:
return (0.0, rsold)
# iterate
# max iters = matrix dimension = 7 (dofs) * num cells
#dying=False
max_iters = self.n_cells*7
for iter in range(max_iters):
# Ap
self.calculate_Ax(self.Ap_dev, self.p_dev)
# p^TAp
pAp = self.vdot(self.p_dev, self.Ap_dev).get()
# alpha = rsold/p^TAp
alpha = numpy.float32(rsold/pAp)
# x = x + alpha*p, x=self.disp
self.vaddkx(self.deltap_dev, alpha, self.deltap_dev, self.p_dev)
# res = res - alpha*Ap
self.vsubkx(self.res_dev, alpha, self.res_dev, self.Ap_dev)
# rsnew = l2norm(res)
rsnew = self.vdot(self.res_dev, self.res_dev).get()
# Test for convergence
if math.sqrt(rsnew/self.n_cts) < self.cgs_tol:
break
# Stopped converging -> terminate
#if rsnew/rsold>2.0:
# break
# p = res + rsnew/rsold *p
self.vaddkx(self.p_dev, numpy.float32(rsnew/rsold), self.res_dev, self.p_dev)
rsold = rsnew
#print ' ',iter,rsold
if self.frame_no%100==0:
print '% 5i'%self.frame_no + '% 6i cells % 6i cts % 6i iterations residual = %f' % (self.n_cells, self.n_cts, iter+1, rsnew)
return (iter+1, rsnew)
def predict(self):
"""Predict cell centers, dirs, lens for a timestep dt based
on the current velocities.
Assumes cell_centers, cell_dirs, cell_lens, cell_rads, and
cell_dcenters, cell_dangs, cell_dlens are current on the device.
Calculates new pred_cell_centers, pred_cell_dirs, pred_cell_lens.
"""
self.program.predict(self.queue,
(self.n_cells,),
None,
self.cell_centers_dev.data,
self.cell_dirs_dev.data,
self.cell_lens_dev.data,
self.cell_dcenters_dev.data,
self.cell_dangs_dev.data,
self.cell_dlens_dev.data,
self.pred_cell_centers_dev.data,
self.pred_cell_dirs_dev.data,
self.pred_cell_lens_dev.data).wait()
def integrate(self):
"""Integrates cell centers, dirs, lens for a timestep dt based
on the current deltap.
Assumes cell_centers, cell_dirs, cell_lens, cell_rads, and
deltap are current on the device.
Calculates new cell_centers, cell_dirs, cell_lens.
"""
self.program.integrate(self.queue,
(self.n_cells,),
None,
self.cell_centers_dev.data,
self.cell_dirs_dev.data,
self.cell_lens_dev.data,
self.cell_dcenters_dev.data,
self.cell_dangs_dev.data,
self.cell_dlens_dev.data).wait()
def add_impulse(self):
self.program.add_impulse(self.queue, (self.n_cells,), None,
numpy.float32(self.muA),
numpy.float32(self.gamma),
self.deltap_dev.data,
self.cell_dirs_dev.data,
self.cell_lens_dev.data,
self.cell_rads_dev.data,
self.cell_dcenters_dev.data,
self.cell_dangs_dev.data,
self.cell_target_dlens_dev.data,
self.cell_dlens_dev.data).wait()
def divide_cell(self, i, d1i, d2i):
"""Divide a cell into two equal sized daughter cells.
Fails silently if we're out of cells.
Assumes our local copy of cells is current.
Calculates new cell_centers, cell_dirs, cell_lens, and cell_rads.
"""
if self.n_cells >= self.max_cells:
return
# idxs of the two new cells
a = d1i
b = d2i
# seems to be making shallow copies without the tuple calls
parent_center = tuple(self.cell_centers[i])
parent_dir = tuple(self.cell_dirs[i])
parent_rad = self.cell_rads[i]
parent_len = self.cell_lens[i]
daughter_len = parent_len/2.0 - parent_rad #- 0.025
daughter_offset = daughter_len/2.0 + parent_rad
center_offset = tuple([parent_dir[k]*daughter_offset for k in range(4)])
self.cell_centers[a] = tuple([(parent_center[k] - center_offset[k]) for k in range(4)])
self.cell_centers[b] = tuple([(parent_center[k] + center_offset[k]) for k in range(4)])
if not self.alternate_divisions:
cdir = numpy.array(parent_dir)
jitter = numpy.random.uniform(-0.001,0.001,3)
if not self.jitter_z: jitter[2] = 0.0
cdir[0:3] += jitter
cdir /= numpy.linalg.norm(cdir)
self.cell_dirs[a] = cdir
cdir = numpy.array(parent_dir)
jitter = numpy.random.uniform(-0.001,0.001,3)
if not self.jitter_z: jitter[2] = 0.0
cdir[0:3] += jitter
cdir /= numpy.linalg.norm(cdir)
self.cell_dirs[b] = cdir
else:
cdir = numpy.array(parent_dir)
tmp = cdir[0]
cdir[0] = -cdir[1]
cdir[1] = tmp
self.cell_dirs[a] = cdir
self.cell_dirs[b] = cdir
self.cell_lens[a] = daughter_len
self.cell_lens[b] = daughter_len
self.cell_rads[a] = parent_rad
self.cell_rads[b] = parent_rad
self.n_cells += 1
self.parents[b] = a
vols = self.cell_vols_dev.get()
daughter_vol = vols[i] / 2.0
vols[a] = daughter_vol
vols[b] = daughter_vol
self.cell_vols_dev.set(vols)
# Inherit velocities from parent (conserve momentum)
parent_dlin = self.cell_dcenters[i]
self.cell_dcenters[a] = parent_dlin
self.cell_dcenters[b] = parent_dlin
parent_dang = self.cell_dangs[i]
self.cell_dangs[a] = parent_dang
self.cell_dangs[b] = parent_dang
#return indices of daughter cells
return (a,b)
def calc_cell_geom(self):
"""Calculate cell geometry using lens/rads on card."""
# swap cell vols and cell_vols old
tmp = self.cell_old_vols_dev
self.cell_old_vols_dev = self.cell_vols_dev
self.cell_vols_dev = tmp
# update geometry
self.calc_cell_area(self.cell_areas_dev, self.cell_rads_dev, self.cell_lens_dev)
self.calc_cell_vol(self.cell_vols_dev, self.cell_rads_dev, self.cell_lens_dev)
def profileGrid(self):
if self.n_cts==0:
return
import time
t1 = time.clock()
for i in range(1000):
# redefine gridding based on the range of cell positions
self.cell_centers = self.cell_centers_dev.get()
self.update_grid() # we assume local cell_centers is current
# get each cell into the correct sq and retrieve from the device
self.bin_cells()
# sort cells and find sq index starts in the list
self.cell_sqs = self.cell_sqs_dev.get() # get updated cell sqs
self.sort_cells()
self.sorted_ids_dev.set(self.sorted_ids) # push changes to the device
self.sq_inds_dev.set(self.sq_inds)
t2 = time.clock()
print "Grid stuff timing for 1000 calls, time per call (s) = %f"%((t2-t1)*0.001)
open("grid_prof","a").write( "%i, %i, %f\n"%(self.n_cells,self.n_cts,(t2-t1)*0.001) )
def profileFindCts(self):
if self.n_cts==0:
return
import time
t1 = time.clock()
dt = 0.005
for i in range(1000):
self.n_cts = 0
self.vcleari(self.cell_n_cts_dev) # clear the accumulated contact count
self.predict()
# find all contacts
self.find_contacts()
# place 'backward' contacts in cells
#self.collect_tos()
# compact the contacts so we can dispatch only enough threads
# to deal with each
#self.ct_frs = self.ct_frs_dev.get()
#self.ct_tos = self.ct_tos_dev.get()
#self.ct_inds_dev.set(self.ct_inds)
t2 = time.clock()
print "Find contacts timing for 1000 calls, time per call (s) = %f"%((t2-t1)*0.001)
open("findcts_prof","a").write( "%i, %i, %f\n"%(self.n_cells,self.n_cts,(t2-t1)*0.001) )
def profileFindCts2(self):
if self.n_cts==0:
return
import time
t1 = time.clock()
dt = 0.005
for i in range(1000):
self.n_cts = 0
self.vcleari(self.cell_n_cts_dev) # clear the accumulated contact count
self.predict()
# find all contacts
self.find_contacts()
# place 'backward' contacts in cells
self.collect_tos()
# compact the contacts so we can dispatch only enough threads
# to deal with each
#self.ct_frs = self.ct_frs_dev.get()
#self.ct_tos = self.ct_tos_dev.get()
#self.ct_inds_dev.set(self.ct_inds)
t2 = time.clock()
print "Find contacts timing for 1000 calls, time per call (s) = %f"%((t2-t1)*0.001)
open("findcts_prof","a").write( "%i, %i, %f\n"%(self.n_cells,self.n_cts,(t2-t1)*0.001) )
def profileCGS(self):
if self.n_cts==0:
return
import time
t1 = time.clock()
dt = 0.005
for i in range(1000):
self.build_matrix() # Calculate entries of the matrix
(iters, res) = self.CGSSolve()
print "cgs prof: iters=%i, res=%f"%(iters,res)
t2 = time.clock()
print "CGS timing for 1000 calls, time per call (s) = %f"%((t2-t1)*0.001)
open("cgs_prof","a").write( "%i, %i, %i, %f\n"%(self.n_cells,self.n_cts,iters,(t2-t1)*0.001) )
circ_pts = [(math.cos(math.radians(th)), math.sin(math.radians(th))) for th in range(-80,90,20)]
def display_grid(spacing, x_lo, x_hi, y_lo, y_hi):
glBegin(GL_LINES)
for i in range(x_lo, x_hi+1):
glVertex3f(i*spacing, y_lo*spacing, 0)
glVertex3f(i*spacing, y_hi*spacing, 0)
for i in range(y_lo, y_hi+1):
glVertex3f(x_lo*spacing, i*spacing, 0)
glVertex3f(x_hi*spacing, i*spacing, 0)
glEnd()
def display_cell(p, d, l, r):
global quad
pa = numpy.array([p[i] for i in range(3)])
da = numpy.array([d[i] for i in range(3)])
e1 = pa - da*l*0.5
e2 = pa + da*l*0.5
glEnable(GL_DEPTH_TEST)
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
glTranslatef(e1[0],e1[1],e1[2])
zaxis = numpy.array([0,0,1])
rotaxis = numpy.cross(da, zaxis)
ang = numpy.arccos(numpy.dot(da, zaxis))
glRotatef(-ang*180.0/math.pi, rotaxis[0], rotaxis[1], rotaxis[2])
#glRotatef(90.0, 1, 0, 0)
gluCylinder(quad, r, r , l, 8, 1)
gluSphere(quad, r, 8, 8)
glPopMatrix()
glPushMatrix()
glTranslatef(e2[0],e2[1],e2[2])
gluSphere(quad, r, 8, 8)
glPopMatrix()
glDisable(GL_DEPTH_TEST)
'''
def display_cell(p, d, l, r):
glEnable(GL_DEPTH_TEST)
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
ang = math.atan2(d[1], d[0]) * 360.0 / (2.0*3.141593)
glTranslatef(p[0], p[1], 0.0)
glRotatef(ang, 0.0, 0.0, 1.0)
glBegin(GL_POLYGON)
glVertex3f(-l/2.0, -r, 0)
glVertex3f(l/2.0, -r, 0)
for x,y in circ_pts:
glVertex3f(l/2.0 + x*r, y*r, 0.0)
glVertex3f(l/2.0, r, 0)
glVertex3f(-l/2.0, r, 0)
for x,y in circ_pts:
glVertex3f(-l/2.0 -x*r, -y*r, 0.0)
glEnd()
glPopMatrix()
glDisable(GL_DEPTH_TEST)
'''
def display_cell_name(p, name):
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
glTranslatef(p[0], p[1], p[2])
glScalef(0.006, 0.006, 0.006)
display_string(name)
glPopMatrix()
def display_ct(pt, norm, fr_Lz):
glMatrixMode(GL_MODELVIEW)
glPushMatrix()
glTranslatef(pt[0], pt[1], pt[2])
glBegin(GL_POINTS)
glVertex3f(0.0, 0.0, 0.0)
glEnd()
glPushMatrix()
glTranslatef(0.1, 0.1, 0.0)
glScalef(0.004, 0.004, 0.004)
display_string(fr_Lz)
glPopMatrix()
xaxis = numpy.array([1,0,0])
norma = numpy.array([norm[i] for i in range(3)])
rotaxis = numpy.cross(norma, xaxis)
ang = numpy.arccos(numpy.dot(norma, xaxis))
glRotatef(-ang*180.0/math.pi, rotaxis[0], rotaxis[1], rotaxis[2])
# ang = math.atan2(norm[1], norm[0]) * 360.0 / (2.0*3.141593)
# glRotatef(ang, 0.0, 0.0, 1.0)
glBegin(GL_LINES)
glVertex3f(0.0, 0.0, 0.0)
glVertex3f(1.0, 0.0, 0.0)
glEnd()
glBegin(GL_TRIANGLES)
glVertex3f(1.0, 0.0, 0.0)
glVertex3f(0.8, 0.2, 0.0)
glVertex3f(0.8, -0.2, 0.0)
glEnd()
glPopMatrix()
def display_string(s):
for ch in s:
glutStrokeCharacter(GLUT_STROKE_ROMAN, ord(ch))
def cell_color(i):
global founders
while i not in founders:
i = model.parents[i]
return founders[i]
def display():
global view_x, view_y, view_z, view_ang
glEnable(GL_LINE_SMOOTH)
glEnable(GL_POLYGON_SMOOTH)
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glClearColor(0.7, 0.7, 0.7, 0.7)
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(60.0, 1.0, 0.1, 1000.0)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glTranslatef(view_x, view_y, -view_z)
glRotatef(view_ang, 1,0,0)
glColor3f(0, 0, 0)
glLineWidth(0.5)
display_grid(model.grid_spacing, model.grid_x_min, model.grid_x_max, model.grid_y_min, model.grid_y_max)
model.get_cells()
for i in range(model.n_cells):
#glColor3f(0.5, 0.5, 0.5)
rr,gg,bb = cell_color(i)
glColor3f(rr, gg, bb)
#glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glPolygonMode(GL_FRONT, GL_FILL)
display_cell(model.cell_centers[i], model.cell_dirs[i], model.cell_lens[i], model.cell_rads[i])
glColor3f(0.0, 0.0, 0.0)
#glPolygonMode(GL_FRONT_AND_BACK, GL_LINE)
glPolygonMode(GL_FRONT, GL_LINE)
glLineWidth(2.0)
display_cell(model.cell_centers[i], model.cell_dirs[i], model.cell_lens[i], model.cell_rads[i])
# glColor3f(0.0, 0.0, 0.0)
# glLineWidth(1.0)
# display_cell_name(model.cell_centers[i], str(i))
glColor3f(0.1, 0.2, 0.4)
glPolygonMode(GL_FRONT_AND_BACK, GL_FILL)
glPointSize(1.0)
glLineWidth(1.0)
global ct_map
new_ct_map = {}
model.get_cts()
for i in range(model.n_cells):
for j in range(model.cell_n_cts[i]):
other = model.ct_tos[i][j]
new_ct_map[i,other] = (model.ct_pts[i][j], model.ct_norms[i][j], '% .4f'%model.ct_dists[i][j])
if other<0:
glColor3f(0.5,0.5,0.1)
elif (i,other) in ct_map:
glColor3f(0.1, 0.4, 0.2)
else:
glColor3f(0.6, 0.1, 0.1)
if other<0:
display_ct(model.ct_pts[i][j], model.ct_norms[i][j], '% .4f'% model.ct_dists[i][j])
dead_cts_keys = set(ct_map.keys()) - set(new_ct_map.keys())
for key in dead_cts_keys:
pt, norm, dist = ct_map[key]
glColor3f(0.1, 0.1, 0.6)
display_ct(pt, norm, dist)
ct_map = new_ct_map
glFlush()
glutSwapBuffers()
def reshape(w, h):
l = min(w, h)
glViewport(0, 0, l, l)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glutPostRedisplay()
from OpenGL.GL import *
from OpenGL.GLU import *
from OpenGL.GLUT import *
display_flag = False
quad = gluNewQuadric()
def idle():
global frame_no
global display_flag
model.tick(0.01)
model.get_cells()
if model.frame_no % 100 == 0:
#self.dump_cell_data(frame_no/100)
print '% 8i % 8i cells % 8i contacts' % (model.frame_no, model.n_cells, model.n_cts)
if model.frame_no %100 == 0:
for i in range(model.n_cells):
if model.cell_lens[i] > 3.0+random.uniform(0.0,1.0):
model.divide_cell(i)
model.set_cells()
if model.frame_no % 500 == 0 or display_flag:
display()
display_flag = False
if model.frame_no % 1001 == 0:
model.profileCGS()
model.profileFindCts()
model.profileGrid()
model.frame_no += 1
view_x = 0
view_y = 0
view_z = 50
view_ang = 45.0
def key_pressed(*args):
global view_x, view_y, view_z, view_ang, display_flag
if args[0] == 'j':
view_x += 2
elif args[0] == 'l':
view_x -= 2
elif args[0] == 'i':
view_y -= 2
elif args[0] == 'k':
view_y += 2
elif args[0] == 'e':
view_z -= 2
elif args[0] == 'd':
view_z += 2
elif args[0] == 'z':
view_ang += 2
elif args[0] == 'x':
view_ang -= 2
elif args[0] == '\x1b':
exit()
elif args[0] == 'f':
display_flag = True
import time
class state:
pass
if __name__ == '__main__':
numpy.set_printoptions(precision=8,
threshold=10000,
linewidth=180)
ct_map = {}
glutInit(sys.argv)
glutInitWindowSize(1400, 1400)
glutInitWindowPosition(0, 0)
glutCreateWindow('CLBacterium')
glutDisplayFunc(display)
glutReshapeFunc(reshape)
glutKeyboardFunc(key_pressed)
glutIdleFunc(idle)
from CellModeller.Simulator import Simulator
sim = Simulator(None, 0.01)
model = CLBacterium(sim, max_cells=2**15, max_contacts=32, max_sqs=64*16, jitter_z=False, reg_param=2, gamma=5.0)
model.addPlane((0,-16,0), (0,1,0), 1)
model.addPlane((0,16,0), (0,-1,0), 1)
#model = CLBacterium(None)
#model.load_test_data()
#model.load_3_cells_2_planes()
#model.load_1024_cells()
#model.load_3_cells()
cs = state()
cs.id=0
cs.idx=0
cs.growthRate = 0.5
model.addCell(cs)
founders = {0:(0.5, 0.3, 0.3),
1:(0.3, 0.5, 0.3),
2:(0.3, 0.3, 0.5)}
#model.load_3_cells_2_planes()
#model.load_1024_cells()
model.load_3_cells()
glutMainLoop()
| bsd-3-clause | 6,362,412,420,733,977,000 | 37.502837 | 139 | 0.525208 | false | 3.224387 | false | false | false |
Bjay1435/capstone | rootfs/usr/share/dh-python/dhpython/build/plugin_distutils.py | 1 | 4638 | # Copyright © 2012-2013 Piotr Ożarowski <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import logging
from glob import glob1
from os import remove
from os.path import exists, isdir, join
from shutil import rmtree
from dhpython.build.base import Base, shell_command
log = logging.getLogger('dhpython')
_setup_tpl = 'setup.py|setup-3.py'
def create_pydistutils_cfg(func):
"""distutils doesn't have sane command-line API - this decorator creates
.pydistutils.cfg file to workaround it
hint: if you think this is plain stupid, please don't read
distutils/setuptools/distribute sources
"""
def wrapped_func(self, context, args, *oargs, **kwargs):
fpath = join(args['home_dir'], '.pydistutils.cfg')
if not exists(fpath):
with open(fpath, 'w', encoding='utf-8') as fp:
fp.writelines(['[clean]\n',
'all=1\n',
'[build]\n',
'build-lib={}\n'.format(args['build_dir']),
'[install]\n',
'install-layout=deb\n',
'install-scripts=/usr/bin\n',
'install-lib={}\n'.format(args['install_dir'])])
context['ENV']['HOME'] = args['home_dir']
return func(self, context, args, *oargs, **kwargs)
wrapped_func.__name__ = func.__name__
return wrapped_func
class BuildSystem(Base):
DESCRIPTION = 'Distutils build system'
SUPPORTED_INTERPRETERS = {'python', 'python3', 'python{version}',
'python-dbg', 'python3-dbg', 'python{version}-dbg',
'pypy'}
REQUIRED_FILES = [_setup_tpl]
OPTIONAL_FILES = {'setup.cfg': 1,
'requirements.txt': 1,
'PKG-INFO': 10,
'*.egg-info': 10}
def detect(self, context):
result = super(BuildSystem, self).detect(context)
if _setup_tpl in self.DETECTED_REQUIRED_FILES:
context['args']['setup_py'] = self.DETECTED_REQUIRED_FILES[_setup_tpl][0]
else:
context['args']['setup_py'] = 'setup.py'
return result
@shell_command
@create_pydistutils_cfg
def clean(self, context, args):
super(BuildSystem, self).clean(context, args)
dpath = join(context['dir'], 'build')
isdir(dpath) and rmtree(dpath)
for fname in glob1(context['dir'], '*.egg-info'):
fpath = join(context['dir'], fname)
rmtree(fpath) if isdir(fpath) else remove(fpath)
return '{interpreter} {setup_py} clean {args}'
@shell_command
@create_pydistutils_cfg
def configure(self, context, args):
return '{interpreter} {setup_py} config {args}'
@shell_command
@create_pydistutils_cfg
def build(self, context, args):
return '{interpreter.binary_dv} {setup_py} build {args}'
@shell_command
@create_pydistutils_cfg
def install(self, context, args):
return '{interpreter.binary_dv} {setup_py} install --root {destdir} {args}'
@shell_command
@create_pydistutils_cfg
def test(self, context, args):
if not self.cfg.custom_tests:
fpath = join(args['dir'], args['setup_py'])
with open(fpath, 'rb') as fp:
if fp.read().find(b'test_suite') > 0:
# TODO: is that enough to detect if test target is available?
return '{interpreter} {setup_py} test {args}'
return super(BuildSystem, self).test(context, args)
| mit | 8,160,499,538,414,738,000 | 40.026549 | 85 | 0.615401 | false | 4.063103 | true | false | false |
atmantree/post-introduccion-zodb | todo_v1/todo.py | 1 | 1684 | # -*- coding: utf-8 -*-
from ZODB import (DB, FileStorage)
import transaction
import argparse
class ToDo:
def __init__(self):
self.store = FileStorage.FileStorage("ToDo.fs")
self.database = DB(self.store)
self.connection = self.database.open()
self.root = self.connection.root()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
transaction.get()
transaction.abort()
self.connection.close()
self.database.close()
self.store.close()
def add(self, key, value):
if key != "":
self.root[key] = value
transaction.commit()
print("New task added..")
else:
print("A task must have a name")
def list(self):
print("Tasks To Do..")
for k in self.root.keys():
print("%s\t%s" % (k, self.root[k]))
def delete(self, key):
if key in self.root.keys():
del(self.root[key])
transaction.commit()
print("Task deleted..")
else:
print("There is no task '%s'.." % key)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('-a', '--add', nargs=2, help="add a tast to the ToDo list")
parser.add_argument('-d', '--delete', nargs=1, help="delete a task from the ToDo list")
args = parser.parse_args()
tasks = ToDo()
if args.add:
tasks.add(args.add[0],args.add[1])
elif args.delete:
tasks.delete(args.delete[0])
else:
tasks.list() | bsd-3-clause | 5,546,271,761,760,480,000 | 27.068966 | 91 | 0.513064 | false | 3.907193 | false | false | false |
IL2HorusTeam/django-il2-domain | il2_domain/locations/migrations/0001_initial.py | 1 | 15133 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Location'
db.create_table(u'locations_location', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name_en', self.gf('django.db.models.fields.CharField')(unique=True, max_length=255)),
('name_ru', self.gf('django.db.models.fields.CharField')(max_length=255, unique=True, null=True, blank=True)),
('name_uk', self.gf('django.db.models.fields.CharField')(max_length=255, unique=True, null=True, blank=True)),
('slug', self.gf('django.db.models.fields.SlugField')(unique=True, max_length=50)),
('width', self.gf('django.db.models.fields.IntegerField')(default=12800)),
('height', self.gf('django.db.models.fields.IntegerField')(default=12800)),
('zoom_min', self.gf('django.db.models.fields.IntegerField')(default=0)),
('zoom_max', self.gf('django.db.models.fields.IntegerField')(default=0)),
('zoom_dflt', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal(u'locations', ['Location'])
# Adding model 'Locality'
db.create_table(u'locations_locality', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('location', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Location'])),
('pos', self.gf('django.contrib.gis.db.models.fields.PointField')()),
('zoom_min', self.gf('django.db.models.fields.IntegerField')(default=0)),
('name_en', self.gf('django.db.models.fields.CharField')(max_length=255)),
('name_ru', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('name_uk', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('area', self.gf('django.contrib.gis.db.models.fields.PolygonField')()),
))
db.send_create_signal(u'locations', ['Locality'])
# Adding model 'GeographicalFeature'
db.create_table(u'locations_geographicalfeature', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('location', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Location'])),
('pos', self.gf('django.contrib.gis.db.models.fields.PointField')()),
('zoom_min', self.gf('django.db.models.fields.IntegerField')(default=0)),
('name_en', self.gf('django.db.models.fields.CharField')(max_length=255)),
('name_ru', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('name_uk', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
))
db.send_create_signal(u'locations', ['GeographicalFeature'])
# Adding model 'Aerodrome'
db.create_table(u'locations_aerodrome', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('location', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Location'])),
('pos', self.gf('django.contrib.gis.db.models.fields.PointField')()),
('zoom_min', self.gf('django.db.models.fields.IntegerField')(default=0)),
('name_en', self.gf('django.db.models.fields.CharField')(max_length=255)),
('name_ru', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('name_uk', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('amsl', self.gf('django.db.models.fields.IntegerField')(default=0)),
))
db.send_create_signal(u'locations', ['Aerodrome'])
# Adding model 'Runway'
db.create_table(u'locations_runway', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('aerodrome', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Aerodrome'])),
('name', self.gf('django.db.models.fields.CharField')(default=u'09/27', max_length=255)),
('is_bidirectional', self.gf('django.db.models.fields.BooleanField')(default=True)),
('beginning', self.gf('django.contrib.gis.db.models.fields.PointField')()),
('end', self.gf('django.contrib.gis.db.models.fields.PointField')()),
('width', self.gf('django.db.models.fields.IntegerField')(default=30)),
('pavement_type', self.gf('django.db.models.fields.CharField')(max_length=3)),
))
db.send_create_signal(u'locations', ['Runway'])
# Adding model 'LocationVariant'
db.create_table(u'locations_locationvariant', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name_en', self.gf('django.db.models.fields.CharField')(max_length=255)),
('name_ru', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('name_uk', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)),
('location', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['locations.Location'])),
('loader', self.gf('django.db.models.fields.CharField')(default=u'location/load.ini', max_length=255)),
('season', self.gf('django.db.models.fields.CharField')(max_length=3)),
('month_number', self.gf('django.db.models.fields.IntegerField')(default=1)),
('atmospheric_pressure', self.gf('django.db.models.fields.IntegerField')(default=745)),
('air_temperature', self.gf('django.db.models.fields.IntegerField')(default=25)),
))
db.send_create_signal(u'locations', ['LocationVariant'])
# Adding M2M table for field localities on 'LocationVariant'
m2m_table_name = db.shorten_name(u'locations_locationvariant_localities')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('locationvariant', models.ForeignKey(orm[u'locations.locationvariant'], null=False)),
('locality', models.ForeignKey(orm[u'locations.locality'], null=False))
))
db.create_unique(m2m_table_name, ['locationvariant_id', 'locality_id'])
# Adding M2M table for field aerodromes on 'LocationVariant'
m2m_table_name = db.shorten_name(u'locations_locationvariant_aerodromes')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('locationvariant', models.ForeignKey(orm[u'locations.locationvariant'], null=False)),
('aerodrome', models.ForeignKey(orm[u'locations.aerodrome'], null=False))
))
db.create_unique(m2m_table_name, ['locationvariant_id', 'aerodrome_id'])
# Adding M2M table for field geographical_features on 'LocationVariant'
m2m_table_name = db.shorten_name(u'locations_locationvariant_geographical_features')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('locationvariant', models.ForeignKey(orm[u'locations.locationvariant'], null=False)),
('geographicalfeature', models.ForeignKey(orm[u'locations.geographicalfeature'], null=False))
))
db.create_unique(m2m_table_name, ['locationvariant_id', 'geographicalfeature_id'])
def backwards(self, orm):
# Deleting model 'Location'
db.delete_table(u'locations_location')
# Deleting model 'Locality'
db.delete_table(u'locations_locality')
# Deleting model 'GeographicalFeature'
db.delete_table(u'locations_geographicalfeature')
# Deleting model 'Aerodrome'
db.delete_table(u'locations_aerodrome')
# Deleting model 'Runway'
db.delete_table(u'locations_runway')
# Deleting model 'LocationVariant'
db.delete_table(u'locations_locationvariant')
# Removing M2M table for field localities on 'LocationVariant'
db.delete_table(db.shorten_name(u'locations_locationvariant_localities'))
# Removing M2M table for field aerodromes on 'LocationVariant'
db.delete_table(db.shorten_name(u'locations_locationvariant_aerodromes'))
# Removing M2M table for field geographical_features on 'LocationVariant'
db.delete_table(db.shorten_name(u'locations_locationvariant_geographical_features'))
models = {
u'locations.aerodrome': {
'Meta': {'object_name': 'Aerodrome'},
'amsl': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']"}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_ru': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name_uk': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pos': ('django.contrib.gis.db.models.fields.PointField', [], {}),
'zoom_min': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'locations.geographicalfeature': {
'Meta': {'object_name': 'GeographicalFeature'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']"}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_ru': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name_uk': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pos': ('django.contrib.gis.db.models.fields.PointField', [], {}),
'zoom_min': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'locations.locality': {
'Meta': {'object_name': 'Locality'},
'area': ('django.contrib.gis.db.models.fields.PolygonField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']"}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_ru': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name_uk': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'pos': ('django.contrib.gis.db.models.fields.PointField', [], {}),
'zoom_min': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'locations.location': {
'Meta': {'ordering': "(u'slug',)", 'object_name': 'Location'},
'height': ('django.db.models.fields.IntegerField', [], {'default': '12800'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name_en': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'name_ru': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'name_uk': ('django.db.models.fields.CharField', [], {'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'width': ('django.db.models.fields.IntegerField', [], {'default': '12800'}),
'zoom_dflt': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'zoom_max': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'zoom_min': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'locations.locationvariant': {
'Meta': {'object_name': 'LocationVariant'},
'aerodromes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['locations.Aerodrome']", 'null': 'True', 'blank': 'True'}),
'air_temperature': ('django.db.models.fields.IntegerField', [], {'default': '25'}),
'atmospheric_pressure': ('django.db.models.fields.IntegerField', [], {'default': '745'}),
'geographical_features': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['locations.GeographicalFeature']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'loader': ('django.db.models.fields.CharField', [], {'default': "u'location/load.ini'", 'max_length': '255'}),
'localities': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['locations.Locality']", 'null': 'True', 'blank': 'True'}),
'location': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Location']"}),
'month_number': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'name_en': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name_ru': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'name_uk': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'season': ('django.db.models.fields.CharField', [], {'max_length': '3'})
},
u'locations.runway': {
'Meta': {'object_name': 'Runway'},
'aerodrome': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['locations.Aerodrome']"}),
'beginning': ('django.contrib.gis.db.models.fields.PointField', [], {}),
'end': ('django.contrib.gis.db.models.fields.PointField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_bidirectional': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "u'09/27'", 'max_length': '255'}),
'pavement_type': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'width': ('django.db.models.fields.IntegerField', [], {'default': '30'})
}
}
complete_apps = ['locations'] | mit | -6,195,351,885,397,270,000 | 66.5625 | 199 | 0.600211 | false | 3.544028 | false | false | false |
AndreySibiryakov/tools | smooth_curve_gaussian_class_api_2_w_smooth.py | 1 | 7752 | import scipy.ndimage as sp
import maya.cmds as cmds
import maya.OpenMaya as om
import maya.OpenMayaAnim as oma
import numpy as np
import scipy.interpolate as si
def bspline(cv, n=100, degree=3, periodic=False):
""" Calculate n samples on a bspline
cv : Array ov control vertices
n : Number of samples to return
degree: Curve degree
periodic: True - Curve is closed
False - Curve is open
"""
# If periodic, extend the point array by count+degree+1
cv = np.asarray(cv)
count = len(cv)
if periodic:
factor, fraction = divmod(count + degree + 1, count)
cv = np.concatenate((cv,) * factor + (cv[:fraction],))
count = len(cv)
degree = np.clip(degree, 1, degree)
# If opened, prevent degree from exceeding count-1
else:
degree = np.clip(degree, 1, count - 1)
# Calculate knot vector
kv = None
if periodic:
kv = np.arange(0 - degree, count + degree + degree - 1)
else:
kv = np.clip(np.arange(count + degree + 1) - degree, 0, count - degree)
# Calculate query range
u = np.linspace(periodic, (count - degree), n)
# Calculate result
return np.array(si.splev(u, (kv, cv.T, degree))).T
class Gaussian(object):
def __init__(self, blend=10):
self.blend = blend
self.curves = []
self.frame_data = {}
self.value_data = {}
self.id_data = {}
def normalize_value(self, v, min_v, max_v):
'''Normalizes single curve value.
Used for processing "Stress" internal facefx curve.
Args:
v (float): curve value
min_v (float): minimum value
max_v (float): maximum value
Returns:
float:
'''
# range2 = y - x;
# a = (a * range2) + x;
return (v - min_v) / (max_v - min_v)
def normalize_data(self, data):
min_v = min(data) if min(data) > 0 else 0
max_v = max(data)
return [self.normalize_value(d, min_v, max_v) for d in data]
def restore_normalized_value(self, v, min_v, max_v):
return min_v + v * (max_v - min_v)
def restore_normalized_data(self, src_data, norm_data):
min_v = min(src_data) if min(src_data) > 0 else 0
max_v = max(src_data)
return [self.restore_normalized_value(d, min_v, max_v) for d in norm_data]
def add_keys(self, plugName, times, values, changeCache=None):
# Get the plug to be animated.
sel = om.MSelectionList()
sel.add(plugName)
plug = om.MPlug()
sel.getPlug(0, plug)
# Create the animCurve.
animfn = oma.MFnAnimCurve(plug)
timeArray = om.MTimeArray()
valueArray = om.MDoubleArray()
for i in range(len(times)):
timeArray.append(om.MTime(times[i], om.MTime.uiUnit()))
valueArray.append(values[i])
# Add the keys to the animCurve.
animfn.addKeys(
timeArray,
valueArray,
oma.MFnAnimCurve.kTangentGlobal,
oma.MFnAnimCurve.kTangentGlobal,
False,
changeCache
)
def delete_node(self, node):
try:
cmds.delete(cmds.listConnections(node)[0])
except:
return
def calc_blend_val(self, orig_val, proc_val, multi):
diff_val = (orig_val - proc_val) * multi
return orig_val - diff_val
def set_blend(self, init_values, filt_values):
# Keeps blend length revelant to list length
if len(init_values) / 2 < self.blend:
self.blend = len(init_values) / 2
gradient_range = [p / float(self.blend)
for p in range(0, self.blend)][1:]
for i, multi in enumerate(gradient_range):
rev_i = -(i + 1)
filt_values[i] = self.calc_blend_val(
init_values[i], filt_values[i], multi)
filt_values[rev_i] = self.calc_blend_val(
init_values[rev_i], filt_values[rev_i], multi)
return filt_values
def group_by_increasing(data):
res = [[data[0]]]
for i in range(1, len(data)):
if data[i - 1] + 1 == data[i]:
res[-1].append(data[i])
else:
res.append([data[i]])
return res
def get_data(self):
self.curves = cmds.keyframe(query=True, name=True)
for curve in self.curves:
frame_data = cmds.keyframe(curve, sl=True, query=True)
if not frame_data:
frame_data = cmds.keyframe(curve, query=True)
self.frame_data[curve] = frame_data
value_data = cmds.keyframe(
curve, sl=True, valueChange=True, query=True)
if not value_data:
value_data = cmds.keyframe(
curve, valueChange=True, query=True)
self.value_data[curve] = value_data
# Resets slider value to default
cmds.floatSlider(power_sl, v=0, edit=True)
def process_curve(self):
# self.get_data()
power = cmds.floatSlider(power_sl, value=True, query=True)
# Reverses the input range, as soon it is not possible to do in gui
for curve in self.curves:
if cmds.checkBox(b_spline, v=True, q=True):
filtered_values = bspline(self.value_data[curve], n=len(
self.value_data[curve]), degree=int(power))
else:
filtered_values = self.gaussian(self.value_data[curve], power)
filtered_values = [float(v) for v in filtered_values]
filtered_values = self.set_blend(
self.value_data[curve], filtered_values)
if cmds.checkBox(cbx, v=True, q=True):
filtered_values = self.normalize_data(filtered_values)
filtered_values = self.restore_normalized_data(
self.value_data[curve], filtered_values)
attr = cmds.listConnections(curve, p=True)[0]
self.add_keys(attr, self.frame_data[curve], filtered_values, None)
cmds.keyTangent(itt='auto', ott='auto')
def normalize_only(self):
self.get_data()
# Reverses the input range, as soon it is not possible to do in gui
for curve in self.curves:
filtered_values = self.normalize_data(self.value_data[curve])
attr = cmds.listConnections(curve, p=True)[0]
self.add_keys(attr, self.frame_data[curve], filtered_values, None)
cmds.keyTangent(itt='auto', ott='auto')
def gaussian(self, data, power):
return sp.filters.gaussian_filter1d(data, power)
window_name = 'Gaussian'
if cmds.window(window_name, exists=True):
cmds.deleteUI(window_name)
cmds.window(window_name)
column = cmds.columnLayout(adjustableColumn=True)
# cmds.label('Power')
# text = cmds.text(label='Size', h=30)
cbx = cmds.checkBox(label='Normalize',
value=False,
ann='Normalyze curves or not')
b_spline = cmds.checkBox(label='B-spline',
value=False,
ann='Simplify curve with B-spline')
power_sl = cmds.floatSlider(
min=0, max=20, step=1, w=250, h=30)
cmds.button(label='Normalize Only',
command='gg.normalize_only()')
gg = Gaussian()
cmds.floatSlider(power_sl, dc='gg.process_curve()',
dragCallback='gg.get_data()', edit=True)
cmds.showWindow()
| mit | 8,784,526,235,841,071,000 | 32.300885 | 82 | 0.551987 | false | 3.660057 | false | false | false |
tomdoel/pyxnatbrowser | browser/progresslistbox.py | 1 | 10205 | # https://github.com/tomdoel/pyxnatbrowser
# Author: Tom Doel www.tomdoel.com
# Distributed under the Simplified BSD License.
from enum import Enum
from sys import stdout
from tkinter import Frame, Scrollbar, VERTICAL, Label, Listbox, EXTENDED, RIGHT, Y, LEFT, BOTH, END, Checkbutton, Text, \
IntVar
from tkinter.ttk import Progressbar
from database.observable import Observable
from database.xnatdatabase import ProgressStatus
class LabeledProgressListBox(Frame):
def __init__(self, parent, list_model_factory, label_text):
Frame.__init__(self, parent)
scrollbar = Scrollbar(self, orient=VERTICAL)
Label(self, text=label_text).pack()
self.check_list_box = ProgressListBox(self, scrollbar, list_model_factory)
scrollbar.config(command=self.check_list_box.yview)
scrollbar.pack(side=RIGHT, fill=Y)
self.check_list_box.pack(side=LEFT, fill=BOTH, expand=1)
class ProgressListBox(Text):
def __init__(self, parent, scrollbar, list_model_factory):
Text.__init__(self, parent, yscrollcommand=scrollbar.set)
self._list_model_factory = list_model_factory
self._list_model = list_model_factory.get_list_model()
self._list_model.add_listener(self._list_items_changed)
def _list_items_changed(self):
self.delete(1.0, END) # Clears the list entries
for list_item in self._list_model.get_list_items():
list_item_model = self._list_model_factory.get_list_item_model(list_item)
new_checkbutton = ProgressListBoxItem(self, list_item_model)
self.window_create("end", window=new_checkbutton)
self.insert("end", "\n")
class ProgressListBoxModelFactory:
def __init__(self):
self._list_model = ProgressListBoxModel()
def get_list_model(self):
return self._list_model
def get_list_item_model(self, list_item):
return ProgressListBoxItemModel(list_item.get_label())
class ProgressListBoxModel(Observable):
def __init__(self):
Observable.__init__(self)
self._list_items = []
def get_list_items(self):
return self._list_items
def set_list_items(self, list_items):
self._list_items = list_items
self._notify()
class ProgressListBoxItemModel(Observable):
def __init__(self, label):
Observable.__init__(self)
self._label = label
self._progress_status = ProgressStatus.undefined
self._check_status = False
def get_progress_status(self):
return self._progress_status
def get_check_status(self):
return self._check_status
def get_label(self):
return self._label
def set_progress_status(self, progress_status):
if self._progress_status is not progress_status:
self._progress_status = progress_status
self._notify()
def set_check_status(self, check_status):
if self._check_status is not check_status:
self._check_status = check_status
self._notify()
def manual_set_checked(self, check_status):
self.set_check_status(check_status)
class ProgressListBoxItem(Frame, Observable):
def __init__(self, parent, model):
Frame.__init__(self, parent)
Observable.__init__(self)
self._model = model
# Create variables and initialise to zero
self._checked_var = IntVar()
self._progress_var = IntVar()
self._checked_var.set(0)
self._progress_var.set(0)
self._current_gui_checked_state = CheckStatus.undefined
self._current_gui_progress_state = ProgressStatus.undefined
self.check_button = Checkbutton(self, text=model.get_label(), variable=self._checked_var, command=self._user_check_changed)
self.progress_bar = Progressbar(self, orient='horizontal', mode='indeterminate', variable=self._progress_var)
self.check_button.pack(side=LEFT, fill="both", expand=True)
self._update()
self._model.add_listener(self._model_changed)
def _model_changed(self):
self.update()
def _update(self):
# Update check status
model_check_state = self._model.get_check_status()
if model_check_state is not self._current_gui_checked_state:
self._current_gui_checked_state = model_check_state
# if self.status_model.is_checked_force_reload():
if model_check_state:
self.check_button.select()
else:
self.check_button.deselect()
# Update progress status
model_progress_state = self._model.get_progress_status
if not model_progress_state == self._current_gui_progress_state:
self._current_gui_progress_state = model_progress_state
if model_progress_state == ProgressStatus.in_progress:
self.progress_bar.pack(side=RIGHT, fill="both", expand=True)
else:
self.progress_bar.pack_forget()
def _user_check_changed(self):
new_checked = self._checked_var.get()
if new_checked is not self._model.get_check_status():
self._model.manual_set_checked(new_checked)
# self.model.set_checked(new_checked)
# if new_checked is not self.status_model.is_checked():
# self._notify(self.index, new_checked)
# def update_list(self, scan_records):
# self.delete(1.0, END) # Clears the list entries
#
# self.list_objects = []
# self.check_buttons = {}
# self.next_index = 0
# self.checked_indices = None
# self.unchecked_indices = None
# for scan_record in scan_records:
# self.list_objects.append(scan_record.scan)
# node_checkbox_model = ProgressCheckButtonModel(scan_record.label)
# new_checkbutton = ProgressCheckButton(self, node_checkbox_model, self.next_index, scan_record)
# self.window_create("end", window=new_checkbutton)
# self.insert("end", "\n")
# self.check_buttons[self.next_index] = new_checkbutton
# new_checkbutton.add_listener(self._checkbox_changed)
# self.next_index += 1
#
# self._populate_cache()
#
# def refresh_checks(self):
# for index, checkbutton in self.check_buttons.items():
# checkbutton.refresh_check()
# self._populate_cache()
#
# def _populate_cache(self):
# self.checked_indices = []
# self.unchecked_indices = []
# for index, checkbutton in self.check_buttons.items():
# if checkbutton.is_checked():
# self.checked_indices.append(index)
# else:
# self.unchecked_indices.append(index)
#
# def _checkbox_changed(self, index, value):
# self._populate_cache()
# selected_items = [self.list_objects[int(index)] for index in self.checked_indices]
# unselected_items = [self.list_objects[int(index)] for index in self.unchecked_indices]
#
# # Update the selection models - these will trigger notifications via their setter methods
# self.selected_items_model.selected_items = selected_items
# self.unselected_items_model.selected_items = unselected_items
class ProgressCheckButtonModel(Observable):
def __init__(self, label, status_model):
self.label = label
self.status_model = status_model
self.status_model.model.add_listener(self._progress_status_changed)
def get_label(self):
return self.label
def get_checked(self):
return self.status_model.is_checked()
def set_checked(self, checked):
return self.label
def _progress_status_changed(self, new_status):
self._notify(self.index, new_status)
class ProgressCheckButton(Frame, Observable):
def __init__(self, parent, model, index, status_model):
Frame.__init__(self, parent)
Observable.__init__(self)
self.model = model
self.index = index
self.status_model = status_model
self.var = IntVar()
self.var.set(model.get_checked())
self.progress_var = IntVar()
self.progress_status = ProgressStatus.undefined
self.check_button = Checkbutton(self, text=model.get_label, variable=self.var, command=self._check_changed)
self.progress_bar = Progressbar(self, orient='horizontal', mode='indeterminate', variable=self.progress_var)
self.check_button.pack(side=LEFT, fill="both", expand=True)
self.model.add_listener(self._model_changed)
def _model_changed(self, new_status):
model_state = self.model.get_checked()
gui_state = self.var.get()
if model_state is not gui_state:
self.model.set_checked(gui_state)
def refresh_check(self):
if self.status_model.is_checked_force_reload():
self.check_button.select()
else:
self.check_button.deselect()
def is_checked(self):
return self.var.get()
def _progress_status_changed(self, new_status):
self._refresh_progress()
def _refresh_progress(self):
status = self.status_model.get_status()
if not status == self.progress_status:
if status == ProgressStatus.in_progress:
self.progress_bar.pack(side=RIGHT, fill="both", expand=True)
else:
self.progress_bar.pack_forget()
def _check_changed(self):
new_checked = self.var.get()
if new_checked is not self.model.get_checked():
self.model.set_checked(new_checked)
if new_checked is not self.status_model.is_checked():
self._notify(self.index, new_checked)
class SelectedItems(Observable):
def __init__(self):
Observable.__init__(self)
self._selected_items = []
@property
def selected_items(self):
return self._selected_items
@selected_items.setter
def selected_items(self, value):
if self.selected_items != value:
self._selected_items = value
self._notify(value)
class CheckStatus(Enum):
off = 0
on = 1
undefined = 2
| bsd-2-clause | 8,188,460,140,480,932,000 | 34.933099 | 131 | 0.630475 | false | 3.769856 | false | false | false |
itfootman/hackrepo | gitc_utils.py | 1 | 2668 | #
# Copyright (C) 2015 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import os
import sys
import git_command
import git_config
# TODO (sbasi) - Remove this constant and fetch manifest dir from /gitc/.config
GITC_MANIFEST_DIR = '/usr/local/google/gitc/'
GITC_FS_ROOT_DIR = '/gitc/manifest-rw/'
NUM_BATCH_RETRIEVE_REVISIONID = 300
def _set_project_revisions(projects):
"""Sets the revisionExpr for a list of projects.
Because of the limit of open file descriptors allowed, length of projects
should not be overly large. Recommend calling this function multiple times
with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects.
@param projects: List of project objects to set the revionExpr for.
"""
# Retrieve the commit id for each project based off of it's current
# revisionExpr and it is not already a commit id.
project_gitcmds = [(
project, git_command.GitCommand(None,
['ls-remote',
project.remote.url,
project.revisionExpr],
capture_stdout=True, cwd='/tmp'))
for project in projects if not git_config.IsId(project.revisionExpr)]
for proj, gitcmd in project_gitcmds:
if gitcmd.Wait():
print('FATAL: Failed to retrieve revisionExpr for %s' % proj)
sys.exit(1)
proj.revisionExpr = gitcmd.stdout.split('\t')[0]
def generate_gitc_manifest(client_dir, manifest):
"""Generate a manifest for shafsd to use for this GITC client.
@param client_dir: GITC client directory to install the .manifest file in.
@param manifest: XmlManifest object representing the repo manifest.
"""
print('Generating GITC Manifest by fetching revision SHAs for each '
'project.')
index = 0
while index < len(manifest.projects):
_set_project_revisions(
manifest.projects[index:(index+NUM_BATCH_RETRIEVE_REVISIONID)])
index += NUM_BATCH_RETRIEVE_REVISIONID
# Save the manifest.
with open(os.path.join(client_dir, '.manifest'), 'w') as f:
manifest.Save(f)
| apache-2.0 | 6,277,069,641,981,856,000 | 38.235294 | 79 | 0.691904 | false | 3.923529 | false | false | false |
feist/pcs | pcs/lib/cib/test/test_constraint.py | 1 | 12376 | from functools import partial
from unittest import mock, TestCase
from lxml import etree
from pcs.common import report_codes
from pcs.lib.cib.constraint import constraint
from pcs.lib.errors import ReportItemSeverity as severities
from pcs.test.tools.assertions import(
assert_raise_library_error,
assert_xml_equal,
)
from pcs.test.tools.custom_mock import MockLibraryReportProcessor
from pcs.test.tools.assertions import (
assert_report_item_list_equal,
)
# pylint: disable=no-self-use, redundant-keyword-arg
def fixture_element(tag, _id):
element = mock.MagicMock()
element.tag = tag
element.attrib = {"id": _id}
return element
@mock.patch("pcs.lib.cib.constraint.constraint.find_parent")
@mock.patch("pcs.lib.cib.constraint.constraint.find_element_by_tag_and_id")
class FindValidResourceId(TestCase):
def setUp(self):
self.cib = "cib"
self.report_processor = MockLibraryReportProcessor()
self.find = partial(
constraint.find_valid_resource_id,
self.report_processor,
self.cib,
in_clone_allowed=False,
)
@staticmethod
def fixture_error_multiinstance(parent_type, parent_id):
return (
severities.ERROR,
report_codes.RESOURCE_FOR_CONSTRAINT_IS_MULTIINSTANCE,
{
"resource_id": "resourceA",
"parent_type": parent_type,
"parent_id": parent_id,
},
report_codes.FORCE_CONSTRAINT_MULTIINSTANCE_RESOURCE
)
@staticmethod
def fixture_warning_multiinstance(parent_type, parent_id):
return (
severities.WARNING,
report_codes.RESOURCE_FOR_CONSTRAINT_IS_MULTIINSTANCE,
{
"resource_id": "resourceA",
"parent_type": parent_type,
"parent_id": parent_id,
},
None
)
def test_return_same_id_when_resource_is_clone(self, mock_find_by_id, _):
mock_find_by_id.return_value = fixture_element("clone", "resourceA")
self.assertEqual("resourceA", self.find(_id="resourceA"))
def test_return_same_id_when_resource_is_master(self, mock_find_by_id, _):
mock_find_by_id.return_value = fixture_element("master", "resourceA")
self.assertEqual("resourceA", self.find(_id="resourceA"))
def test_return_same_id_when_resource_is_bundle(self, mock_find_by_id, _):
mock_find_by_id.return_value = fixture_element("bundle", "resourceA")
self.assertEqual("resourceA", self.find(_id="resourceA"))
def test_return_same_id_when_resource_is_standalone_primitive(
self, mock_find_by_id, mock_find_parent
):
mock_find_by_id.return_value = fixture_element("primitive", "resourceA")
mock_find_parent.return_value = None
self.assertEqual("resourceA", self.find(_id="resourceA"))
def test_refuse_when_resource_is_in_clone(
self, mock_find_by_id, mock_find_parent
):
mock_find_by_id.return_value = fixture_element("primitive", "resourceA")
mock_find_parent.return_value = fixture_element("clone", "clone_id")
assert_raise_library_error(
lambda: self.find(_id="resourceA"),
self.fixture_error_multiinstance("clone", "clone_id"),
)
def test_refuse_when_resource_is_in_master(
self, mock_find_by_id, mock_find_parent
):
mock_find_by_id.return_value = fixture_element("primitive", "resourceA")
mock_find_parent.return_value = fixture_element("master", "master_id")
assert_raise_library_error(
lambda: self.find(_id="resourceA"),
self.fixture_error_multiinstance("clone", "master_id"),
)
def test_refuse_when_resource_is_in_bundle(
self, mock_find_by_id, mock_find_parent
):
mock_find_by_id.return_value = fixture_element("primitive", "resourceA")
mock_find_parent.return_value = fixture_element("bundle", "bundle_id")
assert_raise_library_error(
lambda: self.find(_id="resourceA"),
self.fixture_error_multiinstance("bundle", "bundle_id"),
)
def test_return_resource_id_when_in_clone_allowed(
self, mock_find_by_id, mock_find_parent
):
mock_find_by_id.return_value = fixture_element("primitive", "resourceA")
mock_find_parent.return_value = fixture_element("clone", "clone_id")
self.assertEqual(
"resourceA",
self.find(in_clone_allowed=True, _id="resourceA")
)
assert_report_item_list_equal(
self.report_processor.report_item_list,
[
self.fixture_warning_multiinstance("clone", "clone_id"),
]
)
def test_return_resource_id_when_in_master_allowed(
self, mock_find_by_id, mock_find_parent
):
mock_find_by_id.return_value = fixture_element("primitive", "resourceA")
mock_find_parent.return_value = fixture_element("master", "master_id")
self.assertEqual(
"resourceA",
self.find(in_clone_allowed=True, _id="resourceA")
)
assert_report_item_list_equal(
self.report_processor.report_item_list,
[
self.fixture_warning_multiinstance("clone", "master_id"),
]
)
def test_return_resource_id_when_in_bundle_allowed(
self, mock_find_by_id, mock_find_parent
):
mock_find_by_id.return_value = fixture_element("primitive", "resourceA")
mock_find_parent.return_value = fixture_element("bundle", "bundle_id")
self.assertEqual(
"resourceA",
self.find(in_clone_allowed=True, _id="resourceA")
)
assert_report_item_list_equal(
self.report_processor.report_item_list,
[
self.fixture_warning_multiinstance("bundle", "bundle_id"),
]
)
class PrepareOptionsTest(TestCase):
def test_refuse_unknown_option(self):
assert_raise_library_error(
lambda: constraint.prepare_options(
("a", ), {"b": "c"}, mock.MagicMock(), mock.MagicMock()
),
(
severities.ERROR,
report_codes.INVALID_OPTIONS,
{
"option_names": ["b"],
"option_type": None,
"allowed": ["a", "id"],
"allowed_patterns": [],
}
),
)
def test_complete_id(self):
mock_create_id = mock.MagicMock()
mock_create_id.return_value = "new-id"
self.assertEqual({"id": "new-id"}, constraint.prepare_options(
("a",), {}, mock_create_id, mock.MagicMock()
))
def test_has_no_side_efect_on_input_options(self):
mock_create_id = mock.MagicMock()
mock_create_id.return_value = "new-id"
options = {"a": "b"}
self.assertEqual(
{"id": "new-id", "a": "b"},
constraint.prepare_options(
("a",),
options,
mock_create_id, mock.MagicMock()
)
)
self.assertEqual({"a": "b"}, options)
def test_refuse_invalid_id(self):
class SomeException(Exception):
pass
mock_validate_id = mock.MagicMock()
mock_validate_id.side_effect = SomeException()
self.assertRaises(
SomeException,
lambda: constraint.prepare_options(
("a", ), {"id": "invalid"}, mock.MagicMock(), mock_validate_id
),
)
mock_validate_id.assert_called_once_with("invalid")
class CreateIdTest(TestCase):
@mock.patch(
"pcs.lib.cib.constraint.constraint.resource_set.extract_id_set_list"
)
@mock.patch("pcs.lib.cib.constraint.constraint.find_unique_id")
def test_create_id_from_resource_set_list(self, mock_find_id, mock_extract):
mock_extract.return_value = [["A", "B"], ["C"]]
mock_find_id.return_value = "some_id"
self.assertEqual(
"some_id",
constraint.create_id("cib", "PREFIX", "resource_set_list")
)
mock_extract.assert_called_once_with("resource_set_list")
mock_find_id.assert_called_once_with("cib", "pcs_PREFIX_set_A_B_set_C")
def fixture_constraint_section(return_value):
constraint_section = mock.MagicMock()
constraint_section.findall = mock.MagicMock()
constraint_section.findall.return_value = return_value
return constraint_section
@mock.patch("pcs.lib.cib.constraint.constraint.export_with_set")
class CheckIsWithoutDuplicationTest(TestCase):
def test_raises_when_duplicate_element_found(self, export_with_set):
export_with_set.return_value = "exported_duplicate_element"
element = mock.MagicMock()
element.tag = "constraint_type"
report_processor = MockLibraryReportProcessor()
assert_raise_library_error(
lambda: constraint.check_is_without_duplication(
report_processor,
fixture_constraint_section(["duplicate_element"]), element,
are_duplicate=lambda e1, e2: True,
export_element=constraint.export_with_set,
),
(
severities.ERROR,
report_codes.DUPLICATE_CONSTRAINTS_EXIST,
{
'constraint_info_list': ['exported_duplicate_element'],
'constraint_type': 'constraint_type'
},
report_codes.FORCE_CONSTRAINT_DUPLICATE
),
)
def test_success_when_no_duplication_found(self, export_with_set):
export_with_set.return_value = "exported_duplicate_element"
element = mock.MagicMock()
element.tag = "constraint_type"
#no exception raised
report_processor = MockLibraryReportProcessor()
constraint.check_is_without_duplication(
report_processor, fixture_constraint_section([]), element,
are_duplicate=lambda e1, e2: True,
export_element=constraint.export_with_set,
)
def test_report_when_duplication_allowed(self, export_with_set):
export_with_set.return_value = "exported_duplicate_element"
element = mock.MagicMock()
element.tag = "constraint_type"
report_processor = MockLibraryReportProcessor()
constraint.check_is_without_duplication(
report_processor,
fixture_constraint_section(["duplicate_element"]), element,
are_duplicate=lambda e1, e2: True,
export_element=constraint.export_with_set,
duplication_alowed=True,
)
assert_report_item_list_equal(
report_processor.report_item_list,
[
(
severities.WARNING,
report_codes.DUPLICATE_CONSTRAINTS_EXIST,
{
'constraint_info_list': ['exported_duplicate_element'],
'constraint_type': 'constraint_type'
},
)
]
)
class CreateWithSetTest(TestCase):
def test_put_new_constraint_to_constraint_section(self):
constraint_section = etree.Element("constraints")
constraint.create_with_set(
constraint_section,
"ticket",
{"a": "b"},
[{"ids": ["A", "B"], "options": {"c": "d"}}]
)
assert_xml_equal(etree.tostring(constraint_section).decode(), """
<constraints>
<ticket a="b">
<resource_set c="d" id="pcs_rsc_set_A_B">
<resource_ref id="A"/>
<resource_ref id="B"/>
</resource_set>
</ticket>
</constraints>
""")
def test_refuse_empty_resource_set_list(self):
constraint_section = etree.Element("constraints")
assert_raise_library_error(
lambda: constraint.create_with_set(
constraint_section,
"ticket",
{"a": "b"},
[]
),
(severities.ERROR, report_codes.EMPTY_RESOURCE_SET_LIST, {})
)
| gpl-2.0 | 6,365,796,591,236,348,000 | 36.165165 | 80 | 0.578378 | false | 3.912741 | true | false | false |
iw3hxn/LibrERP | account_vat_period_end_statement/wizard/vat_settlement.py | 1 | 17171 | # flake8: noqa
# -*- coding: utf-8 -*-
# Copyright 2017-19 Didotech srl (<http://www.didotech.com>)
# Andrei Levin <[email protected]>
# Antonio M. Vigliotti <[email protected]>
# Odoo-Italia.org Community
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from openerp.osv import fields, orm
import base64
import logging
import datetime
from openerp.tools import DEFAULT_SERVER_DATE_FORMAT
from openerp.addons.l10n_it_ade.bindings.vat_settlement_v_18 import (
CTD_ANON, # Intestazione,; Comunicazione,
Comunicazione_IVP_Type,
DatiContabili_IVP_Type,
Fornitura,
Frontespizio_IVP_Type,
Intestazione_IVP_Type
)
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.DEBUG)
codice_fornitura = 'IVP18'
identificativo_software = 'Odoo.6.1.4.0.0'
class WizardVatSettlement(orm.TransientModel):
_name = "wizard.vat.settlement"
_columns = {
'data': fields.binary("File", readonly=True),
'name': fields.char('Filename', 32, readonly=True),
'state': fields.selection((
('create', 'create'), # choose
('get', 'get'), # get the file
)),
}
_defaults = {
'state': lambda *a: 'create',
}
def get_date_start_stop(self, statement, context=None):
date_start = False
date_stop = False
periods = statement.e_period_ids or statement.period_ids
for period in periods: # todo passare come parametro
if not date_start:
date_start = period.date_start
else:
if period.date_start < date_start:
date_start = period.date_start
if not date_stop:
date_stop = period.date_stop
else:
if period.date_stop > date_stop:
date_stop = period.date_stop
date_start = datetime.datetime.strptime(date_start,
DEFAULT_SERVER_DATE_FORMAT)
date_stop = datetime.datetime.strptime(date_stop,
DEFAULT_SERVER_DATE_FORMAT)
return date_start, date_stop
def get_taxable(self, cr, uid, statement, type, context=None):
"""
:param cr:
:param uid:
:param statement:
:param type: 'credit' or 'debit'
:param context:
:return: amount_taxable
"""
base_amount = 0.0
if type == 'credit':
credit_line_pool = self.pool.get('statement.credit.account.line')
for credit_line in statement.credit_vat_account_line_ids:
# if credit_line.amount != 0.0:
base_amount += credit_line.base_amount
elif type == 'debit':
debit_line_pool = self.pool.get('statement.debit.account.line')
for debit_line in statement.debit_vat_account_line_ids:
# if debit_line.amount != 0.0:
base_amount += debit_line.base_amount
return base_amount
@staticmethod
def italian_number(number):
return '{:.2f}'.format(number).replace('.', ',')
@staticmethod
def italian_date(dt):
if len(dt) == 8:
return dt[-2:] + dt[4:6] + dt[0:4]
elif len(dt) == 10:
return dt[-2:] + dt[5:7] + dt[0:4]
else:
return ''
def export_vat_settlemet(self, cr, uid, ids, context=None):
# TODO: insert period verification
context = context or {}
model_data_obj = self.pool['ir.model.data']
statement_debit_account_line_obj = \
self.pool['statement.debit.account.line']
statement_credit_account_line_obj = \
self.pool['statement.credit.account.line']
trimestre = {
'3': '1',
'6': '2',
'9': '3',
'12': '4'
}
module_pool = self.pool.get('ir.module.module')
company_pool = self.pool.get('res.company')
ids = module_pool.search(
cr, uid, [('name', '=', 'account_vat_period_end_statement')])
if len(ids) == 0:
_logger.info('Invalid software signature.')
_logger.info('Please contact [email protected] '
'to obtain free valid software')
identificativo_software = ''
else:
ver = module_pool.browse(cr, uid,
ids[0]).installed_version
identificativo_software = 'Odoo' + ver
identificativo_software = identificativo_software.upper()
statement_pool = self.pool.get('account.vat.period.end.statement')
statement_ids = context.get('active_ids', False)
for statement in statement_pool.browse(cr,
uid,
statement_ids,
context=context):
progressivo_telematico = statement_pool.set_progressivo_telematico(
cr, uid, statement, context)
company_id = statement.company_id.id
company = company_pool.browse(cr, uid, company_id, context=context)
if company.partner_id.vat[:2].lower() == 'it':
vat = company.partner_id.vat[2:]
else:
vat = company.partner_id.vat
settlement = Fornitura()
settlement.Intestazione = (Intestazione_IVP_Type())
settlement.Intestazione.CodiceFornitura = codice_fornitura
_logger.debug(settlement.Intestazione.toDOM().toprettyxml(
encoding="UTF-8"))
if statement.type[0:3] != 'xml':
_logger.info('No electronic statement type!')
raise orm.except_orm(
'Error!',
'No electronic statement type!')
if not (statement.period_ids or statement.e_period_ids):
_logger.info('No period defined!')
raise orm.except_orm(
'Error!',
'No period defined!')
if not statement.soggetto_codice_fiscale:
_logger.info(
'Manca CF del contribuente!')
raise orm.except_orm(
'Errore!',
'Manca CF del contribuente!')
if len(statement.soggetto_codice_fiscale) != 11:
_logger.info(
'Il CF del dichiarante deve essere una PI di 11 cifre!')
raise orm.except_orm(
'Errore!',
'Il CF del dichiarante deve essere una PI di 11 cifre!')
if statement.soggetto_codice_fiscale != \
company.partner_id.vat[2:] and \
statement.soggetto_codice_fiscale != \
company.partner_id.fiscalcode:
_logger.info(
'CF contrinuente diverso da CF azienda!')
raise orm.except_orm(
'Errore!',
'CF contrinuente diverso da CF azienda!')
if not statement.dichiarante_codice_fiscale:
_logger.info(
'Manca CF del dichiarante!')
raise orm.except_orm(
'Errore!',
'Manca CF del dichiarante!')
if len(statement.dichiarante_codice_fiscale) != 16:
_logger.info(
'Il dichiarante deve essere PF con CF di 16 caratteri!')
raise orm.except_orm(
'Errore!',
'Il dichiarante deve essere PF con CF di 16 caratteri!!')
if not statement.codice_carica:
_logger.info(
'Manca codice carica del dichiarante!')
raise orm.except_orm(
'Errore!',
'Manca codice carica del dichiarante!')
if not statement.incaricato_trasmissione_codice_fiscale or \
not statement.incaricato_trasmissione_data_impegno:
_logger.info(
'Manca CF o data impegno incaricato alla trasmissione!')
raise orm.except_orm(
'Errore!',
'Manca CF o data impegno incaricato alla trasmissione!')
settlement.Comunicazione = (Comunicazione_IVP_Type())
settlement.Comunicazione.Frontespizio = (Frontespizio_IVP_Type())
settlement.Comunicazione.Frontespizio.FirmaDichiarazione = "1"
settlement.Comunicazione.Frontespizio.CodiceFiscale = \
statement.soggetto_codice_fiscale
settlement.Comunicazione.Frontespizio.CFIntermediario = \
statement.incaricato_trasmissione_codice_fiscale
if statement.incaricato_trasmissione_data_impegno:
settlement.Comunicazione.Frontespizio.DataImpegno = \
self.italian_date(
statement.incaricato_trasmissione_data_impegno)
settlement.Comunicazione.Frontespizio.FirmaIntermediario = "1"
settlement.Comunicazione.Frontespizio.ImpegnoPresentazione = "1"
if statement.dichiarante_codice_fiscale:
settlement.Comunicazione.Frontespizio.CFDichiarante = \
statement.dichiarante_codice_fiscale
if statement.codice_carica:
settlement.Comunicazione.Frontespizio.CodiceCaricaDichiarante = \
statement.codice_carica.code
date_start, date_stop = self.get_date_start_stop(statement, context=context)
settlement.Comunicazione.Frontespizio.AnnoImposta = str(
date_stop.year)
settlement.Comunicazione.Frontespizio.PartitaIVA = \
statement.soggetto_codice_fiscale
# settlement.Comunicazione.Frontespizio.PIVAControllante
# settlement.Comunicazione.Frontespizio.UltimoMese = str(date_period_end.month)
# settlement.Comunicazione.Frontespizio.LiquidazioneGruppo
# settlement.Comunicazione.Frontespizio.CodiceFiscaleSocieta
# settlement.Comunicazione.Frontespizio.FlagConferma
if identificativo_software:
settlement.Comunicazione.Frontespizio.\
IdentificativoProdSoftware = identificativo_software
_logger.debug(
settlement.Comunicazione.Frontespizio.toDOM().toprettyxml(
encoding="UTF-8"))
settlement.Comunicazione.DatiContabili = (DatiContabili_IVP_Type())
# We may have more than one modulo, but do we want it?
# modulo_period_end = datetime.datetime.strptime(statement.date,
# DEFAULT_SERVER_DATE_FORMAT)
modulo = CTD_ANON()
modulo.NumeroModulo = '1' # 1, 2, 3, 4, 5
# <<<<< quarter_vat_period non esite nella 7.0 >>>>>
# if statement.period_ids[0].fiscalyear_id.quarter_vat_period:
# # trimestrale
# modulo.Trimestre = trimestre[str(modulo_period_end.month)]
# else:
# # mensile
# modulo.Mese = str(modulo_period_end.month)
if date_start.month == date_stop.month:
modulo.Mese = str(date_stop.month)
else:
if date_start.month in (1, 4, 7, 10) and \
date_stop.month in (3, 6, 9, 12):
modulo.Trimestre = trimestre[str(date_stop.month)]
else:
_logger.info(
'Undetermined quarter/month!')
raise orm.except_orm(
'Error!',
"Undetermined quarter/month!")
# TODO: Per aziende supposte al controllo antimafia (i subfornitori), per il momento non valorizziamo
# modulo.Subfornitura = "0"
# TODO: facultativo: Vittime del terremoto, per il momento non valorizziamo
# modulo.EventiEccezionali =
modulo.TotaleOperazioniAttive = self.italian_number(
self.get_taxable(cr, uid, statement, 'debit', context)
)
modulo.TotaleOperazioniPassive = self.italian_number(
self.get_taxable(cr, uid, statement, 'credit', context)
)
iva_esigibile = 0
debit_account_line_ids = statement_debit_account_line_obj.search(
cr, uid, [('statement_id', '=', statement.id)])
for debit_account_line in statement_debit_account_line_obj.browse(
cr, uid, debit_account_line_ids, context):
iva_esigibile += debit_account_line.amount
# NOTE: formato numerico;
# i decimali vanno separati con il carattere ',' (virgola)
modulo.IvaEsigibile = self.italian_number(iva_esigibile)
iva_detratta = 0
credit_account_line_ids = statement_credit_account_line_obj.search(
cr, uid, [('statement_id', '=', statement.id)])
for credit_account_line in statement_credit_account_line_obj.\
browse(cr, uid, credit_account_line_ids, context):
iva_detratta += credit_account_line.amount
# NOTE: formato numerico;
# i decimali vanno separati con il carattere ',' (virgola)
modulo.IvaDetratta = self.italian_number(iva_detratta)
if iva_esigibile > iva_detratta:
iva_dovuta = iva_esigibile - iva_detratta
modulo.IvaDovuta = self.italian_number(iva_dovuta)
else:
iva_credito = iva_detratta - iva_esigibile
modulo.IvaCredito = self.italian_number(iva_credito)
# TODO: lasciamo per dopo
# modulo.IvaDetratta = self.italian_number(iva_detratta)
# modulo.IvaCredito =
previous_debit = statement.previous_debit_vat_amount
if previous_debit:
modulo.DebitoPrecedente = self.italian_number(previous_debit)
previous_credit = statement.previous_credit_vat_amount
if previous_credit:
if date_start.month == 1:
modulo.CreditoAnnoPrecedente = self.italian_number(previous_credit)
else:
modulo.CreditoPeriodoPrecedente = self.italian_number(previous_credit)
# Chiedere all'utente
# modulo.CreditoAnnoPrecedente
# TODO: lasciamo per dopo
# modulo.VersamentiAutoUE
# modulo.CreditiImposta
# modulo.InteressiDovuti
# modulo.Acconto
if statement.authority_vat_amount > 0:
# NOTE: formato numerico; i decimali vanno separati dall'intero con il carattere ',' (virgola)
modulo.ImportoDaVersare = self.italian_number(statement.authority_vat_amount)
elif statement.authority_vat_amount < 0:
# NOTE: formato numerico; i decimali vanno separati dall'intero con il carattere ',' (virgola)
modulo.ImportoACredito = self.italian_number(-statement.authority_vat_amount)
settlement.Comunicazione.DatiContabili.Modulo.append(modulo)
_logger.debug(settlement.Comunicazione.DatiContabili.toDOM().toprettyxml(encoding="UTF-8"))
settlement.Comunicazione.identificativo = \
"%05d" % progressivo_telematico
vat_settlement_xml = settlement.toDOM().toprettyxml(encoding="UTF-8")
fn_name = 'IT%s_LI_%05d.xml' % (statement.soggetto_codice_fiscale,
progressivo_telematico)
attach_vals = {
'name': fn_name,
'datas_fname': fn_name,
'datas': base64.encodestring(vat_settlement_xml),
'res_model': 'account.vat.period.end.statement',
'res_id': statement.id
}
statement_pool.write(cr, uid, [statement.id],
{'progressivo_telematico': progressivo_telematico})
vat_settlement_attachment_out_id = self.pool[
'account.vat.settlement.attachment'].create(cr,
uid, attach_vals, context={})
view_rec = model_data_obj.get_object_reference(
cr, uid, 'account_vat_period_end_statement',
'view_vat_settlement_attachment_form')
if view_rec:
view_id = view_rec and view_rec[1] or False
return {
'view_type': 'form',
'name': "Export Liquidazione IVA",
'view_id': [view_id],
'res_id': vat_settlement_attachment_out_id,
'view_mode': 'form',
'res_model': 'account.vat.settlement.attachment',
'type': 'ir.actions.act_window',
'context': context
}
| agpl-3.0 | 7,779,389,733,822,867,000 | 42.915601 | 113 | 0.552152 | false | 3.731204 | true | false | false |
sunfall/giles | giles/games/seat.py | 1 | 1691 | # Giles: seat.py
# Copyright 2012 Phil Bordelon
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from giles.utils import Struct
class Seat(object):
"""A seat at a game. Seats can be named, be active or inactive, and
have players or not.
"""
def __init__(self, name):
self.display_name = name
self.name = name.lower()
self.active = False
self.player = None
self.player_name = "Empty!"
self.data = Struct()
def __repr__(self):
return self.display_name
def sit(self, player, activate=True):
# By default, sitting a player down in a seat activates that
# seat. That can be overridden.
if not self.player:
self.player = player
self.player_name = repr(player)
if activate:
self.active = True
return True
return False
def stand(self):
if self.player:
self.player_name = repr(self.player) + " (absentee)"
self.player = None
return True
return False
| agpl-3.0 | -5,881,379,406,293,452,000 | 29.745455 | 74 | 0.640449 | false | 4.104369 | false | false | false |
MaxTyutyunnikov/lino | lino/core/auth.py | 1 | 11109 | # -*- coding: UTF-8 -*-
## Copyright 2010-2013 Luc Saffre
## This file is part of the Lino project.
## Lino is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 3 of the License, or
## (at your option) any later version.
## Lino is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
## You should have received a copy of the GNU General Public License
## along with Lino; if not, see <http://www.gnu.org/licenses/>.
"""
Lino's authentification middleware
"""
from __future__ import unicode_literals
import os
import logging
logger = logging.getLogger(__name__)
from django.utils.importlib import import_module
from django.utils.translation import ugettext_lazy as _
from django.core import exceptions
from django.utils import translation
from django.conf import settings
from django import http
from lino.core import constants
from lino.core.perms import AnonymousUser
class AuthMiddleWareBase(object):
"""
Common base class for
:class:`RemoteUserMiddleware`,
:class:`SessionsUserMiddleware`
and
:class:`NoUserMiddleware`.
"""
def get_user_from_request(self, request):
raise NotImplementedError
def process_request(self, request):
#~ print 20130313, request.session.get('username')
settings.SITE.startup()
"""
first request will trigger site startup to load UserProfiles
"""
user = self.get_user_from_request(request)
self.on_login(request,user)
class NOT_NEEDED:
pass
#~ @classmethod
def authenticate(cls, username, password=NOT_NEEDED):
#~ logger.info("20130923 authenticate %s,%s" % (username,password))
if not username:
return AnonymousUser.instance()
"""
20120110 : Alicia once managed to add a space char in front of
her username log in the login dialog.
Apache let her in as " alicia".
"""
username = username.strip()
try:
user = settings.SITE.user_model.objects.get(username=username)
if user.profile is None:
logger.info("Could not authenticate %s : user has no profile",username)
return None
if password != cls.NOT_NEEDED:
if not user.check_password(password):
logger.info("Could not authenticate %s : password mismatch",username)
return None
#~ logger.info("20130923 good password for %s",username)
#~ else:
#~ logger.info("20130923 no password needed for %s",username)
return user
except settings.SITE.user_model.DoesNotExist,e:
logger.info("Could not authenticate %s : no such user",username)
return None
def on_login(self,request,user):
"""
The method which is applied when the user has been determined.
On multilingual sites,
if URL_PARAM_USER_LANGUAGE is present it overrides user.language.
"""
#~ logger.info("20130923 on_login(%s)" % user)
request.user = user
user_language = user.language or settings.SITE.get_default_language()
if request.method == 'GET':
rqdata = request.GET
elif request.method in ('PUT','DELETE'):
rqdata = http.QueryDict(request.body) # raw_post_data before Django 1.4
elif request.method == 'POST':
rqdata = request.POST
else:
# e.g. OPTIONS, HEAD
if len(settings.SITE.languages) > 1:
translation.activate(user_language)
request.LANGUAGE_CODE = translation.get_language()
#~ logger.info("20121205 on_login %r",translation.get_language())
request.requesting_panel = None
request.subst_user = None
return
#~ else: # DELETE
#~ request.subst_user = None
#~ request.requesting_panel = None
#~ return
if len(settings.SITE.languages) > 1:
user_language = rqdata.get(constants.URL_PARAM_USER_LANGUAGE,user_language)
translation.activate(user_language)
request.LANGUAGE_CODE = translation.get_language()
su = rqdata.get(constants.URL_PARAM_SUBST_USER,None)
if su is not None:
if su:
try:
su = settings.SITE.user_model.objects.get(id=int(su))
#~ logger.info("20120714 su is %s",su.username)
except settings.SITE.user_model.DoesNotExist, e:
su = None
else:
su = None # e.g. when it was an empty string "su="
request.subst_user = su
request.requesting_panel = rqdata.get(constants.URL_PARAM_REQUESTING_PANEL,None)
#~ logger.info("20121228 subst_user is %r",request.subst_user)
#~ if request.subst_user is not None and not isinstance(request.subst_user,settings.SITE.user_model):
#~ raise Exception("20121228")
class RemoteUserMiddleware(AuthMiddleWareBase):
"""
Middleware automatically installed by
:meth:`get_middleware_classes <lino.site.Site.get_middleware_classes>`
when both
:setting:`remote_user_header` and :setting:`user_model`
are not empty.
This does the same as
`django.contrib.auth.middleware.RemoteUserMiddleware`,
but in a simplified manner and without using Sessions.
It also activates the User's language, if that field is not empty.
Since it will run *after*
`django.contrib.auth.middleware.RemoteUserMiddleware`
(at least if you didn't change :meth:`lino.Lino.get_middleware_classes`),
it will override any browser setting.
"""
def get_user_from_request(self, request):
username = request.META.get(
settings.SITE.remote_user_header,settings.SITE.default_user)
if not username:
#~ msg = "Using remote authentication, but no user credentials found."
#~ raise exceptions.PermissionDenied(msg)
raise Exception("Using remote authentication, but no user credentials found.")
user = self.authenticate(username)
if user is None:
#~ logger.info("20130514 Unknown username %s from request %s",username, request)
#~ raise Exception(
#~ raise exceptions.PermissionDenied("Unknown or inactive username %r. Please contact your system administrator."
#~ logger.info("Unknown or inactive username %r.",username)
raise exceptions.PermissionDenied()
return user
class NoUserMiddleware(AuthMiddleWareBase):
"""
Middleware automatically installed by
:meth:`get_middleware_classes <lino.site.Site.get_middleware_classes>`
when :setting:`user_model` is None.
"""
def get_user_from_request(self, request):
return AnonymousUser.instance()
class SessionUserMiddleware(AuthMiddleWareBase):
"""
Middleware automatically installed by
:meth:`get_middleware_classes <lino.site.Site.get_middleware_classes>`
when
:setting:`remote_user_header` is None
and :setting:`user_model` not.
"""
def get_user_from_request(self, request):
#~ logger.info("20130923 get_user_from_request(%s)" % request.session.items())
user = self.authenticate(request.session.get('username'),
request.session.get('password'))
if user is None:
#~ logger.info("20130923 Login failed from session %s", request.session)
user = AnonymousUser.instance()
return user
class LDAPAuthMiddleware(SessionUserMiddleware):
"""
Middleware automatically installed by
:meth:`get_middleware_classes <lino.site.Site.get_middleware_classes>`
when
- :setting:`user_model` is not None
- :setting:`remote_user_header` is None
- :setting:`ldap_auth_server` is not None
Using this requires
`activedirectory <https://github.com/theatlantic/python-active-directory>`_.
Thanks to Josef Kejzlar for the initial implementation.
"""
def __init__(self):
from activedirectory import Client, Creds
from activedirectory.core.exception import Error
server_spec = settings.SITE.ldap_auth_server
if isinstance(server_spec,basestring):
server_spec = server_spec.split()
self.domain = server_spec[0]
self.server = server_spec[1]
#~ domain = 'DOMAIN_NAME'
#~ server = 'SERVER_DNS'
self.creds = Creds(domain)
def check_password(self,username, password):
try:
self.creds.acquire(username, password, server=self.server)
return True
except Exception as e:
pass
return False
#~ @classmethod
def authenticate(cls, username, password=SessionUserMiddleware.NOT_NEEDED, from_session=False):
if not from_session and username and password != SessionUserMiddleware.NOT_NEEDED:
if not cls.check_password(username, password):
return None
return SessionUserMiddleware.authenticate(username, SessionUserMiddleware.NOT_NEEDED)
def get_user_from_request(self, request):
user = self.authenticate(request.session.get('username'),
request.session.get('password'), True)
if user is None:
logger.debug("Login failed from session %s", request.session)
user = AnonymousUser.instance()
return user
def get_auth_middleware():
if settings.SITE.auth_middleware is None:
return AuthMiddleWareBase
module, obj = settings.SITE.auth_middleware.rsplit('.', 1)
module = import_module(module)
return getattr(module, obj)
def authenticate(*args, **kwargs):
"""
Needed by the ``/auth`` view (:class:`lino.ui.views.Authenticate`).
Called when the Login window of the web interface is confirmed.
"""
middleware = get_auth_middleware()
return middleware().authenticate(*args, **kwargs)
| gpl-3.0 | 8,390,691,697,609,771,000 | 33.155063 | 126 | 0.596543 | false | 4.499392 | false | false | false |
nicoechaniz/IPP | bdd/features/steps/completar_muestra.py | 1 | 3130 | # -*- coding: utf-8 -*-
# IPP, Plataforma web del Índice de Precios Popular
# Copyright (c) 2016 Nicolás Echániz and contributors.
#
# This file is part of IPP
#
# IPP is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from time import sleep
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from behave import *
from utils import seleccionar_primera_opcion
from ipp.relevamiento.constants import RELEVADOR
from ipp.relevamiento.factories import JerarquizacionMarcaFactory
from ipp.relevamiento.models import Muestra, PlanillaDeRelevamiento, Perfil, ProductoConMarca
@given('los productos en la planilla tienen marcas establecidas')
def impl(context):
planilla = PlanillaDeRelevamiento.objects.last()
producto_ids = [p.id for p in planilla.planilla_modelo.productos.all()]
for p_id in producto_ids:
producto_con_marca = ProductoConMarca.objects.get(producto_generico__id=p_id,
marca=context.MARCA_POR_DEFECTO)
JerarquizacionMarcaFactory(tipo_marca="economica",
planilla_de_relevamiento=planilla,
producto_con_marca=producto_con_marca)
@when('selecciono la Muestra')
def impl(context):
muestra = Muestra.objects.last()
url = reverse("relevamiento:editar_muestra",
kwargs={"muestra_id": muestra.id})
context.browser.click_link_by_href(url)
@when('establezco el precio para un producto')
def impl(context):
context.browser.find_by_css('span.glyphicon').first.click()
# cuando el behave_browser es un browser real, demora la animación para mostrar el modal
sleep(1)
context.browser.fill('precio', 112)
context.browser.find_by_name('guardar_precio').first.click()
@then('la planilla refleja el precio cargado')
def impl(context):
ocurrencia = context.browser.find_by_css('td.success')[1].html.find("112")
assert ocurrencia >= 0
@then('si edito el precio cargado')
def impl(context):
context.browser.find_by_css('span.glyphicon').first.click()
# cuando el behave_browser es un browser real, demora la animación para mostrar el modal
sleep(1)
context.browser.fill('precio', 116)
context.browser.find_by_name('guardar_precio').first.click()
@then('la planilla refleja el nuevo precio')
def impl(context):
ocurrencia = context.browser.find_by_css('td.success')[1].html.find("116")
assert ocurrencia >= 0
| agpl-3.0 | 8,816,208,493,711,927,000 | 40.666667 | 93 | 0.70336 | false | 3.310381 | false | false | false |
repleo/bounca | api/serializers.py | 1 | 5784 | """Serializers for Certificate API"""
import uuid
from django.contrib.auth import password_validation
from rest_framework import serializers
from x509_pki.models import Certificate, DistinguishedName
class DistinguishedNameSerializer(serializers.ModelSerializer):
class Meta:
fields = (
'commonName',
'countryName',
'stateOrProvinceName',
'localityName',
'organizationName',
'organizationalUnitName',
'emailAddress',
'subjectAltNames')
model = DistinguishedName
class CertificateSerializer(serializers.ModelSerializer):
dn = DistinguishedNameSerializer()
passphrase_in = serializers.CharField(
max_length=200,
required=False,
allow_null=True,
allow_blank=True)
passphrase_out = serializers.CharField(
max_length=200,
required=False,
allow_null=True,
allow_blank=True)
passphrase_out_confirmation = serializers.CharField(
max_length=200, required=False, allow_null=True, allow_blank=True)
class Meta:
fields = (
'id',
'owner',
'shortname',
'name',
'parent',
'cert_path',
'type',
'dn',
'created_at',
'expires_at',
'revoked_at',
'days_valid',
'expired',
'revoked',
'crl_distribution_url',
'ocsp_distribution_host',
'passphrase_in',
'passphrase_out',
'passphrase_out_confirmation')
model = Certificate
extra_kwargs = {
'passphrase_out': {
'write_only': True}, 'passphrase_out_confirmation': {
'write_only': True}, 'passphrase_in': {
'write_only': True}}
def validate_passphrase_out(self, passphrase_out):
if passphrase_out:
password_validation.validate_password(
passphrase_out, self.instance)
return passphrase_out
return None
def validate_passphrase_in(self, passphrase_in):
if passphrase_in:
if not self.initial_data.get('parent'):
raise serializers.ValidationError(
"You should provide a parent certificate if you provide a passphrase in")
parent = Certificate.objects.get(
pk=self.initial_data.get('parent'))
parent.passphrase_in = passphrase_in
if not parent.is_passphrase_valid():
raise serializers.ValidationError(
"Passphrase incorrect. Not allowed to sign your certificate")
return passphrase_in
return None
def validate_passphrase_out_confirmation(
self, passphrase_out_confirmation):
if passphrase_out_confirmation:
passphrase_out = self.initial_data.get("passphrase_out")
if passphrase_out and passphrase_out_confirmation and passphrase_out != passphrase_out_confirmation:
raise serializers.ValidationError(
"The two passphrase fields didn't match.")
password_validation.validate_password(
passphrase_out_confirmation, self.instance)
return passphrase_out_confirmation
return None
def validate(self, data):
shortname = data.get("shortname")
cert_type = data.get("type")
if Certificate.objects.filter(
shortname=shortname,
type=cert_type,
revoked_uuid=0).count() > 0:
raise serializers.ValidationError(
dict(
Certificate.TYPES)[cert_type] +
" \"" +
shortname +
"\" already exists.")
return data
def create(self, validated_data):
dn_data = validated_data.pop('dn')
dn = DistinguishedName.objects.create(**dn_data)
certificate = Certificate.objects.create(dn=dn, **validated_data)
return certificate
class CertificateRevokeSerializer(serializers.ModelSerializer):
passphrase_in = serializers.CharField(max_length=200, required=True)
class Meta:
fields = ('passphrase_in',)
model = Certificate
extra_kwargs = {'passphrase_in': {'write_only': True}}
def validate_passphrase_in(self, passphrase_in):
if passphrase_in:
self.instance.parent.passphrase_in = passphrase_in
if not self.instance.parent.is_passphrase_valid():
raise serializers.ValidationError(
"Passphrase incorrect. Not allowed to revoke your certificate")
return passphrase_in
return None
def update(self, instance, validated_data):
instance.passphrase_in = validated_data['passphrase_in']
instance.delete()
return instance
class CertificateCRLSerializer(serializers.ModelSerializer):
passphrase_in = serializers.CharField(max_length=200, required=True)
class Meta:
fields = ('passphrase_in',)
model = Certificate
extra_kwargs = {'passphrase_in': {'write_only': True}}
def validate_passphrase_in(self, passphrase_in):
if passphrase_in:
self.instance.passphrase_in = passphrase_in
if not self.instance.is_passphrase_valid():
raise serializers.ValidationError(
"Passphrase incorrect. No permission to create CRL File")
return passphrase_in
return None
def update(self, instance, validated_data):
instance.passphrase_in = validated_data['passphrase_in']
instance.generate_crl()
return instance
| apache-2.0 | -8,043,028,781,522,893,000 | 33.023529 | 112 | 0.592669 | false | 4.772277 | false | false | false |
exepulveda/swfc | python/clustering_pca_ds4.py | 1 | 2204 | import numpy as np
import pickle
import logging
import argparse
import csv
import matplotlib as mpl
mpl.use('agg')
from sklearn.preprocessing import StandardScaler
from sklearn.cluster import KMeans
from sklearn.decomposition import PCA
from sklearn.metrics import silhouette_score
from cluster_utils import create_clusters_dict, recode_categorical_values
from plotting import scatter_clusters
import matplotlib.pyplot as plt
import clusteringlib as cl
from case_study_2d import attributes,setup_case_study,setup_distances
if __name__ == "__main__":
filename = 'ds4'
X = np.loadtxt("../data/{dataset}.csv".format(dataset=filename),skiprows=1,delimiter=",")
locations = X[:,0:2]
data = X[:,2:6] #0,1,2,5 are continues
true_clusters = X[:,6]
N,ND = data.shape
#now all are continues variables
var_types = np.ones(ND)
seed = 1634120
np.random.seed(seed)
standadize = StandardScaler()
data_scaled = standadize.fit_transform(data)
scale = standadize.scale_
ND_PCA = 2
pca = PCA(n_components=ND_PCA,whiten=True)
pca_X = pca.fit_transform(data_scaled)
data_F = np.asfortranarray(data,dtype=np.float32)
for NC in range(2,11):
clustering_pca = KMeans(n_clusters=NC)
clusters_pca = clustering_pca.fit_predict(pca_X)
#print("Calculating centroids")
centroids_F = np.asfortranarray(np.empty((NC,ND)),dtype=np.float32)
for k in range(NC):
indices = np.where(clusters_pca == k)[0]
centroids_F[k,:] = np.mean(data[indices,:],axis=0)
#print(k,len(indices)) #,centroids_F[k,:])
#PCA
cl.distances.sk_setup(np.asfortranarray(np.float32(scale)))
cl.distances.set_variables(np.asfortranarray(np.int32(var_types)),False)
clusters = np.asfortranarray(clusters_pca,dtype=np.int8)
weights = np.asfortranarray(np.ones((NC,ND),dtype=np.float32)/ ND)
ret_pca = cl.clustering.dbi_index(centroids_F,data_F,clusters,weights)
ret_sill= cl.clustering.silhouette_index(data_F,clusters,weights)
print('2D PCA',NC,ret_pca,ret_sill,sep=',')
cl.distances.reset()
| gpl-3.0 | -6,894,974,080,485,650,000 | 30.042254 | 93 | 0.663793 | false | 3.299401 | false | false | false |
gooofy/HTMLTerminal | HTMLTerminal.py | 1 | 10940 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2015 Guenter Bartsch
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
# OpenGL ES based HTML terminal for RaspberryPi
#
import ctypes
import time
import math
import os
import sys
import platform
import datetime
from base64 import b64decode
import traceback
import threading
import cairo
import zmq
import json
import ConfigParser
from os.path import expanduser
from Platform import pi_version
from temperature import measure_temperatures
from logger import ldebug, linfo, lerror, set_loglevel, LOG_DEBUG, LOG_INFO
import robinson
LED_UPDATE = 50
TEMP_UPDATE = 100
def hal_comm (socket, cmd, arg):
reply = None
try:
rq = json.dumps ([cmd, arg])
ldebug ("hal_comm: sending %s" % rq)
socket.send (rq)
# Get the reply.
message = socket.recv()
reply = json.loads(message)
except:
traceback.print_exc()
return reply
def _load_resource (resourcefn):
global socket
return b64decode(hal_comm (socket, 'LOAD_RESOURCE', resourcefn))
DRAW_SPEED = 32
SCMD_SET_SOURCE_RGBA = 1
SCMD_PAINT = 2
SCMD_SELECT_FONT_FACE = 3
SCMD_SET_FONT_SIZE = 4
SCMD_MOVE_TO = 5
SCMD_SHOW_TEXT = 6
SCMD_REL_LINE_TO = 7
SCMD_CLOSE_PATH = 8
SCMD_FILL = 9
SCMD_SET_LINE_WIDTH = 10
SCMD_SAVE = 11
SCMD_RESTORE = 12
SCMD_SET_SOURCE = 13
SCMD_CLIP = 14
SCMD_SET_SOURCE_SURFACE = 15
def text_extents(self, font_face, font_size, text):
self.ctx.select_font_face (font_face)
self.ctx.set_font_size (font_size)
return self.ctx.text_extents (text)
def font_extents(self, font_face, font_size):
self.ctx.select_font_face (font_face)
self.ctx.set_font_size (font_size)
return self.ctx.font_extents ()
class HAL(object):
def __init__(self, gfx):
self.gfx = gfx
self.ctx = gfx.get_cairo_ctx()
self.width = gfx.width
self.height = gfx.height
self.scene = []
self.coffset = 0
print "HAL.__init__() done."
#
# anim scene support stuff in a cairo context lookalike way
#
def scene_reset(self, counter):
self.scene = []
self.coffset = counter
def set_source_rgba (self, r, g, b, a):
self.scene.append ( (SCMD_SET_SOURCE_RGBA, r, g, b, a) )
def paint (self):
self.scene.append ( (SCMD_PAINT, ) )
def select_font_face (self, font_face):
self.ctx.select_font_face (font_face)
self.scene.append ( (SCMD_SELECT_FONT_FACE, font_face) )
def set_font_size (self, font_size):
self.ctx.set_font_size (font_size)
self.scene.append ( (SCMD_SET_FONT_SIZE, font_size) )
def set_line_width (self, w):
self.scene.append ( (SCMD_SET_LINE_WIDTH, w) )
def move_to (self, x, y):
self.scene.append ( (SCMD_MOVE_TO, x, y) )
def show_text (self, txt):
self.scene.append ( (SCMD_SHOW_TEXT, txt) )
def rel_line_to (self, x, y):
self.scene.append ( (SCMD_REL_LINE_TO, x, y) )
def close_path (self):
self.scene.append ( (SCMD_CLOSE_PATH,) )
def fill (self):
self.scene.append ( (SCMD_FILL,) )
def rectangle (self, x, y, w, h):
self.move_to (x, y)
self.rel_line_to (w, 0)
self.rel_line_to (0, h)
self.rel_line_to (-w, 0)
self.close_path()
def set_source (self, img):
self.scene.append ( (SCMD_SET_SOURCE, img) )
def set_source_surface (self, img, x, y):
self.scene.append ( (SCMD_SET_SOURCE_SURFACE, img, x, y) )
def clip (self):
self.scene.append ( (SCMD_CLIP,) )
def font_extents(self):
return self.ctx.font_extents()
def scene_html (self, html, css):
html = robinson.html(html, css, self.width, _load_resource, text_extents, font_extents, self)
html.render (self)
def scene_draw(self, counter):
#
# cairo
#
self.ctx.set_operator (cairo.OPERATOR_OVER)
drawlimit = (counter - self.coffset) * DRAW_SPEED
# render scene by executing commands
for t in self.scene:
drawlimit -= 1
if drawlimit <= 0:
break
#print "SCMD: %s" % repr(t)
scmd = t[0]
if scmd == SCMD_SET_SOURCE_RGBA:
self.ctx.set_source_rgba (t[1], t[2], t[3], t[4])
elif scmd == SCMD_PAINT:
self.ctx.paint()
elif scmd == SCMD_SELECT_FONT_FACE:
self.ctx.select_font_face (t[1])
elif scmd == SCMD_SET_FONT_SIZE:
self.ctx.set_font_size (t[1])
elif scmd == SCMD_MOVE_TO:
self.ctx.move_to (t[1], t[2])
elif scmd == SCMD_SHOW_TEXT:
self.ctx.show_text (t[1][:drawlimit])
drawlimit -= len(t[1])
elif scmd == SCMD_REL_LINE_TO:
self.ctx.rel_line_to (t[1], t[2])
elif scmd == SCMD_CLOSE_PATH:
self.ctx.close_path()
elif scmd == SCMD_FILL:
self.ctx.fill()
elif scmd == SCMD_SET_LINE_WIDTH:
self.ctx.set_line_width (t[1])
elif scmd == SCMD_SAVE:
self.ctx.save()
elif scmd == SCMD_RESTORE:
self.ctx.restore()
elif scmd == SCMD_SET_SOURCE:
self.ctx.set_source(t[1])
elif scmd == SCMD_CLIP:
self.ctx.clip()
elif scmd == SCMD_SET_SOURCE_SURFACE:
self.ctx.set_source_surface(t[1], t[2], t[3])
self.gfx.swap_buffers()
def update_led():
if USE_X11:
return
dt = datetime.datetime.now()
led.led_write (dt.strftime("%H%M"))
class input_handler (object):
def _process_events(self):
try:
key = self.inp.process_events()
if key is not None:
hal_comm (self.socket, 'KEYPRESS', key)
return True
except:
traceback.print_exc()
lerror("Input handler: EXCEPTION CAUGHT: %s" % traceback.format_exc())
return False
def _input_loop(self):
while True:
ldebug ("Input handler: _linput_loop iter")
if not self._process_events():
time.sleep(0.1)
else:
ldebug ("Input handler: INPUT EVENT HANDLED")
def process_events(self):
"""public function to be called regularly, in effect on non-threaded X11 only"""
global USE_X11
if not USE_X11:
return False
return self._process_events()
def __init__(self, inp):
global USE_X11
self.inp = inp
linfo("Input handler: connecting to server...")
self.context = zmq.Context()
self.socket = self.context.socket(zmq.REQ)
self.socket.connect ("tcp://%s:%s" % (host_getty, port_getty))
if USE_X11:
return
# on rpi we handle input in separate thread for low latency
linfo("Input handler: running on pi -> starting input thread")
self.thread = threading.Thread (target=self._input_loop)
self.thread.setDaemon(True)
self.thread.start()
#
# main
#
USE_X11 = pi_version() == None
linfo ("Using X11: %s " % repr(USE_X11))
#
# load config, set up global variables
#
home_path = expanduser("~")
config = ConfigParser.RawConfigParser()
config.read("%s/%s" % (home_path, ".halrc"))
host_getty = config.get("zmq", "host")
port_getty = config.get("zmq", "port_getty")
port_gettyp = config.get("zmq", "port_gettyp")
sensor_inside = config.get("term", "sensor_inside")
sensor_outside = config.get("term", "sensor_outside")
term_location = config.get("term", "location")
# command line
if len(sys.argv) == 2 and sys.argv[1] == '-d':
set_loglevel(LOG_DEBUG)
else:
set_loglevel(LOG_INFO)
if not USE_X11:
import led
from PiGraphics import PiGraphics
from PiInput import PiInput
gfx = PiGraphics ()
inp = PiInput ()
else:
from X11Graphics import X11Graphics
from X11Input import X11Input
gfx = X11Graphics (name = "HAL 9000")
inp = X11Input (gfx.xDisplay)
#
# zmq connection to getty
#
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.connect ("tcp://%s:%s" % (host_getty, port_getty))
# subscribe to broadcasts
socket_sub = context.socket(zmq.SUB)
socket_sub.connect ("tcp://%s:%s" % (host_getty, port_gettyp))
# messages we're interested in
socket_sub.setsockopt(zmq.SUBSCRIBE, 'DISPLAY_HTML')
# set up poller so we can do timeouts when waiting for messages
poller = zmq.Poller()
poller.register(socket_sub, zmq.POLLIN)
#
# setup rendering engine + display
#
linfo("Setup rendering engine + display ...")
hal = HAL(gfx)
hal_comm (socket, 'TERM_BOOT', measure_temperatures(term_location, sensor_inside, sensor_outside))
update_led()
#
# input handler
#
linfo("Launching input handler...")
inp_handler = input_handler(inp)
#
# main loop
#
linfo("Starting main loop.")
quit = False
counter = 0
while not quit:
if not inp_handler.process_events():
# check for broadcast messages
socks = poller.poll(10)
if len(socks) > 0:
for s,e in socks:
cmd, data = s.recv().split(' ', 1)
data = json.loads(data)
ldebug("CMD is %s" % cmd)
if cmd == 'DISPLAY_HTML':
ldebug("display html, counter=%d" % counter)
job_html, job_css, job_effect = data
try:
hal.scene_reset (0)
counter = 0 if job_effect == 1 else 32768
hal.scene_html (job_html, job_css)
except:
traceback.print_exc()
hal.scene_draw (counter)
counter += 1
if counter % LED_UPDATE == 0:
update_led()
if counter % TEMP_UPDATE == 0:
hal_comm (socket, 'TEMPERATURE', measure_temperatures(term_location, sensor_inside, sensor_outside))
| lgpl-3.0 | -8,675,675,495,578,616,000 | 24.560748 | 112 | 0.574497 | false | 3.382808 | true | false | false |
jawsper/modularirc | src/modularirc/modules/base.py | 1 | 1738 | import logging
class BaseModule(object):
def __init__(self, manager, has_commands=True, admin_only=False):
self.bot = manager.bot
self.has_commands = has_commands
self.admin_only = admin_only
self.module_name = self.__module__.split('.')[-1]
logging.info('Module {0} __init__'.format(self.module_name))
self.start()
def __del__(self):
logging.info('Module {0} __del__'.format(self.module_name))
self.stop()
def enable(self):
self.start()
def disable(self):
self.stop()
def start(self):
pass
def stop(self):
pass
def get_cmd_list(self, prefix='cmd_'):
return ['!{0}'.format(cmd[len(prefix):]) for cmd in dir(self) if cmd.startswith(prefix)]
def has_cmd(self, cmd, prefix='cmd_'):
return hasattr(self, '{}{}'.format(prefix, cmd))
def get_cmd(self, cmd, prefix='cmd_'):
return getattr(self, '{}{}'.format(prefix, cmd))
def get_admin_cmd_list(self):
return self.get_cmd_list(prefix='admin_cmd_')
def has_admin_cmd(self, cmd):
return self.has_cmd(cmd, prefix='admin_cmd_')
def get_admin_cmd(self, cmd):
return self.get_cmd(cmd, prefix='admin_cmd_')
# methods that directly call the bot
def notice(self, target, message):
self.bot.notice(target, message)
def privmsg(self, target, message):
self.bot.privmsg(target, message)
def get_config(self, key, default=None):
return self.bot.get_config(self.module_name, key, default)
def set_config(self, key, value):
self.bot.set_config(self.module_name, key, value)
def get_module(self, name):
return self.bot.get_module(name)
| mit | 6,261,652,994,644,733,000 | 27.491803 | 96 | 0.601266 | false | 3.511111 | false | false | false |
vitriolik/Asteroids2 | asteroids.py | 1 | 3039 | import asteroid
import math
import pygame
from pygame.locals import *
import random
import ship
import sys
'''Pygame constants'''
SCR_WIDTH, SCR_HEIGHT = 640, 480
FPS = 30
'''Misc stff'''
starfield = []
NUM_STARS = 45
asteroids = []
NUM_ASTEROIDS = 3
'''Pygame init'''
pygame.init()
fps_timer = pygame.time.Clock()
screen = pygame.display.set_mode((SCR_WIDTH, SCR_HEIGHT))
player = ship.Ship(SCR_WIDTH, SCR_HEIGHT)
def init_starfield():
global starfield
for i in range(NUM_STARS):
x = random.random() * SCR_WIDTH
y = random.random() * SCR_HEIGHT
starfield.insert(i, (x,y))
init_starfield()
def init_asteroids():
for i in range(NUM_ASTEROIDS):
asteroids.append(asteroid.Asteroid(SCR_WIDTH, SCR_HEIGHT))
init_asteroids()
first_pass = True
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.key == K_RIGHT or event.key == K_d:
player.rotate_right = True
elif event.key == K_LEFT or event.key == K_a:
player.rotate_left = True
if event.key == K_UP or event.key == K_w:
player.thrusting = True
if event.key == K_SPACE:
player.fire = True
if event.type == KEYUP:
if event.key == K_RIGHT or event.key == K_d:
player.rotate_right = False
if event.key == K_LEFT or event.key == K_a:
player.rotate_left = False
if event.key == K_UP or event.key == K_w:
player.thrusting = False
if event.key == K_SPACE:
player.fire = False
if player.rotate_right:
player.angle += player.ROTATE_SPEED
elif player.rotate_left:
player.angle -= player.ROTATE_SPEED
if player.thrusting:
vel = player.thrust(player.angle)
player.xvel += vel[0]
player.yvel += vel[1]
if math.fabs(player.xvel) > player.MAX_VEL:
player.xvel = math.copysign(player.MAX_VEL, player.xvel)
if math.fabs(player.yvel) > player.MAX_VEL:
player.yvel = math.copysign(player.MAX_VEL, player.yvel)
else:
if math.fabs(player.xvel) > 0.0:
player.xvel += -(math.copysign(player.FRICTION, player.xvel))
else:
player.xvel = 0.0
if math.fabs(player.yvel) > 0.0:
player.yvel += -(math.copysign(player.FRICTION, player.yvel))
else:
player.yvel = 0.0
if player.fire:
player.fire_bullet(player.angle, player.points[0][0], player.points[0][1])
player.fire = False
if len(player.bullets) > 0:
player.update_bullets()
player.rotate(player.centx, player.centy)
player.trans()
player.centx += player.xvel
player.centy += player.yvel
centroid = player.wrap()
player.centx = centroid[0]
player.centy = centroid[1]
# print('xvel = ' + str(xvel) + ', yvel = ' + str(yvel) + ', angle = ' + str(angle))
screen.fill((32,32,32))
for star in starfield:
pygame.draw.rect(screen, (255,255,255), (star[0], star[1], 2, 2))
for bullet in player.bullets:
pygame.draw.rect(screen, (255, 255, 0), (bullet[1], bullet[2], 2, 2))
for each_asteroid in asteroids:
each_asteroid.move()
each_asteroid.render(screen)
player.render(screen)
pygame.display.flip()
fps_timer.tick(FPS)
| gpl-2.0 | 4,837,477,248,523,590,000 | 24.754237 | 85 | 0.670944 | false | 2.622088 | false | false | false |
Starbase/StarinetPythonLogger | analogue/readadc.py | 1 | 1579 | import Adafruit_BBIO.ADC as ADC
import logging
## initialise logger
logger = logging.getLogger('analogue')
try:
ADC.setup()
except Exception as e:
print 'Adc failed - did you start as root?', e
pass
try:
ADC.read("P9_40")
except Exception as e:
print 'failed to read adc - did you start as root?', e
pass
def read():
logger.debug("Analogue.readadc called")
_reading = None
try:
x0 = ADC.read("AIN0")
x0 = ADC.read("AIN0")
x1 = ADC.read("AIN1")
x1 = ADC.read("AIN1")
x2 = ADC.read("AIN2")
x2 = ADC.read("AIN2")
x3 = ADC.read("AIN3")
x3 = ADC.read("AIN3")
# print "Reading are 0 = ", x0
# print "Reading are 1 = ", x1
# print "Reading are 2 = ", x2
# print "Reading are 3 = ", x3
b0 = int(x0 * 1800)
b1 = int(x1 * 1800)
b2 = int(x2 * 1800)
b3 = int(x3 * 1800)
# print "Reading are 0 = ", b0
# print "Reading are 1 = ", b1
# print "Reading are 2 = ", b2
# print "Reading are 3 = ", b3
r0 = "{0:04d}".format(b0)
r1 = "{0:04d}".format(b1)
r2 = "{0:04d}".format(b2)
r3 = "{0:04d}".format(b3)
except IOError:
_reading = '0000', '0000', '0000', '0000'
logger.debug("%s %s", "adc IO Error ", e)
except RuntimeError:
_reading = '0000', '0000', '0000', '0000'
logger.debug("%s %s", "adc RuntimeError ", e)
else:
_reading = r0, r1, r2, r3
return _reading
if __name__ == "__main__":
print read()
| gpl-2.0 | -8,467,573,453,376,623,000 | 21.884058 | 58 | 0.51045 | false | 2.929499 | false | false | false |
benjello/liam2 | liam2/exprmisc.py | 1 | 22638 | # encoding: utf-8
from __future__ import print_function
from itertools import izip, chain
import os
import random
import numpy as np
import config
from expr import (Variable, UnaryOp, BinaryOp, ComparisonOp, DivisionOp,
LogicalOp, getdtype, coerce_types, expr_eval, as_simple_expr,
as_string, collect_variables,
get_default_array, get_default_vector, FunctionExpr,
always, firstarg_dtype, expr_cache)
from exprbases import (FilteredExpression, CompoundExpression, NumexprFunction,
TableExpression, NumpyChangeArray)
from context import context_length
from importer import load_ndarray, load_table
from utils import PrettyTable, argspec
# TODO: implement functions in expr to generate "Expr" nodes at the python level
# less painful
class Min(CompoundExpression):
def build_expr(self, context, *args):
assert len(args) >= 2
expr1, expr2 = args[:2]
expr = Where(ComparisonOp('<', expr1, expr2), expr1, expr2)
for arg in args[2:]:
expr = Where(ComparisonOp('<', expr, arg), expr, arg)
# args = [Symbol('x%d' % i) for i in range(len(self.args))]
# ctx = {'__entity__': 'x',
# 'x': {'x%d' % i: a for i, a in enumerate(self.args)}}
# where = Symbol('where')
# expr = where(a < b, a, b)
# for arg in self.args[2:]:
# expr = where(expr < arg, expr, arg)
# expr = expr.to_ast(ctx)
# expr1, expr2 = self.args[:2]
# expr = parse('if(a < b, a, b)',
# {'__entity__': 'x', 'x': {'a': expr1, 'b': expr2}})
# for arg in self.args[2:]:
# expr = parse('if(a < b, a, b)',
# {'__entity__': 'x', 'x': {'a': expr, 'b': arg}})
# expr = Where(expr1 < expr2, expr1, expr2)
# for arg in self.args[2:]:
# expr = Where(expr < arg, expr, arg)
# Where(Where(expr1 < expr2, expr1, expr2) < expr3,
# Where(expr1 < expr2, expr1, expr2),
# expr3)
# 3 where, 3 comparisons = 6 op (or 4 if optimised)
#
# Where(Where(Where(expr1 < expr2, expr1, expr2) < expr3,
# Where(expr1 < expr2, expr1, expr2),
# expr3) < expr4,
# Where(Where(expr1 < expr2, expr1, expr2) < expr3,
# Where(expr1 < expr2, expr1, expr2),
# expr3),
# expr4)
# 7 where, 7 comp = 14 op (or 6 if optimised)
# this version scales better in theory (but in practice, it will depend
# if numexpr factorize the common subexpression in the above version
# or not)
# Where(expr1 < expr2 & expr1 < expr3,
# expr1,
# Where(expr2 < expr3, expr2, expr3))
# 2 where, 3 comparisons, 1 and = 6 op
#
# Where(expr1 < expr2 & expr1 < expr3 & expr1 < expr4,
# expr1,
# Where(expr2 < expr3 & expr2 < expr4,
# expr2
# Where(expr3 < expr4,
# expr3,
# expr4)))
# 3 where, 6 comp, 3 and = 12 op
return expr
class Max(CompoundExpression):
def build_expr(self, context, *args):
assert len(args) >= 2
expr1, expr2 = args[:2]
# if(x > y, x, y)
expr = Where(ComparisonOp('>', expr1, expr2), expr1, expr2)
for arg in args[2:]:
# if(e > z, e, z)
expr = Where(ComparisonOp('>', expr, arg), expr, arg)
return expr
class Logit(CompoundExpression):
def build_expr(self, context, expr):
# log(x / (1 - x))
return Log(DivisionOp('/', expr, BinaryOp('-', 1.0, expr)))
class Logistic(CompoundExpression):
def build_expr(self, context, expr):
# 1 / (1 + exp(-x))
return DivisionOp('/', 1.0,
BinaryOp('+', 1.0, Exp(UnaryOp('-', expr))))
class ZeroClip(CompoundExpression):
def build_expr(self, context, expr, expr_min, expr_max):
# if(minv <= x <= maxv, x, 0)
return Where(LogicalOp('&', ComparisonOp('>=', expr, expr_min),
ComparisonOp('<=', expr, expr_max)), expr,
0)
# We do not have to coerce with self.expr_min & expr_max because they
# are only used in the comparisons, not in the result.
dtype = firstarg_dtype
# >>> mi = 1
# >>> ma = 10
# >>> a = np.arange(1e7)
#
# >>> timeit np.clip(a, mi, ma)
# 10 loops, best of 3: 127 ms per loop
# >>> timeit np.clip(a, mi, ma, a)
# 10 loops, best of 3: 26.2 ms per loop
# >>> timeit ne.evaluate('where(a < mi, mi, where(a > ma, ma, a))')
# 10 loops, best of 3: 94.1 ms per loop
class Clip(NumpyChangeArray):
np_func = np.clip
class Sort(NumpyChangeArray):
np_func = np.sort
# ------------------------------------
class Round(NumpyChangeArray):
np_func = np.round
dtype = firstarg_dtype
class Trunc(FunctionExpr):
# TODO: check that the dtype is correct at compilation time (__init__ is too
# early since we do not have the context yet)
# assert getdtype(self.args[0], context) == float
def compute(self, context, expr):
if isinstance(expr, np.ndarray):
return expr.astype(int)
else:
return int(expr)
dtype = always(int)
# ------------------------------------
class Abs(NumexprFunction):
argspec = argspec('expr')
dtype = always(float)
class Log(NumexprFunction):
argspec = argspec('expr')
dtype = always(float)
class Exp(NumexprFunction):
argspec = argspec('expr')
dtype = always(float)
def add_individuals(target_context, children):
target_entity = target_context.entity
id_to_rownum = target_entity.id_to_rownum
array = target_entity.array
num_rows = len(array)
num_birth = len(children)
if config.log_level == "processes":
print("%d new %s(s) (%d -> %d)" % (num_birth, target_entity.name,
num_rows, num_rows + num_birth),
end=' ')
target_entity.array.append(children)
temp_variables = target_entity.temp_variables
for name, temp_value in temp_variables.iteritems():
# FIXME: OUCH, this is getting ugly, I'll need a better way to
# differentiate nd-arrays from "entity" variables
# I guess having the context contain all entities and a separate
# globals namespace should fix this problem. Well, no it would not
# fix the problem by itself, as this would only move the problem
# to the "store" part of Assignment processes which would need to be
# able to differentiate between an "entity temp" and a global temp.
# I think this can be done by inspecting the expressions that generate
# them: no non-aggregated entity var => global temp. It would be nice
# to further distinguish between aggregated entity var and other global
# temporaries to store them in the entity somewhere, but I am unsure
# whether it is possible.
if (isinstance(temp_value, np.ndarray) and
temp_value.shape == (num_rows,)):
extra = get_default_vector(num_birth, temp_value.dtype)
temp_variables[name] = np.concatenate((temp_value, extra))
extra_variables = target_context.entity_data.extra
for name, temp_value in extra_variables.iteritems():
if name == '__globals__':
continue
if isinstance(temp_value, np.ndarray) and temp_value.shape:
extra = get_default_vector(num_birth, temp_value.dtype)
extra_variables[name] = np.concatenate((temp_value, extra))
id_to_rownum_tail = np.arange(num_rows, num_rows + num_birth)
target_entity.id_to_rownum = np.concatenate(
(id_to_rownum, id_to_rownum_tail))
class New(FilteredExpression):
no_eval = ('filter', 'kwargs')
def _initial_values(self, array, to_give_birth, num_birth, default_values):
return get_default_array(num_birth, array.dtype, default_values)
@classmethod
def _collect_kwargs_variables(cls, kwargs):
used_variables = set()
# kwargs are stored as a list of (k, v) pairs
for k, v in kwargs.iteritems():
used_variables.update(collect_variables(v))
return used_variables
def compute(self, context, entity_name=None, filter=None, number=None,
**kwargs):
if filter is not None and number is not None:
# Having neither is allowed, though, as there can be a contextual
# filter. Also, there is no reason to prevent the whole
# population giving birth, even though the usefulness of such
# usage seem dubious.
raise ValueError("new() 'filter' and 'number' arguments are "
"mutually exclusive")
source_entity = context.entity
if entity_name is None:
target_entity = source_entity
else:
target_entity = context.entities[entity_name]
# target context is the context where the new individuals will be
# created
if target_entity is source_entity:
target_context = context
else:
# we do need to copy the data (.extra) because we will insert into
# the entity.array anyway => fresh_data=True
target_context = context.clone(fresh_data=True,
entity_name=target_entity.name)
filter_expr = self._getfilter(context, filter)
if filter_expr is not None:
to_give_birth = expr_eval(filter_expr, context)
num_birth = to_give_birth.sum()
elif number is not None:
to_give_birth = None
num_birth = number
else:
to_give_birth = np.ones(len(context), dtype=bool)
num_birth = len(context)
array = target_entity.array
default_values = target_entity.fields.default_values
id_to_rownum = target_entity.id_to_rownum
num_individuals = len(id_to_rownum)
children = self._initial_values(array, to_give_birth, num_birth,
default_values)
if num_birth:
children['id'] = np.arange(num_individuals,
num_individuals + num_birth)
children['period'] = context.period
used_variables = [v.name for v in
self._collect_kwargs_variables(kwargs)]
if to_give_birth is None:
assert not used_variables
child_context = context.empty(num_birth)
else:
child_context = context.subset(to_give_birth, used_variables,
filter_expr)
for k, v in kwargs.iteritems():
if k not in array.dtype.names:
print("WARNING: {} is unknown, ignoring it!".format(k))
continue
children[k] = expr_eval(v, child_context)
add_individuals(target_context, children)
expr_cache.invalidate(context.period, context.entity_name)
# result is the ids of the new individuals corresponding to the source
# entity
if to_give_birth is not None:
result = np.full(context_length(context), -1, dtype=int)
if source_entity is target_entity:
extra_bools = np.zeros(num_birth, dtype=bool)
to_give_birth = np.concatenate((to_give_birth, extra_bools))
# Note that np.place is a bit faster, but is currently buggy when
# working with columns of structured arrays.
# See https://github.com/numpy/numpy/issues/2462
result[to_give_birth] = children['id']
return result
else:
return None
dtype = always(int)
class Clone(New):
def __init__(self, filter=None, **kwargs):
New.__init__(self, None, filter, None, **kwargs)
def _initial_values(self, array, to_give_birth, num_birth, default_values):
return array[to_give_birth]
class Dump(TableExpression):
no_eval = ('args',)
kwonlyargs = {'filter': None, 'missing': None, 'header': True,
'limit': None}
def compute(self, context, *args, **kwargs):
filter_value = kwargs.pop('filter', None)
missing = kwargs.pop('missing', None)
# periods = kwargs.pop('periods', None)
header = kwargs.pop('header', True)
limit = kwargs.pop('limit', None)
entity = context.entity
if args:
expressions = list(args)
else:
# extra=False because we don't want globals nor "system" variables
# (nan, period, __xxx__)
# FIXME: we should also somehow "traverse" expressions in this case
# too (args is ()) => all keys in the current context
expressions = [Variable(entity, name)
for name in context.keys(extra=False)]
str_expressions = [str(e) for e in expressions]
if 'id' not in str_expressions:
str_expressions.insert(0, 'id')
expressions.insert(0, Variable(entity, 'id'))
id_pos = 0
else:
id_pos = str_expressions.index('id')
# if (self.periods is not None and len(self.periods) and
# 'period' not in str_expressions):
# str_expressions.insert(0, 'period')
# expressions.insert(0, Variable('period'))
# id_pos += 1
columns = []
for expr in expressions:
if filter_value is False:
# dtype does not matter much
expr_value = np.empty(0)
else:
expr_value = expr_eval(expr, context)
if (filter_value is not None and
isinstance(expr_value, np.ndarray) and
expr_value.shape):
expr_value = expr_value[filter_value]
columns.append(expr_value)
ids = columns[id_pos]
if isinstance(ids, np.ndarray) and ids.shape:
numrows = len(ids)
else:
# FIXME: we need a test for this case (no idea how this can happen)
numrows = 1
# expand scalar columns to full columns in memory
# TODO: handle or explicitly reject columns wh ndim > 1
for idx, col in enumerate(columns):
dtype = None
if not isinstance(col, np.ndarray):
dtype = type(col)
elif not col.shape:
dtype = col.dtype.type
if dtype is not None:
# TODO: try using itertools.repeat instead as it seems to be a
# bit faster and would consume less memory (however, it might
# not play very well with Pandas.to_csv)
newcol = np.full(numrows, col, dtype=dtype)
columns[idx] = newcol
if limit is not None:
assert isinstance(limit, (int, long))
columns = [col[:limit] for col in columns]
data = izip(*columns)
table = chain([str_expressions], data) if header else data
return PrettyTable(table, missing)
dtype = always(None)
class Where(NumexprFunction):
funcname = 'if'
argspec = argspec('cond, iftrue, iffalse')
@property
def cond(self):
return self.args[0]
@property
def iftrue(self):
return self.args[1]
@property
def iffalse(self):
return self.args[2]
def as_simple_expr(self, context):
cond = as_simple_expr(self.cond, context)
# filter is stored as an unevaluated expression
context_filter = context.filter_expr
local_ctx = context.clone()
if context_filter is None:
local_ctx.filter_expr = self.cond
else:
# filter = filter and cond
local_ctx.filter_expr = LogicalOp('&', context_filter, self.cond)
iftrue = as_simple_expr(self.iftrue, local_ctx)
if context_filter is None:
local_ctx.filter_expr = UnaryOp('~', self.cond)
else:
# filter = filter and not cond
local_ctx.filter_expr = LogicalOp('&', context_filter,
UnaryOp('~', self.cond))
iffalse = as_simple_expr(self.iffalse, local_ctx)
return Where(cond, iftrue, iffalse)
def as_string(self):
args = as_string((self.cond, self.iftrue, self.iffalse))
return 'where(%s)' % self.format_args_str(args, [])
def dtype(self, context):
assert getdtype(self.cond, context) == bool
return coerce_types(context, self.iftrue, self.iffalse)
def _plus(a, b):
return BinaryOp('+', a, b)
def _mul(a, b):
return BinaryOp('*', a, b)
class ExtExpr(CompoundExpression):
def __init__(self, fname):
data = load_ndarray(os.path.join(config.input_directory, fname))
# TODO: handle more dimensions. For that we need to evaluate a
# different expr depending on the values for the other dimensions
# we will need to either
# 1) create awful expressions with lots of nested if() (X*Y*Z)
# OR
# 2) use groupby (or partition_nd)
# the problem with groupby is that once we have one vector of values
# for each group, we have to recombine them into a single vector
# result = np.empty(context_length(context), dtype=expr.dtype)
# groups = partition_nd(filtered_columns, True, possible_values)
# if not groups:
# return
# contexts = [filtered_context.subset(indices, expr_vars, not_hashable)
# for indices in groups]
# data = [expr_eval(expr, c) for c in contexts]
# for group_indices, group_values in zip(groups, data):
# result[group_indices] = group_values
# 3) use a lookup for each individual & coef (we can only envision
# this during the evaluation of the larger expression if done via numba,
# otherwise it will be too slow
# expr = age * AGECOEF[gender, xyz] + eduach * EDUCOEF[gender, xyz]
# 4) compute the coefs separately
# 4a) via nested if()
# AGECOEF = if(gender, if(workstate == 1, a, if(workstate == 2, b, c)
# if(workstate == 1, a, if(workstate == 2, b, c))
# EDUCOEF = ...
# expr = age * AGECOEF + eduach * EDUCOEF
# 4b) via lookup
# AGECOEF = AGECOEFS[gender, workstate]
# EDUCOEF = EDUCOEFS[gender, workstate]
# expr = age * AGECOEF + eduach * EDUCOEF
# Note, in general, we could make
# EDUCOEFS (sans rien) equivalent to EDUCOEFS[:, :, period] s'il y a
# une dimension period en 3eme position
# et non à EDUCOEFS[gender, workstate, period] car ca pose probleme
# pour l'alignement (on a pas besoin d'une valeur par personne)
# in general, we could let user tell explicitly which fields they want
# to index by (autoindex: period) for periodic
fields_dim = data.dim_names.index('fields')
fields_axis = data.axes[fields_dim]
self.names = list(fields_axis.labels)
self.coefs = list(data)
# needed for compatibility with CompoundExpression
self.args = []
self.kwargs = []
def build_expr(self, context):
res = None
for name, coef in zip(self.names, self.coefs):
# XXX: parse expressions instead of only simple Variable?
if name != 'constant':
# cond_dims = self.cond_dims
# cond_exprs = [Variable(context.entity, d) for d in cond_dims]
# coef = GlobalArray('__xyz')[name, *cond_exprs]
term = _mul(Variable(context.entity, name), coef)
else:
term = coef
if res is None:
res = term
else:
res = _plus(res, term)
return res
class Seed(FunctionExpr):
def compute(self, context, seed=None):
if seed is not None:
seed = long(seed)
print("using fixed random seed: %d" % seed)
else:
print("resetting random seed")
random.seed(seed)
np.random.seed(seed)
class Array(FunctionExpr):
def compute(self, context, expr):
return np.array(expr)
# XXX: is this correct?
dtype = firstarg_dtype
class Load(FunctionExpr):
def compute(self, context, fname, type=None, fields=None):
# TODO: move those checks to __init__
if type is None and fields is None:
raise ValueError("type or fields must be specified")
if type is not None and fields is not None:
raise ValueError("cannot specify both type and fields")
if type is not None:
return load_ndarray(os.path.join(config.input_directory, fname), type)
elif fields is not None:
return load_table(os.path.join(config.input_directory, fname), fields)
functions = {
# element-wise functions
# Min and Max are in aggregates.py.functions (because of the dispatcher)
'abs': Abs,
'clip': Clip,
'zeroclip': ZeroClip,
'round': Round,
'trunc': Trunc,
'exp': Exp,
'log': Log,
'logit': Logit,
'logistic': Logistic,
'where': Where,
# misc
'sort': Sort,
'new': New,
'clone': Clone,
'dump': Dump,
'extexpr': ExtExpr,
'seed': Seed,
'array': Array,
'load': Load,
}
| gpl-3.0 | -134,677,495,349,826,080 | 36.045378 | 82 | 0.546009 | false | 4.004422 | false | false | false |
hfaran/slack-export-viewer | slackviewer/formatter.py | 1 | 5541 | import logging
import re
import sys
import emoji
import markdown2
from slackviewer.user import User
# Workaround for ASCII encoding error in Python 2.7
# See https://github.com/hfaran/slack-export-viewer/issues/81
if sys.version_info[0] == 2:
reload(sys)
sys.setdefaultencoding('utf8')
class SlackFormatter(object):
"This formats messages and provides access to workspace-wide data (user and channel metadata)."
# Class-level constants for precompilation of frequently-reused regular expressions
# URL detection relies on http://stackoverflow.com/a/1547940/1798683
_LINK_PAT = re.compile(r"<(https|http|mailto):[A-Za-z0-9_\.\-\/\?\,\=\#\:\@]+\|[^>]+>")
_MENTION_PAT = re.compile(r"<((?:#C|@[UB])\w+)(?:\|([A-Za-z0-9.-_]+))?>")
_HASHTAG_PAT = re.compile(r"(^| )#[A-Za-z][\w\.\-\_]+( |$)")
def __init__(self, USER_DATA, CHANNEL_DATA):
self.__USER_DATA = USER_DATA
self.__CHANNEL_DATA = CHANNEL_DATA
def find_user(self, message):
if message.get("subtype", "").startswith("bot_") and "bot_id" in message and message["bot_id"] not in self.__USER_DATA:
bot_id = message["bot_id"]
logging.debug("bot addition for %s", bot_id)
if "bot_link" in message:
(bot_url, bot_name) = message["bot_link"].strip("<>").split("|", 1)
elif "username" in message:
bot_name = message["username"]
bot_url = None
else:
bot_name = None
bot_url = None
self.__USER_DATA[bot_id] = User({
"user": bot_id,
"real_name": bot_name,
"bot_url": bot_url,
"is_bot": True,
"is_app_user": True
})
user_id = message.get("user") or message.get("bot_id")
if user_id in self.__USER_DATA:
return self.__USER_DATA.get(user_id)
logging.error("unable to find user in %s", message)
def render_text(self, message, process_markdown=True):
message = message.replace("<!channel>", "@channel")
message = message.replace("<!channel|@channel>", "@channel")
message = message.replace("<!here>", "@here")
message = message.replace("<!here|@here>", "@here")
message = message.replace("<!everyone>", "@everyone")
message = message.replace("<!everyone|@everyone>", "@everyone")
# Handle mentions of users, channels and bots (e.g "<@U0BM1CGQY|calvinchanubc> has joined the channel")
message = self._MENTION_PAT.sub(self._sub_annotated_mention, message)
# Handle links
message = self._LINK_PAT.sub(self._sub_hyperlink, message)
# Handle hashtags (that are meant to be hashtags and not headings)
message = self._HASHTAG_PAT.sub(self._sub_hashtag, message)
# Introduce unicode emoji
message = self.slack_to_accepted_emoji(message)
message = emoji.emojize(message, use_aliases=True)
if process_markdown:
# Handle bold (convert * * to ** **)
message = re.sub(r'\*', "**", message)
message = markdown2.markdown(
message,
extras=[
"cuddled-lists",
# This gives us <pre> and <code> tags for ```-fenced blocks
"fenced-code-blocks",
"pyshell"
]
).strip()
# Special handling cases for lists
message = message.replace("\n\n<ul>", "<ul>")
message = message.replace("\n<li>", "<li>")
return message
def slack_to_accepted_emoji(self, message):
"""Convert some Slack emoji shortcodes to more universal versions"""
# Convert -'s to _'s except for the 1st char (preserve things like :-1:)
# For example, Slack's ":woman-shrugging:" is converted to ":woman_shrugging:"
message = re.sub(
r":([^ <>/:])([^ <>/:]+):",
lambda x: ":{}{}:".format(x.group(1), x.group(2).replace("-", "_")),
message
)
# https://github.com/Ranks/emojione/issues/114
message = message.replace(":simple_smile:", ":slightly_smiling_face:")
return message
def _sub_annotated_mention(self, matchobj):
ref_id = matchobj.group(1)[1:] # drop #/@ from the start, we don't care
annotation = matchobj.group(2)
if ref_id.startswith('C'):
mention_format = "<b>#{}</b>"
if not annotation:
channel = self.__CHANNEL_DATA.get(ref_id)
annotation = channel["name"] if channel else ref_id
else:
mention_format = "@{}"
if not annotation:
user = self.__USER_DATA.get(ref_id)
annotation = user.display_name if user else ref_id
return mention_format.format(annotation)
def _sub_hyperlink(self, matchobj):
compound = matchobj.group(0)[1:-1]
if len(compound.split("|")) == 2:
url, title = compound.split("|")
else:
url, title = compound, compound
result = "<a href=\"{url}\">{title}</a>".format(url=url, title=title)
return result
def _sub_hashtag(self, matchobj):
text = matchobj.group(0)
starting_space = " " if text[0] == " " else ""
ending_space = " " if text[-1] == " " else ""
return "{}<b>{}</b>{}".format(
starting_space,
text.strip(),
ending_space
)
| mit | 8,813,842,781,377,737,000 | 38.297872 | 127 | 0.548096 | false | 3.696464 | false | false | false |
mruwnik/magnolia | src/magnolia/math/geometry.py | 1 | 11252 | import math
from typing import List, Tuple, Iterable
class FrontError(ValueError):
"""Raised when a valid front can't be constructed."""
class Sphere(object):
"""Represent a sphere in cylindrical coords."""
def __init__(self, angle=0, height=0, radius=1, scale=3, **kwargs):
"""
Initialise the sphere.
:param float angle: the angle by which the sphere is rotated around the cylinder
:param float height: the height of the sphere on the cylinder
:param float radius: the radius of the cylinder
:param float scale: the radius of the sphere
"""
self.angle = angle
self.height = height
self.radius = radius
self.scale = scale
super().__init__(**kwargs)
@staticmethod
def cyl_to_cart(angle, height, radius):
"""Convert the given cylinderic point to a cartesian one."""
x = math.sin(angle) * radius
z = math.cos(angle) * radius
return (x, height, z)
@property
def offset(self):
"""Calculate the buds offset from the meristems origin.
The bud is positioned on a simple circle on the XZ axis, so
simple trigonometry does the trick.
"""
return self.cyl_to_cart(self.angle, self.height, self.radius)
@staticmethod
def norm_angle(angle):
"""Normalize the given angle (wrapping around π)."""
return norm_angle(angle)
def angle2x(self, angle):
"""Return the given angle in pseudo 2D coordinates.
In these coordinates, x is the bud's angle, while y is its height. To make calculations
work, the angle has to be scaled by the radius. Otherwise 2 buds with the same angle would
have the same x value, regardless of their radius. This would mean that there would be no way
to e.g. check which is wider.
"""
return self.norm_angle(angle) * self.radius
def distance(self, bud):
"""Calculate the distance between this bud and the provided one."""
return math.sqrt(self.angle2x(self.angle - bud.angle)**2 + (self.height - bud.height)**2)
def opposite(self, b1, b2):
"""Check whether the given buds are on the opposite sides of this bud.
This checks to a precision of 1% of the radius.
"""
angles_diff = abs(self.angle2x(b1.angle - self.angle) + self.angle2x(b2.angle - self.angle))
height_diff = abs(abs(b1.height + b2.height)/2 - abs(self.height))
return angles_diff < self.radius / 100 and height_diff < self.radius / 100
def bounds_test(self, angle, h, offset):
"""Check whether the provided point lies in this bud.
This is a 2D test, for use when a meristem is rolled out.
"""
dist = self.angle2x(angle / self.radius - offset[0] - self.angle)**2 + (h - self.height)**2
if dist < self.scale**2:
return math.sqrt(dist)
return -1
def __repr__(self):
return '<Sphere (angle=%s, height=%s, radius=%s, scale=%s)' % (self.angle, self.height, self.radius, self.scale)
def by_height(circles: List[Sphere], reversed=True):
"""Return the given circles sorted by height."""
return sorted(circles, key=lambda c: c.height, reverse=reversed)
def by_angle(circles: List[Sphere], reversed=True):
"""Return the given circles sorted by angle."""
return sorted(circles, key=lambda c: c.angle, reverse=reversed)
def approx_equal(a: float, b: float, diff=0.001) -> bool:
"""Check whether the 2 values are appropriately equal."""
return abs(a - b) < diff
def norm_angle(angle):
"""Normalize the given angle (wrapping around π)."""
return ((angle + math.pi) % (2 * math.pi) - math.pi)
def dot_product(v1: Iterable, v2: Iterable) -> float:
"""Calculate the dot products of the provided vectors.
If the vectors have different lengths, the extra values will be discarded from the longer one.
"""
return sum(i*j for i, j in zip(v1, v2))
def vect_diff(v1: Iterable, v2: Iterable) -> List[float]:
"""Subtract the provided vectors from each other.
If the vectors have different lengths, the extra values will be discarded from the longer one.
"""
return [i - j for i, j in zip(v1, v2)]
def vect_mul(v: Iterable, scalar: float) -> List[float]:
"""Multiply the vector by the scalar."""
return [i * scalar for i in v]
def cross_product(v1, v2):
"""Return the cross product of the provided 3D vectors."""
ax, ay, az = v1
bx, by, bz = v2
i = ay * bz - az * by
j = az * bx - ax * bz
k = ax * by - ay * bx
return (i, j, k)
def length(vector: Iterable) -> float:
"""Return the length of the provided vector."""
return math.sqrt(sum(i**2 for i in vector))
def cylin_distance(p1: Tuple[float, float], p2: Tuple[float, float]) -> float:
"""Calculate the distance between the given points, in cylinder coords."""
return length((norm_angle(p1[0] - p2[0]), p1[1] - p2[1]))
def in_cone_checker(tip, dir_vec, r, h):
"""
Return a function that checks whether a bud is in the provided cone.
The `r` and `h` params describe a sample base - in reality the cone is assumed to be
infinite. For use in occlusion checks, `tip` should be where the inner tangents of the
checked bud meet, `dir_vec` should be the vector between them, while `r` and `h` should
be the scale and height (respectably) of the occluding bud.
:param tuple tip: the tip of the cone
:param tuple dir_vec: the direction vector of the cone
:param float r: a radius at h that describes the cone
:param float h: a height along the axis which along with `r` describes the cone
"""
tx, ty, tz = tip
def in_cone(bud):
"""Return whether the given bud totally fits in the cone."""
diff = (norm_angle(bud.angle - tx), bud.height - ty, bud.radius - tz)
cone_dist = dot_product(diff, dir_vec)
if cone_dist < 0:
return False
radius = r * cone_dist / h
orth_dist = length(vect_diff(diff, vect_mul(dir_vec, cone_dist)))
return orth_dist < radius
return in_cone
def first_gap(circles: List[Sphere], radius: float) -> Tuple[float, float]:
"""
Return the first available gap that will fit a circle of the given radius.
This simply loops around the circles, sorted by x, and whenever the distance between
2 circles is larger than 2*radius it deems that it's found a hole and returns the (x,y) that lies
between the 2 circles.
"""
circles = by_angle(circles)
for c1, c2 in zip(circles, circles[1:] + [circles[0]]):
dist = abs(norm_angle(c1.angle - c2.angle))
if c1.scale + c2.scale + 2*radius < dist:
return norm_angle(c1.angle - dist/2), 0
def flat_circle_overlap(
b1: Tuple[float, float, float], b2: Tuple[float, float, float], r: float) -> Tuple[float, float]:
"""Return the higher overlap of 2 circles that are on the same height."""
x1, y1, r1 = b1
x2, y2, r2 = b2
# there are 2 possible intersections, both with the same x, but with different ys
x3 = -((r + r1)**2 - (r + r2)**2)/(2 * (x1 + x2))
y3 = math.sqrt((r + r1)**2 - (x3 - x1))
return norm_angle(x3), max(y1 + y3, y1 - y3)
def are_intersecting(c1: Sphere, c2: Sphere) -> bool:
"""Check whether the 2 provided circles intersect,"""
return c1.distance(c2) < c1.scale + c2.scale - 0.0000001
def check_collisions(circle: Sphere, to_check: List[Sphere]) -> bool:
"""Check whether the given circle overlaps with any in the provided list."""
return any(are_intersecting(circle, c) for c in to_check)
def closest_circle(b1: Sphere, b2: Sphere, radius: float) -> Sphere:
"""
Return the angle and height of a bud with the given radius as close a possible to the given buds.
n *
/ \
/ phi \
n_b1 / \ n_b2
/ \
/ \
b1 * -------------------------* b2
b1_b2
This can be reduced to the intersection of 2 circles at b1 and b2, with radiuses of
b1,radius + radius and b2.radius + radius
"""
x1, y1, r1 = b1.angle, b1.height, b1.scale
x2, y2, r2 = b2.angle, b2.height, b2.scale
n_b1 = r1 + radius
n_b2 = r2 + radius
# the dist between the 2 buds should be r1 + r2, but do it manually just in case
b1_b2 = b1.distance(b2)
# check if the circles are in the same place
if approx_equal(b1_b2, 0):
return None
a = (n_b1**2 - n_b2**2 + b1_b2**2) / (2 * b1_b2)
if n_b1 < abs(a):
h = 0
else:
h = math.sqrt(n_b1**2 - a**2)
midx = x1 + a * norm_angle(x2 - x1)/b1_b2
midy = y1 + a * (y2 - y1)/b1_b2
x3_1 = midx + h*(y2 - y1)/b1_b2
y3_1 = midy - h*norm_angle(x2 - x1)/b1_b2
x3_2 = midx - h*(y2 - y1)/b1_b2
y3_2 = midy + h*norm_angle(x2 - x1)/b1_b2
if y3_1 > y3_2:
return Sphere(norm_angle(x3_1), y3_1, scale=radius)
return Sphere(norm_angle(x3_2), y3_2, scale=radius)
def highest_left(circles: List[Sphere], checked: Sphere) -> Sphere:
for c in circles:
if norm_angle(c.angle - checked.angle) > 0:
return c
raise FrontError
def touching(circle: Sphere, circles: Iterable[Sphere], precision: float=0.1) -> List[Sphere]:
"""Return all circles that are touching the provided one."""
return [c for c in circles if circle.distance(c) < c.scale + circle.scale + precision and c != circle]
def front(circles: List[Sphere]) -> List[Sphere]:
"""
Given a list of circles, return their current front.
From https://doi.org/10.5586/asbp.3533: "a front is a zigzagging ring of
primordia encircling the cylinder, each primordium being tangent to one on its left and
one on its right. Moreover, any primordium above the front must be higher than any
primordium of the front."
:param list circles: the collection of circles to be checked
:returns: the front
"""
if not circles:
return []
# sort the circles by height
circles = by_height(circles)
highest = circles[0]
seen = set()
def left(checked):
neighbours = touching(checked, circles)
c = highest_left(neighbours, checked)
if c and c != highest and c not in seen:
# sometimes a proper front can't be constructed coz a bud has no left neighbours
# so to stop infinite recursions, stop when a bud is found more than once
seen.add(c)
return [checked] + left(c)
return [checked]
try:
return left(highest)
except FrontError:
return None
def cycle_ring(ring: List[Sphere], n: int) -> List[Sphere]:
"""
Rotate the given ring of circles by n circles.
This function assumes that the ring is sorted by angle.
"""
if n > 1:
ring = cycle_ring(ring, n - 1)
last = ring[-1]
first = ring[0]
if abs(last.angle - first.angle) > math.pi:
first = Sphere(last.angle - 2 * math.pi, last.height, scale=last.scale)
else:
first = last
return [first] + ring[:-1]
| gpl-3.0 | -7,572,957,586,726,389,000 | 32.885542 | 120 | 0.614311 | false | 3.365241 | false | false | false |
j127/caster | caster/asynch/auto_com/language.py | 1 | 1206 | from caster.lib import utilities, settings, ccr, control
AUTO_ENABLED_LANGUAGE = None
LAST_EXTENSION = None
def toggle_language():
global AUTO_ENABLED_LANGUAGE, LAST_EXTENSION
filename, folders, title = utilities.get_window_title_info()
extension = None
if filename != None:
extension = "." + filename.split(".")[-1]
if LAST_EXTENSION != extension:
message=None
if extension != None and extension in settings.SETTINGS["ccr"]["registered_extensions"]:
chosen_extension=settings.SETTINGS["ccr"]["registered_extensions"][extension]
ccr.set_active_command(1, chosen_extension)
AUTO_ENABLED_LANGUAGE = chosen_extension
LAST_EXTENSION = extension
message="Enabled '"+chosen_extension+"'"
elif AUTO_ENABLED_LANGUAGE != None:
message="Disabled '"+AUTO_ENABLED_LANGUAGE+"'"
ccr.set_active_command(0, AUTO_ENABLED_LANGUAGE)
AUTO_ENABLED_LANGUAGE = None
if message!=None:
if settings.SETTINGS["miscellaneous"]["status_window_enabled"]:
control.nexus().comm.get_com("status").text(message)
LAST_EXTENSION = extension
| lgpl-3.0 | -2,507,564,527,367,156,000 | 39.2 | 96 | 0.639303 | false | 4.369565 | false | false | false |
minghuadev/hulahop | python/webview.py | 1 | 11188 | # Copyright (C) 2007, Red Hat, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import logging
import gobject
import gtk
from hulahop import _hulahop
import xpcom
from xpcom import components
from xpcom.components import interfaces
from xpcom.nsError import *
class _Chrome:
_com_interfaces_ = interfaces.nsIWebBrowserChrome, \
interfaces.nsIWebBrowserChrome2, \
interfaces.nsIEmbeddingSiteWindow, \
interfaces.nsIWebProgressListener, \
interfaces.nsIWindowProvider, \
interfaces.nsIInterfaceRequestor
def __init__(self, web_view):
self.web_view = web_view
self.title = ''
self._modal = False
self._chrome_flags = interfaces.nsIWebBrowserChrome.CHROME_ALL
self._visible = False
def provideWindow(self, parent, flags, position_specified,
size_specified, uri, name, features):
if name == "_blank":
return parent, False
else:
return None, False
# nsIWebBrowserChrome
def destroyBrowserWindow(self):
logging.debug("nsIWebBrowserChrome.destroyBrowserWindow")
if self._modal:
self.exitModalEventLoop(0)
self.web_view.get_toplevel().destroy()
def exitModalEventLoop(self, status):
logging.debug("nsIWebBrowserChrome.exitModalEventLoop: %r" % status)
"""
if self._continue_modal_loop:
self.enable_parent(True)
"""
if self._modal:
self._continue_modal_loop = False
self._modal = False
self._modal_status = status
#self.web_view.get_toplevel().grab_remove()
def isWindowModal(self):
logging.debug("nsIWebBrowserChrome.isWindowModal")
return self._modal
def setStatus(self, statusType, status):
#logging.debug("nsIWebBrowserChrome.setStatus")
self.web_view._set_status(status.encode('utf-8'))
def showAsModal(self):
logging.debug("nsIWebBrowserChrome.showAsModal")
self._modal = True
self._continue_modal_loop = True
self._modal_status = None
#EnableParent(PR_FALSE);
#self.web_view.get_toplevel().grab_add()
cls = components.classes["@mozilla.org/thread-manager;1"]
thread_manager = cls.getService(interfaces.nsIThreadManager)
current_thread = thread_manager.currentThread
self.web_view.push_js_context()
while self._continue_modal_loop:
processed = current_thread.processNextEvent(True)
if not processed:
break
self.web_view.pop_js_context()
self._modal = False
self._continue_modal_loop = False
return self._modal_status
def sizeBrowserTo(self, cx, cy):
logging.debug("nsIWebBrowserChrome.sizeBrowserTo: %r %r" % (cx, cy))
self.web_view.get_toplevel().resize(cx, cy)
self.web_view.type = WebView.TYPE_POPUP
# nsIWebBrowserChrome2
def setStatusWithContext(self, statusType, statusText, statusContext):
self.web_view._set_status(statusText.encode('utf-8'))
# nsIEmbeddingSiteWindow
def getDimensions(self, flags):
logging.debug("nsIEmbeddingSiteWindow.getDimensions: %r" % flags)
base_window = self.web_view.browser.queryInterface(interfaces.nsIBaseWindow)
if (flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_POSITION) and \
((flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_SIZE_INNER) or \
(flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_SIZE_OUTER)):
return base_window.getPositionAndSize()
elif flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_POSITION:
x, y = base_window.getPosition()
return (x, y, 0, 0)
elif (flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_SIZE_INNER) or \
(flags & interfaces.nsIEmbeddingSiteWindow.DIM_FLAGS_SIZE_OUTER):
width, height = base_window.getSize()
return (0, 0, width, height)
else:
raise xpcom.Exception('Invalid flags: %r' % flags)
def setDimensions(self, flags, x, y, cx, cy):
logging.debug("nsIEmbeddingSiteWindow.setDimensions: %r" % flags)
def setFocus(self):
logging.debug("nsIEmbeddingSiteWindow.setFocus")
base_window = self.web_view.browser.queryInterface(interfaces.nsIBaseWindow)
base_window.setFocus()
def get_title(self):
logging.debug("nsIEmbeddingSiteWindow.get_title: %r" % self.title)
return self.title
def set_title(self, title):
logging.debug("nsIEmbeddingSiteWindow.set_title: %r" % title)
self.title = title
self.web_view._notify_title_changed()
def get_webBrowser(self):
return self.web_view.browser
def get_chromeFlags(self):
return self._chrome_flags
def set_chromeFlags(self, flags):
self._chrome_flags = flags
def get_visibility(self):
logging.debug("nsIEmbeddingSiteWindow.get_visibility: %r" % self._visible)
# See bug https://bugzilla.mozilla.org/show_bug.cgi?id=312998
# Work around the problem that sometimes the window is already visible
# even though mVisibility isn't true yet.
visibility = self.web_view.props.visibility
mapped = self.web_view.flags() & gtk.MAPPED
return visibility or (not self.web_view.is_chrome and mapped)
def set_visibility(self, visibility):
logging.debug("nsIEmbeddingSiteWindow.set_visibility: %r" % visibility)
if visibility == self.web_view.props.visibility:
return
self.web_view.props.visibility = visibility
# nsIWebProgressListener
def onStateChange(self, web_progress, request, state_flags, status):
if (state_flags & interfaces.nsIWebProgressListener.STATE_STOP) and \
(state_flags & interfaces.nsIWebProgressListener.STATE_IS_NETWORK):
if self.web_view.is_chrome:
self.web_view.dom_window.sizeToContent()
def onStatusChange(self, web_progress, request, status, message): pass
def onSecurityChange(self, web_progress, request, state): pass
def onProgressChange(self, web_progress, request, cur_self_progress, max_self_progress, cur_total_progress, max_total_progress): pass
def onLocationChange(self, web_progress, request, location): pass
# nsIInterfaceRequestor
def queryInterface(self, uuid):
if uuid == interfaces.nsIDOMWindow:
return self.web_view.dom_window
if not uuid in self._com_interfaces_:
# Components.returnCode = Cr.NS_ERROR_NO_INTERFACE;
logging.warning('Interface %s not implemented by this instance: %r' % (uuid, self))
return None
return xpcom.server.WrapObject(self, uuid)
def getInterface(self, uuid):
result = self.queryInterface(uuid)
if not result:
# delegate to the nsIWebBrowser
requestor = self.web_view.browser.queryInterface(interfaces.nsIInterfaceRequestor)
try:
result = requestor.getInterface(uuid)
except xpcom.Exception:
logging.warning('Interface %s not implemented by this instance: %r' % (uuid, self.web_view.browser))
result = None
return result
class WebView(_hulahop.WebView):
TYPE_WINDOW = 0
TYPE_POPUP = 1
__gproperties__ = {
'title' : (str, None, None, None,
gobject.PARAM_READABLE),
'status' : (str, None, None, None,
gobject.PARAM_READABLE),
'visibility' : (bool, None, None, False,
gobject.PARAM_READWRITE)
}
def __init__(self):
_hulahop.WebView.__init__(self)
self.type = WebView.TYPE_WINDOW
self.is_chrome = False
chrome = _Chrome(self)
self._chrome = xpcom.server.WrapObject(chrome, interfaces.nsIEmbeddingSiteWindow)
weak_ref = xpcom.client.WeakReference(self._chrome)
self.browser.containerWindow = self._chrome
listener = xpcom.server.WrapObject(chrome, interfaces.nsIWebProgressListener)
weak_ref2 = xpcom.client.WeakReference(listener)
# FIXME: weak_ref2._comobj_ looks quite a bit ugly.
self.browser.addWebBrowserListener(weak_ref2._comobj_,
interfaces.nsIWebProgressListener)
self._status = ''
self._first_uri = None
self._visibility = False
def do_setup(self):
_hulahop.WebView.do_setup(self)
if self._first_uri:
self.load_uri(self._first_uri)
def _notify_title_changed(self):
self.notify('title')
def _set_status(self, status):
self._status = status
self.notify('status')
def do_get_property(self, pspec):
if pspec.name == 'title':
return self._chrome.title
elif pspec.name == 'status':
return self._status
elif pspec.name == 'visibility':
return self._visibility
def do_set_property(self, pspec, value):
if pspec.name == 'visibility':
self._visibility = value
def get_window_root(self):
return _hulahop.WebView.get_window_root(self)
def get_browser(self):
return _hulahop.WebView.get_browser(self)
def get_doc_shell(self):
requestor = self.browser.queryInterface(interfaces.nsIInterfaceRequestor)
return requestor.getInterface(interfaces.nsIDocShell)
def get_web_progress(self):
return self.doc_shell.queryInterface(interfaces.nsIWebProgress)
def get_web_navigation(self):
return self.browser.queryInterface(interfaces.nsIWebNavigation)
def get_dom_window(self):
return self.browser.contentDOMWindow
def load_uri(self, uri):
try:
self.web_navigation.loadURI(
uri, interfaces.nsIWebNavigation.LOAD_FLAGS_NONE,
None, None, None)
except xpcom.Exception:
self._first_uri = uri
dom_window = property(get_dom_window)
browser = property(get_browser)
window_root = property(get_window_root)
doc_shell = property(get_doc_shell)
web_progress = property(get_web_progress)
web_navigation = property(get_web_navigation)
| lgpl-2.1 | 6,456,294,321,182,294,000 | 36.169435 | 137 | 0.639524 | false | 3.93805 | false | false | false |
josircg/raizcidadanista | raizcidadanista/BruteBuster/decorators.py | 1 | 2284 | # BruteBuster by Cyber Security Consulting(www.csc.bg)
"""Decorators used by BruteBuster"""
from BruteBuster.models import FailedAttempt
from BruteBuster.middleware import get_request
def protect_and_serve(auth_func):
"""
This is the main code of the application. It is meant to replace the
authentication() function, with one that records failed login attempts and
blocks logins, if a threshold is reached
"""
if hasattr(auth_func, '__BB_PROTECTED__'):
# avoiding multiple decorations
return auth_func
def decor(*args, **kwargs):
"""
This is the wrapper that gets installed around the default
authentication function.
"""
user = kwargs.get('username', '')
if not user:
raise ValueError('BruteBuster cannot work with authenticate functions that do not include "username" as an argument')
request = get_request()
if request:
# try to get the remote address from thread locals
IP_ADDR = request.META.get('HTTP_X_FORWARDED_FOR', request.META.get('REMOTE_ADDR', '')).split(', ')[0]
else:
IP_ADDR = None
try:
fa = FailedAttempt.objects.filter(username=user, IP=IP_ADDR)[0]
if fa.recent_failure():
if fa.too_many_failures():
# we block the authentication attempt because
# of too many recent failures
fa.failures += 1
fa.save()
return None
else:
# the block interval is over, so let's start
# with a clean sheet
fa.failures = 0
fa.save()
except IndexError:
# No previous failed attempts
fa = None
result = auth_func(*args, **kwargs)
if result:
# the authentication was successful - we do nothing
# special
return result
# the authentication was kaput, we should record this
fa = fa or FailedAttempt(username=user, IP=IP_ADDR, failures=0)
fa.failures += 1
fa.save()
# return with unsuccessful auth
return None
decor.__BB_PROTECTED__ = True
return decor
| gpl-3.0 | -5,943,375,566,235,557,000 | 33.089552 | 129 | 0.57662 | false | 4.586345 | false | false | false |
tcyb/nextgen4b | nextgen4b/process/filter.py | 1 | 14126 | """
nextgen4b.process.filter
TC, 8/11/16
A collection of functions that read, filter, and output sequence data from
next-generation sequencing experiments.
"""
import gzip
import logging
import os
import re
import sys
import time
import uuid
import numpy as np
import yaml
from Bio import AlignIO, SeqIO
from Bio.Emboss.Applications import NeedleCommandline
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from tqdm import tqdm
__all__ = ['filter_sample', 'run_all_experiments']
#####################
# File Management
#####################
def load_ngs_file(fpath, ftype='fastq'):
"""
Load a .fastq file to a SeqIO iterator, un-gzip if necessary.
"""
if fpath.endswith('.gz'):
seq_f = gzip.open(fpath, 'rt')
elif fpath.endswith('.fastq'):
seq_f = open(fpath, 'rt')
else:
raise ValueError('File does not end in .gz or .fastq; confirm file type.')
f_iter = SeqIO.parse(seq_f, ftype)
return f_iter
#####################
# Main Filter Code
#####################
def filter_sample(f_name, pe_name, bcs, templates, f_filt_seqs, r_filt_seqs):
"""
Output filtered sequences as dictionary, indexed by barcode.
Sequences will be aligned to the provided template.
Parts of the template not represented will be '-'
"""
# setup loggers
text_logger = logging.getLogger(__name__+'.text_logger')
csv_logger = logging.getLogger(__name__+'.csv_logger')
text_logger.info('Started filtering routine for %s', f_name)
# Compile regexes
f_res = compile_res(f_filt_seqs)
pe_res = compile_res(r_filt_seqs)
# Load as generators, then filter
text_logger.info('Loading Files')
f_seqs = load_ngs_file(f_name)
for regex in f_res:
f_seqs = filter_seqs(f_seqs, regex)
pe_seqs = load_ngs_file(pe_name)
for regex in pe_res:
pe_seqs = filter_seqs(pe_seqs, regex)
# Barcode Filtering/Demux
bc_seqs = barcodeDemux(f_seqs, bcs)
# Sequence-based filtering
for expt in bcs.keys():
text_logger.info('Starting post-demux filtering for expt ID %s', expt)
csv_data = [expt, len(bc_seqs[expt])]
# Filter based on PE matches, only return the copied sequence
# Assumes the first RE in f_res will terminate the copied sequence
# copiedFuncGenerator's output should return all sequence before the adapter
seqs = filter_pe_mismatch(bc_seqs[expt], pe_seqs,
gen_copied_seq_function(f_res))
csv_data.append(len(seqs))
seqs = [trim_lig_adapter(s, f_res) for s in seqs] # Trim CS2 before filtering on quality (bad Qs at end of seqs)
# Quality filter
if len(seqs) > 0:
seqs = quality_filter(seqs) # Quality Filtering (needs to only have copied sequence)
else:
text_logger.info("""No sequences left, skipped quality score
filtering for expt ID %s.""", expt)
bc_seqs[expt] = seqs
csv_data.append(len(seqs))
# Align filter
if len(seqs) > 0:
# Do alignment-based filtering
full_template = '{}{}'.format(bcs[expt], templates[expt])
seqs = alignment_filter(seqs, full_template) # Do alignment-based filtering
else:
text_logger.info("""No sequences left, skipped align filtering for
expt ID %s.***""", expt)
bc_seqs[expt] = seqs
csv_data.append(len(seqs))
# Length filtering
if len(seqs) > 0:
seqs = len_filter(seqs, l_barcode=len(bcs[expt])) # Length Filtering
else:
text_logger.info("""No sequences left, skipped length filtering for
expt ID %s***""", expt)
bc_seqs[expt] = seqs
csv_data.append(len(seqs))
csv_logger.info(','.join([str(n) for n in csv_data]))
bc_seqs[expt] = seqs
return bc_seqs
#####################
# F/R Regex Filtering
#####################
def filter_seqs(seqs, q_re):
"""
Filter an iterator based on whether items match a regex object.
"""
# sIO_iterator should be generated by Bio.SeqIO.parse
# q_re should be a regex object generated by re.compile()
# Outputs a list of Seq objects that have the desired sequence in them.
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started regex filter: %s', q_re.pattern)
out_l = [s for s in seqs if q_re.search(str(s.seq))]
text_logger.info('Finished regex filter. Kept %i sequences.', len(out_l))
return out_l
def compile_res(seqs):
"""
Compile regex for each string in a list, return list of regex objects.
"""
# Takes a list of sequences you want to filter for
# Outputs a list of regex objects that you can iterate over
return [re.compile(s) for s in seqs]
#####################
# Barcode Filtering
#####################
def barcodeDemux(seqs, bcs):
"""
Takes lists of sequence objects, dict of barcodes (indexed by expt. ID)
Demuxes based on the barcode the sequences start with
Discards sequences that don't start with a barcode exact match
Assumes forward read -> sequences start with a barcode
"""
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started barcode demuxing.')
n_seqs = 0
bc_filtered_data = {}
for expt in bcs.keys():
bc_filtered_data[expt] = [s for s in seqs if str(s.seq).startswith(bcs[expt])]
n_seqs += len(bc_filtered_data[expt])
text_logger.info('Finished barcode demuxing. Kept %i of %i sequences.', n_seqs, len(seqs))
return bc_filtered_data
#####################
# Paired End Match Filtering
#####################
def get_coords(s):
return ':'.join(s.description.split(' ')[0].split(':')[3:])
def get_sense(s):
return s.description.split(' ')[1].split(':')[0]
def get_copied_seq(s, f_res):
return s[f_res[0].search(str(s.seq)).end():list(f_res[1].finditer(str(s.seq)))[-1].start()]
def trim_lig_adapter(s, f_res):
return s[:list(f_res[1].finditer(str(s.seq)))[-1].start()]
def gen_copied_seq_function(f_res):
return lambda s: get_copied_seq(s, f_res)
def filter_pe_mismatch(f_seqs, pe_seqs, copied_func):
"""
Args:
f_seqs - sequences from forward reads. Presumably filtered for the
required adatper(s).
pe_seqs - the paired end sequences of f_seqs. Also presumably filtered
for the required adapter(s).
copied_func - takes a sequence, should ouptut the DNA that we expect
to have been copied, i.e. that should be on the paired
end read.
Outputs a list of forward sequences that pass two filters:
* Have a coordinate match in the paired end reads
* That coordinate match has the same sequence.
"""
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started Paired-End Filtering')
# Some housekeeping stuff
proc_ct = 0 # number of sequences processed
co_ct = 0 # number of sequences with coordinate matches
aln_ct = 0 # number of sequences that have paired end sequence matches
matched_seq_list = []
# Get coordinate list
pe_coord_list = [get_coords(s) for s in pe_seqs]
for s in f_seqs:
if get_coords(s) in pe_coord_list: # Filter based on paired-end presence
co_ct += 1
copied = copied_func(s) # Get the part of the sequence that was actually copied
if str(pe_seqs[0].reverse_complement().seq).find(str(copied.seq)): # Filter on PE match
aln_ct += 1
matched_seq_list.append(s)
proc_ct += 1
if not (proc_ct % 5000):
text_logger.info("Processed %i out of %i", proc_ct, len(f_seqs))
text_logger.info("Finished Paired-End Filtering")
text_logger.info("""Kept %i of %i forward sequences after coordinate
filtering""", co_ct, len(f_seqs))
text_logger.info("""Kept %i of %i forward sequences after paired-end sequence
matching""", aln_ct, co_ct)
return matched_seq_list
#####################
# Q-score Filtering
#####################
def quality_filter(seqs, q_cutoff=20):
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started Quality Score Filtering')
out_l = [s for s in seqs
if not any(s.letter_annotations['phred_quality']
< np.ones(len(s.letter_annotations['phred_quality']))*q_cutoff)]
text_logger.info('Finished Quality Score Filtering. Kept %i of %i sequences.',
len(out_l), len(seqs))
return out_l
#####################
# Length Filtering
#####################
def len_filter(seqs, l_cutoff=70, u_cutoff=150, l_barcode=0):
"""
Return only sequence objects that have length between l_cutoff and
u_cutoff
"""
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started Length Filtering')
out_l = [s for s in seqs if (len(s.seq) >= (l_cutoff + l_barcode)) and
(len(s.seq) <= (u_cutoff + l_barcode))]
text_logger.info('Finished Length Filtering. Kept %i of %i sequences.',
len(out_l), len(seqs))
return out_l
#####################
# Alignment Filtering
#####################
def alignment_filter(seqs, template, gapopen=10, gapextend=0.5, lo_cutoff=300,
hi_cutoff=1000, cleanup=True):
text_logger = logging.getLogger(__name__+'.text_logger')
text_logger.info('Started alignment-based filtering')
start_n_seqs = len(seqs)
# Save the template and sequences as temporary fasta files
# Probably some hacking that can be done in the NeedleCommandline stuff
seqs_f_name = 'tempseq.fa'
with open(seqs_f_name, 'w') as sh:
SeqIO.write(seqs, sh, 'fastq')
# Generate alignment command, run the alignment
text_logger.info("""Began EMBOSS needle routine with settings:\ngapopen:
%i\ngapextend: %i\nlo_cutoff: %i\nhi_cutoff: %i""",
gapopen, gapextend, lo_cutoff, hi_cutoff)
ofilen = 'temp_'+str(uuid.uuid4())+'.needle'
needle_cline = NeedleCommandline(asequence='asis::{}'.format(template),
bsequence=seqs_f_name, gapopen=gapopen,
gapextend=gapextend, outfile=ofilen)
needle_cline()
text_logger.info('Finished EMBOSS needle routine')
aln_data = AlignIO.parse(open(ofilen), "emboss")
new_seqs = cull_alignments(aln_data, lo_cutoff=lo_cutoff,
hi_cutoff=hi_cutoff)
# Exit routine
if cleanup:
text_logger.info('Cleaning up temp files')
os.remove(seqs_f_name)
os.remove(ofilen)
text_logger.info("""Finished alignment-based filtering. Kept %i of %i
sequences.""", len(new_seqs), start_n_seqs)
return new_seqs
def cull_alignments(aln_data, lo_cutoff=300, hi_cutoff=650):
new_seqs = []
for alignment in aln_data:
if (alignment.annotations['score'] > lo_cutoff) \
and (alignment.annotations['score'] < hi_cutoff):
# Template should have no gaps, and should contain the whole
# non-template sequence
if not str(alignment[0].seq).count('-') > 0:
new_seqs.append(alignment[1])
new_seqs[-1].annotations['alnscore'] = alignment.annotations['score']
return new_seqs
#####################
# Main Routines
#####################
def setup_logger(name, file_name, log_format, level=logging.INFO):
logger = logging.getLogger(name)
logger.setLevel(level)
handler = logging.FileHandler(file_name)
formatter = logging.Formatter(log_format)
handler.setFormatter(formatter)
logger.addHandler(handler)
return logger
def run_all_experiments(yf_name, save_intermediates=True):
"""
Filters all sequences noted in the passed YAML file.
"""
# Setup text_logger
text_logger = setup_logger(__name__+'.text_logger',
'ngs_%s.log' % time.strftime("%Y%m%d-%H%M%S"),
'%(asctime)s %(message)s')
csv_logger = setup_logger(__name__+'.csv_logger',
'ngs_filter_%s.csv' % time.strftime("%Y%m%d-%H%M%S"),
'%(message)s')
# Load YAML file
with open(yf_name) as expt_f:
expt_yaml = yaml.load(expt_f) # Should probably make this a class at some point...
text_logger.info('Loaded YAML experiment file '+yf_name)
runs = expt_yaml['ngsruns']
text_logger.info('Found NGS Runs: '+', '.join(runs))
for run in tqdm(runs.keys()):
text_logger.info('Performing routine for NGS Run '+run)
expts = runs[run]['experiments']
text_logger.info('Found experiments '+', '.join(expts))
# Get barcodes, templates for all experiments in the run
bcs = {}
templates = {}
for expt in expts:
bcs[expt] = expt_yaml['experiments'][expt]['barcode']
templates[expt] = expt_yaml['experiments'][expt]['template_seq']
# Do filtering
text_logger.info('Starting filtering for run %s', run)
aln_seqs = filter_sample(runs[run]['f_read_name'],
runs[run]['pe_read_name'],
bcs, templates,
runs[run]['filter_seqs']['forward'],
runs[run]['filter_seqs']['reverse'])
if save_intermediates:
for expt in aln_seqs.keys():
with open('aln_seqs_%s_%s.fa' % (run, expt), 'w') as out_f:
SeqIO.write(aln_seqs[expt], out_f, 'fasta')
text_logger.info('Finished filtering for run %s', run)
if __name__ == '__main__':
if len(sys.argv) > 1:
yaml_name = sys.argv[1]
else:
yaml_name = 'samples.yaml'
run_all_experiments(yaml_name, save_intermediates=True)
| mit | -3,774,632,956,410,515,500 | 35.313625 | 120 | 0.593162 | false | 3.631362 | false | false | false |
anton-golubkov/Garland | src/ipf/ipfblock/findchessboard.py | 1 | 2238 | #-------------------------------------------------------------------------------
# Copyright (c) 2011 Anton Golubkov.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the GNU Lesser Public License v2.1
# which accompanies this distribution, and is available at
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.html
#
# Contributors:
# Anton Golubkov - initial API and implementation
#-------------------------------------------------------------------------------
# -*- coding: utf-8 -*-
import cv
import ipfblock
import ioport
import ipf.ipfblock.processing
from property import Property
from ipf.ipftype.ipfimage1ctype import IPFImage1cType
from ipf.ipftype.ipfimage3ctype import IPFImage3cType
from ipf.ipftype.ipfarraytype import IPFArrayType
from ipf.ipftype.ipfinttype import IPFIntType
from ipf.ipftype.ipffindchessboardtype import IPFFindChessboardType
class FindChessboard(ipfblock.IPFBlock):
""" Find chess board corners block
"""
type = "FindChessboard"
category = "Feature detection"
is_abstract_block = False
def __init__(self):
super(FindChessboard, self).__init__()
self.input_ports["input_image"] = ioport.IPort(self, IPFImage1cType)
self.output_ports["output_array"] = ioport.OPort(self, IPFArrayType)
self.properties["type"] = Property(IPFFindChessboardType)
self.properties["width"] = Property(IPFIntType, 3, 20)
self.properties["height"] = Property(IPFIntType, 3, 20)
self.processing_function = ipf.ipfblock.processing.find_chessboard
def get_preview_image(self):
corners = self.output_ports["output_array"]._value
if len(corners) == 0:
return self.input_ports["input_image"]._value
output_image = IPFImage3cType.convert(self.input_ports["input_image"]._value)
width = self.properties["width"].get_value()
height = self.properties["height"].get_value()
cv.DrawChessboardCorners(output_image,
(width, height),
corners,
1)
return output_image
| lgpl-2.1 | -891,357,427,570,049,300 | 36.3 | 85 | 0.615728 | false | 3.819113 | false | false | false |
demisto/content | Packs/UnifiVideoNVR/Integrations/UnifiVideo/UnifiVideo.py | 1 | 8478 | import cv2
import demistomock as demisto # noqa: F401
from CommonServerPython import * # noqa: F401
from unifi_video import UnifiVideoAPI
import dateparser
import json
demisto_format = '%Y-%m-%dT%H:%M:%SZ'
params = demisto.params()
args = demisto.args()
api_key = params.get('api_key')
address = params.get('addr')
port = params.get('port')
schema = params.get('schema')
fetch_limit = params.get('fetch_limit')
verify_cert = params.get('verify_cert')
FETCH_TIME = params.get('fetch_time')
if demisto.command() == 'test-module':
# This is the call made when pressing the integration test button.
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
demisto.results('ok')
if demisto.command() == 'unifivideo-get-camera-list':
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
context_output = []
for camera in uva.cameras:
context_output.append(camera.name)
results = [
CommandResults(
outputs_prefix='UnifiVideo.Cameras',
readable_output=tableToMarkdown("Camera list", context_output, headers=["Camera name"], removeNull=False),
outputs=context_output
)]
return_results(results)
if demisto.command() == 'unifivideo-get-snapshot':
camera_name = args.get('camera_name')
output = bytes()
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.get_camera(camera_name).snapshot("/tmp/snapshot.png")
f = open("/tmp/snapshot.png", "rb")
output = f.read()
filename = "snapshot.png"
file = fileResult(filename=filename, data=output)
file['Type'] = entryTypes['image']
demisto.results(file)
if demisto.command() == 'unifivideo-set-recording-settings':
camera_name = args.get('camera_name')
rec_set = args.get('rec_set')
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.get_camera(camera_name).set_recording_settings(rec_set)
demisto.results(camera_name + ": " + rec_set)
if demisto.command() == 'unifivideo-ir-leds':
camera_name = args.get('camera_name')
ir_leds = args.get('ir_leds')
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.get_camera(camera_name).ir_leds(ir_leds)
demisto.results(camera_name + ": " + ir_leds)
if demisto.command() == 'unifivideo-get-recording':
recording_id = args.get('recording_id')
recording_file_name = 'recording-' + recording_id + '.mp4'
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.refresh_recordings(0)
uva.recordings[recording_id].download('/tmp/recording.mp4')
f = open("/tmp/recording.mp4", "rb")
output = f.read()
filename = recording_file_name
file = fileResult(filename=filename, data=output, file_type=EntryType.ENTRY_INFO_FILE)
demisto.results(file)
if demisto.command() == 'unifivideo-get-recording-motion-snapshot':
recording_id = args.get('recording_id')
snapshot_file_name = 'snapshot-motion-' + recording_id + '.jpg'
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.refresh_recordings(0)
uva.recordings[recording_id].motion('/tmp/snapshot.png')
f = open("/tmp/snapshot.png", "rb")
output = f.read()
filename = snapshot_file_name
file = fileResult(filename=filename, data=output)
file['Type'] = entryTypes['image']
demisto.results(file)
if demisto.command() == 'unifivideo-get-recording-snapshot':
recording_id = args.get('recording_id')
snapshot_file_name = 'snapshot-' + recording_id + '-' + args.get('frame') + '.jpg'
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
uva.refresh_recordings(0)
uva.recordings[recording_id].download('/tmp/recording.mp4')
if "frame" in args:
vc = cv2.VideoCapture('/tmp/recording.mp4') # pylint: disable=E1101
c = 1
if vc.isOpened():
rval, frame = vc.read()
else:
rval = False
while rval:
rval, frame = vc.read()
c = c + 1
if c == int(args.get('frame')):
cv2.imwrite("/tmp/" + snapshot_file_name, frame) # pylint: disable=E1101
break
vc.release()
f = open("/tmp/" + snapshot_file_name, "rb")
output = f.read()
filename = snapshot_file_name
file = fileResult(filename=filename, data=output)
file['Type'] = entryTypes['image']
demisto.results(file)
if demisto.command() == 'unifivideo-get-recording-list':
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
recordings = []
for rec in uva.get_recordings():
rec_tmp = {}
rec_tmp['id'] = rec._id
rec_tmp['rec_type'] = rec.rec_type
rec_tmp['start_time'] = rec.start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
rec_tmp['end_time'] = rec.start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
recordings.append(rec_tmp)
results = [
CommandResults(
outputs_prefix='UnifiVideo.Recordings',
readable_output=tableToMarkdown("Recording list", recordings, headers=["id", "rec_type", "start_time", "end_time"]),
outputs_key_field=['id'],
outputs=recordings
)]
return_results(results)
if demisto.command() == 'unifivideo-get-snapshot-at-frame':
entry_id = demisto.args().get('entryid')
snapshot_file_name = 'snapshot-' + entry_id + '-' + args.get('frame') + '.jpg'
try:
file_result = demisto.getFilePath(entry_id)
except Exception as ex:
return_error("Failed to load file entry with entryid: {}. Error: {}".format(entry_id, ex))
video_path = file_result.get("path") # pylint: disable=E1101
vc = cv2.VideoCapture(video_path) # pylint: disable=E1101
c = 1
if vc.isOpened():
rval, frame = vc.read()
else:
rval = False
while rval:
rval, frame = vc.read()
c = c + 1
if c == int(args.get('frame')):
cv2.imwrite("/tmp/" + snapshot_file_name, frame) # pylint: disable=E1101
break
vc.release()
f = open("/tmp/" + snapshot_file_name, "rb")
output = f.read()
filename = snapshot_file_name
file = fileResult(filename=filename, data=output)
file['Type'] = entryTypes['image']
demisto.results(file)
if demisto.command() == 'fetch-incidents':
start_time_of_int = str(datetime.now())
uva = UnifiVideoAPI(api_key=api_key, addr=address, port=port, schema=schema, verify_cert=verify_cert)
# And retrieve it for use later:
last_run = demisto.getLastRun()
# lastRun is a dictionary, with value "now" for key "time".
# JSON of the incident type created by this integration
inc = []
start_time = dateparser.parse(FETCH_TIME)
if last_run:
start_time = last_run.get('start_time')
if not isinstance(start_time, datetime):
start_time = datetime.strptime(str(start_time), '%Y-%m-%d %H:%M:%S.%f')
uva.refresh_recordings()
for rec in uva.get_recordings(limit=fetch_limit, start_time=start_time, order='desc'):
incident = {}
datetime_object = datetime.strptime(str(rec.start_time), '%Y-%m-%d %H:%M:%S')
for camera in uva.cameras:
cam_id = uva.get_camera(camera.name)
if cam_id._id in rec.cameras:
camera_name = camera.name
try:
if datetime_object > start_time:
incident = {
'name': rec.rec_type,
'occurred': datetime_object.strftime('%Y-%m-%dT%H:%M:%SZ'),
'rawJSON': json.dumps({"event": rec.rec_type, "ubnt_id": rec._id, "camera_name": camera_name,
"integration_lastrun": str(start_time), "start_time": str(rec.start_time),
"stop_time": str(rec.end_time)})
}
inc.append(incident)
except Exception as e:
raise Exception("Problem comparing: " + str(datetime_object) + ' ' + str(start_time) + " Exception: " + str(e))
demisto.incidents(inc)
demisto.setLastRun({'start_time': start_time_of_int})
| mit | 6,209,920,336,024,969,000 | 40.763547 | 128 | 0.623378 | false | 3.307842 | false | false | false |
helgefmi/Easy | src/easy/lexer.py | 1 | 4113 | import re
class Token(object):
def __init__(self, lineno, token_type, token_value=None):
self._type = token_type
self._value = token_value
self._lineno = lineno
@property
def type(self):
return self._type
@property
def value(self):
return self._value
@property
def lineno(self):
return self._lineno
def __str__(self):
if self.type == 'tok_string':
return '"%s"' % self.value
if self.value is None:
return self.type
else:
return str(self.value)
class Lexer(object):
KEYWORDS = (
'def', 'do', 'end',
'if', 'then', 'else',
'return',
)
SYMBOLS = (
('>=', 'tok_binary_op'),
('<=', 'tok_binary_op'),
('==', 'tok_binary_op'),
('!=', 'tok_binary_op'),
('<', 'tok_binary_op'),
('>', 'tok_binary_op'),
('*', 'tok_binary_op'),
('-', 'tok_binary_op'),
('/', 'tok_binary_op'),
('+', 'tok_binary_op'),
('(', 'tok_paren_start'),
(')', 'tok_paren_end'),
(';', 'tok_semicolon'),
)
def __init__(self, input, filename=None):
self.input = input
self._tokens = []
self._lineno = 1
def _append(self, type, value=None):
token = Token(self._lineno, type, value)
self._tokens.append(token)
def _strip_whitespace(self):
for char in self.input:
if not char.isspace():
break
if char == '\n':
self._lineno += 1
self.input = self.input.lstrip()
def _assert(self, cond, error, lineno=None):
lineno = lineno or self._lineno
if not cond:
print error
print 'At line %d' % lineno
print 'input[:10] = %s' % repr(self.input[:10])
exit(1)
def lex(self):
while True:
self._strip_whitespace()
if not self.input:
break
result = (self.lex_identifier() or self.lex_number() or
self.lex_symbol() or self.lex_string() or
self.lex_type())
self._assert(result, 'Unexpected input')
return self._tokens
def lex_string(self):
if self.input[0] != '"':
return False
self.input = self.input[1:]
start_lineno = self._lineno
last = None
for i, char in enumerate(self.input):
if char == '\n':
self._lineno += 1
if char == '"' and last != '\\':
break
last = char
else:
self._assert(False, 'Unterminated string literal; expecting "',
start_lineno)
string, self.input = self.input[:i], self.input[i + 1:]
self._append('tok_string', string)
return True
def lex_identifier(self):
match = re.match(r'[a-z][a-zA-Z0-9_]*', self.input)
if not match:
return False
id = match.group()
self.input = self.input[match.end():]
if id in self.KEYWORDS:
self._append('tok_%s' % id)
else:
self._append('tok_identifier', id)
return True
def lex_type(self):
match = re.match(r'[A-Z][a-zA-Z0-9_]*', self.input)
if not match:
return False
name = match.group()
self.input = self.input[match.end():]
self._append('tok_type', name)
return True
def lex_symbol(self):
for symbol, token in self.SYMBOLS:
if self.input.startswith(symbol):
self.input = self.input[len(symbol):]
self._append(token, symbol)
return True
return False
def lex_number(self):
for i, char in enumerate(self.input):
if not char.isdigit():
break
if i == 0:
return False
number, self.input = self.input[:i], self.input[i:]
self._append('tok_number', int(number))
return True
| mit | -6,500,820,321,542,905,000 | 26.42 | 75 | 0.480671 | false | 3.924618 | false | false | false |
Azure/azure-sdk-for-python | sdk/appconfiguration/azure-mgmt-appconfiguration/azure/mgmt/appconfiguration/models/_app_configuration_management_client_enums.py | 1 | 3123 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum, EnumMeta
from six import with_metaclass
class _CaseInsensitiveEnumMeta(EnumMeta):
def __getitem__(self, name):
return super().__getitem__(name.upper())
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
try:
return cls._member_map_[name.upper()]
except KeyError:
raise AttributeError(name)
class ActionsRequired(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Any action that is required beyond basic workflow (approve/ reject/ disconnect)
"""
NONE = "None"
RECREATE = "Recreate"
class ConfigurationResourceType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The resource type to check for name availability.
"""
MICROSOFT_APP_CONFIGURATION_CONFIGURATION_STORES = "Microsoft.AppConfiguration/configurationStores"
class ConnectionStatus(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The private link service connection status.
"""
PENDING = "Pending"
APPROVED = "Approved"
REJECTED = "Rejected"
DISCONNECTED = "Disconnected"
class CreatedByType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of identity that created the resource.
"""
USER = "User"
APPLICATION = "Application"
MANAGED_IDENTITY = "ManagedIdentity"
KEY = "Key"
class IdentityType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of managed identity used. The type 'SystemAssigned, UserAssigned' includes both an
implicitly created identity and a set of user-assigned identities. The type 'None' will remove
any identities.
"""
NONE = "None"
SYSTEM_ASSIGNED = "SystemAssigned"
USER_ASSIGNED = "UserAssigned"
SYSTEM_ASSIGNED_USER_ASSIGNED = "SystemAssigned, UserAssigned"
class ProvisioningState(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The provisioning state of the configuration store.
"""
CREATING = "Creating"
UPDATING = "Updating"
DELETING = "Deleting"
SUCCEEDED = "Succeeded"
FAILED = "Failed"
CANCELED = "Canceled"
class PublicNetworkAccess(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Control permission for data plane traffic coming from public networks while private endpoint is
enabled.
"""
ENABLED = "Enabled"
DISABLED = "Disabled"
| mit | -6,880,166,865,497,613,000 | 34.488636 | 103 | 0.666346 | false | 4.367832 | true | false | false |
m1trix/Tetris-Wars | tetris_wars/sdl2/test/hints_test.py | 1 | 3881 | import sys
import unittest
from ctypes import cast, c_char_p
from .. import SDL_Init, SDL_Quit, SDL_QuitSubSystem, SDL_INIT_EVERYTHING
from .. import hints
class SDLHintsTest(unittest.TestCase):
__tags__ = ["sdl"]
def setUp(self):
SDL_Init(SDL_INIT_EVERYTHING)
def tearDown(self):
SDL_QuitSubSystem(SDL_INIT_EVERYTHING)
SDL_Quit()
def test_SDL_ClearHints(self):
self.assertEqual(hints.SDL_SetHint(b"TEST", b"32"), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"32")
hints.SDL_ClearHints()
self.assertEqual(hints.SDL_GetHint(b"TEST"), None)
def test_SDL_GetHint(self):
self.assertEqual(hints.SDL_SetHint(b"TEST", b"32"), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"32")
self.assertEqual(hints.SDL_SetHint(hints.SDL_HINT_RENDER_DRIVER,
b"dummy"), 1)
self.assertEqual(hints.SDL_GetHint(hints.SDL_HINT_RENDER_DRIVER),
b"dummy")
def test_SDL_SetHint(self):
self.assertEqual(hints.SDL_SetHint(b"TEST", b"32"), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"32")
self.assertEqual(hints.SDL_SetHint(b"TEST", b"abcdef"), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"abcdef")
if sys.platform != "cli":
# TODO: Check on next IronPython version (>2.7.4)
self.assertEqual(hints.SDL_SetHint(b"", b""), 1)
self.assertEqual(hints.SDL_GetHint(b""), b"")
def test_SDL_SetHintWithPriority(self):
self.assertEqual(hints.SDL_SetHintWithPriority
(b"TEST", b"32", hints.SDL_HINT_DEFAULT), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"32")
self.assertEqual(hints.SDL_SetHintWithPriority
(b"TEST", b"abcdef", hints.SDL_HINT_NORMAL), 1)
self.assertEqual(hints.SDL_GetHint(b"TEST"), b"abcdef")
if sys.platform != "cli":
# TODO: Check on next IronPython version (>2.7.4)
self.assertEqual(hints.SDL_SetHintWithPriority
(b"", b"", hints.SDL_HINT_OVERRIDE), 1)
self.assertEqual(hints.SDL_GetHint(b""), b"")
# self.assertRaises(ValueError, hints.SDL_SetHintWithPriority,
# "TEST", "123456789", 12)
# self.assertRaises(ValueError, hints.SDL_SetHintWithPriority,
# "TEST", "123456789", -78)
# self.assertRaises(ValueError, hints.SDL_SetHintWithPriority,
# "TEST", "123456789", None)
# self.assertRaises(ValueError, hints.SDL_SetHintWithPriority,
# "TEST", "123456789", "bananas")
def test_SDL_AddDelHintCallback(self):
calls = []
def callback(userdata, name, oldval, newval):
data = cast(userdata, c_char_p)
calls.append((data.value, name, oldval, newval))
hintcb = hints.SDL_HintCallback(callback)
udata = c_char_p(b"banana")
hints.SDL_AddHintCallback(hints.SDL_HINT_ALLOW_TOPMOST, hintcb,
udata)
# SDL_AddHintCallback invokes the callback once.
self.assertEqual(len(calls), 1)
self.assertEqual(calls[0], (b"banana", hints.SDL_HINT_ALLOW_TOPMOST,
None, None))
hints.SDL_SetHint(hints.SDL_HINT_ALLOW_TOPMOST, b"true")
self.assertEqual(len(calls), 2)
self.assertEqual(calls[1], (b"banana", hints.SDL_HINT_ALLOW_TOPMOST,
None, b"true"))
hints.SDL_DelHintCallback(hints.SDL_HINT_ALLOW_TOPMOST, hintcb,
udata)
hints.SDL_SetHint(hints.SDL_HINT_ALLOW_TOPMOST, b"false")
self.assertEqual(len(calls), 2)
if __name__ == '__main__':
sys.exit(unittest.main())
| gpl-2.0 | 3,688,641,344,245,164,000 | 42.606742 | 76 | 0.582324 | false | 3.531392 | true | false | false |
endlessm/chromium-browser | third_party/catapult/telemetry/telemetry/core/platform.py | 1 | 14924 | # Copyright 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging as real_logging
import os
import sys
import time
from telemetry.core import local_server
from telemetry.core import memory_cache_http_server
from telemetry.core import network_controller
from telemetry.core import tracing_controller
from telemetry.core import util
from telemetry.internal.platform import (platform_backend as
platform_backend_module)
from py_utils import discover
_HOST_PLATFORM = None
# Remote platform is a dictionary from device ids to remote platform instances.
_REMOTE_PLATFORMS = {}
def _InitHostPlatformIfNeeded():
global _HOST_PLATFORM # pylint: disable=global-statement
if _HOST_PLATFORM:
return
backend = None
backends = _IterAllPlatformBackendClasses()
for platform_backend_class in backends:
if platform_backend_class.IsPlatformBackendForHost():
backend = platform_backend_class()
break
if not backend:
raise NotImplementedError()
_HOST_PLATFORM = Platform(backend)
def GetHostPlatform():
_InitHostPlatformIfNeeded()
return _HOST_PLATFORM
def _IterAllPlatformBackendClasses():
platform_dir = os.path.dirname(os.path.realpath(
platform_backend_module.__file__))
return discover.DiscoverClasses(
platform_dir, util.GetTelemetryDir(),
platform_backend_module.PlatformBackend).itervalues()
def GetPlatformForDevice(device, finder_options, logging=real_logging):
""" Returns a platform instance for the device.
Args:
device: a device.Device instance.
"""
if device.guid in _REMOTE_PLATFORMS:
return _REMOTE_PLATFORMS[device.guid]
try:
for platform_backend_class in _IterAllPlatformBackendClasses():
if platform_backend_class.SupportsDevice(device):
_REMOTE_PLATFORMS[device.guid] = (
platform_backend_class.CreatePlatformForDevice(device,
finder_options))
return _REMOTE_PLATFORMS[device.guid]
return None
except Exception:
current_exception = sys.exc_info()
logging.error('Fail to create platform instance for %s.', device.name)
raise current_exception[0], current_exception[1], current_exception[2]
class Platform(object):
"""The platform that the target browser is running on.
Provides a limited interface to interact with the platform itself, where
possible. It's important to note that platforms may not provide a specific
API, so check with IsFooBar() for availability.
"""
def __init__(self, platform_backend):
self._platform_backend = platform_backend
self._platform_backend.InitPlatformBackend()
self._platform_backend.SetPlatform(self)
self._network_controller = network_controller.NetworkController(
self._platform_backend.network_controller_backend)
self._tracing_controller = tracing_controller.TracingController(
self._platform_backend.tracing_controller_backend)
self._local_server_controller = local_server.LocalServerController(
self._platform_backend)
self._forwarder = None
@property
def is_host_platform(self):
return self == GetHostPlatform()
@property
def network_controller(self):
"""Control network settings and servers to simulate the Web."""
return self._network_controller
@property
def tracing_controller(self):
return self._tracing_controller
def Initialize(self):
pass
def CanMonitorThermalThrottling(self):
"""Platforms may be able to detect thermal throttling.
Some fan-less computers go into a reduced performance mode when their heat
exceeds a certain threshold. Performance tests in particular should use this
API to detect if this has happened and interpret results accordingly.
"""
return self._platform_backend.CanMonitorThermalThrottling()
def GetSystemLog(self):
return self._platform_backend.GetSystemLog()
def IsThermallyThrottled(self):
"""Returns True if the device is currently thermally throttled."""
return self._platform_backend.IsThermallyThrottled()
def HasBeenThermallyThrottled(self):
"""Returns True if the device has been thermally throttled."""
return self._platform_backend.HasBeenThermallyThrottled()
def GetDeviceTypeName(self):
"""Returns a string description of the Platform device, or None.
Examples: Nexus 7, Nexus 6, Desktop"""
return self._platform_backend.GetDeviceTypeName()
def GetArchName(self):
"""Returns a string description of the Platform architecture.
Examples: x86_64 (posix), AMD64 (win), armeabi-v7a, x86"""
return self._platform_backend.GetArchName()
def GetOSName(self):
"""Returns a string description of the Platform OS.
Examples: WIN, MAC, LINUX, CHROMEOS"""
return self._platform_backend.GetOSName()
def GetDeviceId(self):
"""Returns a string identifying the device.
Examples: 0123456789abcdef"""
return self._platform_backend.GetDeviceId()
def GetOSVersionName(self):
"""Returns a logically sortable, string-like description of the Platform OS
version.
Examples: VISTA, WIN7, LION, MOUNTAINLION"""
return self._platform_backend.GetOSVersionName()
def GetOSVersionDetailString(self):
"""Returns more detailed information about the OS version than
GetOSVersionName, if available. Otherwise returns the empty string.
Examples: '10.12.4' on macOS."""
return self._platform_backend.GetOSVersionDetailString()
def GetSystemTotalPhysicalMemory(self):
"""Returns an integer with the total physical memory in bytes."""
return self._platform_backend.GetSystemTotalPhysicalMemory()
def CanFlushIndividualFilesFromSystemCache(self):
"""Returns true if the disk cache can be flushed for individual files."""
return self._platform_backend.CanFlushIndividualFilesFromSystemCache()
def SupportFlushEntireSystemCache(self):
"""Returns true if entire system cache can be flushed.
Also checks that platform has required privilegues to flush system caches.
"""
return self._platform_backend.SupportFlushEntireSystemCache()
def _WaitForPageCacheToBeDropped(self):
# There seems to be no reliable way to wait for all pages to be dropped from
# the OS page cache (also known as 'file cache'). There is no guaranteed
# moment in time when everything is out of page cache. A number of pages
# will likely be reused before other pages are evicted. While individual
# files can be watched in limited ways, we choose not to be clever.
time.sleep(2)
def FlushEntireSystemCache(self):
"""Flushes the OS's file cache completely.
This function may require root or administrator access. Clients should
call SupportFlushEntireSystemCache to check first.
"""
self._platform_backend.FlushEntireSystemCache()
self._WaitForPageCacheToBeDropped()
def FlushSystemCacheForDirectories(self, directories):
"""Flushes the OS's file cache for the specified directory.
This function does not require root or administrator access."""
for path in directories:
self._platform_backend.FlushSystemCacheForDirectory(path)
self._WaitForPageCacheToBeDropped()
def FlushDnsCache(self):
"""Flushes the OS's DNS cache completely.
This function may require root or administrator access."""
return self._platform_backend.FlushDnsCache()
def LaunchApplication(self,
application,
parameters=None,
elevate_privilege=False):
""""Launches the given |application| with a list of |parameters| on the OS.
Set |elevate_privilege| to launch the application with root or admin rights.
Returns:
A popen style process handle for host platforms.
"""
return self._platform_backend.LaunchApplication(
application,
parameters,
elevate_privilege=elevate_privilege)
def StartActivity(self, intent, blocking=False):
"""Starts an activity for the given intent on the device."""
return self._platform_backend.StartActivity(intent, blocking)
def CanLaunchApplication(self, application):
"""Returns whether the platform can launch the given application."""
return self._platform_backend.CanLaunchApplication(application)
def InstallApplication(self, application, **kwargs):
"""Installs the given application."""
return self._platform_backend.InstallApplication(application, **kwargs)
def IsCooperativeShutdownSupported(self):
"""Indicates whether CooperativelyShutdown, below, is supported.
It is not necessary to implement it on all platforms."""
return self._platform_backend.IsCooperativeShutdownSupported()
def CooperativelyShutdown(self, proc, app_name):
"""Cooperatively shut down the given process from subprocess.Popen.
Currently this is only implemented on Windows. See
crbug.com/424024 for background on why it was added.
Args:
proc: a process object returned from subprocess.Popen.
app_name: on Windows, is the prefix of the application's window
class name that should be searched for. This helps ensure
that only the application's windows are closed.
Returns True if it is believed the attempt succeeded.
"""
return self._platform_backend.CooperativelyShutdown(proc, app_name)
def CanTakeScreenshot(self):
return self._platform_backend.CanTakeScreenshot()
# TODO(nednguyen): Implement this on Mac, Linux & Win. (crbug.com/369490)
def TakeScreenshot(self, file_path):
""" Takes a screenshot of the platform and save to |file_path|.
Note that this method may not be supported on all platform, so check with
CanTakeScreenshot before calling this.
Args:
file_path: Where to save the screenshot to. If the platform is remote,
|file_path| is the path on the host platform.
Returns True if it is believed the attempt succeeded.
"""
return self._platform_backend.TakeScreenshot(file_path)
def CanRecordVideo(self):
return self._platform_backend.CanRecordVideo()
def StartVideoRecording(self):
"""Starts recording a video on the device.
Note that this method may not be supported on all platforms, so the caller
must check with CanRecordVideo before calling this. Once the caller starts
recording a video using this call, the caller must stop recording the video
by calling StopVideoRecording() before attempting to start recording another
video.
"""
self._platform_backend.StartVideoRecording()
def StopVideoRecording(self, video_path):
"""Stops recording a video on the device and saves to |video_path|.
This method must be called only if recording a video had started using a
call to StartVideoRecording(), and it was not already stopped using a call
to StopVideoRecording().
Args:
video_path: Where to save the video to. If the platform is remote,
|video_path| is the path on the host platform.
"""
self._platform_backend.StopVideoRecording(video_path)
def SetFullPerformanceModeEnabled(self, enabled):
""" Set full performance mode on the platform.
Note: this can be no-op on certain platforms.
"""
return self._platform_backend.SetFullPerformanceModeEnabled(enabled)
def StartLocalServer(self, server):
"""Starts a LocalServer and associates it with this platform.
|server.Close()| should be called manually to close the started server.
"""
self._local_server_controller.StartServer(server)
@property
def http_server(self):
# TODO(crbug.com/799490): Ownership of the local server should be moved
# to the network_controller.
server = self._local_server_controller.GetRunningServer(
memory_cache_http_server.MemoryCacheDynamicHTTPServer, None)
if server:
return server
return self._local_server_controller.GetRunningServer(
memory_cache_http_server.MemoryCacheHTTPServer, None)
def SetHTTPServerDirectories(self, paths, handler_class=None):
"""Returns True if the HTTP server was started, False otherwise."""
# pylint: disable=redefined-variable-type
if isinstance(paths, basestring):
paths = set([paths])
paths = set(os.path.realpath(p) for p in paths)
# If any path is in a subdirectory of another, remove the subdirectory.
duplicates = set()
for parent_path in paths:
for sub_path in paths:
if parent_path == sub_path:
continue
if os.path.commonprefix((parent_path, sub_path)) == parent_path:
duplicates.add(sub_path)
paths -= duplicates
if self.http_server:
old_handler_class = getattr(self.http_server,
"dynamic_request_handler_class", None)
if not old_handler_class and not handler_class and \
self.http_server.paths == paths:
return False
if old_handler_class and handler_class \
and old_handler_class.__name__ == handler_class.__name__ \
and self.http_server.paths == paths:
return False
self.http_server.Close()
if not paths:
return False
if handler_class:
server = memory_cache_http_server.MemoryCacheDynamicHTTPServer(
paths, handler_class)
real_logging.info('MemoryCacheDynamicHTTPServer created')
else:
server = memory_cache_http_server.MemoryCacheHTTPServer(paths)
real_logging.info('MemoryCacheHTTPServer created')
self.StartLocalServer(server)
# For now, Fuchsia needs to do port forwarding due to --proxy-server
# flag not being supported in its browser.
# TODO(https://crbug.com/1014670): Remove once debug flags supported in
# Fuchsia browsers.
if self._platform_backend.GetOSName() == 'fuchsia':
self._platform_backend.forwarder_factory.Create(server.port, server.port)
return True
def StopAllLocalServers(self):
self._local_server_controller.Close()
if self._forwarder:
self._forwarder.Close()
@property
def local_servers(self):
"""Returns the currently running local servers."""
return self._local_server_controller.local_servers
def WaitForBatteryTemperature(self, temp):
"""Waits for the battery on the device under test to cool down to temp.
Args:
temp: temperature target in degrees C.
"""
return self._platform_backend.WaitForBatteryTemperature(temp)
def WaitForCpuTemperature(self, temp):
"""Waits for the CPU temperature to be less than temp.
Args:
temp: A float containing the maximum temperature to allow
in degrees c.
"""
return self._platform_backend.WaitForCpuTemperature(temp)
def GetTypExpectationsTags(self):
return self._platform_backend.GetTypExpectationsTags()
| bsd-3-clause | 561,020,929,033,317,700 | 35.311436 | 80 | 0.718306 | false | 4.298387 | false | false | false |
Dirrot/python-dogechain-api | DogechainApi/DogechainApi.py | 1 | 3991 | '''
Created on 21.01.2014
@author: Dirk Rother
@contact: [email protected]
@license: GPL
@version: 0.1
'''
from urllib2 import Request, urlopen, URLError, HTTPError
class API(object):
'''
This class is a wrapper class for the dogechain.info api.
'''
API_PATH = "http://www.dogechain.info/chain/Dogecoin/"
API_QUERY = API_PATH + "q/"
def addressbalance(self, address):
'''
Amount ever received minus amount ever sent by a given address.
Usage: API_QUERY + addressbalance/ADDRESS
'''
url = self.API_QUERY + 'addressbalance/' + address
return self._getdata(url)
def addresstohash(self, address):
'''
Shows the public key hash encoded in an address.
Usage: API_QUERY + addresstohash/ADDRESS
'''
url = self.API_QUERY + 'addresstohash/' + address
return self._getdata(url)
def checkaddress(self, address):
'''
Checks an address for validity.
Usage: API_QUERY + checkaddress/ADDRESS
'''
url = self.API_QUERY + 'checkaddress/' + address
return self._getdata(url)
def decode_address(self, address):
'''
Shows the version prefix and hash encoded in an address.
Usage: API_QUERY + decode_address/ADDRESS
'''
url = self.API_QUERY + 'decode_address/' + address
return self._getdata(url)
def getblockcount(self):
'''
Shows the current block number.
Usage: API_QUERY + getblockcount
'''
url = self.API_QUERY + 'getblockcount'
return self._getdata(url)
def getdifficulty(self):
'''
Shows the last solved block's difficulty.
Usage: API_QUERY + getdifficulty
'''
url = self.API_QUERY + 'getdifficulty'
return self._getdata(url)
def getreceivedbyaddress(self, address):
'''
Shows the amount ever received from a given address.
(not balance, sends are not subtracted)
Usage: API_QUERY + getreceivedbyaddress/ADDRESS
'''
url = self.API_QUERY + 'getreceivedbyaddress/' + address
return self._getdata(url)
def getsentbyaddress(self, address):
'''
Shows the amount ever sent from a given address.
Usage: API_QUERY + getsentbyaddress/ADDRESS
'''
url = self.API_QUERY + 'getsentbyaddress/' + address
return self._getdata(url)
def hashtoaddress(self, hash):
'''
Shows the address with the given version prefix an hash.
Converts a 160-bit hash and address version to an address.
Usage: API_QUERY + hashtoaddress/HASH
'''
url = self.API_QUERY + 'hashtoaddress/' + hash
return self._getdata(url)
def nethash(self):
'''
Shows statistics about difficulty and network power.
Usage: API_QUERY + nethash
'''
url = self.API_QUERY + 'nethash'
return self._getdata(url)
def totalbc(self):
'''
Shows the amount of currency ever mined.
Usage: API_QUERY + totalbc
'''
url = self.API_QUERY + 'totalbc'
return self._getdata(url)
def transactions(self):
'''
Shows the amount transactions of the last blocks.
Usage: API_QUERY + transactions
'''
url = self.API_QUERY + 'transactions'
return self._getdata(url)
def _getdata(self, url):
'''
Wrapper method
'''
request = Request(url)
try:
response = urlopen(request)
except HTTPError as e:
print 'The Server couldn\'t fulfill the request.'
print 'Error code: ', e.code
except URLError as e:
print 'We failed to reach a server.'
print 'Reason: ', e.code
else:
# Everything is fine.
return response.read()
| gpl-2.0 | 10,162,344,229,742,784 | 28.783582 | 71 | 0.572288 | false | 4.254797 | false | false | false |
RealTimeWeb/Blockpy-Server | controllers/services.py | 1 | 2386 | import logging
from pprint import pprint
from flask_wtf import Form
from wtforms import IntegerField, BooleanField
from flask import Blueprint, send_from_directory
from flask import Flask, redirect, url_for, session, request, jsonify, g,\
make_response, Response, render_template
from werkzeug.utils import secure_filename
from sqlalchemy import Date, cast, func, desc, or_
from main import app
from controllers.helpers import crossdomain
from interaction_logger import StructuredEvent
services = Blueprint('services', __name__, url_prefix='/services')
from controllers.service_libraries import weather as weather_service
@services.route('/weather/', methods=['GET', "POST"])
@services.route('/weather', methods=['GET', 'POST'])
def weather():
function = request.args.get("function", "get_temperature")
city = request.args.get("city", "Blacksburg, VA")
weather_function = getattr(weather_service, function)
return jsonify(data=weather_function(city))
@services.route('/sheets', methods=['GET'])
def sheets(sheet_url):
sheet_id = ''
if sheet_url.startswith('http'):
sheet_url.split('/')
elif sheet_url.startswith('docs'):
sheet_url.split('/')
elif sheet_url.startswith('docs'):
sheet_url.split('/')
# sample:
# https://docs.google.com/spreadsheets/d/1eLbX_5EFvZYc7JOGYF8ATdu5uQeu6OvILNnr4vH3vFI/pubhtml
# =>
# https://spreadsheets.google.com/feeds/list/___/od6/public/basic?alt=json
# https://spreadsheets.google.com/feeds/list/1eLbX_5EFvZYc7JOGYF8ATdu5uQeu6OvILNnr4vH3vFI/od6/public/basic?alt=json
@services.route('/log/', methods=['GET', 'POST', 'OPTIONS'])
@services.route('/log', methods=['GET', 'POST', 'OPTIONS'])
#@crossdomain(origin='*')
def log_event():
user_id = request.form.get('user_id', "")
if user_id == "":
user_id = str(request.remote_addr)
question_id = request.form.get('question_id', "")
event = request.form.get('event', "")
action = request.form.get('action', "")
body = request.form.get('body', "")
external_interactions_logger = logging.getLogger('ExternalInteractions')
external_interactions_logger.info(
StructuredEvent(user_id, question_id, event, action, body)
)
response = make_response('success')
response.headers['Access-Control-Allow-Origin'] = "*"
return response
| mit | -2,807,518,795,514,543,600 | 37.483871 | 119 | 0.687343 | false | 3.488304 | false | false | false |
spring01/libPSI | lib/python/grendel/util/units/unit.py | 1 | 21791 | from __future__ import absolute_import
from collections import defaultdict
import math
from numbers import Number, Real
from grendel.util.aliasing import function_alias
from grendel.util.strings import classname
import sys
# Version 3 compatibility
if sys.version_info[0] == 3:
basestring = str
__all__ = [
'Unit',
'isunit', 'is_unit',
'convert', 'convert_units',
'compatible_units', 'iscompatible',
# Unit genres:
'DistanceUnit',
'EnergyUnit',
'AngularUnit',
'ElectricChargeUnit',
'MassUnit',
'TimeUnit'
]
#############
# Utilities #
#############
def isunit(unit):
if isinstance(unit, Unit) or isinstance(unit, CompositeUnit):
return True
else:
return False
is_unit = function_alias('is_unit', isunit)
def plural(unit): # pragma: no cover
if not isunit(unit):
raise TypeError
return unit.__plural__
def convert_units(val, from_unit, to_unit):
if not isunit(from_unit):
raise UnknownUnitError(from_unit)
if not isunit(to_unit):
raise UnknownUnitError(to_unit)
if from_unit == to_unit:
return val
return val * from_unit.to(to_unit)
convert = function_alias('convert', convert_units)
def compatible_units(unit1, unit2):
try:
unit1.to(unit2)
return True
except IncompatibleUnitsError:
return False
iscompatible = function_alias('iscompatible', compatible_units)
########################
# Metaclasses and such #
########################
class Prefix(object):
""" The prefix for a unit, e.g. centi-, kilo-, mega-, etc.
"""
##############
# Attributes #
##############
in_words = None
abbrev = None
multiplier = None
##################
# Initialization #
##################
def __init__(self, in_words, abbrev, multiplier):
self.in_words = in_words
self.abbrev = abbrev
self.multiplier = multiplier
class Unit(type):
""" Metaclass for a general unit of something.
"""
########################
# Metaclass Attributes #
########################
known_units = []
prefixes = [
Prefix("Yotta", "Y", 1.0e24),
Prefix("Zetta", "Z", 1.0e21),
Prefix("Exa", "E", 1.0e18),
Prefix("Peta", "P", 1.0e15),
Prefix("Tera", "T", 1.0e12),
Prefix("Giga", "G", 1.0e9),
Prefix("Mega", "M", 1.0e6),
Prefix("Kilo", "k", 1.0e3),
Prefix("Hecto", "h", 100.0),
Prefix("Deca", "da", 10.0),
Prefix("Deci", "d", 1.0e-1),
Prefix("Centi", "c", 1.0e-2),
Prefix("Milli", "m", 1.0e-3),
Prefix("Micro", "u", 1.0e-6),
Prefix("Nano", "n", 1.0e-9),
Prefix("Pico", "p", 1.0e-12),
Prefix("Femto", "f", 1.0e-15),
Prefix("Atto", "a", 1.0e-18),
Prefix("Zepto", "z", 1.0e-21),
Prefix("Yocto", "y", 1.0e-24)
]
####################
# Class Attributes #
####################
__plural__ = None
__aliases__ = None
__abbrev__ = None
__prefixed__ = True
############################
# Metaclass Initialization #
############################
def __init__(cls, name, bases, dct):
Unit.known_units.append(name)
if not all(issubclass(base, UnitGenre) for base in bases) or not len(bases) == 1:
raise TypeError("Units must inherit from a single class with the UnitGenre superclass.")
super(Unit, cls).__init__(name, bases, dct)
globals()['__all__'].append(str(cls))
# Automatically create a plural alias for the unit if one is not given
if cls.__plural__ is None:
cls.__plural__ = str(cls) + "s"
if not cls.__plural__ == name:
globals()[cls.__plural__] = cls
globals()['__all__'].append(cls.__plural__)
Unit.known_units.append(cls.__plural__)
# Automatically create prefixed versions of the unit
if cls.__prefixed__:
for prefix in Unit.prefixes:
d = {'prefix': prefix, 'base_unit': cls}
name1 = prefix.in_words + name
pre = PrefixedUnit.__new__(PrefixedUnit, name1, (cls,), d)
globals()[name1] = pre
globals()['__all__'].append(name1)
Unit.known_units.append(pre)
name2 = prefix.in_words + cls.__plural__
globals()[name2] = pre
globals()['__all__'].append(name1)
# If the name is not CamelCase or UPPERCASE, append uncapitalized versions (e.g. Kilogram as well
# as KiloGram, but not KiloaMU, only KiloAMU)
if not any(letter.isupper() for letter in name[1:]):
name3 = prefix.in_words + name[0].lower() + name[1:]
globals()[name3] = pre
globals()['__all__'].append(name3)
name4 = prefix.in_words + cls.__plural__[0].lower() + cls.__plural__[1:]
globals()[name4] = pre
globals()['__all__'].append(name4)
####################
# Class Properties #
####################
@property
def genre(cls):
return cls.__mro__[1]
@property
def name(cls):
return cls.__name__
#########################
# Special Class Methods #
#########################
def __contains__(self, item):
if isinstance(item, ValueWithUnits):
if item.units == self:
return True
else:
return False
else:
raise TypeError()
#----------------------#
# Comparison Operators #
#----------------------#
def __eq__(cls, other):
try:
return other.to(cls) == 1.0
except IncompatibleUnitsError:
return False
except AttributeError:
# Other doesn't even have a 'to()' method...
return NotImplemented
def __ne__(self, other):
eq_val = self.__eq__(other)
if eq_val is NotImplemented:
return NotImplemented
else:
return not eq_val
#----------------------#
# Arithmetic Operators #
#----------------------#
def __mul__(cls, other):
if isinstance(other, Number):
return ValueWithUnits(other, cls)
elif isinstance(other, Unit):
return CompositeUnit({cls: 1, other: 1})
else:
return NotImplemented
def __rmul__(cls, other):
if isinstance(other, Number):
return ValueWithUnits(other, cls)
else:
return NotImplemented
def __div__(cls, other):
if isinstance(other, Unit):
return CompositeUnit({cls: 1, other:-1})
else:
return NotImplemented
__truediv__ = __div__
def __rdiv__(cls, other):
if isinstance(other, Number):
return ValueWithUnits(other, CompositeUnit({cls: -1}))
else: # pragma: no cover
return NotImplemented
__rtruediv__ = __rdiv__
def __pow__(cls, power):
if isinstance(power, Real):
return CompositeUnit({cls: power})
else: # pragma: no cover
return NotImplemented
#------------------------#
# Output Representations #
#------------------------#
def __repr__(cls):
return classname(super(Unit, cls).__repr__())
__str__ = __repr__
#################
# Class Methods #
#################
def genre_check(cls, other):
if not issubclass(other, cls.genre):
raise IncompatibleUnitsError(cls, other)
def prefix_factor(cls, other):
other_fact = 1.0
if isinstance(other, PrefixedUnit):
other_fact = other.prefix.multiplier
my_fact = 1.0
if isinstance(cls, PrefixedUnit):
my_fact = cls.prefix.multiplier
return my_fact / other_fact
def to(cls, other):
cls.genre_check(other)
if other is cls:
return 1.0
elif issubclass(other, cls) or issubclass(cls, other):
return cls.prefix_factor(other)
else:
return (1.0 / cls.genre.reference_unit.to(cls)) * cls.genre.reference_unit.to(other)
class PrefixedUnit(Unit):
""" Metaclass for a unit with a prefix, such as a Kilogram, Centimeter, etc.
"""
####################
# Class Attributes #
####################
base_unit = None
prefix = None
############################
# Metaclass Initialization #
############################
def __init__(cls, name, bases, dct):
cls.known_units.append(name)
if not 'to' in dct:
dct['to'] = PrefixedUnit.to
if not all(isinstance(base, Unit) for base in bases) or not len(bases) == 1: # pragma: no cover
raise TypeError("PrefixedUnits must inherit from a single class which is a Unit.")
super(Unit, cls).__init__(name, bases, dct)
@property
def genre(cls):
return cls.base_unit.genre
class UnitGenre(object):
""" Superclass for classes of things that can be measured by units.
For instance, DistanceUnit, AngularUnit, EnergyUnit, etc.
"""
default = None
reference_unit = None
class GenreDefaultDict(defaultdict):
def __missing__(self, key):
return key.genre
#--------------------------------------------------------------------------------#
####################
# Helper functions #
####################
# Can be called as either def_unit_alias(alias, unit) or def_unit_alias(unit, alias) (as long as alias is a str and
# is_unit(unit) is True)
def def_unit_alias(arg1, arg2, plural=True, prefixed=True): # pragma: no cover
alias = None
unit = None
if isinstance(arg1, basestring) and is_unit(arg2):
alias = arg1
unit = arg2
elif isinstance(arg2, basestring) and is_unit(arg1):
alias = arg2
unit = arg1
else:
raise TypeError()
globals()[alias] = unit
globals()['__all__'].append(alias)
my_plural = None
if plural is True:
# Automatically add plural with 's' unless the user specifies a specific plural or if the user specifies 'False'
globals()[alias + 's'] = unit
globals()['__all__'].append(alias + 's')
my_plural = alias + 's'
elif plural is not False and not str(plural) == alias:
my_plural = str(plural)
globals()[my_plural] = unit
globals()['__all__'].append(my_plural)
# Automatically create prefixed versions of the unit alias
if prefixed:
for prefix in Unit.prefixes:
d = {'prefix': prefix, 'base_unit': unit}
name = prefix.in_words + alias
pre = PrefixedUnit.__new__(PrefixedUnit, name, (unit,), d)
PrefixedUnit.__init__(pre, name, (unit,), d)
globals()[name] = pre
globals()['__all__'].append(name)
Unit.known_units.append(pre)
if not plural is False:
name = prefix.in_words + my_plural
globals()[name] = pre
globals()['__all__'].append(name)
if not any(letter.isupper() for letter in alias[1:]):
# If the name is not CamelCase or UPPERCASE, append uncapitalized versions
# (e.g. Kilogram as well as KiloGram, but not KiloaMU, only KiloAMU)
name = prefix.in_words + alias[0].lower() + alias[1:]
globals()[name] = pre
globals()['__all__'].append(name)
if not plural is False:
name = prefix.in_words + my_plural[0].lower() + my_plural[1:]
globals()[name] = pre
globals()['__all__'].append(name)
def def_unit_aliases(unit, *args, **kwargs): # pragma: no cover
for al in args:
alias = str(al)
plural = kwargs.pop(al + "_plural", True)
prefixed = kwargs.pop(al + "_prefixed", True)
def_unit_alias(unit, alias, plural, prefixed)
def def_unit(genre, unit, plural=True, prefixed=True):
d = {} #{'to': Unit.to}
if plural is False: # pragma: no cover
# Define a plural that is the same as the unit to prevent plural from being defined
d['__plural__'] = unit
elif plural is not True:
# When plural is True, use the default plural. Otherwise, define it
d['__plural__'] = str(plural)
new_cls = globals()[unit] = Unit.__new__(Unit, unit, (genre,), d)
new_cls.__prefixed__ = prefixed
Unit.__init__(globals()[unit], unit, (genre,), d)
globals()['__all__'].append(unit)
def def_units(genre, *args, **kwargs): # pragma: no cover
for unit in args:
prefixed = kwargs.pop(unit + "_prefixed", True)
plural = kwargs.pop(unit + "_plural", True)
def_unit(genre, unit, plural, prefixed)
if (unit + "_alias") in kwargs:
if (unit + "_alias_plural") in kwargs:
def_unit_alias(kwargs[unit + "_alias"], eval(unit, globals()), kwargs[unit + "_alias_plural"])
elif kwargs[unit + 'alias'] + "_alias" in kwargs:
def_unit_alias(kwargs[unit + "_alias"], eval(unit, globals()), kwargs[kwargs[unit + "_alias"] + '_alias'])
else:
def_unit_alias(kwargs[unit + "_alias"], eval(unit, globals()))
elif (unit + "_aliases") in kwargs:
for alias in kwargs[unit + "_aliases"]:
aplural = kwargs.pop(alias + "_plural", True)
aprefixed = kwargs.pop(alias + "_prefixed", prefixed)
def_unit_alias(alias, eval(unit, globals()), aplural, aprefixed)
#--------------------------------------------------------------------------------#
##################
# Distance Units #
##################
class DistanceUnit(UnitGenre):
""" General superclass for all distance units
"""
class Angstrom(DistanceUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Bohr):
return pf / BohrRadius.value
elif issubclass(other, Meter):
return 1e-10 * pf
elif issubclass(other, Angstrom):
return pf
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
DistanceUnit.reference_unit = Angstrom
class Bohr(DistanceUnit):
__metaclass__ = Unit
def_unit_alias('AtomicUnitOfDistance', Bohr, plural='AtomicUnitsOfDistance')
class Meter(DistanceUnit):
__metaclass__ = Unit
DistanceUnit.default = Angstrom
#DistanceUnit.default = Bohr
#################
# Angular Units #
#################
class AngularUnit(UnitGenre):
""" General superclass for all angular units
"""
class Degree(AngularUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Radian):
return pf * math.pi / 180.0
elif issubclass(other, Degree):
return pf
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
AngularUnit.reference_unit = Degree
class Radian(AngularUnit):
__metaclass__ = Unit
# For now, using default units of Radians causes some unit tests to fail
#AngularUnit.default = Radian
AngularUnit.default = Degree
################
# Energy Units #
################
class EnergyUnit(UnitGenre):
""" General superclass for all energy units
"""
class Joule(EnergyUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Joule):
return pf
elif issubclass(other, Hartree):
return pf / 4.35974434e-18
elif issubclass(other, Wavenumbers):
return pf / (PlanckConstant.value * SpeedOfLight.in_units(Centimeters/Second).value)
elif issubclass(other, KiloCaloriePerMol):
return pf * AvogadrosNumber / 1000.0 / 4.184
elif issubclass(other, KiloJoulePerMol):
return pf * AvogadrosNumber / 1000.0
elif issubclass(other, Hertz):
return pf / PlanckConstant.value
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
EnergyUnit.reference_unit = Joule
class Wavenumber(EnergyUnit):
__metaclass__ = Unit
EnergyUnit.default = Wavenumber
# TODO Molar energy unit?
def_units(EnergyUnit,
#'ElectronVolt',
'Hertz',
'Hartree',
'KiloCaloriePerMol',
'KiloJoulePerMol',
#------------------#
Hartree_alias = 'AtomicUnitOfEnergy',
Hartree_alias_plural = 'AtomicUnitsOfEnergy',
#------------------#
KiloCaloriePerMol_prefixed = False, # Don't create prefixes, since e.g. MicroKCalPerMol doesn't make sense
KiloCaloriePerMol_aliases = [
'KiloCaloriePerMole',
'KCalPerMol',
'KcalPerMol',
],
KiloCaloriePerMole_plural = 'KiloCaloriesPerMol',
KcalPerMol_plural = 'KcalsPerMol',
KCalPerMol_plural = 'KCalsPerMol',
#------------------#
KiloJoulePerMol_plural = 'KiloJoulesPerMol',
KiloJoulesPerMol_prefixed = False,
KiloJoulesPerMol_aliases = [
'KJPerMol',
],
KJPerMol_plural = False,
#------------------#
)
##############
# Time Units #
##############
class TimeUnit(UnitGenre):
""" General superclass for all time units
"""
class Second(TimeUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Second):
return pf
elif issubclass(other, AtomicUnitOfTime):
return pf / 2.418884326502e-17
elif issubclass(other, Minute):
return pf / 60.0
elif issubclass(other, Hour):
return pf / 3600.0
elif issubclass(other, Day):
return pf / 86400.0
elif issubclass(other, Week):
return pf / 604800.0
elif issubclass(other, Year):
return pf / 31556925.445
elif issubclass(other, Decade):
return pf / (31556925.445 * 10)
elif issubclass(other, Century):
return pf / (31556925.445 * 100)
elif issubclass(other, Millennium):
return pf / (31556925.445 * 1000)
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
TimeUnit.default = Second
TimeUnit.reference_unit = Second
# Just to demonstrate how the process works...
def_units(TimeUnit,
'AtomicUnitOfTime',
'Minute',
'Hour',
'Day',
'Week',
'Year',
'Decade',
'Century',
'Millennium',
AtomicUnitOfTime_plural = "AtomicUnitsOfTime",
Century_plural = "Centuries",
Millennium_plural = 'Millennia')
#########################
# Electric Charge Units #
#########################
class ElectricChargeUnit(UnitGenre):
""" General superclass for all units of electric charge
"""
class Coulomb(ElectricChargeUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Coulomb):
return pf
elif issubclass(other, AtomicUnitOfElectricCharge):
return pf / ElementaryCharge.in_units(Coulomb).value
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
ElectricChargeUnit.default = Coulomb
ElectricChargeUnit.reference_unit = Coulomb
def_units(ElectricChargeUnit,
'AtomicUnitOfElectricCharge',
AtomicUnitOfElectricCharge_plural = 'AtomicUnitsOfElectricCharge',
AtomicUnitOfElectricCharge_alias = 'AtomicUnitOfCharge',
AtomicUnitOfElectricCharge_alias_plural = 'AtomicUnitsOfCharge',
)
##############
# Mass Units #
##############
class MassUnit(UnitGenre):
""" General superclass for all units of mass
"""
class Gram(MassUnit):
__metaclass__ = Unit
@classmethod
def to(cls, other):
cls.genre_check(other)
pf = cls.prefix_factor(other)
if issubclass(other, Gram):
return pf
if issubclass(other, AtomicMassUnit):
# NIST
return pf / 1.660538921e-24
# IUPAC
#return pf / 1.6605402e-24
elif issubclass(other, AtomicUnitOfMass):
return pf / ElectronMass.in_units(Gram).value
else: # pragma: no cover
raise NotImplementedError("Conversion from units " + classname(cls) + " to units " + classname(other) + " is not implemented.")
MassUnit.reference_unit = Gram
class AtomicMassUnit(MassUnit):
__metaclass__ = Unit
def_unit_alias('AMU', AtomicMassUnit)
MassUnit.default = AtomicMassUnit
class AtomicUnitOfMass(MassUnit):
__metaclass__ = Unit
__plural__ = 'AtomicUnitsOfMass'
#####################
# Dependent Imports #
#####################
from grendel.util.units.composite import CompositeUnit
from grendel.util.units.errors import IncompatibleUnitsError, UnknownUnitError
from grendel.util.units.value_with_units import ValueWithUnits
from grendel.util.units.physical_constants import ElectronMass, ElementaryCharge, PlanckConstant, SpeedOfLight, AvogadrosNumber, BohrRadius
| gpl-2.0 | -1,983,723,185,309,552,000 | 30.765306 | 139 | 0.556835 | false | 3.843888 | false | false | false |
davsebamse/random_testing | selenium_connector.py | 1 | 3011 | # -*- coding: utf-8 -*-
"""
Created on Wed Aug 14 19:31:52 2013
@author: davse
"""
#TODO: Make this class more factory like, eg. get it to accept
#a driver created from the enviroment, instead of creating one itself
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
import misc
class TreeNode:
parent = None
children = []
element = None
id = None
text = None
tag_name = None
location = None
size = None
def __init__(self, element, parent):
self.element = element
self.parent = parent
def store_element_in_node(self):
self.id = self.element.id
self.text = self.element.text
self.tag_name = self.element.tag_name
self.location = self.element.location
self.size = self.element.size
def __str__(self):
return self.element.tag_name # + ' ' + self.element.text
class SeleniumConnector:
def __init__(self, drivername='CHROME', localserver=False, host='127.0.0.1', port=4444, debug=1):
if debug == 1:
misc.enable_debug()
desired_capabilities = None
if (drivername.upper() == 'CHROME'):
desired_capabilities = DesiredCapabilities.CHROME
elif (drivername.upper() == 'FIREFOX'):
desired_capabilities = DesiredCapabilities.FIREFOX
host = 'http://' + host +':'+ str(port)+'/wd/hub'
misc.debug_writeline('Connecting to {0} with desiredcapabilities {1}'.format(host, desired_capabilities))
self.driver = webdriver.Remote(
command_executor=host,
desired_capabilities=desired_capabilities)
def goto_page(self, page):
self.driver.get(page)
def quit(self):
self.driver.quit()
class SeleniumWrapper:
def __init__(self, connector):
self.connector = connector
def get_root_element_in_page(self):
return self.connector.driver.find_element_by_xpath('*')
def build_dom_tree(self, parent):
sub_elements = list(parent.element.find_elements_by_xpath('*'))
if len(sub_elements) == 0:
return
for element in sub_elements:
tmp = TreeNode(element, parent)
tmp.children = []
parent.children.append(tmp)
self.build_dom_tree(tmp)
def get_dom_tree(self):
root = self.get_root_element_in_page()
root_tree_node = TreeNode(root, None)
self.build_dom_tree(root_tree_node)
return root_tree_node
def print_tree(treenode, level=0):
print ' ' * (level * 3), str(treenode)
if len(treenode.children) == 0:
return
for c in treenode.children:
print_tree(c, level + 1)
def test_if_element_is_clickable(element):
return element.is_enabled() and element.is_displayed()
def domtree_to_list(treenode, acc=[]):
acc.append(treenode)
if len(treenode.children) == 0:
return;
for c in treenode.children:
domtree_to_list(c, acc) | mit | 3,968,769,667,942,308,400 | 28.242718 | 113 | 0.623049 | false | 3.632087 | false | false | false |
EggInTheShell/TodoCounting | blur_image.py | 1 | 2626 | import numpy as np
from PIL import Image, ImageFilter
import matplotlib.pyplot as plt
import pandas as pd
from os.path import join, relpath
import glob, os
from scipy.ndimage.filters import gaussian_filter
import pickle
from settings import *
from data_utils import *
import time
startTime = time.time()
data_folder = DATA_DIR + 'patches_bool/'
data_path_list = glob.glob(data_folder+'*traindata_reduced.pkl')
# ぼかし方を設定
# todo 各dotのガウスを和算せずに最大値を取る -> peakが消失しない ref openpose
sigma = 15
sample = np.zeros([99,99], dtype=np.float32)
sample[44,44] = 1
sample = gaussian_filter(sample, sigma=sigma)
# plt.imshow(sample)
# plt.gray()
# plt.show()
peak = np.max(sample)
# print(peak)
for path in data_path_list:
id = int(os.path.basename(path)[:-len('traindata_reduced.pkl')])
print('processing: ', id)
with open(path, mode='rb') as f:
dict = pickle.load(f)
slice = 1000
images = dict['image'][:slice]
labels = dict['label'][:slice]
labels_blurred = np.zeros([slice,labels.shape[1], labels.shape[2], 5], dtype=np.float32)
# print('labels shape', labels.shape)
for i in range(labels.shape[0]):
print(i)
label = labels[i].astype(np.float32)
# print(np.max(label))
# print(label.shape)
blurred = np.zeros_like(label, dtype=np.float32)
blurred = gaussian_filter(label[:, :], sigma=15)
for ch in range(label.shape[2]):
blurred[:,:,ch] = gaussian_filter(label[:,:,ch], sigma=sigma)
# print(np.max(blurred))
labels_blurred[i] = blurred
# labels_blurred = labels_blurred/peak/2
print('label peak ', np.max(labels_blurred))
labels_blurred = np.minimum(1, labels_blurred)
# 可視化
# for i in range(slice):
# plt.subplot(2,3,1)
# plt.imshow(images[i])
# plt.subplot(2,3,2)
# plt.imshow(labels_blurred[i,:,:,0])
# plt.gray()
# plt.subplot(2,3,3)
# plt.imshow(labels_blurred[i,:,:,1])
# plt.gray()
# plt.subplot(2,3,4)
# plt.imshow(labels_blurred[i,:,:,2])
# plt.gray()
# plt.subplot(2,3,5)
# plt.imshow(labels_blurred[i,:,:,3])
# plt.gray()
# plt.subplot(2,3,6)
# plt.imshow(labels_blurred[i,:,:,4])
# plt.gray()
# plt.show()
# 保存
dict = {'image': images, 'label': labels_blurred}
savepath = DATA_DIR + str(id) + '_train_blurred.pkl'
with open(savepath, mode='wb') as f:
pickle.dump(dict, f)
print('saved: ', savepath, time.time()-startTime) | mit | 7,488,384,135,458,226,000 | 30.182927 | 92 | 0.605243 | false | 2.849498 | false | false | false |
krausedj/TaxCruncher | BoaCCParser.py | 1 | 2009 |
import collections
import cfg
parse_files = cfg.parse_files
out_file = open(cfg.boa_cc_outfile, 'w')
data_csv = 'Filename,Transaction Date,Post Date,Business,Location,Reference Number,Account Number, Amount\n'
out_file.write(data_csv)
for file in sorted(parse_files):
with open(file, encoding='cp1252') as f:
lines = f.readlines()
for parse_range in parse_files[file]['ParseRanges']:
colm_info = parse_files[file]['Columns']
for parsed_line in lines[parse_range[0]-1:parse_range[1]]:
if parsed_line not in ('','\n'):
data_TransDate = parsed_line[colm_info['TransDate'][0]-1:colm_info['TransDate'][1]-1].strip()
data_PostDate = parsed_line[colm_info['PostDate'][0]-1:colm_info['PostDate'][1]-1].strip()
data_Business = parsed_line[colm_info['Business'][0]-1:colm_info['Business'][1]-1].strip()
data_Location = parsed_line[colm_info['Location'][0]-1:colm_info['Location'][1]-1].strip()
data_RefNum = parsed_line[colm_info['RefNum'][0]-1:colm_info['RefNum'][1]-1].strip()
data_ActNum = parsed_line[colm_info['ActNum'][0]-1:colm_info['ActNum'][1]-1].strip()
data_Amount = parsed_line[colm_info['Amount'][0]-1:colm_info['Amount'][1]-1].strip()
print(parsed_line)
print('Transation Date: {0}'.format(data_TransDate))
print('Post Date: {0}'.format(data_PostDate))
print('Business: {0}'.format(data_Business))
print('Location: {0}'.format(data_Location))
print('Reference Number: {0}'.format(data_RefNum))
print('Account Number: {0}'.format(data_ActNum))
print('Amount: {0}'.format(data_Amount))
data_csv = '{0},{1},{2},{3},{4},{5},{6},{7}\n'.format(file,data_TransDate,data_PostDate,data_Business,data_Location,data_RefNum,data_ActNum,data_Amount)
out_file.write(data_csv)
out_file.close()
| mit | -6,896,914,806,560,751,000 | 53.297297 | 168 | 0.594823 | false | 3.271987 | false | false | false |
nikitanovosibirsk/district42 | district42/_props.py | 1 | 1280 | from typing import Any, Mapping, TypeVar
from niltype import Nil, Nilable
__all__ = ("Props", "PropsType",)
PropsType = TypeVar("PropsType", bound="Props")
class Props:
def __init__(self, registry: Nilable[Mapping[str, Any]] = Nil) -> None:
self._registry = registry if (registry is not Nil) else {}
def get(self, name: str, default: Nilable[Any] = Nil) -> Nilable[Any]:
return self._registry.get(name, default)
def set(self: PropsType, name: str, value: Any) -> PropsType:
registry = {**self._registry, name: value}
return self.__class__(registry)
def update(self: PropsType, **keys: Any) -> PropsType:
registry = {**self._registry, **keys}
return self.__class__(registry)
def __repr__(self) -> str:
return f"<{self.__class__.__name__} {self._registry}>"
def __eq__(self, other: Any) -> bool:
if not isinstance(other, self.__class__):
return False
for key, val in self._registry.items():
other_val = other.get(key)
if val != other_val:
return False
for key, other_val in other._registry.items():
val = self.get(key)
if other_val != val:
return False
return True
| mit | 8,434,861,864,708,869,000 | 29.47619 | 75 | 0.56875 | false | 3.657143 | false | false | false |
Kriegspiel/ks-python-api | kriegspiel_api_server/api/views/game.py | 1 | 1249 | # -*- coding: utf-8 -*-
from django.db import transaction
from api.views.base import AuthenticatedApiView, ApiView
from api.response import ApiResponse
from api.serializers.game import GameSerializer
from api import exceptions
from kriegspiel.models import Game, Move
class GamesView(AuthenticatedApiView):
def get(self, request):
"""
List all games.
"""
return ApiResponse()
def post(self, request):
"""
Create a new game.
"""
input_serializer = GameSerializer().load_data(request.POST)
game = Game.objects.create(
created_by=request.user,
name=input_serializer['name'],
white=input_serializer.get('white'),
black=input_serializer.get('black'),
)
output_serializer, errors = GameSerializer().dump(game)
return ApiResponse(data=output_serializer)
class TurnView(AuthenticatedApiView):
def post(self, request, game_id):
game = Game.objects.filter(id=game_id).first()
if game is None or request.user.id not in [game.white_id, game.black_id]:
raise exceptions.NotFound()
with transaction.atomic():
pass # todo: validate move, save it to db | mit | -8,627,306,668,786,321,000 | 28.761905 | 81 | 0.639712 | false | 4.135762 | false | false | false |
tongxindao/shiyanlou | shiyanlou_cs803/my_blog/my_blog/settings.py | 1 | 3126 | """
Django settings for my_blog project.
Generated by 'django-admin startproject' using Django 1.11.4.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '9z)b3%mhxoilgs&ga@950naj*@v!r)+!1e0%58hs^j(q^=^i61'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'article',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'my_blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'my_blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
| apache-2.0 | 7,739,922,912,703,766,000 | 24.834711 | 91 | 0.684901 | false | 3.488839 | false | false | false |
msimet/Stile | stile/treecorr_utils.py | 1 | 5553 | """
treecorr_utils.py: Contains elements of Stile needed to interface with Mike Jarvis's TreeCorr
program.
"""
import numpy
from . import file_io
import treecorr
from treecorr.corr2 import corr2_valid_params
def Parser():
import argparse
p = argparse.Parser()
p.add_argument('--file_type',
help="File type (ASCII or FITS)",
dest='file_type')
p.add_argument('--delimiter',
help="ASCII file column delimiter",
dest='delimiter')
p.add_argument('--comment_marker',
help="ASCII file comment-line marker",
dest='comment_marker')
p.add_argument('--first_row',
help="First row of the file(s) to be considered",
dest='first_row')
p.add_argument('--last_row',
help="Last row of the file(s) to be considered",
dest='last_row')
p.add_argument('--x_units',
help="X-column units (radians, hours, degrees, arcmin, arcsec) -- only allowed "+
"by certain DataHandlers",
dest='x_units')
p.add_argument('--y_units',
help="Y-column units (radians, hours, degrees, arcmin, arcsec) -- only allowed "+
"by certain DataHandlers",
dest='y_units')
p.add_argument('--ra_units',
help="RA-column units (radians, hours, degrees, arcmin, arcsec) -- only "+
"allowed by certain DataHandlers",
dest='ra_units')
p.add_argument('--dec_units',
help="dec-column units (radians, hours, degrees, arcmin, arcsec) -- only "+
"allowed by certain DataHandlers",
dest='dec_units')
p.add_argument('--flip_g1',
help="Flip the sign of g1 [default: False]",
dest='flip_g1', default=False)
p.add_argument('--flip_g2',
help="Flip the sign of g2 [default: False]",
dest='flip_g2', default=False)
p.add_argument('--min_sep',
help="Minimum separation for the TreeCorr correlation functions",
dest='min_sep')
p.add_argument('--max_sep',
help="Maximum separation for the TreeCorr correlation functions",
dest='max_sep')
p.add_argument('--nbins',
help="Number of bins for the TreeCorr correlation functions",
dest='nbins')
p.add_argument('--bin_size',
help="Bin width for the TreeCorr correlation functions",
dest='bin_size')
p.add_argument('--sep_units',
help="Units for the max_sep/min_sep/bin_size arguments for the TreeCorr "+
"correlation functions",
dest='sep_units')
p.add_argument('--bin_slop',
help="A parameter relating to accuracy of the TreeCorr bins--changing is not "+
"recommended",
dest='bin_slop')
p.add_argument('-v', '--verbose',
help="Level of verbosity",
dest='verbose')
p.add_argument('--num_threads',
help='Number of threads (TreeCorr) or multiprocessing.Pool processors '+
'(Stile) to use; default is to automatically determine',
dest='num_threads')
p.add_argument('--split_method',
help="One of 'mean', 'median', or 'middle', directing TreeCorr how to split the "
"tree into child nodes. [default: 'mean']",
dest='split_method')
return p
def ReadTreeCorrResultsFile(file_name):
"""
Read in the given ``file_name``. Cast it into a formatted numpy array with the appropriate
fields and return it.
:param file_name: The location of an output file from TreeCorr.
:returns: A numpy array corresponding to the data in ``file_name``.
"""
from . import stile_utils
output = file_io.ReadASCIITable(file_name, comments='#')
if not len(output):
raise RuntimeError('File %s (supposedly an output from TreeCorr) is empty.'%file_name)
# Now, the first line of the TreeCorr output file is of the form:
# "# col1 . col2 . col3 [...]"
# so we can get the proper field names by reading the first line of the file and processing it.
with open(file_name) as f:
fields = f.readline().split()
fields = fields[1:]
fields = [field for field in fields if field != '.']
return stile_utils.FormatArray(output, fields=fields)
def PickTreeCorrKeys(input_dict):
"""
Take an ``input_dict``, harvest the kwargs you'll need for TreeCorr, and return a dict
containing these values. This is useful if you have a parameters dict that contains some things
TreeCorr might want, but some other keys that shouldn't be used by it.
:param input_dict: A dict containing some (key, value) pairs that apply to TreeCorr.
:returns: A dict containing the (key, value) pairs from input_dict that apply to
TreeCorr.
"""
if not input_dict:
return {}
if 'treecorr_kwargs' in input_dict:
treecorr_dict = input_dict['treecorr_kwargs']
else:
treecorr_dict = {}
for key in corr2_valid_params:
if key in input_dict:
treecorr_dict[key] = input_dict[key]
return treecorr_dict
| bsd-3-clause | 957,391,640,524,652,000 | 42.724409 | 100 | 0.563299 | false | 4.159551 | false | false | false |
bluegenes/MakeMyTranscriptome | scripts/expression.py | 1 | 11582 | import argparse
import os
from os.path import join, dirname, basename
import sys
from tasks_v2 import Supervisor, Task
import functions_general as fg
import functions_annotater as fan
import functions_expression as fex
import assembler as assemb
salmon_naming = 'salmon'
express_naming = 'express'
intersect_naming = 'intersect'
def gen_salmon_supervisor(opc, fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,gene_trans_map,sample_info,model,out_dir,cpu_cap, deps):
salmon_tasks = []
salmon_dir = fg.make_dir_task(os.path.join(out_dir,'salmon'))
out_dir = salmon_dir.targets[0]
build_salmon = fex.build_salmon_task(opc, assembly_path, assembly_name, out_dir,fg.round_div(cpu_cap, 2),[salmon_dir])
deps = deps + [build_salmon] #, salmon_gene_map]
salmon_trans_gene_map = ''
if len(gene_trans_map) > 0:
salmon_gene_map = fex.salmon_gene_map_task(opc,out_dir,assembly_name,gene_trans_map,[salmon_dir])
salmon_trans_gene_map = salmon_gene_map.targets[0]
deps = deps + [salmon_gene_map]
for i in range(len(fastq1)):
#filename = '_'.join([paired_names[i],salmon_naming,assembly_name])
filename = paired_names[i] #,salmon_naming,assembly_name])
salmon = fex.salmon_task(opc, build_salmon.targets[0],fastq1[i],fastq2[i],filename, salmon_trans_gene_map,out_dir,fg.round_div(cpu_cap,2),deps)
salmon_tasks.append(salmon)
for i in range(len(unpaired)):
#filename = '_'.join([unpaired_names[i],salmon_naming,assembly_name])
filename = unpaired_names[i] #,salmon_naming,assembly_name])
salmon = fex.salmon_unpaired_task(opc, build_salmon.targets[0],unpaired[i],filename,salmon_trans_gene_map,out_dir,fg.round_div(cpu_cap,2),deps)
salmon_tasks.append(salmon)
transcriptName = assembly_name #'_'.join([assembly_name,salmon_naming])
geneName = assembly_name + '_gene' #'_'.join([assembly_name,salmon_naming,'gene'])
counts_to_table_salmon=fex.counts_to_table_task(opc, assembly_name,gene_trans_map,out_dir,[t.targets[0] for t in salmon_tasks],transcriptName,'--salmon',salmon_tasks)
deseq2_salmon = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_salmon.targets[0],sample_info,transcriptName,model,[counts_to_table_salmon])
deseq2_salmon_gene = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_salmon.targets[1],sample_info,geneName,model,[counts_to_table_salmon])
salmon_tasks = [salmon_dir,build_salmon,salmon_gene_map,counts_to_table_salmon, deseq2_salmon, deseq2_salmon_gene]+salmon_tasks
return Supervisor(tasks = salmon_tasks)
def gen_express_supervisor(opc,fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu_cap,deps):
express_tasks,bowtie_e_tasks = [],[]
express_dir = fg.make_dir_task(os.path.join(out_dir,'express'))
out_dir = express_dir.targets[0]
for i in range(len(fastq1)):
filename = paired_names[i] #'_'.join([paired_names[i],express_naming,assembly_name])
#filename = '_'.join([paired_names[i],express_naming,assembly_name])
bowtie_e = fex.bowtie2_task(opc, bowtie2_index,out_dir,fastq1[i],fastq2[i],filename,0,fg.round_div(cpu_cap,2),deps)
express = fex.express_task(opc, bowtie2_index,assembly_path,out_dir,paired_names[i],bowtie_e.targets[0],[bowtie_e])
bowtie_e_tasks.append(bowtie_e)
express_tasks.append(express)
for i in range(len(unpaired)):
filename = unpaired_names[i] #'_'.join([unpaired_names[i],express_naming,assembly_name])
bowtie_e = fex.bowtie2_unpaired_task(opc, bowtie2_index,out_dir,unpaired[i],filename,0,fg.round_div(cpu_cap,2),deps)
bowtie_e_tasks.append(bowtie_e)
express = fex.express_task(opc, bowtie2_index,assembly_path,out_dir,unpaired_names[i],bowtie_e.targets[0],[bowtie_e])
express_tasks.append(express)
transcriptName = assembly_name #'_'.join([assembly_name,express_naming])
geneName = assembly_name + '_gene' #'_'.join([assembly_name,express_naming,'gene'])
counts_to_table_express = fex.counts_to_table_task(opc, assembly_name,gene_trans_map,out_dir,[t.targets[0] for t in express_tasks],transcriptName,'--eXpress',express_tasks)
deseq2_express = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_express.targets[0],sample_info,transcriptName,model,[counts_to_table_express])
deseq2_express_gene = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_express.targets[1],sample_info,geneName,model,[counts_to_table_express])
e_tasks = [express_dir,counts_to_table_express,deseq2_express,deseq2_express_gene]+bowtie_e_tasks+express_tasks
return Supervisor(tasks = e_tasks)
def gen_rapclust_supervisor(opc,fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu_cap,deps):
rc_tasks,bowtie_rc_tasks = [],[]
rc_dir = fg.make_dir_task(os.path.join(out_dir,'rapclust_bt2'))
out_dir = rc_dir.targets[0]
for i in range(len(fastq1)):
filename = paired_names[i] #'_'.join([paired_names[i],express_naming,assembly_name])
#filename = '_'.join([paired_names[i],express_naming,assembly_name])
bowtie_rc = fex.bowtie2_task(opc, bowtie2_index,out_dir,fastq1[i],fastq2[i],filename,2,fg.round_div(cpu_cap,2),deps)
# express = fex.express_task(opc, bowtie2_index,assembly_path,out_dir,paired_names[i],bowtie_e.targets[0],[bowtie_e])
bowtie_rc_tasks.append(bowtie_rc)
# express_tasks.append(express)
for i in range(len(unpaired)):
filename = unpaired_names[i] #'_'.join([unpaired_names[i],express_naming,assembly_name])
bowtie_rcU = fex.bowtie2_unpaired_task(opc, bowtie2_index,out_dir,unpaired[i],filename,2,fg.round_div(cpu_cap,2),deps)
bowtie_rc_tasks.append(bowtie_rcU)
# express = fex.express_task(opc, bowtie2_index,assembly_path,out_dir,unpaired_names[i],bowtie_e.targets[0],[bowtie_e])
# express_tasks.append(express)
# transcriptName = assembly_name #'_'.join([assembly_name,express_naming])
# geneName = assembly_name + '_gene' #'_'.join([assembly_name,express_naming,'gene'])
# counts_to_table_express = fex.counts_to_table_task(opc, assembly_name,gene_trans_map,out_dir,[t.targets[0] for t in express_tasks],transcriptName,'--eXpress',express_tasks)
# deseq2_express = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_express.targets[0],sample_info,transcriptName,model,[counts_to_table_express])
# deseq2_express_gene = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_express.targets[1],sample_info,geneName,model,[counts_to_table_express])
rc_tasks = [rc_dir]+bowtie_rc_tasks+ rc_tasks
return Supervisor(tasks = rc_tasks)
def gen_intersect_supervisor(opc,fq1,fq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu_cap, deps):
intersect_tasks,bowtie_i_tasks,sam_sort_tasks = [],[],[]
intersect_dir = fg.make_dir_task(os.path.join(out_dir,'intersectBed'))
out_dir = intersect_dir.targets[0]
deps.append(intersect_dir)
fasta_to_bed = fan.assembly_to_bed_task(opc, assembly_path, out_dir,[intersect_dir])
for i in range(len(fq1)):
filename = paired_names[i] #'_'.join([paired_names[i],intersect_naming,assembly_name])
#filename = '_'.join([paired_names[i],intersect_naming,assembly_name])
bowtie_i = fex.bowtie2_task(opc, bowtie2_index,out_dir,fq1[i],fq2[i],filename,1,fg.round_div(cpu_cap,2),deps)
sorted_name = filename + '_sorted'
sam_sort = fex.sam_sort_task(opc, out_dir,bowtie_i.targets[0],sorted_name,[bowtie_i])
intersect_bed = fex.intersect_bed_task(opc, out_dir,sam_sort.targets[0],fasta_to_bed.targets[0],paired_names[i],[sam_sort,fasta_to_bed])
bowtie_i_tasks.append(bowtie_i)
sam_sort_tasks.append(sam_sort)
intersect_tasks.append(intersect_bed)
for i in range(len(unpaired)):
filename = unpaired_names[i] #'_'.join([unpaired_names[i],intersect_naming,assembly_name])
bowtie_i = fex.bowtie2_unpaired_task(opc, bowtie2_index,out_dir,unpaired[i],filename,1,fg.round_div(cpu_cap,2),deps)
bowtie_i_tasks.append(bowtie_i)
sorted_name = filename + '_sorted'
sam_sort = fex.sam_sort_task(opc, out_dir,bowtie_i.targets[0],sorted_name,[bowtie_i])
sam_sort_tasks.append(sam_sort)
intersect_bed = fex.intersect_bed_task(opc, out_dir,sam_sort.targets[0],fasta_to_bed.targets[0],unpaired_names[i],[sam_sort,fasta_to_bed])
intersect_tasks.append(intersect_bed)
transcriptName = assembly_name #'_'.join([assembly_name,express_naming])
geneName = assembly_name + '_gene' #'_'.join([assembly_name,express_naming,'gene'])
counts_to_table_intersect=fex.counts_to_table_task(opc, assembly_name,gene_trans_map,out_dir,[t.targets[0] for t in intersect_tasks],transcriptName,'',intersect_tasks)
deseq2_intersect = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_intersect.targets[0],sample_info,transcriptName,model,[counts_to_table_intersect])
deseq2_intersect_gene = fex.deseq2_task(opc, assembly_name,out_dir,counts_to_table_intersect.targets[1],sample_info,geneName,model,[counts_to_table_intersect])
i_tasks = [intersect_dir,fasta_to_bed,counts_to_table_intersect,deseq2_intersect, deseq2_intersect_gene]+bowtie_i_tasks+sam_sort_tasks+intersect_tasks
return Supervisor(tasks=i_tasks)
def gen_expression_supervisor(opc, dbs, fastq1,fastq2,paired_names,unpaired,unpaired_names,cpu,sample_info,model,gene_trans_map,dependency_set,assembly_name, assembly_path, out_dir,run_salmon=True,run_express=False,run_intersectbed=False,run_rapclust=False):
all_tasks = []
deps = []
trim_reads = False
if trim_reads:
trimmomatic_flag = True
rmdup = False
truncate_opt = False
trim_tasks,fastq1,fastq2,unpaired=assemb.gen_trimming_supervisor(opc,out_dir,fastq1,fastq2,unpaired,False,trimmomatic_flag,rmdup,10**15,0,truncate_opt,[],cpu)
all_tasks.append(trim_tasks)
deps.append(trim_tasks)
if run_salmon:
salmon_tasks = gen_salmon_supervisor(opc, fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,gene_trans_map,sample_info,model,out_dir,cpu, deps)
all_tasks.append(salmon_tasks)
if run_express or run_intersectbed or run_rapclust:
build_bowtie = fex.build_bowtie_task(opc, assembly_path,assembly_name, out_dir,[])
bowtie2_index = join(dirname(build_bowtie.targets[0]),basename(build_bowtie.targets[0]).split('.')[0])
all_tasks.append(build_bowtie)
if run_express:
express_tasks = gen_express_supervisor(opc,fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu, [build_bowtie])
all_tasks.append(express_tasks)
if run_rapclust:
rc_tsks = gen_rapclust_supervisor(opc,fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu, [build_bowtie])
all_tasks.append(rc_tsks)
if run_intersectbed:
intersect_tasks = gen_intersect_supervisor(opc,fastq1,fastq2,paired_names,unpaired,unpaired_names,assembly_path,assembly_name,bowtie2_index,gene_trans_map,sample_info,model,out_dir,cpu,[build_bowtie])
all_tasks.append(intersect_tasks)
return Supervisor(tasks=all_tasks,dependencies=dependency_set)
if(__name__=='__main__'):
pass
| bsd-3-clause | -7,513,924,934,046,294,000 | 72.303797 | 258 | 0.717406 | false | 2.783466 | false | false | false |
exclude/monki | monki/boards/admin.py | 1 | 3418 | from django.contrib import admin
from django.core.files import File
from django.conf import settings
from imagekit.admin import AdminThumbnail
from monki.boards.models import (
Ban,
Banner,
Board,
Category,
Image,
Post,
Video,
)
from monki.boards.forms import ImageForm, VideoForm
@admin.register(Category)
class CategoryAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'order',
)
list_editable = (
'order',
)
@admin.register(Board)
class BoardAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'category',
'order',
'max_replies',
'max_length',
'show_id',
'country_flags',
'enable_captcha',
'forced_anonymous',
'locked',
'nsfw',
'created_at',
)
list_editable = (
'category',
'order',
'max_replies',
'max_length',
'show_id',
'country_flags',
'enable_captcha',
'forced_anonymous',
'locked',
'nsfw',
)
class ImageInline(admin.StackedInline):
model = Image
form = ImageForm
can_delete = False
class VideoInline(admin.StackedInline):
model = Video
form = VideoForm
can_delete = False
@admin.register(Post)
class PostAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'board',
'subject',
'name',
'tripcode',
'cid',
'ip_address',
'created_at',
'updated_at',
'bumped_at',
)
list_filter = (
'board',
)
search_fields = (
'subject',
'name',
'tripcode',
'ip_address',
'cid',
)
ordering = (
'-created_at',
)
inlines = (
ImageInline,
VideoInline,
)
@admin.register(Image)
class ImageAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'original_filename',
'admin_thumbnail',
'size',
'width',
'heigth',
'checksum',
)
admin_thumbnail = AdminThumbnail(image_field='thumbnail')
actions = (
'turn_potato',
)
def turn_potato(self, request, queryset):
count = 0
placeholder = str(settings.BASE_DIR / 'static' / 'img' / 'anders_bateva.png')
with open(placeholder, 'rb') as file:
for image in queryset:
image.file = File(file, name=image.original_filename)
image.save()
count += 1
self.message_user(request, '{} image(s) was potato\'d'.format(count))
turn_potato.short_description = 'Turn into a potato'
@admin.register(Video)
class VideoAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'admin_thumbnail',
)
admin_thumbnail = AdminThumbnail(image_field='thumbnail')
@admin.register(Banner)
class BannerAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'admin_thumbnail',
)
admin_thumbnail = AdminThumbnail(image_field='image')
@admin.register(Ban)
class BanAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'reason',
'created_at',
'expires_at',
'banned_by',
)
search_fields = (
'ip_address',
)
def save_model(self, request, obj, form, change):
obj.banned_by = request.user
obj.save()
| agpl-3.0 | -175,683,558,941,638,980 | 17.78022 | 85 | 0.534816 | false | 3.789357 | false | false | false |
amuramatsu/dwf | setup.py | 1 | 1430 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
import sys
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dwf',
version='0.2.0.dev0',
description="Digilent's DWF library wrapper",
long_description=long_description,
url='https://github.com/amuramatsu/dwf/',
author='MURAMATSU Atsushi',
author_email='[email protected]',
license='MIT',
install_requires=[
'enum34'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6', # Not tested
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3', # Not tested
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
platforms="Linux,Mac,Windows",
packages=['dwf'],
use_2to3=False
)
| mit | -7,619,697,030,664,391,000 | 28.183673 | 64 | 0.596503 | false | 3.844086 | false | true | false |
ArielCabib/python-tkinter-calculator | Calculator/Widgets/MainMenu.py | 1 | 3396 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# MainMenu.py
#
# Copyright 2010 Ariel Haviv <[email protected]>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of the nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
***Main menu container (a Frame)***
Calculator by Ariel Haviv ([email protected])
Instructors: Anatoly Peymer, Zehava Lavi
"""
from Tkinter import *
#auto-generated methods will use this list:
m = [['File',
['Load history (Ctrl+L)', 'Load_History'],
['Save history (Ctrl+S)', 'Save_History'],
['Quit (Alt+F4)', 'Quit']],
['Edit',
['Undo (Ctrl+Z)', 'Undo'],
['Redo (Ctrl+Y)', 'Redo']],
['View',
['Toggle previous action bar (Ctrl+P)', 'Toggle_Prev_Lbl'],
['Show history', 'Show_History'],
['Toggle Prefix & Postfix', 'Toggle_Fixes']],
['Base',
['Binary (Ctrl+B)', 'Binary'],
['Octal (Ctrl+O)', 'Octal'],
['Decimal (Ctrl+D)', 'Decimal'],
['Hexa (Ctrl+X)', 'Hexa'],
['Manual (Ctrl+A)', 'Manual']],
['Help',
['Contents (F1)', 'Contents'],
['About...', 'About']]]
class MainMenu(Frame):
def __init__(self, root, in_hndl, **args):
Frame.__init__(self, root, **args)
self.root = root
self.in_hndl = in_hndl
mb = self.menuBtns = []
mn = self.menus = []
#drawing menus
for i in range(len(m)):
mb.append(Menubutton(self, text=m[i][0]))
mb[i].grid(row=0, column=i)
mn.append(Menu(mb[i], tearoff=False))
mb[i]['menu'] = mn[i]
for j in m[i][1:]:
#pointing to auto-generated class methods
method = ("%s_%s" % (m[i][0], j[1]))
eval('mn[i].add_command(label=j[0], command=self.%s)' % method)
#auto-generating methods
for i in range(len(m)):
for j in m[i][1:]:
#generating auto class methods for menu commands
method = ("%s_%s" % (m[i][0], j[1]))
exec("""def %s(self):
self.in_hndl.mnu_clicked(["%s", "%s"])""" % (method, m[i][0], j[1]))
| bsd-3-clause | -1,414,582,796,137,303,800 | 35.913043 | 80 | 0.636926 | false | 3.345813 | false | false | false |
metalshark/lesscss-python | lesscss/media.py | 1 | 2033 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Copyright 2010 Beech Horn
This file is part of lesscss-python.
lesscss-python is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
lesscss-python is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with lesscss-python. If not, see <http://www.gnu.org/licenses/>.
'''
import re
from lesscss.nested import parse_nested
from lesscss.rules import Rules
MEDIA = re.compile('''
(?P<names>
@media
\s*
(?P<media>
[a-z]+
\s*
(
,
\s*
[a-z]+
\s*
)*?
)
)
\s*
{
''', re.DOTALL | re.IGNORECASE | re.VERBOSE)
def parse_media(less, parent=None, **kwargs):
match = MEDIA.match(less)
if not match:
raise ValueError()
media = [media.strip() for media in match.group('media').split(',')]
matched_length = len(match.group())
remaining_less = less[matched_length:]
contents = parse_nested(remaining_less)
code = match.group() + contents + '}'
return Media(code=code, media=media, contents=contents, parent=parent)
class Media(Rules):
__slots__ = ('__media',)
def __init__(self, parent, code, media, contents=None):
Rules.__init__(self, parent=parent, code=code, contents=contents)
self.__media = media
def __get_media(self):
return self.__media
media = property(fget=__get_media) | gpl-3.0 | 2,662,149,872,208,000,000 | 20.870968 | 74 | 0.578455 | false | 4.165984 | false | false | false |
BCVisin/PhotoViewer | get_photos.py | 1 | 2807 |
import threading
import Queue
from PIL import ImageTk
from PIL import Image
class get_photos(object):
def __init__(self, max_w, max_h):
self.max_w, self.max_h = max_w, max_h
self.image_index = -1
self.images = ['photos/%s.JPG' % x for x in range(1, 11)]
self.image_queue = Queue.Queue()
self.image_dict = {}
def thread_load_images(self):
while True:
try:
image_location, image = self.image_queue.get_nowait()
self.image_dict[image_location] = image
except Queue.Empty:
break
def get_next_index(self):
if self.image_index >= len(self.images) - 1:
self.image_index = 0
else:
self.image_index += 1
return self.image_index
def get_previous_index(self):
if self.image_index <= 0:
self.image_index = len(self.images) - 1
else:
self.image_index -= 1
return self.image_index
def get_photo(self, image_path):
#check the queue for other images that we may have returned
self.thread_load_images()
#try to return the image if it's been pre-loaded:
try:
return self.image_dict[image_path]
except KeyError:
#load the image
self.image_dict[image_path] = load_image(self.image_queue, image_path, self.max_w, self.max_h).run(True)
return self.image_dict[image_path]
def get_next(self):
this_photo_index = self.get_next_index()
self.preload(start_index=this_photo_index)
return self.get_photo(self.images[this_photo_index])
def get_previous(self):
return self.get_photo(self.images[self.get_previous_index()])
def preload(self, start_index, forward=True):
preload_num = 4
if forward:
index_range = range(start_index + 1, min(start_index + preload_num + 1, len(self.images)))
else:
index_range = range(max(0, start_index - preload_num), start_index)
for i in index_range:
try:
self.image_dict[self.images[i]]
except KeyError:
load_image(self.image_queue, self.images[i], self.max_w, self.max_h).start()
class load_image(threading.Thread):
def __init__(self, return_queue, image_path, max_x, max_y):
self.return_queue = return_queue
self.image_path = image_path
self.max_x = max_x
self.max_y = max_y
threading.Thread.__init__(self)
def run(self, direct=False):
image = Image.open(self.image_path)
new_size = self.get_new_size(self.max_x, self.max_y, image)
resized_image = image.resize(new_size, Image.ANTIALIAS)
final_image = ImageTk.PhotoImage(resized_image)
if direct:
return final_image
else:
self.return_queue.put((self.image_path, final_image))
def get_new_size(self, max_width, max_height, image):
x, y = image.size
if x > max_width or x > y:
y = int(max(y * max_width / x, 1))
x = int(max_width)
if y > max_height or x < y:
x = int(max(x * max_height / y, 1))
y = int(max_height)
new_size = x, y
return new_size | mit | -3,579,786,799,524,637,000 | 23 | 107 | 0.670823 | false | 2.746575 | false | false | false |
dhatzenbichler/Slider | tl.py | 1 | 2225 | #!/usr/bin/python
from datetime import datetime
from datetime import timedelta
import subprocess
import RPi.GPIO as GPIO
import time
from wrappers import GPhoto
from wrappers import Identify
from wrappers import NetworkInfo
from ui import TimelapseUi
from motor import MotorObject
def main():
print "Timelapse"
camera = GPhoto(subprocess)
idy = Identify(subprocess)
netinfo = NetworkInfo(subprocess)
ui = TimelapseUi()
motor = MotorObject()
motor.backwards(0.005,50)
shot = 0
network_status = netinfo.network_status()
ui.main(motor, network_status)
print "Test vor capture"
try:
## last_started = datetime.now()
## print "Shot: %d Shutter: %s ISO: %d" % (shot)
## ui.backlight_on()
## print "Jetyt set shutter speed"
## camera.set_shutter_speed(secs=config[0])
## print "Jetyt nach set shutter speed"
## print config[1]
## camera.set_iso(iso=str(config[1]))
## print "Jetyt nach set iso"
if ui.getBkt() == True:
camera.set_bracketing()
print "nach Set Bracketing"
ui.backlight_off()
while True:
try:
if ui.getBkt() == True:
camera.capture_image_and_download(shot)
shot = shot + 1
camera.capture_image_and_download(shot)
shot = shot + 1
camera.capture_image_and_download(shot)
else:
camera.capture_image_and_download(shot)
time.sleep(intervall)
motor.forward(5/1000,ui.getSteps())
time.sleep(ui.getSteps()/33) # Zeit die der Motor yum fahren braucht
except Exception, e:
print "Error on capture." + str(e)
print "Retrying..."
# Occasionally, capture can fail but retries will be successful.
continue
shot = shot + 1
except Exception,e:
ui.show_error(str(e))
if __name__ == "__main__":
main() | gpl-3.0 | 992,594,585,256,588,700 | 25.5 | 84 | 0.529888 | false | 3.994614 | false | false | false |
apallin/testworks-appium | testworksappium/elements.py | 1 | 1483 | #!/usr/bin/env python
import logging
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import WebDriverException
log = logging.getLogger(__name__)
class Elements(object):
def __init__(self, appium_driver, **kwargs):
"""
Element object for wrapping webdriver element calls.
Must pass a locator/locator_value in kwargs to find elements.
:param: :appium_driver: webdriver object
"""
self.appium_driver = appium_driver
self.element_objects = []
if not kwargs:
raise ValueError("Please specify a locator")
if len(kwargs) > 1:
raise ValueError("Please specify only one locator")
locator_key, locator_value = next(iter(kwargs.items()))
self.locator_value = locator_value
self.locator_key = locator_key
self.locator = (locator_key, locator_value)
def find_elements(self):
"""
Function for finding element objects for appium interaction.
:return: webdriver element object
"""
log.debug("Finding {}".format(self.locator))
try:
self.element_objects = self.appium_driver.find_elements(
by=self.locator_key, value=self.locator_value)
except NoSuchElementException as e:
log.error(e)
pass
except WebDriverException:
log.error(e)
pass
return self.element_objects
| mit | -8,590,678,342,811,658,000 | 31.955556 | 69 | 0.626433 | false | 4.493939 | false | false | false |
renskiy/django-bitmask-field | django_bitmask_field.py | 1 | 4281 | import codecs
import functools
from django import forms
from django.core import checks, exceptions, validators
from django.db import models
from django.utils.encoding import force_bytes
from django.utils.six import integer_types, buffer_types, text_type
from django.utils.six.moves import reduce
from django.utils.translation import ugettext_lazy as _
long = integer_types[-1]
def int2bytes(i):
hex_value = '{0:x}'.format(i)
# make length of hex_value a multiple of two
hex_value = '0' * (len(hex_value) % 2) + hex_value
return codecs.decode(hex_value, 'hex_codec')
def bytes2int(b):
return long(codecs.encode(b, 'hex_codec'), 16)
class BitmaskFormField(forms.TypedMultipleChoiceField):
def prepare_value(self, value):
if isinstance(value, list):
return value
if not value:
return value
return [
long(bit) * (2 ** place)
for place, bit in enumerate('{0:b}'.format(value)[::-1])
if bit == '1'
]
def has_changed(self, initial, data):
return initial != self._coerce(data)
def _coerce(self, value):
values = super(BitmaskFormField, self)._coerce(value)
if values is None:
return values
return reduce(long.__or__, map(long, values), long(0))
class BitmaskField(models.BinaryField):
description = _('Bitmask')
default_validators = [validators.MinValueValidator(0)]
def __init__(self, *args, **kwargs):
editable = kwargs.get('editable', True)
super(BitmaskField, self).__init__(*args, **kwargs)
self.editable = editable
self.validators = list(self.__validators)
@property
def __validators(self):
for validator in self.validators:
if isinstance(validator, validators.MaxLengthValidator):
max_value = 2 ** (validator.limit_value * 8)
yield validators.MaxValueValidator(max_value)
else:
yield validator
def _check_choices(self):
errors = super(BitmaskField, self)._check_choices()
if not errors and self.choices and not all(
isinstance(choice, integer_types) and choice >= 0
for choice, description in self.flatchoices
):
return [
checks.Error(
"all 'choices' must be of integer type.",
obj=self,
)
]
return errors
def deconstruct(self):
return models.Field.deconstruct(self)
@property
def all_values(self):
return reduce(
long.__or__,
map(long, list(zip(*self.flatchoices))[0]),
long(0),
)
def validate(self, value, model_instance):
try:
super(BitmaskField, self).validate(value, model_instance)
except exceptions.ValidationError as error:
if error.code != 'invalid_choice':
raise
if (
self.choices
and value not in self.empty_values
and value & self.all_values != value
):
raise exceptions.ValidationError(
self.error_messages['invalid_choice'],
code='invalid_choice',
params={'value': value},
)
def value_to_string(self, obj):
return models.Field.value_to_string(self, obj)
def to_python(self, value):
if isinstance(value, buffer_types):
return bytes2int(force_bytes(value))
elif isinstance(value, text_type):
return long(value)
return value
def get_prep_value(self, value):
value = super(BitmaskField, self).get_prep_value(value)
if value is None:
return value
return int2bytes(value)
def from_db_value(self, value, expression, connection, context):
return self.to_python(value)
def formfield(self, **kwargs):
defaults = {
'form_class': functools.partial(forms.IntegerField, min_value=0),
'choices_form_class': BitmaskFormField,
}
if self.choices:
defaults['coerce'] = long
defaults.update(kwargs)
return super(BitmaskField, self).formfield(**defaults)
| mit | 6,983,439,037,241,272,000 | 29.798561 | 77 | 0.589348 | false | 4.213583 | false | false | false |
ddurdle/XBMC-ustvnow | resources/lib/ustvnow.py | 1 | 7979 | '''
ustvnow XBMC Plugin
Copyright (C) 2011 t0mm0
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import Addon
import cookielib
import os
import re
import urllib, urllib2
class Ustvnow:
__BASE_URL = 'http://lv2.ustvnow.com'
def __init__(self, user, password):
self.user = user
self.password = password
def get_channels(self, quality=1, stream_type='rtmp'):
self._login()
html = self._get_html('iphone_ajax', {'tab': 'iphone_playingnow',
'token': self.token})
channels = []
for channel in re.finditer('id="(content.+?)".+?class="panel".+?title="(.+?)".+?src="' +
'(.+?)".+?class="nowplaying_item">(.+?)' +
'<\/td>.+?class="nowplaying_itemdesc".+?' +
'<\/a>(.+?)<\/td>.+?href="(.+?)"',
html, re.DOTALL):
id, name, icon, title, plot, url = channel.groups()
title = title.replace("&", "&")
if name.find('fieldset') != -1:
if icon.endswith('APL.png'):
name = 'Animal Planet'
elif icon.endswith('BRAVO.png'):
name = 'Bravo'
elif icon.endswith('TOON.png'):
name = 'Cartoon Network'
elif icon.endswith('ESPN.png'):
name = 'ESPN'
elif icon.endswith('CNN.png'):
name = 'CNN'
elif icon.endswith('CNBC.png'):
name = 'CNBC'
elif icon.endswith('USA.png'):
name = 'USA'
elif icon.endswith('SYFY.png'):
name = 'Syfy'
elif icon.endswith('HISTORY.png'):
name = 'History'
elif icon.endswith('DSC.png'):
name = 'Discovery Channel'
elif icon.endswith('COMEDY.png'):
name = 'Comedy Central'
elif icon.endswith('TNT.png'):
name = 'TNT'
elif icon.endswith('WLYH.png'):
name = 'CW'
elif icon.endswith('WHTM.png'):
name = 'ABC'
elif icon.endswith('WPMT.png'):
name = 'FOX'
elif icon.endswith('FX.png'):
name = 'FX'
elif icon.endswith('WPSU.png'):
name = 'PBS'
elif icon.endswith('FOOD.png'):
name = 'Food Network'
elif icon.endswith('TBS.png'):
name = 'TBS'
elif icon.endswith('NIK.png'):
name = 'Nickelodeon'
elif icon.endswith('WHP.png'):
name = 'CBS'
elif icon.endswith('WGAL.png'):
name = 'NBC'
elif icon.endswith('AETV.png'):
name = 'AETV'
elif icon.endswith('LIFE.png'):
name = 'Lifetime'
elif icon.endswith('SPIKETV.png'):
name = 'SPIKE TV'
elif icon.endswith('FNC.png'):
name = 'Fox News Channel'
elif icon.endswith('NGC.png'):
name = 'National Geographic Channel'
elif icon.endswith('WHVLLD.png'):
name = 'My9'
elif icon.endswith('AMC.png'):
name = 'AMC'
else:
name = 'Unknown'
if not url.startswith('http'):
now = {'title': title, 'plot': plot.strip()}
url = '%s%s%d' % (stream_type, url[4:-1], quality + 1)
aChannel = {'name': name, 'url': url,
'icon': icon, 'now': now}
if aChannel in channels:
print 'Duplicate channel found: %s' % (name)
else:
channels.append(aChannel)
channels.sort()
return channels
def get_recordings(self, quality=1, stream_type='rtmp'):
self._login()
html = self._get_html('iphone_ajax', {'tab': 'iphone_viewdvrlist'})
schedule_index = html.find('Scheduled')
if schedule_index > 0:
html = html[0:schedule_index]
recordings = []
for r in re.finditer('class="panel".+?title="(.+?)".+?src="(.+?)".+?' +
'class="nowplaying_item">(.+?)<\/td>.+?(?:<\/a>' +
'(.+?)<\/td>.+?)?vertical-align:bottom.+?">.+?(Recorded.+?)' +
'<\/div>.+?"(rtsp.+?)".+?"(iphone_ajax.+?)"',
html, re.DOTALL):
chan, icon, title, plot, rec_date, url, del_url = r.groups()
rec_date = rec_date.replace('\n', ' ').replace('\r', '').replace('\t', '')
url = '%s%s%s' % (stream_type, url[4:-7],
['350', '650', '950'][quality])
if plot:
plot = plot.strip()
else:
plot = ''
recordings.append({'channel': chan,
'stream_url': url,
'title': title,
'plot': plot,
'rec_date': rec_date.strip(),
'icon': icon,
'del_url': del_url
})
return recordings
def delete_recording(self, del_url):
html = self._get_html(del_url)
print html
def _build_url(self, path, queries={}):
if queries:
query = Addon.build_query(queries)
return '%s/%s?%s' % (self.__BASE_URL, path, query)
else:
return '%s/%s' % (self.__BASE_URL, path)
def _fetch(self, url, form_data=False):
if form_data:
Addon.log('posting: %s %s' % (url, str(form_data)))
req = urllib2.Request(url, form_data)
else:
Addon.log('getting: ' + url)
req = url
try:
response = urllib2.urlopen(url)
return response
except urllib2.URLError, e:
Addon.log(str(e), True)
return False
def _get_html(self, path, queries={}):
html = False
url = self._build_url(path, queries)
response = self._fetch(url)
if response:
html = response.read()
else:
html = False
return html
def _login(self):
Addon.log('logging in')
self.token = None
self.cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
urllib2.install_opener(opener)
url = self._build_url('iphone_login', {'username': self.user,
'password': self.password})
response = self._fetch(url)
#response = opener.open(url)
self.token ='1fjcfojwzitbz6ufzetw'
for cookie in self.cj:
print '%s: %s' % (cookie.name, cookie.value)
if cookie.name == 'token':
self.token = cookie.value
self.token ='1fjcfojwzitbz6ufzetw'
| gpl-2.0 | 6,640,263,805,704,231,000 | 39.095477 | 97 | 0.455571 | false | 4.255467 | false | false | false |
FLOSSmole/codeplex | 1getCodeplexPages.py | 1 | 3667 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it
# and/or modify it under the terms of GPL v3
#
# Copyright (C) 2004-2017 Megan Squire <[email protected]>
#
# We're working on this at http://flossmole.org - Come help us build
# an open and accessible repository for data and analyses for open
# source projects.
#
# If you use this code or data for preparing an academic paper please
# provide a citation to
#
# Howison, J., Conklin, M., & Crowston, K. (2006). FLOSSmole:
# A collaborative repository for FLOSS research data and analyses.
# Int. Journal of Information Technology & Web Engineering, 1(3), 17–26.
#
# and
#
# FLOSSmole(2004-2017) FLOSSmole: a project to provide academic access to data
# and analyses of open source projects. Available at http://flossmole.org
#
################################################################
# usage:
# 1getCodeplexPages.py <datasource_id> <db password>
# purpose:
# grab all the pages for projects stored on Codeplex before it was shut down
################################################################
import sys
import pymysql
try:
import urllib.request as urllib2
except ImportError:
import urllib2
# grab commandline args
datasourceID = str(sys.argv[1])
pw = str(sys.argv[2])
lastUpdated = None
# Open remote database connection
dbconn = pymysql.connect(host="",
user="",
passwd=pw,
db="",
use_unicode=True,
charset="utf8mb4")
cursor = dbconn.cursor()
# read in list of projects
# for each project, grab the following pages:
# --- home page
# --- history page
selectProjectsQuery = 'SELECT proj_name, proj_url FROM cp_projects \
WHERE datasource_id = %s \
ORDER BY 1'
insertHTMLQuery = 'INSERT INTO cp_projects_indexes (proj_name, \
datasource_id, \
home_html, \
history_html, \
last_updated) \
VALUES (%s, %s, %s, %s, %s)'
cursor.execute(selectProjectsQuery, (datasourceID,))
projectList = cursor.fetchall()
# insert project pages
for project in projectList:
projectName = project[0]
projectUrl = project[1]
print("grabbing", projectName)
# set up headers
hdr = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'}
try:
# grab the main page
req = urllib2.Request(projectUrl, headers=hdr)
mainhtml = urllib2.urlopen(req).read()
# grab the history page
historyUrl = projectUrl + 'wikipage/history'
req2 = urllib2.Request(historyUrl, headers=hdr)
historyhtml = urllib2.urlopen(req2).read()
cursor.execute(insertHTMLQuery, (projectName,
datasourceID,
mainhtml,
historyhtml,
lastUpdated))
dbconn.commit()
except pymysql.Error as error:
print(error)
dbconn.rollback()
except:
print()
dbconn.close()
| gpl-3.0 | -2,855,964,648,277,341,000 | 32.623853 | 132 | 0.554707 | false | 3.96645 | false | false | false |
wpjesus/codematch | ietf/doc/templatetags/wg_menu.py | 2 | 2540 | # Copyright (C) 2009-2010 Nokia Corporation and/or its subsidiary(-ies).
# All rights reserved. Contact: Pasi Eronen <[email protected]>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# * Neither the name of the Nokia Corporation and/or its
# subsidiary(-ies) nor the names of its contributors may be used
# to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from django import template
from django.template.loader import render_to_string
from django.db import models
from ietf.group.models import Group
register = template.Library()
area_short_names = {
'ops':'Ops & Mgmt',
'rai':'RAI'
}
@register.simple_tag
def wg_menu():
parents = Group.objects.filter(models.Q(type="area") | models.Q(type="irtf", acronym="irtf"),
state="active").order_by('type_id', 'acronym')
for p in parents:
p.short_name = area_short_names.get(p.acronym) or p.name
if p.short_name.endswith(" Area"):
p.short_name = p.short_name[:-len(" Area")]
if p.type_id == "area":
p.menu_url = "/wg/#" + p.acronym
elif p.acronym == "irtf":
p.menu_url = "/rg/"
return render_to_string('base/menu_wg.html', { 'parents': parents })
| bsd-3-clause | 5,304,016,432,687,058,000 | 40.639344 | 97 | 0.710236 | false | 3.860182 | false | false | false |
rascul/botwot | plugins/cookie.py | 1 | 3091 | """ Cookie Plugin (botwot plugins.cookie) """
# Copyright 2014 Ray Schulz <https://rascul.io>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
import requests
from bs4 import BeautifulSoup
from pyaib.plugins import keyword, plugin_class
from pyaib.db import db_driver
@plugin_class
@plugin_class.requires('db')
class Cookie(object):
def __init__(self, context, config):
self.db = context.db.get('cookies')
self.cookies = list(self.db.getAll())
print "%s cookies are in the jar." % len(self.cookies)
@keyword("cookie")
@keyword.nosub("round", "refresh")
def keyword_cookie(self, context, msg, trigger, args, kargs):
""" [<user>] - Hand out a cookie, to <user> if specified """
# Choose a cookie
cookie = random.choice(self.cookies).value
# Aquire target
target_user = " ".join(args)
# Dispense cookie
context.PRIVMSG(
msg.channel or msg.sender,
"\x01ACTION hands %s a %s from the cookie jar.\x01" % (
target_user or msg.sender,
cookie
)
)
@keyword("cookie")
@keyword.sub("round")
def keyword_cookie_round(self, context, msg, trigger, args, kargs):
""" - Pass around a box of cookies """
# Choose a cookie
cookie = random.choice(self.cookies).value
# Pass the box around
context.PRIVMSG(
msg.channel or msg.sender,
"\x01ACTION passes around a box of %s.\x01" % cookie
)
def scancookies(self):
""" Download and scan the cookie list into the database """
counter = 0
# Grab the listing from Wikipedia
page = requests.get("http://en.wikipedia.org/wiki/List_of_cookies")
soup = BeautifulSoup(page.text)
# grab each table row, drop the header
cookie_cells = [tr.td for tr in soup.table.find_all("tr")][1:]
# grab the cookie name from each row, some have links and some don't
new_cookies = [getattr(c.contents[0], "text", None) or getattr(c, "text", None) for c in cookie_cells]
# Fill the database
for c in new_cookies:
item = self.db.get(c)
item.value = "%s" % c
item.commit()
counter += 1
self.cookies = list(self.db.getAll())
print "%s cookies scanned." % counter
return counter
@keyword("cookie")
@keyword.sub("refresh")
def keyword_cookie_refresh(self, context, msg, trigger, args, kargs):
""" Download and scan the cookie list into the database """
# Only if user is an admin
if msg.sender == context.config.IRC.admin:
print "Scanning cookies..."
# First clear the database
for item in self.cookies:
self.db.delete(item.key)
msg.reply("%s cookies scanned." % self.scancookies())
| apache-2.0 | -7,046,567,008,575,976,000 | 26.353982 | 104 | 0.681009 | false | 3.338013 | false | false | false |
sourceperl/pyHMI | pyHMI/Dialog.py | 1 | 3709 | # -*- coding: utf-8 -*-
from tkinter import *
from .Colors import *
class ConfirmDialog(Toplevel):
def __init__(self, parent, title, text, valid_command):
super(ConfirmDialog, self).__init__(parent)
self.transient(parent)
self.grab_set()
self.title(title)
self.valid_command = valid_command
Label(self, text=text).grid(row=0, column=0, columnspan=2, padx=20, pady=20)
Button(self, text='Validation', command=self.ok).grid(row=1, column=0, padx=10, pady=10)
Button(self, text='Annulation', command=self.cancel).grid(row=1, column=1, padx=10, pady=10)
self.bind('<Escape>', lambda evt: self.destroy())
self.after(45000, self.destroy)
def ok(self):
self.valid_command()
self.destroy()
def cancel(self):
self.destroy()
class ValveOpenCloseDialog(Toplevel):
def __init__(self, parent, title, text, open_command, close_command):
super(ValveOpenCloseDialog, self).__init__(parent)
self.transient(parent)
self.grab_set()
self.title(title)
self.open_command = open_command
self.close_command = close_command
Label(self, text=text).grid(row=0, column=0, columnspan=3, padx=20, pady=20)
Button(self, text='Ouverture', command=self.open).grid(row=1, column=0, padx=10, pady=5)
Button(self, text='Fermeture', command=self.close).grid(row=1, column=1, padx=10, pady=5)
Button(self, text='Annulation', command=self.destroy, default=ACTIVE).grid(row=1, column=2, padx=10, pady=5)
self.bind('<Escape>', lambda evt: self.destroy())
self.after(45000, self.destroy)
def open(self):
self.open_command()
self.destroy()
def close(self):
self.close_command()
self.destroy()
class ValveESDDialog(Toplevel):
def __init__(self, parent, title, text, stop_command, pst_command):
super(ValveESDDialog, self).__init__(parent)
self.transient(parent)
self.grab_set()
self.title(title)
self.stop_command = stop_command
self.pst_command = pst_command
Label(self, text=text).grid(row=0, column=0, columnspan=3, padx=20, pady=20)
Button(self, text='Fermeture', command=self.stop, background=RED).grid(row=1, column=0, padx=10, pady=5)
Button(self, text='Test partiel', command=self.pst).grid(row=1, column=1, padx=10, pady=5)
Button(self, text='Annulation', command=self.destroy, default=ACTIVE).grid(row=1, column=2, padx=10, pady=5)
self.bind('<Escape>', lambda evt: self.destroy())
self.after(45000, self.destroy)
def stop(self):
self.stop_command()
self.destroy()
def pst(self):
self.pst_command()
self.destroy()
class SetIntValueDialog(Toplevel):
def __init__(self, parent, title, text, valid_command):
super(SetIntValueDialog, self).__init__(parent)
self.transient(parent)
self.grab_set()
self.title(title)
self.valid_command = valid_command
self.value = IntVar()
Label(self, text=text).grid(row=0, column=0, columnspan=2, padx=20, pady=20)
Entry(self, textvariable=self.value).grid(row=1, column=0, columnspan=2, padx=10, pady=10)
Button(self, text='Validation', command=self.ok).grid(row=2, column=0, padx=10, pady=10)
Button(self, text='Annulation', command=self.cancel).grid(row=2, column=1, padx=10, pady=10)
self.bind('<Escape>', lambda evt: self.destroy())
self.after(45000, self.destroy)
def ok(self):
self.valid_command(self.value.get())
self.destroy()
def cancel(self):
self.destroy()
| mit | -1,398,264,354,190,854,100 | 37.635417 | 116 | 0.626314 | false | 3.311607 | false | false | false |
facebookresearch/Detectron | detectron/datasets/json_dataset.py | 1 | 19557 | # Copyright (c) 2017-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
"""Representation of the standard COCO json dataset format.
When working with a new dataset, we strongly suggest to convert the dataset into
the COCO json format and use the existing code; it is not recommended to write
code to support new dataset formats.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import copy
import logging
import numpy as np
import os
import scipy.sparse
# Must happen before importing COCO API (which imports matplotlib)
import detectron.utils.env as envu
envu.set_up_matplotlib()
# COCO API
from pycocotools import mask as COCOmask
from pycocotools.coco import COCO
from detectron.core.config import cfg
from detectron.utils.timer import Timer
import detectron.datasets.dataset_catalog as dataset_catalog
import detectron.utils.boxes as box_utils
from detectron.utils.io import load_object
import detectron.utils.segms as segm_utils
logger = logging.getLogger(__name__)
class JsonDataset(object):
"""A class representing a COCO json dataset."""
def __init__(self, name):
assert dataset_catalog.contains(name), \
'Unknown dataset name: {}'.format(name)
assert os.path.exists(dataset_catalog.get_im_dir(name)), \
'Im dir \'{}\' not found'.format(dataset_catalog.get_im_dir(name))
assert os.path.exists(dataset_catalog.get_ann_fn(name)), \
'Ann fn \'{}\' not found'.format(dataset_catalog.get_ann_fn(name))
logger.debug('Creating: {}'.format(name))
self.name = name
self.image_directory = dataset_catalog.get_im_dir(name)
self.image_prefix = dataset_catalog.get_im_prefix(name)
self.COCO = COCO(dataset_catalog.get_ann_fn(name))
self.debug_timer = Timer()
# Set up dataset classes
category_ids = self.COCO.getCatIds()
categories = [c['name'] for c in self.COCO.loadCats(category_ids)]
self.category_to_id_map = dict(zip(categories, category_ids))
self.classes = ['__background__'] + categories
self.num_classes = len(self.classes)
self.json_category_id_to_contiguous_id = {
v: i + 1
for i, v in enumerate(self.COCO.getCatIds())
}
self.contiguous_category_id_to_json_id = {
v: k
for k, v in self.json_category_id_to_contiguous_id.items()
}
self._init_keypoints()
def get_roidb(
self,
gt=False,
proposal_file=None,
min_proposal_size=2,
proposal_limit=-1,
crowd_filter_thresh=0
):
"""Return an roidb corresponding to the json dataset. Optionally:
- include ground truth boxes in the roidb
- add proposals specified in a proposals file
- filter proposals based on a minimum side length
- filter proposals that intersect with crowd regions
"""
assert gt is True or crowd_filter_thresh == 0, \
'Crowd filter threshold must be 0 if ground-truth annotations ' \
'are not included.'
image_ids = self.COCO.getImgIds()
image_ids.sort()
roidb = copy.deepcopy(self.COCO.loadImgs(image_ids))
for entry in roidb:
self._prep_roidb_entry(entry)
if gt:
# Include ground-truth object annotations
self.debug_timer.tic()
for entry in roidb:
self._add_gt_annotations(entry)
logger.debug(
'_add_gt_annotations took {:.3f}s'.
format(self.debug_timer.toc(average=False))
)
if proposal_file is not None:
# Include proposals from a file
self.debug_timer.tic()
self._add_proposals_from_file(
roidb, proposal_file, min_proposal_size, proposal_limit,
crowd_filter_thresh
)
logger.debug(
'_add_proposals_from_file took {:.3f}s'.
format(self.debug_timer.toc(average=False))
)
_add_class_assignments(roidb)
return roidb
def _prep_roidb_entry(self, entry):
"""Adds empty metadata fields to an roidb entry."""
# Reference back to the parent dataset
entry['dataset'] = self
# Make file_name an abs path
im_path = os.path.join(
self.image_directory, self.image_prefix + entry['file_name']
)
assert os.path.exists(im_path), 'Image \'{}\' not found'.format(im_path)
entry['image'] = im_path
entry['flipped'] = False
entry['has_visible_keypoints'] = False
# Empty placeholders
entry['boxes'] = np.empty((0, 4), dtype=np.float32)
entry['segms'] = []
entry['gt_classes'] = np.empty((0), dtype=np.int32)
entry['seg_areas'] = np.empty((0), dtype=np.float32)
entry['gt_overlaps'] = scipy.sparse.csr_matrix(
np.empty((0, self.num_classes), dtype=np.float32)
)
entry['is_crowd'] = np.empty((0), dtype=np.bool)
# 'box_to_gt_ind_map': Shape is (#rois). Maps from each roi to the index
# in the list of rois that satisfy np.where(entry['gt_classes'] > 0)
entry['box_to_gt_ind_map'] = np.empty((0), dtype=np.int32)
if self.keypoints is not None:
entry['gt_keypoints'] = np.empty(
(0, 3, self.num_keypoints), dtype=np.int32
)
# Remove unwanted fields that come from the json file (if they exist)
for k in ['date_captured', 'url', 'license', 'file_name']:
if k in entry:
del entry[k]
def _add_gt_annotations(self, entry):
"""Add ground truth annotation metadata to an roidb entry."""
ann_ids = self.COCO.getAnnIds(imgIds=entry['id'], iscrowd=None)
objs = self.COCO.loadAnns(ann_ids)
# Sanitize bboxes -- some are invalid
valid_objs = []
valid_segms = []
width = entry['width']
height = entry['height']
for obj in objs:
# crowd regions are RLE encoded
if segm_utils.is_poly(obj['segmentation']):
# Valid polygons have >= 3 points, so require >= 6 coordinates
obj['segmentation'] = [
p for p in obj['segmentation'] if len(p) >= 6
]
if obj['area'] < cfg.TRAIN.GT_MIN_AREA:
continue
if 'ignore' in obj and obj['ignore'] == 1:
continue
# Convert form (x1, y1, w, h) to (x1, y1, x2, y2)
x1, y1, x2, y2 = box_utils.xywh_to_xyxy(obj['bbox'])
x1, y1, x2, y2 = box_utils.clip_xyxy_to_image(
x1, y1, x2, y2, height, width
)
# Require non-zero seg area and more than 1x1 box size
if obj['area'] > 0 and x2 > x1 and y2 > y1:
obj['clean_bbox'] = [x1, y1, x2, y2]
valid_objs.append(obj)
valid_segms.append(obj['segmentation'])
num_valid_objs = len(valid_objs)
boxes = np.zeros((num_valid_objs, 4), dtype=entry['boxes'].dtype)
gt_classes = np.zeros((num_valid_objs), dtype=entry['gt_classes'].dtype)
gt_overlaps = np.zeros(
(num_valid_objs, self.num_classes),
dtype=entry['gt_overlaps'].dtype
)
seg_areas = np.zeros((num_valid_objs), dtype=entry['seg_areas'].dtype)
is_crowd = np.zeros((num_valid_objs), dtype=entry['is_crowd'].dtype)
box_to_gt_ind_map = np.zeros(
(num_valid_objs), dtype=entry['box_to_gt_ind_map'].dtype
)
if self.keypoints is not None:
gt_keypoints = np.zeros(
(num_valid_objs, 3, self.num_keypoints),
dtype=entry['gt_keypoints'].dtype
)
im_has_visible_keypoints = False
for ix, obj in enumerate(valid_objs):
cls = self.json_category_id_to_contiguous_id[obj['category_id']]
boxes[ix, :] = obj['clean_bbox']
gt_classes[ix] = cls
seg_areas[ix] = obj['area']
is_crowd[ix] = obj['iscrowd']
box_to_gt_ind_map[ix] = ix
if self.keypoints is not None:
gt_keypoints[ix, :, :] = self._get_gt_keypoints(obj)
if np.sum(gt_keypoints[ix, 2, :]) > 0:
im_has_visible_keypoints = True
if obj['iscrowd']:
# Set overlap to -1 for all classes for crowd objects
# so they will be excluded during training
gt_overlaps[ix, :] = -1.0
else:
gt_overlaps[ix, cls] = 1.0
entry['boxes'] = np.append(entry['boxes'], boxes, axis=0)
entry['segms'].extend(valid_segms)
# To match the original implementation:
# entry['boxes'] = np.append(
# entry['boxes'], boxes.astype(np.int).astype(np.float), axis=0)
entry['gt_classes'] = np.append(entry['gt_classes'], gt_classes)
entry['seg_areas'] = np.append(entry['seg_areas'], seg_areas)
entry['gt_overlaps'] = np.append(
entry['gt_overlaps'].toarray(), gt_overlaps, axis=0
)
entry['gt_overlaps'] = scipy.sparse.csr_matrix(entry['gt_overlaps'])
entry['is_crowd'] = np.append(entry['is_crowd'], is_crowd)
entry['box_to_gt_ind_map'] = np.append(
entry['box_to_gt_ind_map'], box_to_gt_ind_map
)
if self.keypoints is not None:
entry['gt_keypoints'] = np.append(
entry['gt_keypoints'], gt_keypoints, axis=0
)
entry['has_visible_keypoints'] = im_has_visible_keypoints
def _add_proposals_from_file(
self, roidb, proposal_file, min_proposal_size, top_k, crowd_thresh
):
"""Add proposals from a proposals file to an roidb."""
logger.info('Loading proposals from: {}'.format(proposal_file))
proposals = load_object(proposal_file)
id_field = 'indexes' if 'indexes' in proposals else 'ids' # compat fix
_remove_proposals_not_in_roidb(proposals, roidb, id_field)
_sort_proposals(proposals, id_field)
box_list = []
for i, entry in enumerate(roidb):
if i % 2500 == 0:
logger.info(' {:d}/{:d}'.format(i + 1, len(roidb)))
boxes = proposals['boxes'][i]
# Sanity check that these boxes are for the correct image id
assert entry['id'] == proposals[id_field][i]
# Remove duplicate boxes and very small boxes and then take top k
boxes = box_utils.clip_boxes_to_image(
boxes, entry['height'], entry['width']
)
keep = box_utils.unique_boxes(boxes)
boxes = boxes[keep, :]
keep = box_utils.filter_small_boxes(boxes, min_proposal_size)
boxes = boxes[keep, :]
if top_k > 0:
boxes = boxes[:top_k, :]
box_list.append(boxes)
_merge_proposal_boxes_into_roidb(roidb, box_list)
if crowd_thresh > 0:
_filter_crowd_proposals(roidb, crowd_thresh)
def _init_keypoints(self):
"""Initialize COCO keypoint information."""
self.keypoints = None
self.keypoint_flip_map = None
self.keypoints_to_id_map = None
self.num_keypoints = 0
# Thus far only the 'person' category has keypoints
if 'person' in self.category_to_id_map:
cat_info = self.COCO.loadCats([self.category_to_id_map['person']])
else:
return
# Check if the annotations contain keypoint data or not
if 'keypoints' in cat_info[0]:
keypoints = cat_info[0]['keypoints']
self.keypoints_to_id_map = dict(
zip(keypoints, range(len(keypoints))))
self.keypoints = keypoints
self.num_keypoints = len(keypoints)
self.keypoint_flip_map = {
'left_eye': 'right_eye',
'left_ear': 'right_ear',
'left_shoulder': 'right_shoulder',
'left_elbow': 'right_elbow',
'left_wrist': 'right_wrist',
'left_hip': 'right_hip',
'left_knee': 'right_knee',
'left_ankle': 'right_ankle'}
def _get_gt_keypoints(self, obj):
"""Return ground truth keypoints."""
if 'keypoints' not in obj:
return None
kp = np.array(obj['keypoints'])
x = kp[0::3] # 0-indexed x coordinates
y = kp[1::3] # 0-indexed y coordinates
# 0: not labeled; 1: labeled, not inside mask;
# 2: labeled and inside mask
v = kp[2::3]
num_keypoints = len(obj['keypoints']) / 3
assert num_keypoints == self.num_keypoints
gt_kps = np.ones((3, self.num_keypoints), dtype=np.int32)
for i in range(self.num_keypoints):
gt_kps[0, i] = x[i]
gt_kps[1, i] = y[i]
gt_kps[2, i] = v[i]
return gt_kps
def add_proposals(roidb, rois, scales, crowd_thresh):
"""Add proposal boxes (rois) to an roidb that has ground-truth annotations
but no proposals. If the proposals are not at the original image scale,
specify the scale factor that separate them in scales.
"""
box_list = []
for i in range(len(roidb)):
inv_im_scale = 1. / scales[i]
idx = np.where(rois[:, 0] == i)[0]
box_list.append(rois[idx, 1:] * inv_im_scale)
_merge_proposal_boxes_into_roidb(roidb, box_list)
if crowd_thresh > 0:
_filter_crowd_proposals(roidb, crowd_thresh)
_add_class_assignments(roidb)
def _merge_proposal_boxes_into_roidb(roidb, box_list):
"""Add proposal boxes to each roidb entry."""
assert len(box_list) == len(roidb)
for i, entry in enumerate(roidb):
boxes = box_list[i]
num_boxes = boxes.shape[0]
gt_overlaps = np.zeros(
(num_boxes, entry['gt_overlaps'].shape[1]),
dtype=entry['gt_overlaps'].dtype
)
box_to_gt_ind_map = -np.ones(
(num_boxes), dtype=entry['box_to_gt_ind_map'].dtype
)
# Note: unlike in other places, here we intentionally include all gt
# rois, even ones marked as crowd. Boxes that overlap with crowds will
# be filtered out later (see: _filter_crowd_proposals).
gt_inds = np.where(entry['gt_classes'] > 0)[0]
if len(gt_inds) > 0:
gt_boxes = entry['boxes'][gt_inds, :]
gt_classes = entry['gt_classes'][gt_inds]
proposal_to_gt_overlaps = box_utils.bbox_overlaps(
boxes.astype(dtype=np.float32, copy=False),
gt_boxes.astype(dtype=np.float32, copy=False)
)
# Gt box that overlaps each input box the most
# (ties are broken arbitrarily by class order)
argmaxes = proposal_to_gt_overlaps.argmax(axis=1)
# Amount of that overlap
maxes = proposal_to_gt_overlaps.max(axis=1)
# Those boxes with non-zero overlap with gt boxes
I = np.where(maxes > 0)[0]
# Record max overlaps with the class of the appropriate gt box
gt_overlaps[I, gt_classes[argmaxes[I]]] = maxes[I]
box_to_gt_ind_map[I] = gt_inds[argmaxes[I]]
entry['boxes'] = np.append(
entry['boxes'],
boxes.astype(entry['boxes'].dtype, copy=False),
axis=0
)
entry['gt_classes'] = np.append(
entry['gt_classes'],
np.zeros((num_boxes), dtype=entry['gt_classes'].dtype)
)
entry['seg_areas'] = np.append(
entry['seg_areas'],
np.zeros((num_boxes), dtype=entry['seg_areas'].dtype)
)
entry['gt_overlaps'] = np.append(
entry['gt_overlaps'].toarray(), gt_overlaps, axis=0
)
entry['gt_overlaps'] = scipy.sparse.csr_matrix(entry['gt_overlaps'])
entry['is_crowd'] = np.append(
entry['is_crowd'],
np.zeros((num_boxes), dtype=entry['is_crowd'].dtype)
)
entry['box_to_gt_ind_map'] = np.append(
entry['box_to_gt_ind_map'],
box_to_gt_ind_map.astype(
entry['box_to_gt_ind_map'].dtype, copy=False
)
)
def _filter_crowd_proposals(roidb, crowd_thresh):
"""Finds proposals that are inside crowd regions and marks them as
overlap = -1 with each ground-truth rois, which means they will be excluded
from training.
"""
for entry in roidb:
gt_overlaps = entry['gt_overlaps'].toarray()
crowd_inds = np.where(entry['is_crowd'] == 1)[0]
non_gt_inds = np.where(entry['gt_classes'] == 0)[0]
if len(crowd_inds) == 0 or len(non_gt_inds) == 0:
continue
crowd_boxes = box_utils.xyxy_to_xywh(entry['boxes'][crowd_inds, :])
non_gt_boxes = box_utils.xyxy_to_xywh(entry['boxes'][non_gt_inds, :])
iscrowd_flags = [int(True)] * len(crowd_inds)
ious = COCOmask.iou(non_gt_boxes, crowd_boxes, iscrowd_flags)
bad_inds = np.where(ious.max(axis=1) > crowd_thresh)[0]
gt_overlaps[non_gt_inds[bad_inds], :] = -1
entry['gt_overlaps'] = scipy.sparse.csr_matrix(gt_overlaps)
def _add_class_assignments(roidb):
"""Compute object category assignment for each box associated with each
roidb entry.
"""
for entry in roidb:
gt_overlaps = entry['gt_overlaps'].toarray()
# max overlap with gt over classes (columns)
max_overlaps = gt_overlaps.max(axis=1)
# gt class that had the max overlap
max_classes = gt_overlaps.argmax(axis=1)
entry['max_classes'] = max_classes
entry['max_overlaps'] = max_overlaps
# sanity checks
# if max overlap is 0, the class must be background (class 0)
zero_inds = np.where(max_overlaps == 0)[0]
assert all(max_classes[zero_inds] == 0)
# if max overlap > 0, the class must be a fg class (not class 0)
nonzero_inds = np.where(max_overlaps > 0)[0]
assert all(max_classes[nonzero_inds] != 0)
def _sort_proposals(proposals, id_field):
"""Sort proposals by the specified id field."""
order = np.argsort(proposals[id_field])
fields_to_sort = ['boxes', id_field, 'scores']
for k in fields_to_sort:
proposals[k] = [proposals[k][i] for i in order]
def _remove_proposals_not_in_roidb(proposals, roidb, id_field):
# fix proposals so they don't contain entries for images not in the roidb
roidb_ids = set({entry["id"] for entry in roidb})
keep = [i for i, id in enumerate(proposals[id_field]) if id in roidb_ids]
for f in ['boxes', id_field, 'scores']:
proposals[f] = [proposals[f][i] for i in keep]
| apache-2.0 | -9,000,661,081,801,376,000 | 41.058065 | 80 | 0.575804 | false | 3.504211 | false | false | false |
sravanti/UVisa | visas/migrations/0001_initial.py | 1 | 1769 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Answer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('answer_text', models.TextField(null=True, blank=True)),
('transcription', models.TextField(null=True, blank=True)),
('audio', models.FileField(null=True, upload_to=b'photos/%Y/%m/%d', blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Form',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('user', models.OneToOneField(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('question_eng', models.CharField(max_length=255, null=True, blank=True)),
('question_esp', models.CharField(max_length=255, null=True, blank=True)),
('link', models.URLField(max_length=255, null=True, blank=True)),
],
options={
},
bases=(models.Model,),
),
]
| mit | 7,047,489,270,904,382,000 | 35.102041 | 114 | 0.538157 | false | 4.400498 | false | false | false |
GjjvdBurg/ABED | abed/prune.py | 1 | 2281 | # -*- coding: utf-8 -*-
"""Functionality for removing results that don't match the current config
"""
import os
import shutil
from pathlib import Path
from .conf import settings
from .utils import hash_from_filename, mkdir
def prune_results(task_dict, dry_run=False):
"""Remove result files that are not in the task_dict
This can occur when the experiment configuration changes over time and old
result files are still lying around. This command moves them to the
PRUNE_DIR defined in the settings file.
"""
if not os.path.exists(settings.RESULT_DIR):
# no results, no pruning
return
# map from hash to Path of the result file
tasks_have = {}
dset_dirs = os.listdir(settings.RESULT_DIR)
for dset in dset_dirs:
dset_path = os.path.join(settings.RESULT_DIR, dset)
method_dirs = os.listdir(dset_path)
for method in method_dirs:
method_path = os.path.join(dset_path, method)
task_files = os.listdir(method_path)
for filename in task_files:
pth = os.path.join(method_path, filename)
h = hash_from_filename(pth)
tasks_have[h] = Path(pth)
# list hashes that we don't have in the task dict
unknown_hashes = []
for h in tasks_have:
if not h in task_dict:
unknown_hashes.append(h)
# no unknown hashes, no problem
if not unknown_hashes:
return
# create the pruned dir if needed
if not dry_run:
mkdir(settings.PRUNE_DIR)
# move the stragglers
for h in unknown_hashes:
path = tasks_have[h]
filename = path.parts[-1]
method = path.parts[-2]
dset = path.parts[-3]
dest_dir = os.path.join(settings.PRUNE_DIR, dset, method)
if not dry_run:
mkdir(dest_dir)
dest_path = os.path.join(dest_dir, filename)
it = 1
while os.path.exists(dest_path):
stem, ext = os.path.splitext(dest_path)
filename = "%s_dup_%i%s" % (stem, it, ext)
dest_path = os.path.join(dest_dir, filename)
it += 1
if dry_run:
print("Moving %s to %s" % (path, dest_path))
else:
shutil.move(path, dest_path)
| gpl-2.0 | -5,288,211,294,130,294,000 | 27.5125 | 79 | 0.597545 | false | 3.64377 | false | false | false |
abhikumar22/MYBLOG | blg/Lib/site-packages/social_core/backends/oauth.py | 1 | 17299 | import six
from requests_oauthlib import OAuth1
from oauthlib.oauth1 import SIGNATURE_TYPE_AUTH_HEADER
from six.moves.urllib_parse import urlencode, unquote
from ..utils import url_add_parameters, parse_qs, handle_http_errors, \
constant_time_compare
from ..exceptions import AuthFailed, AuthCanceled, AuthUnknownError, \
AuthMissingParameter, AuthStateMissing, \
AuthStateForbidden, AuthTokenError
from .base import BaseAuth
class OAuthAuth(BaseAuth):
"""OAuth authentication backend base class.
Also settings will be inspected to get more values names that should be
stored on extra_data field. Setting name is created from current backend
name (all uppercase) plus _EXTRA_DATA.
access_token is always stored.
URLs settings:
AUTHORIZATION_URL Authorization service url
ACCESS_TOKEN_URL Access token URL
"""
AUTHORIZATION_URL = ''
ACCESS_TOKEN_URL = ''
ACCESS_TOKEN_METHOD = 'GET'
REVOKE_TOKEN_URL = None
REVOKE_TOKEN_METHOD = 'POST'
ID_KEY = 'id'
SCOPE_PARAMETER_NAME = 'scope'
DEFAULT_SCOPE = None
SCOPE_SEPARATOR = ' '
REDIRECT_STATE = False
STATE_PARAMETER = False
def extra_data(self, user, uid, response, details=None, *args, **kwargs):
"""Return access_token and extra defined names to store in
extra_data field"""
data = super(OAuthAuth, self).extra_data(user, uid, response, details,
*args, **kwargs)
data['access_token'] = response.get('access_token', '') or \
kwargs.get('access_token')
return data
def state_token(self):
"""Generate csrf token to include as state parameter."""
return self.strategy.random_string(32)
def get_or_create_state(self):
if self.STATE_PARAMETER or self.REDIRECT_STATE:
# Store state in session for further request validation. The state
# value is passed as state parameter (as specified in OAuth2 spec),
# but also added to redirect, that way we can still verify the
# request if the provider doesn't implement the state parameter.
# Reuse token if any.
name = self.name + '_state'
state = self.strategy.session_get(name)
if state is None:
state = self.state_token()
self.strategy.session_set(name, state)
else:
state = None
return state
def get_session_state(self):
return self.strategy.session_get(self.name + '_state')
def get_request_state(self):
request_state = self.data.get('state') or \
self.data.get('redirect_state')
if request_state and isinstance(request_state, list):
request_state = request_state[0]
return request_state
def validate_state(self):
"""Validate state value. Raises exception on error, returns state
value if valid."""
if not self.STATE_PARAMETER and not self.REDIRECT_STATE:
return None
state = self.get_session_state()
request_state = self.get_request_state()
if not request_state:
raise AuthMissingParameter(self, 'state')
elif not state:
raise AuthStateMissing(self, 'state')
elif not constant_time_compare(request_state, state):
raise AuthStateForbidden(self)
else:
return state
def get_redirect_uri(self, state=None):
"""Build redirect with redirect_state parameter."""
uri = self.redirect_uri
if self.REDIRECT_STATE and state:
uri = url_add_parameters(uri, {'redirect_state': state})
return uri
def get_scope(self):
"""Return list with needed access scope"""
scope = self.setting('SCOPE', [])
if not self.setting('IGNORE_DEFAULT_SCOPE', False):
scope = scope + (self.DEFAULT_SCOPE or [])
return scope
def get_scope_argument(self):
param = {}
scope = self.get_scope()
if scope:
param[self.SCOPE_PARAMETER_NAME] = self.SCOPE_SEPARATOR.join(scope)
return param
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service. Implement in subclass"""
return {}
def authorization_url(self):
return self.AUTHORIZATION_URL
def access_token_url(self):
return self.ACCESS_TOKEN_URL
def revoke_token_url(self, token, uid):
return self.REVOKE_TOKEN_URL
def revoke_token_params(self, token, uid):
return {}
def revoke_token_headers(self, token, uid):
return {}
def process_revoke_token_response(self, response):
return response.status_code == 200
def revoke_token(self, token, uid):
if self.REVOKE_TOKEN_URL:
url = self.revoke_token_url(token, uid)
params = self.revoke_token_params(token, uid)
headers = self.revoke_token_headers(token, uid)
data = urlencode(params) if self.REVOKE_TOKEN_METHOD != 'GET' \
else None
response = self.request(url, params=params, headers=headers,
data=data, method=self.REVOKE_TOKEN_METHOD)
return self.process_revoke_token_response(response)
class BaseOAuth1(OAuthAuth):
"""Consumer based mechanism OAuth authentication, fill the needed
parameters to communicate properly with authentication service.
URLs settings:
REQUEST_TOKEN_URL Request token URL
"""
REQUEST_TOKEN_URL = ''
REQUEST_TOKEN_METHOD = 'GET'
OAUTH_TOKEN_PARAMETER_NAME = 'oauth_token'
REDIRECT_URI_PARAMETER_NAME = 'redirect_uri'
UNATHORIZED_TOKEN_SUFIX = 'unauthorized_token_name'
def auth_url(self):
"""Return redirect url"""
token = self.set_unauthorized_token()
return self.oauth_authorization_request(token)
def process_error(self, data):
if 'oauth_problem' in data:
if data['oauth_problem'] == 'user_refused':
raise AuthCanceled(self, 'User refused the access')
raise AuthUnknownError(self, 'Error was ' + data['oauth_problem'])
@handle_http_errors
def auth_complete(self, *args, **kwargs):
"""Return user, might be logged in"""
# Multiple unauthorized tokens are supported (see #521)
self.process_error(self.data)
self.validate_state()
token = self.get_unauthorized_token()
access_token = self.access_token(token)
return self.do_auth(access_token, *args, **kwargs)
@handle_http_errors
def do_auth(self, access_token, *args, **kwargs):
"""Finish the auth process once the access_token was retrieved"""
if not isinstance(access_token, dict):
access_token = parse_qs(access_token)
data = self.user_data(access_token)
if data is not None and 'access_token' not in data:
data['access_token'] = access_token
kwargs.update({'response': data, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
def get_unauthorized_token(self):
name = self.name + self.UNATHORIZED_TOKEN_SUFIX
unauthed_tokens = self.strategy.session_get(name, [])
if not unauthed_tokens:
raise AuthTokenError(self, 'Missing unauthorized token')
data_token = self.data.get(self.OAUTH_TOKEN_PARAMETER_NAME)
if data_token is None:
raise AuthTokenError(self, 'Missing unauthorized token')
token = None
for utoken in unauthed_tokens:
orig_utoken = utoken
if not isinstance(utoken, dict):
utoken = parse_qs(utoken)
if utoken.get(self.OAUTH_TOKEN_PARAMETER_NAME) == data_token:
self.strategy.session_set(name, list(set(unauthed_tokens) -
set([orig_utoken])))
token = utoken
break
else:
raise AuthTokenError(self, 'Incorrect tokens')
return token
def set_unauthorized_token(self):
token = self.unauthorized_token()
name = self.name + self.UNATHORIZED_TOKEN_SUFIX
tokens = self.strategy.session_get(name, []) + [token]
self.strategy.session_set(name, tokens)
return token
def request_token_extra_arguments(self):
"""Return extra arguments needed on request-token process"""
return self.setting('REQUEST_TOKEN_EXTRA_ARGUMENTS', {})
def unauthorized_token(self):
"""Return request for unauthorized token (first stage)"""
params = self.request_token_extra_arguments()
params.update(self.get_scope_argument())
key, secret = self.get_key_and_secret()
# decoding='utf-8' produces errors with python-requests on Python3
# since the final URL will be of type bytes
decoding = None if six.PY3 else 'utf-8'
state = self.get_or_create_state()
response = self.request(
self.REQUEST_TOKEN_URL,
params=params,
auth=OAuth1(key, secret, callback_uri=self.get_redirect_uri(state),
decoding=decoding),
method=self.REQUEST_TOKEN_METHOD
)
content = response.content
if response.encoding or response.apparent_encoding:
content = content.decode(response.encoding or
response.apparent_encoding)
else:
content = response.content.decode()
return content
def oauth_authorization_request(self, token):
"""Generate OAuth request to authorize token."""
if not isinstance(token, dict):
token = parse_qs(token)
params = self.auth_extra_arguments() or {}
params.update(self.get_scope_argument())
params[self.OAUTH_TOKEN_PARAMETER_NAME] = token.get(
self.OAUTH_TOKEN_PARAMETER_NAME
)
state = self.get_or_create_state()
params[self.REDIRECT_URI_PARAMETER_NAME] = self.get_redirect_uri(state)
return '{0}?{1}'.format(self.authorization_url(), urlencode(params))
def oauth_auth(self, token=None, oauth_verifier=None,
signature_type=SIGNATURE_TYPE_AUTH_HEADER):
key, secret = self.get_key_and_secret()
oauth_verifier = oauth_verifier or self.data.get('oauth_verifier')
if token:
resource_owner_key = token.get('oauth_token')
resource_owner_secret = token.get('oauth_token_secret')
if not resource_owner_key:
raise AuthTokenError(self, 'Missing oauth_token')
if not resource_owner_secret:
raise AuthTokenError(self, 'Missing oauth_token_secret')
else:
resource_owner_key = None
resource_owner_secret = None
# decoding='utf-8' produces errors with python-requests on Python3
# since the final URL will be of type bytes
decoding = None if six.PY3 else 'utf-8'
state = self.get_or_create_state()
return OAuth1(key, secret,
resource_owner_key=resource_owner_key,
resource_owner_secret=resource_owner_secret,
callback_uri=self.get_redirect_uri(state),
verifier=oauth_verifier,
signature_type=signature_type,
decoding=decoding)
def oauth_request(self, token, url, params=None, method='GET'):
"""Generate OAuth request, setups callback url"""
return self.request(url, method=method, params=params,
auth=self.oauth_auth(token))
def access_token(self, token):
"""Return request for access token value"""
return self.get_querystring(self.access_token_url(),
auth=self.oauth_auth(token),
method=self.ACCESS_TOKEN_METHOD)
class BaseOAuth2(OAuthAuth):
"""Base class for OAuth2 providers.
OAuth2 draft details at:
http://tools.ietf.org/html/draft-ietf-oauth-v2-10
"""
REFRESH_TOKEN_URL = None
REFRESH_TOKEN_METHOD = 'POST'
RESPONSE_TYPE = 'code'
REDIRECT_STATE = True
STATE_PARAMETER = True
def auth_params(self, state=None):
client_id, client_secret = self.get_key_and_secret()
params = {
'client_id': client_id,
'redirect_uri': self.get_redirect_uri(state)
}
if self.STATE_PARAMETER and state:
params['state'] = state
if self.RESPONSE_TYPE:
params['response_type'] = self.RESPONSE_TYPE
return params
def auth_url(self):
"""Return redirect url"""
state = self.get_or_create_state()
params = self.auth_params(state)
params.update(self.get_scope_argument())
params.update(self.auth_extra_arguments())
params = urlencode(params)
if not self.REDIRECT_STATE:
# redirect_uri matching is strictly enforced, so match the
# providers value exactly.
params = unquote(params)
return '{0}?{1}'.format(self.authorization_url(), params)
def auth_complete_params(self, state=None):
client_id, client_secret = self.get_key_and_secret()
return {
'grant_type': 'authorization_code', # request auth code
'code': self.data.get('code', ''), # server response code
'client_id': client_id,
'client_secret': client_secret,
'redirect_uri': self.get_redirect_uri(state)
}
def auth_complete_credentials(self):
return None
def auth_headers(self):
return {'Content-Type': 'application/x-www-form-urlencoded',
'Accept': 'application/json'}
def extra_data(self, user, uid, response, details=None, *args, **kwargs):
"""Return access_token, token_type, and extra defined names to store in
extra_data field"""
data = super(BaseOAuth2, self).extra_data(user, uid, response,
details=details,
*args, **kwargs)
data['token_type'] = response.get('token_type') or \
kwargs.get('token_type')
return data
def request_access_token(self, *args, **kwargs):
return self.get_json(*args, **kwargs)
def process_error(self, data):
if data.get('error'):
if data['error'] == 'denied' or data['error'] == 'access_denied':
raise AuthCanceled(self, data.get('error_description', ''))
raise AuthFailed(self, data.get('error_description') or
data['error'])
elif 'denied' in data:
raise AuthCanceled(self, data['denied'])
@handle_http_errors
def auth_complete(self, *args, **kwargs):
"""Completes login process, must return user instance"""
self.process_error(self.data)
state = self.validate_state()
response = self.request_access_token(
self.access_token_url(),
data=self.auth_complete_params(state),
headers=self.auth_headers(),
auth=self.auth_complete_credentials(),
method=self.ACCESS_TOKEN_METHOD
)
self.process_error(response)
return self.do_auth(response['access_token'], response=response,
*args, **kwargs)
@handle_http_errors
def do_auth(self, access_token, *args, **kwargs):
"""Finish the auth process once the access_token was retrieved"""
data = self.user_data(access_token, *args, **kwargs)
response = kwargs.get('response') or {}
response.update(data or {})
if 'access_token' not in response:
response['access_token'] = access_token
kwargs.update({'response': response, 'backend': self})
return self.strategy.authenticate(*args, **kwargs)
def refresh_token_params(self, token, *args, **kwargs):
client_id, client_secret = self.get_key_and_secret()
return {
'refresh_token': token,
'grant_type': 'refresh_token',
'client_id': client_id,
'client_secret': client_secret
}
def process_refresh_token_response(self, response, *args, **kwargs):
return response.json()
def refresh_token(self, token, *args, **kwargs):
params = self.refresh_token_params(token, *args, **kwargs)
url = self.refresh_token_url()
method = self.REFRESH_TOKEN_METHOD
key = 'params' if method == 'GET' else 'data'
request_args = {'headers': self.auth_headers(),
'method': method,
key: params}
request = self.request(url, **request_args)
return self.process_refresh_token_response(request, *args, **kwargs)
def refresh_token_url(self):
return self.REFRESH_TOKEN_URL or self.access_token_url()
| gpl-3.0 | -4,339,673,126,509,218,300 | 38.676606 | 79 | 0.595757 | false | 4.172455 | false | false | false |
CoffeeForThinkers/MagentoModels | mm/routines/product.py | 1 | 3304 | import logging
import mm.routines
_LOGGER = logging.getLogger(__name__)
class ProductRoutines(mm.routines.RoutinesBase):
noun = 'product'
def update_enum_product_attribute(self, sku, att_name, att_value):
record = \
self.get_one_record(
'update_enum_product_attribute',
sku, att_name, att_value)
record['affected'] = int(record['affected'])
return record
def upsert_product_int_attribute(self, sku, att_name, att_value, store_id=0):
record = \
self.get_one_record(
'upsert_product_int_attribute',
sku, att_name, att_value, store_id)
return record
def upsert_product_varchar_attribute(self, sku, att_name, att_value, store_id=0):
record = \
self.get_one_record(
'upsert_product_varchar_attribute',
sku, att_name, att_value, store_id)
return record
def get_configurable_associated_products(self, store_id=None, is_active=None, is_visible=None):
message = "Not a valid input value for '{0}'. Use: {1}"
assert type(store_id) is int or store_id is None, \
message.format('store_id', 'None or int')
assert is_active is True or is_active is False or is_active is None, \
message.format('is_active', 'None, True or False')
assert is_visible is True or is_visible is False or is_visible is None, \
message.format('is_visible', 'None, True or False')
rows = \
self.call(
'get_configurable_associated_products',
store_id,
is_active,
is_visible)
return rows
def get_configurable_associated_products_stock(self, store_id=None):
assert type(store_id) is int or store_id is None, \
"Not a valid input value for 'store_id'. Use: 'None or int'"
rows = \
self.call(
'get_configurable_associated_products_stock',
store_id)
return rows
def get_product_listing_with_attributes(self, product_type=None, store_id=None):
assert type(product_type) is str or product_type is None, \
"Not a valid input value for 'product_type'. Use: 'None or string'"
assert type(store_id) is int or store_id is None, \
"Not a valid input value for 'store_id'. Use: 'None or int'"
rows = \
self.call(
'get_product_listing_with_attributes',
product_type,
store_id)
return rows
def upsert_product_price(self, sku, currency_code, price, special_price, store_id=0):
record = \
self.get_one_record(
'upsert_product_price',
sku, store_id, currency_code, price, special_price)
record['affected'] = int(record['affected'])
return record
def catalog_association(self, product_id, linked_product_id, link_type, is_cleanup = False):
record = \
self.get_one_record(
'catalog_association',
product_id, linked_product_id, link_type, 1 if is_cleanup else 0)
record['affected'] = int(record['affected'])
return record
| gpl-3.0 | 2,908,460,994,270,722,600 | 31.712871 | 99 | 0.573245 | false | 3.882491 | false | false | false |
graphql-python/graphql-core | tests/utilities/test_get_introspection_query.py | 1 | 1816 | import re
from graphql.utilities import get_introspection_query
def describe_get_introspection_query():
def skips_all_description_fields():
has_descriptions = re.compile(r"\bdescription\b").search
assert has_descriptions(get_introspection_query())
assert has_descriptions(get_introspection_query(descriptions=True))
assert not has_descriptions(get_introspection_query(descriptions=False))
def includes_is_repeatable_field_on_directives():
has_repeatability = re.compile(r"\bisRepeatable\b").search
assert not has_repeatability(get_introspection_query())
assert has_repeatability(get_introspection_query(directive_is_repeatable=True))
assert not has_repeatability(
get_introspection_query(directive_is_repeatable=False)
)
def includes_description_field_on_schema():
all_descriptions = re.compile(r"\bdescription\b").findall
assert len(all_descriptions(get_introspection_query())) == 5
assert (
len(all_descriptions(get_introspection_query(schema_description=False)))
== 5
)
assert (
len(all_descriptions(get_introspection_query(schema_description=True))) == 6
)
assert not all_descriptions(
get_introspection_query(descriptions=False, schema_description=True)
)
def includes_specified_by_url_field():
all_specified_by_urls = re.compile(r"\bspecifiedByUrl\b").findall
assert not all_specified_by_urls(get_introspection_query())
assert not all_specified_by_urls(
get_introspection_query(specified_by_url=False)
)
assert (
len(all_specified_by_urls(get_introspection_query(specified_by_url=True)))
== 1
)
| mit | 182,015,892,003,239,520 | 30.859649 | 88 | 0.660793 | false | 4.165138 | false | false | false |
glormph/msstitch | src/app/drivers/prottable.py | 1 | 5076 | from app.drivers.base import PepProttableDriver
from app.drivers.options import prottable_options
from app.readers import tsv as tsvreader
from app.dataformats import prottable as prottabledata
from app.dataformats import peptable as peptabledata
from app.dataformats import mzidtsv as mzidtsvdata
from app.actions import proteins
from app.actions.psmtable import isosummarize
class ProttableDriver(PepProttableDriver):
mediannormalize = False # FIXME remove when done
def set_options(self):
super().set_options()
options = self.define_options(['decoyfn', 'scorecolpattern', 'minlogscore',
'quantcolpattern', 'minint', 'denomcols', 'denompatterns',
'precursor', 'psmfile'], prottable_options)
self.options.update(options)
def get_td_proteins_bestpep(self, theader, dheader):
self.header = [self.headeraccfield] + prottabledata.PICKED_HEADER
tscorecol = tsvreader.get_cols_in_file(self.scorecolpattern, theader, True)
dscorecol = tsvreader.get_cols_in_file(self.scorecolpattern, dheader, True)
tpeps = tsvreader.generate_tsv_psms(self.fn, theader)
dpeps = tsvreader.generate_tsv_psms(self.decoyfn, dheader)
targets = proteins.generate_bestpep_proteins(tpeps, tscorecol,
self.minlogscore, self.headeraccfield, self.featcol)
decoys = proteins.generate_bestpep_proteins(dpeps, dscorecol,
self.minlogscore, self.headeraccfield, self.featcol)
return targets, decoys
def get_quant(self, theader, features):
if self.precursor:
tpeps = tsvreader.generate_tsv_psms(self.fn, theader)
self.header.append(prottabledata.HEADER_AREA)
features = proteins.add_ms1_quant_from_top3_mzidtsv(features,
tpeps, self.headeraccfield, self.featcol)
if self.quantcolpattern:
psmheader = tsvreader.get_tsv_header(self.psmfile)
if self.denomcols is not None:
denomcols = [self.number_to_headerfield(col, psmheader)
for col in self.denomcols]
elif self.denompatterns is not None:
denomcolnrs = [tsvreader.get_columns_by_pattern(psmheader, pattern)
for pattern in self.denompatterns]
denomcols = set([col for cols in denomcolnrs for col in cols])
else:
raise RuntimeError('Must define either denominator column numbers '
'or regex pattterns to find them')
quantcols = tsvreader.get_columns_by_pattern(psmheader, self.quantcolpattern)
nopsms = [isosummarize.get_no_psms_field(qf) for qf in quantcols]
self.header = self.header + quantcols + nopsms
features = isosummarize.get_isobaric_ratios(self.psmfile, psmheader,
quantcols, denomcols, self.minint,
features, self.headeraccfield,
self.featcol, self.mediannormalize)
return features
class ProteinsDriver(ProttableDriver):
command = 'proteins'
commandhelp = 'Create a protein table from peptides'
outsuffix = '_proteins.tsv'
headeraccfield = prottabledata.HEADER_PROTEIN
featcol = peptabledata.HEADER_MASTERPROTEINS
def set_features(self):
theader = tsvreader.get_tsv_header(self.fn)
dheader = tsvreader.get_tsv_header(self.decoyfn)
targets, decoys = self.get_td_proteins_bestpep(theader, dheader)
features = proteins.generate_protein_fdr(targets, decoys, self.headeraccfield)
self.features = self.get_quant(theader, features)
class GenesDriver(ProttableDriver):
command = 'genes'
commandhelp = 'Create a gene table from peptides'
outsuffix = '_genes.tsv'
headeraccfield = prottabledata.HEADER_GENENAME
featcol = mzidtsvdata.HEADER_SYMBOL
def set_options(self):
super().set_options()
options = self.define_options(['fastadelim', 'genefield', 't_fasta',
'd_fasta'], prottable_options)
self.options.update(options)
def set_features(self):
theader = tsvreader.get_tsv_header(self.fn)
dheader = tsvreader.get_tsv_header(self.decoyfn)
targets, decoys = self.get_td_proteins_bestpep(theader, dheader)
fastadelim, genefield = self.get_fastadelim_genefield(self.fastadelim,
self.genefield)
features = proteins.generate_pick_fdr(
targets, decoys, self.t_fasta, self.d_fasta, 'fasta', self.headeraccfield,
fastadelim, genefield)
self.features = self.get_quant(theader, features)
class ENSGDriver(GenesDriver):
command = 'ensg'
commandhelp = 'Create an ENSG table from peptides'
outsuffix = '_ensg.tsv'
headeraccfield = prottabledata.HEADER_GENEID
featcol = mzidtsvdata.HEADER_GENE
# TODO create a result driver? For what?
| mit | -8,372,553,706,438,568,000 | 43.920354 | 89 | 0.650315 | false | 3.529903 | false | false | false |
jgagneastro/FireHose_OLD | 3-XIDL/23-XIDL/idlspec2d/bin/copy_spPlan.py | 2 | 5070 | #!/usr/bin/env python
"""
Utility script to copy the spPlan* files from one production to another
while updating the RUN2D entries appropriately.
Stephen Bailey, LBL
Fall 2012
"""
import sys
import os
import os.path
import random
from glob import glob
import pyfits
#- copy spPlan file while updating the RUN2D entry
def copyplan(inplan, outplan, run2d):
finput = open(inplan)
foutput = open(outplan, 'w')
for line in finput:
if line.startswith('RUN2D'):
xx = line.split(None, 2) #- RUN2D VER [# Comment]
xx[1] = run2d #- replace RUN2D
line = " ".join(xx) + '\n' #- put back together with newline
foutput.write(line)
finput.close()
foutput.close()
#-------------------------------------------------------------------------
import optparse
parser = optparse.OptionParser(usage = "%prog [options]",
description="""Copy spPlan files from one redux version to another while replacing RUN2D.
""")
parser.add_option("-i", "--input", type="string", help="input directory [default $BOSS_SPECTRO_REDUX/$RUN2D/]")
parser.add_option("-o", "--output", type="string", help="output directory")
parser.add_option("--run2d", type="string", help="output RUN2D version")
parser.add_option("--minmjd", type="int", help="min MJD to include", default=0)
parser.add_option("--maxmjd", type="int", help="max MJD to include", default=100000)
parser.add_option("-n", "--numplates", type="int", help="number of plates to copy [default all good ones]")
parser.add_option("-R", "--randseed", type="int", default=0, help="random seed [default 0]")
### parser.add_option("--run1d", type="string", help="output RUN1D version")
parser.add_option("-b", "--bad", help="also copy bad quality plans, not just good ones", action="store_true")
parser.add_option("-p", "--platelist", help="override platelist location [default input/platelist.fits]")
opts, args = parser.parse_args()
#- Set random seed so that results are reproducible
random.seed(opts.randseed)
#- Default input directory $BOSS_SPECTRO_REDUX/$RUN2D/
if opts.input is None:
opts.input = os.environ['BOSS_SPECTRO_REDUX'] + "/" + os.environ['RUN2D']
#- required options
if opts.output is None:
print >> sys.stderr, 'ERROR: you must specify -o/--output directory'
print >> sys.stderr, 'To see all options, run copy_spPlan.py -h'
sys.exit(1)
#- choose run2d based upon output name if needed
if opts.run2d is None:
opts.run2d = os.path.basename(opts.output)
if opts.run2d in (None, '', '.'):
opts.run2d = os.path.basename(os.path.dirname(opts.output))
if opts.run2d in (None, '', '.'):
print "ERROR: Unable to derive RUN2D from path", opts.output
print "ERROR: use --run2d instead"
sys.exit(2)
print "Using RUN2D="+opts.run2d
#- Create output directory if needed
if not os.path.isdir(opts.output):
os.makedirs(opts.output)
#- Find platelist file
if opts.platelist is None:
opts.platelist = opts.input + '/platelist.fits'
if not os.path.exists(opts.platelist):
print >> sys.stderr, "ERROR: if no platelist.fits in input dir, you must specify a platelist"
sys.exit(1)
#- Create set of plates with at least one good plugging within the
#- desired MJD range
p = pyfits.getdata(opts.platelist, 1)
goodplates = set()
for plate, mjd, quality in zip(p['PLATE'], p['MJD'], p['PLATEQUALITY']):
if (quality.strip() == 'good' or opts.bad):
if opts.minmjd <= mjd <= opts.maxmjd:
goodplates.add( plate )
#- Randomly subsample
if opts.numplates is not None:
goodplates = set( random.sample(goodplates, opts.numplates) )
#- Make matching list of good pluggings for those plates
goodplugs = set()
for plate, mjd, quality in zip(p['PLATE'], p['MJD'], p['PLATEQUALITY']):
if plate in goodplates and (quality.strip() == 'good' or opts.bad):
goodplugs.add( (plate, mjd) )
#- Loop over plates, copying the plan files
ncopied = 0
for plate in sorted(goodplates):
platedir = os.path.join(opts.input, str(plate))
print '\rPlate', plate,
sys.stdout.flush()
#- Copy all the plan2d files since they may be needed for the coadd
#- even if they aren't in platelist
plan2dfiles = glob(platedir + '/spPlan2d*.par')
for planfile in plan2dfiles:
outdir = opts.output + "/" + str(plate)
if not os.path.isdir(outdir):
os.makedirs(outdir)
outplan = outdir + '/' + os.path.basename(planfile)
copyplan(planfile, outplan, opts.run2d)
#- Copy only the good plate-mjd plancomb files
plancombfiles = glob(platedir + '/spPlancomb*.par')
for planfile in plancombfiles:
mjd = int(os.path.basename(planfile).split('-')[2][0:5])
if (plate, mjd) in goodplugs:
outplan = outdir + '/' + os.path.basename(planfile)
copyplan(planfile, outplan, opts.run2d)
#- final blank line print to get CR since we were being fancy with '\r...'
print
| gpl-2.0 | -461,338,971,563,287,600 | 37.120301 | 112 | 0.642998 | false | 3.25 | false | false | false |
openlmi/openlmi-doc | doc/python/lmi/test/base.py | 1 | 5665 | # Copyright (C) 2012-2014 Red Hat, Inc. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
# Authors: Jan Safranek <[email protected]>
# Authors: Michal Minar <[email protected]>
# Authors: Roman Rakus <[email protected]>
#
"""
Base classes for *OpenLMI Provider* test cases.
"""
import os
import socket
from lmi.test import CIMError
from lmi.test import unittest
from lmi.test import util
from lmi.test import wbem
def render_iname(iname, indent=2):
"""
Render object path in human readable way. Result will occupy multiple
lines. The first line is a class name, which is not indented at all. Other
lines will be indented with *indent* spaces.
:param iname: Object path to render.
:type iname: :py:class:`lmiwbem.CIMInstanceName`
:param integer ident: Number of spaces prefixing all lines but the first.
:returns: *iname* nicely rendered.
:rtype: string
"""
if not isinstance(iname, wbem.CIMInstanceName):
return repr(iname)
lines = [ "%s" % iname.classname
, " "*indent + "namespace: %s" % iname.namespace
, " "*indent + "keys:"]
align = max([len(k) for k in iname.keybindings.iterkeys()])
for key, value in iname.keybindings.iteritems():
if isinstance(value, wbem.CIMInstanceName):
value = render_iname(value, indent + 4)
lines.append(" "*indent + (" %%-%ds : %%s" % align) % (key, value))
return "\n".join(lines)
class BaseLmiTestCase(unittest.TestCase):
"""
Base class for all LMI test cases.
"""
#: Value used in ``SystemName`` key properties in various *CIM* instances.
#: It's also used to fill ``CIM_ComputerySystem.Name`` property.
SYSTEM_NAME = socket.gethostname()
@classmethod
def setUpClass(cls):
#: Cached value of SystemCreationClassName set with
#: ``LMI_CS_CLASSNAME`` environment variable.
cls.system_cs_name = os.environ.get(
"LMI_CS_CLASSNAME", "PG_ComputerSystem")
#: *URL* of *CIMOM* we connect to. Overriden with ``LMI_CIMOM_URL``
#: environment variable.
cls.url = os.environ.get("LMI_CIMOM_URL", "https://localhost:5989")
#: User name for authentication with *CIMOM*. Overriden with
#: ``LMI_CIMOM_USERNAME`` variable.
cls.username = os.environ.get("LMI_CIMOM_USERNAME", "root")
#: User's password for authentication with *CIMOM*. Overriden with
#: ``LMI_CIMOM_PASSWORD`` environment variable.
cls.password = os.environ.get("LMI_CIMOM_PASSWORD", "")
#: Name of *CIMOM* we connect to. There are two possible values:
#: ``"tog-pegasus"`` and ``"sblim-sfcb"``. Overriden with
#: ``LMI_CIMOM_BROKER`` environment variable.
cls.cimom = os.environ.get("LMI_CIMOM_BROKER", "tog-pegasus")
#: Boolean value saying whether to run dangerous tests. These are marked
#: with :py:func:`mark_dangerous` decorator. This is set with
#: ``LMI_RUN_DANGEROUS`` environment variable.
cls.run_dangerous = util.get_environvar('LMI_RUN_DANGEROUS', '0', bool)
#: Boolean value saying whether to run tedious tests. These are marked
#: with :py:func:`mark_tedious` decorator. This is set with
#: ``LMI_RUN_TEDIOUS`` environment variable.
cls.run_tedious = util.get_environvar('LMI_RUN_TEDIOUS', '1', bool)
def assertRaisesCIM(self, cim_err_code, func, *args, **kwds):
"""
This test passes if given function called with supplied arguments
raises `CIMError` with given cim error code.
"""
with self.assertRaises(CIMError) as cm:
func(*args, **kwds)
self.assertEqual(cim_err_code, cm.exception.args[0])
def assertCIMNameEqual(self, fst, snd, msg=None):
"""
Compare two objects of :py:class:`lmiwbem.CIMInstanceName`. Their host
properties are not checked.
"""
if msg is None:
msg = ( "%s\n\nis not equal to: %s"
% (render_iname(fst), render_iname(snd)))
self.assertTrue(util.check_inames_equal(fst, snd), msg)
def assertCIMNameIn(self, name, candidates):
"""
Checks that given :py:class:`lmiwbem.CIMInstanceName` is present in
set of candidates. It compares all properties but ``host``.
"""
for candidate in candidates:
if util.check_inames_equal(name, candidate):
return
self.assertTrue(False, 'name "%s" is not in candidates' % str(name))
def assertNocaseDictEqual(self, fst, snd, msg=None):
"""
Compare two no-case dictionaries ignoring the case of their keys.
"""
fst_dict = {}
for (key, value) in fst.iteritems():
fst_dict[key.lower()] = value
snd_dict = {}
for (key, value) in snd.iteritems():
snd_dict[key.lower()] = value
self.assertEqual(fst_dict, snd_dict, msg)
| gpl-2.0 | -3,318,208,735,782,902,000 | 41.276119 | 80 | 0.644484 | false | 3.596825 | true | false | false |
nnugumanov/yandex-tank | yandextank/plugins/Phantom/plugin.py | 1 | 11454 | """ Contains Phantom Plugin, Console widgets, result reader classes """
# FIXME: 3 there is no graceful way to interrupt the process of phout import
# TODO: phout import
import logging
import multiprocessing as mp
import os
import subprocess
import time
from ...common.util import execute, expand_to_seconds
from ...common.interfaces import AbstractPlugin, AbstractCriterion, GeneratorPlugin
from .reader import PhantomReader, PhantomStatsReader
from .utils import PhantomConfig
from .widget import PhantomInfoWidget, PhantomProgressBarWidget
from ..Aggregator import Plugin as AggregatorPlugin
from ..Autostop import Plugin as AutostopPlugin
from ..Console import Plugin as ConsolePlugin
logger = logging.getLogger(__name__)
class Plugin(AbstractPlugin, GeneratorPlugin):
""" Plugin for running phantom tool """
OPTION_CONFIG = "config"
SECTION = PhantomConfig.SECTION
def __init__(self, core, config_section):
AbstractPlugin.__init__(self, core, config_section)
self.config = None
self.process = None
self.predefined_phout = None
self.phout_import_mode = False
self.did_phout_import_try = False
self.phantom_path = None
self.eta_file = None
self.processed_ammo_count = 0
self.phantom_start_time = time.time()
self.buffered_seconds = "2"
self.taskset_affinity = None
self.cpu_count = mp.cpu_count()
self.phantom = None
self.cached_info = None
self.phantom_stderr = None
self.exclude_markers = []
self.enum_ammo = False
@staticmethod
def get_key():
return __file__
def get_available_options(self):
opts = [
"phantom_path", "buffered_seconds", "exclude_markers", "affinity"
]
opts += [PhantomConfig.OPTION_PHOUT, self.OPTION_CONFIG]
opts += PhantomConfig.get_available_options()
return opts
def configure(self):
# plugin part
self.config = self.get_option(self.OPTION_CONFIG, '')
self.phantom_path = self.get_option("phantom_path", 'phantom')
self.enum_ammo = self.get_option("enum_ammo", False)
self.buffered_seconds = int(
self.get_option("buffered_seconds", self.buffered_seconds))
self.exclude_markers = set(
filter((lambda marker: marker != ''),
self.get_option('exclude_markers', []).split(' ')))
self.taskset_affinity = self.get_option('affinity', '')
try:
autostop = self.core.get_plugin_of_type(AutostopPlugin)
autostop.add_criterion_class(UsedInstancesCriterion)
except KeyError:
logger.debug(
"No autostop plugin found, not adding instances criterion")
self.predefined_phout = self.get_option(PhantomConfig.OPTION_PHOUT, '')
if not self.get_option(
self.OPTION_CONFIG, '') and self.predefined_phout:
self.phout_import_mode = True
if not self.config and not self.phout_import_mode:
self.phantom = PhantomConfig(self.core)
self.phantom.read_config()
def prepare_test(self):
aggregator = self.core.job.aggregator_plugin
if not self.config and not self.phout_import_mode:
# generate config
self.config = self.phantom.compose_config()
args = [self.phantom_path, 'check', self.config]
try:
result = execute(args, catch_out=True)
except OSError:
raise RuntimeError("Phantom I/O engine is not installed!")
retcode = result[0]
if retcode:
raise RuntimeError(
"Config check failed. Subprocess returned code %s" %
retcode)
if result[2]:
raise RuntimeError(
"Subprocess returned message: %s" % result[2])
reader = PhantomReader(self.phantom.phout_file)
logger.debug(
"Linking sample reader to aggregator."
" Reading samples from %s", self.phantom.phout_file)
logger.debug(
"Linking stats reader to aggregator."
" Reading stats from %s", self.phantom.stat_log)
else:
reader = PhantomReader(self.predefined_phout)
logger.debug(
"Linking sample reader to aggregator."
" Reading samples from %s", self.predefined_phout)
if aggregator:
aggregator.reader = reader
info = self.phantom.get_info()
aggregator.stats_reader = PhantomStatsReader(
self.phantom.stat_log, info)
aggregator.add_result_listener(self)
try:
console = self.core.get_plugin_of_type(ConsolePlugin)
except Exception as ex:
logger.debug("Console not found: %s", ex)
console = None
self.core.job.phantom_info = self.phantom.get_info()
if console and aggregator:
widget = PhantomProgressBarWidget(self)
console.add_info_widget(widget)
aggregator.add_result_listener(widget)
widget = PhantomInfoWidget(self)
console.add_info_widget(widget)
aggregator = self.core.get_plugin_of_type(AggregatorPlugin)
aggregator.add_result_listener(widget)
def start_test(self):
if not self.phout_import_mode:
args = [self.phantom_path, 'run', self.config]
logger.debug(
"Starting %s with arguments: %s", self.phantom_path, args)
if self.taskset_affinity != '':
args = [
self.core.taskset_path, '-c', self.taskset_affinity
] + args
logger.debug(
"Enabling taskset for phantom with affinity: %s,"
" cores count: %d", self.taskset_affinity, self.cpu_count)
self.phantom_start_time = time.time()
phantom_stderr_file = self.core.mkstemp(
".log", "phantom_stdout_stderr_")
self.core.add_artifact_file(phantom_stderr_file)
self.phantom_stderr = open(phantom_stderr_file, 'w')
self.process = subprocess.Popen(
args,
stderr=self.phantom_stderr,
stdout=self.phantom_stderr,
close_fds=True)
else:
if not os.path.exists(self.predefined_phout):
raise RuntimeError(
"Phout file not exists for import: %s" %
self.predefined_phout)
logger.warn(
"Will import phout file instead of running phantom: %s",
self.predefined_phout)
def is_test_finished(self):
if not self.phout_import_mode:
retcode = self.process.poll()
if retcode is not None:
logger.info("Phantom done its work with exit code: %s", retcode)
return abs(retcode)
else:
info = self.get_info()
if info:
eta = int(info.duration) - (
int(time.time()) - int(self.phantom_start_time))
self.publish('eta', eta)
return -1
else:
if not self.processed_ammo_count or self.did_phout_import_try != self.processed_ammo_count:
self.did_phout_import_try = self.processed_ammo_count
return -1
else:
return 0
def end_test(self, retcode):
if self.process and self.process.poll() is None:
logger.warn(
"Terminating phantom process with PID %s", self.process.pid)
self.process.terminate()
if self.process:
self.process.communicate()
else:
logger.debug("Seems phantom finished OK")
if self.phantom_stderr:
self.phantom_stderr.close()
return retcode
def post_process(self, retcode):
if not retcode:
info = self.get_info()
if info and info.ammo_count != self.processed_ammo_count:
logger.warning(
"Planned ammo count %s differs from processed %s",
info.ammo_count, self.processed_ammo_count)
return retcode
def on_aggregated_data(self, data, stat):
self.processed_ammo_count += data["overall"]["interval_real"]["len"]
logger.debug("Processed ammo count: %s/", self.processed_ammo_count)
def get_info(self):
""" returns info object """
if not self.cached_info:
if not self.phantom:
return None
self.cached_info = self.phantom.get_info()
return self.cached_info
class UsedInstancesCriterion(AbstractCriterion):
"""
Autostop criterion, based on active instances count
"""
RC_INST = 24
@staticmethod
def get_type_string():
return 'instances'
def __init__(self, autostop, param_str):
AbstractCriterion.__init__(self)
self.seconds_count = 0
self.autostop = autostop
self.threads_limit = 1
level_str = param_str.split(',')[0].strip()
if level_str[-1:] == '%':
self.level = float(level_str[:-1]) / 100
self.is_relative = True
else:
self.level = int(level_str)
self.is_relative = False
self.seconds_limit = expand_to_seconds(param_str.split(',')[1])
try:
phantom = autostop.core.get_plugin_of_type(Plugin)
info = phantom.get_info()
if info:
self.threads_limit = info.instances
if not self.threads_limit:
raise ValueError(
"Cannot create 'instances' criterion"
" with zero instances limit")
except KeyError:
logger.warning("No phantom module, 'instances' autostop disabled")
def notify(self, data, stat):
threads = stat["metrics"]["instances"]
if self.is_relative:
threads = float(threads) / self.threads_limit
if threads > self.level:
if not self.seconds_count:
self.cause_second = (data, stat)
logger.debug(self.explain())
self.seconds_count += 1
self.autostop.add_counting(self)
if self.seconds_count >= self.seconds_limit:
return True
else:
self.seconds_count = 0
return False
def get_rc(self):
return self.RC_INST
def get_level_str(self):
"""
String value for instances level
"""
if self.is_relative:
level_str = str(100 * self.level) + "%"
else:
level_str = self.level
return level_str
def explain(self):
items = (
self.get_level_str(), self.seconds_count,
self.cause_second[0].get('ts'))
return (
"Testing threads (instances) utilization"
" higher than %s for %ss, since %s" % items)
def widget_explain(self):
items = (self.get_level_str(), self.seconds_count, self.seconds_limit)
return "Instances >%s for %s/%ss" % items, float(
self.seconds_count) / self.seconds_limit
| lgpl-2.1 | -7,101,965,361,478,808,000 | 34.682243 | 103 | 0.569408 | false | 4.090714 | true | false | false |
pgroudas/pants | src/python/pants/subsystem/subsystem.py | 1 | 3698 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.option.options import Options
class SubsystemError(Exception):
"""An error in a subsystem."""
class Subsystem(object):
"""A separable piece of functionality that may be reused across multiple tasks or other code.
Subsystems encapsulate the configuration and initialization of things like JVMs,
Python interpreters, SCMs and so on.
Subsystem instances are tied to option scopes. For example, a singleton subsystem that all tasks
share is tied to the global scope, while a private instance used by just one task is tied to
that task's scope.
A Subsystem instance initializes itself from options in a subscope (the 'qualified scope') of
the scope it's tied to. For example, a global SubsystemFoo instance gets its options from
scope 'foo', while a SubsystemFoo instance for use just in task bar.baz gets its options from
scope 'bar.baz.foo'.
TODO(benjy): Model dependencies between subsystems? Registration of subsystems?
"""
@classmethod
def scope_qualifier(cls):
"""Qualifies the options scope of this Subsystem type.
E.g., for SubsystemFoo this should return 'foo'.
"""
raise NotImplementedError()
@classmethod
def register_options(cls, register):
"""Register options for this subsystem.
Subclasses may override and call register(*args, **kwargs) with argparse arguments.
"""
@classmethod
def register_options_on_scope(cls, options, scope):
"""Trigger registration of this subsystem's options under a given scope."""
cls.register_options(options.registration_function_for_scope(cls.qualify_scope(scope)))
@classmethod
def qualify_scope(cls, scope):
return '{0}.{1}'.format(scope, cls.scope_qualifier()) if scope else cls.scope_qualifier()
# The full Options object for this pants run. Will be set after options are parsed.
# TODO: A less clunky way to make option values available?
_options = None
# A cache of (cls, scope) -> the instance of cls tied to that scope.
_scoped_instances = {}
@classmethod
def global_instance(cls):
return cls._instance_for_scope(Options.GLOBAL_SCOPE)
@classmethod
def reset(cls):
"""Forget all option values and cached subsystem instances.
Used for test isolation.
"""
cls._options = None
cls._scoped_instances = {}
@classmethod
def instance_for_task(cls, task):
return cls._instance_for_scope(task.options_scope)
@classmethod
def _instance_for_scope(cls, scope):
if cls._options is None:
raise SubsystemError('Subsystem not initialized yet.')
key = (cls, scope)
if key not in cls._scoped_instances:
qscope = cls.qualify_scope(scope)
cls._scoped_instances[key] = cls(qscope, cls._options.for_scope(qscope))
return cls._scoped_instances[key]
def __init__(self, scope, scoped_options):
"""Note: A subsystem has no access to options in scopes other than its own.
TODO: We'd like that to be true of Tasks some day. Subsystems will help with that.
Task code should call instance_for_scope() or global_instance() to get a subsystem instance.
Tests can call this constructor directly though.
"""
self._scope = scope
self._scoped_options = scoped_options
@property
def options_scope(self):
return self._scope
def get_options(self):
"""Returns the option values for this subsystem's scope."""
return self._scoped_options
| apache-2.0 | 3,583,883,669,439,646,700 | 33.560748 | 98 | 0.715792 | false | 4.099778 | false | false | false |
TorleifHensvold/ITGK3 | oving9/Torleif/07_Soke_i_tekst.py | 1 | 1351 | import string
# f = open(filename)
# variabel = f.read()
# f.close()
# liste = variabel.split('\n')
# print(liste)
# my_dictionary = {}
# while liste:
# if liste[0] in my_dictionary:
# midlertidig = my_dictionary[liste[0]]
# print(midlertidig)
# midlertidig += 1
# print(midlertidig)
# my_dictionary[liste[0]] = midlertidig
# print(my_dictionary)
# liste.pop(0)
# else:
# my_dictionary[liste[0]] = 1
# liste.pop(0)
# print(my_dictionary)
# return my_dictionary
def read_from_file(filename):
f = open(filename,'r')
variabel=f.read()
f.close()
return variabel
def remove_symbols(text):
validchars = string.ascii_letters + ' '
clean_string = ''.join(a for a in text if a in validchars)
lower_clean_string = clean_string.lower()
return lower_clean_string
def count_words(filename):
variabel = read_from_file(filename)
# print(variabel)
variabel = remove_symbols(variabel)
liste=variabel.split()
# print(liste)
my_dictionary = {}
while liste:
if liste[0] in my_dictionary:
midlertidig=1+my_dictionary[liste[0]]
my_dictionary[liste[0]]=midlertidig
liste.pop(0)
else:
my_dictionary[liste[0]]=1
liste.pop(0)
return my_dictionary
bible_dict = count_words('BIBLE.txt')
for word, value in bible_dict.items():
print(word,value)
| mit | -6,117,888,994,099,742,000 | 18.784615 | 59 | 0.647668 | false | 2.465328 | false | false | false |
masegaloeh/web-parser | tiketkai/parser.py | 1 | 1052 | from bs4 import BeautifulSoup
import urllib.request
def getLastDate(soupObj):
objList = None
lastdate = None
comboClass = soupObj.findAll(class_="itScheduleCombox")
for combo in comboClass:
if combo['name'] == 'tanggal':
objList = combo
try:
for obj in objList:
lastdate = obj.string
return lastdate
except TypeError as e:
print("Webpage structure not found. Quitting...")
return None
def fetchPage(url):
# add a header to define a custon User-Ageny
headers = { 'User-Agent' : 'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)' }
try:
req = urllib.request.Request(url, None, headers)
data = urllib.request.urlopen(req)
return data
except:
return None
#our url
url = "https://tiket.kereta-api.co.id/"
page = fetchPage(url)
mydate = None
if page is not None:
soup = BeautifulSoup(page.read())
mydate = getLastDate(soup)
if mydate is None:
print("something error. Sorry")
else:
print(mydate) | mit | 7,484,101,775,677,648,000 | 22.4 | 81 | 0.629278 | false | 3.566102 | false | false | false |
csxeba/brainforge | brainforge/util/persistance.py | 1 | 2344 | class Capsule:
def __init__(self, name=None, cost=None, optimizer=None, architecture=None, layers=None):
self.vname = name
self.vcost = cost
self.voptimizer = optimizer
self.varchitecture = architecture
self.vlayers = layers
def dump(self, path):
import pickle
import gzip
with gzip.open(path, "wb") as handle:
pickle.dump({k: v for k, v in self.__dict__.items() if k[0] == "v"},
handle)
@classmethod
def encapsulate(cls, network, dumppath=None):
capsule = cls(**{
"name": network.name,
"metrics": network.cost,
"optimizer": network.optimizer,
"architecture": network.layers.architecture[:],
"layers": [layer.capsule() for layer in network.layers.layers]})
if dumppath is not None:
capsule.dump(dumppath)
return capsule
@classmethod
def read(cls, path):
import pickle
import gzip
from os.path import exists
if not exists(path):
raise RuntimeError("No such capsule:", path)
new = cls()
with gzip.open(path) as handle:
new.__dict__.update(pickle.load(handle))
return new
def __getitem__(self, item):
if item not in self.__dict__:
raise AttributeError("No such item in capsule:", item)
return self.__dict__[item]
def load(capsule):
from ..learner import Backpropagation
from ..optimizers import optimizers
from ..util.shame import translate_architecture as trsl
if not isinstance(capsule, Capsule):
capsule = Capsule.read(capsule)
c = capsule
net = Backpropagation(input_shape=c["vlayers"][0][0], name=c["vname"])
for layer_name, layer_capsule in zip(c["varchitecture"], c["vlayers"]):
if layer_name[:5] == "Input":
continue
layer_cls = trsl(layer_name)
layer = layer_cls.from_capsule(layer_capsule)
net.add(layer)
opti = c["voptimizer"]
if isinstance(opti, str):
opti = optimizers[opti]()
net.finalize(cost=c["vcost"], optimizer=opti)
for layer, lcaps in zip(net.layers, c["vlayers"]):
if layer.weights is not None:
layer.set_weights(lcaps[-1], fold=False)
return net
| gpl-3.0 | 5,053,646,490,615,273,000 | 28.670886 | 93 | 0.584898 | false | 3.932886 | false | false | false |
pclubuiet/website | home/migrations/0005_auto_20180815_0959.py | 1 | 1621 | # Generated by Django 2.1 on 2018-08-15 09:59
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('home', '0004_resourceurl_category'),
]
operations = [
migrations.CreateModel(
name='Topic',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=128, null=True)),
],
),
migrations.RemoveField(
model_name='resourceurl',
name='resource',
),
migrations.AddField(
model_name='resource',
name='category',
field=models.CharField(choices=[('video', 'Videos'), ('blog', 'Blogs / Articles'), ('ebook', 'E-Books'), ('other', 'Others')], max_length=128, null=True),
),
migrations.AddField(
model_name='resource',
name='description',
field=models.CharField(blank=True, max_length=10240, null=True),
),
migrations.AddField(
model_name='resource',
name='url',
field=models.URLField(db_index=True, max_length=128, null=True),
),
migrations.DeleteModel(
name='ResourceURL',
),
migrations.AddField(
model_name='resource',
name='resource',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='home.Topic'),
preserve_default=False,
),
]
| gpl-3.0 | 135,660,602,289,841,000 | 32.081633 | 166 | 0.552745 | false | 4.265789 | false | false | false |
JavierGarciaD/AlgoTrader | algotrader/common/utilities.py | 1 | 3183 |
"""
Some utilities used across the program
"""
import datetime
import os
import pathlib
from functools import wraps
from itertools import tee, islice, chain
def rename_files(dir_path):
"""
Rename all files for a given extension within a folder
"""
dir_path = pathlib.Path(dir_path)
counter = 0
all_files = dir_path.glob('**/*.gz')
for old_path in dir_path.glob('**/*.gz'):
# get the components of the path
parts = pathlib.Path(old_path).parts
parent = pathlib.Path(old_path).parent
# Construct new file path
wk = parts[-1]
yr = parts[-2]
sym = parts[-3]
new_name = sym + '_' + yr + '_' + wk
new_path = parent / new_name
# Rename
os.rename(old_path, new_path)
counter += 1
print('Doing {} out of {}'.format(counter, len(list(all_files))))
def zero_bytes_files(dir_path, action=None):
"""
Perform action on small size files within a directory
Args:
dir_path:
action: 'print', 'delete', None
Returns: list of file paths
"""
zeros = []
dir_path = pathlib.Path(dir_path)
for each_file in dir_path.glob('**/*.gz'):
print('Checking file: {}'.format(each_file))
if os.stat(each_file).st_size == 100000: # size in bytes
zeros.append(each_file)
if action is None:
print('Done !!!')
elif action == 'print':
print(zeros)
elif action == 'delete':
for to_delete in zeros:
os.remove(to_delete)
print('File deleted: {}'.format(to_delete))
return zeros
def iter_islast(iterable):
"""
Generates pairs where the first element is an item from the iterable
source and the second element is a boolean flag indicating if it is the
last item in the sequence.
https://code.activestate.com/recipes/392015-finding-the-last-item-in-a-loop/
Returns: (item, islast)
"""
it = iter(iterable)
prev = it.__next__()
for item in it:
yield prev, False
prev = item
yield prev, True
def previous_and_next(some_iterable):
"""
Generates tuple with three consecutive elements of an iterable
source where the first element is the previous element of the iteration,
the second element is the current element and the last is the next.
https://stackoverflow.com/a/1012089/3512107
Returns: (previous, item, next)
"""
prevs, items, nexts = tee(some_iterable, 3)
prevs = chain([None], prevs)
nexts = chain(islice(nexts, 1, None), [None])
return zip(prevs, items, nexts)
def fn_timer(function):
"""
Define a decorator that measures the elapsed time in running the function.
http://www.marinamele.com/7-tips-to-time-python-scripts-and-control-memory-and-cpu-usage
Returns: print the elapsed time
"""
@wraps(function)
def function_timer(*args, **kwargs):
t0 = datetime.datetime.now()
result = function(*args, **kwargs)
t1 = datetime.datetime.now()
print("Total time running {}: {}".format(function.__name__, t1 - t0))
return result
return function_timer
| mit | 1,530,684,084,787,055,400 | 25.090164 | 92 | 0.61263 | false | 3.78478 | false | false | false |
ruipgpinheiro/subuser | logic/subuserCommands/subuser-dry-run.py | 1 | 5131 | #!/usr/bin/env python
# This file should be compatible with both Python 2 and 3.
# If it is not, please file a bug report.
try:
import pathConfig
except ImportError:
pass
#external imports
import sys
import os
#internal imports
import subuserlib.classes.user
import subuserlib.profile
##############################################################
helpString = """
Display the command which would be issued to launch Docker if you were to run this subuser.
For example:
$ subuser dry-run iceweasel
Will display the command used to launch the subuser iceweasel.
Please note, this is only a rough approximation for debugging purposes and there is no guarantee that the command displayed here would actually work.
"""
#################################################################################################
def dryRunTestSetup():
import sys,os,getpass
os.getuid = lambda: 1000
getpass.getuser = lambda: "travis"
@subuserlib.profile.do_cprofile
def dryRun(args):
"""
Print the command that would have been run if this wasn't a dry run.
>>> dry_run = __import__("subuser-dry-run")
>>> dry_run.dryRunTestSetup()
>>> subuser = __import__("subuser-subuser")
>>> remove_old_images = __import__("subuser-remove-old-images")
If we dry run the basic foo test subuser, we will see the generated pre-run Dockerfile and also the docker command that will launch our subuser.
>>> dry_run.dryRun(["foo"])
The image will be prepared using the Dockerfile:
FROM 2
RUN useradd --uid=1000 travis ;export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo uid exists ; elif [ $exitstatus -eq 9 ]; then echo username exists. ; else exit $exitstatus ; fi
RUN test -d /home/travis || mkdir /home/travis && chown travis /home/travis
<BLANKLINE>
The command to launch the image is:
docker 'run' '--rm' '-i' '-e' 'HOME=/home/travis' '--workdir=/home/travis' '--net=none' '--user=1000' '--hostname' '<random-hostname>' '--entrypoint' '/usr/bin/foo' '3'
Running subusers installed through temporary repositories works as well. Here, we add a subuser named bar, run it, and then remove it again.
>>> subuser.subuser(["add","bar","--accept","bar@file:///home/travis/remote-test-repo"])
Adding subuser bar bar@file:///home/travis/remote-test-repo
Adding new temporary repository file:///home/travis/remote-test-repo
Verifying subuser configuration.
Verifying registry consistency...
Unregistering any non-existant installed images.
bar would like to have the following permissions:
Description: bar
Maintainer: fred
Executable: /usr/bin/bar
A - Accept and apply changes
E - Apply changes and edit result
A
Checking if images need to be updated or installed...
Checking if subuser bar is up to date.
Installing bar ...
Building...
Building...
Building...
Successfully built 4
Building...
Building...
Building...
Successfully built 5
Installed new image <5> for subuser bar
Running garbage collector on temporary repositories...
The actual dry-run call.
>>> dry_run.dryRun(["bar"])
The image will be prepared using the Dockerfile:
FROM 5
RUN useradd --uid=1000 travis ;export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo uid exists ; elif [ $exitstatus -eq 9 ]; then echo username exists. ; else exit $exitstatus ; fi
RUN test -d /home/travis || mkdir /home/travis && chown travis /home/travis
<BLANKLINE>
The command to launch the image is:
docker 'run' '--rm' '-i' '-e' 'HOME=/home/travis' '--workdir=/home/travis' '--net=none' '--user=1000' '--hostname' '<random-hostname>' '--entrypoint' '/usr/bin/bar' '6'
Cleanup.
>>> subuser.subuser(["remove","bar"])
Removing subuser bar
If you wish to remove the subusers image, issue the command $ subuser remove-old-images
Verifying subuser configuration.
Verifying registry consistency...
Unregistering any non-existant installed images.
Running garbage collector on temporary repositories...
>>> remove_old_images.removeOldImages([])
Removing unneeded image 5 : bar@file:///home/travis/remote-test-repo
Verifying subuser configuration.
Verifying registry consistency...
Unregistering any non-existant installed images.
Running garbage collector on temporary repositories...
Removing uneeded temporary repository: file:///home/travis/remote-test-repo
"""
if len(args) == 0 or {"help","-h","--help"} & set(args):
print(helpString)
sys.exit()
subuserName = args[0]
argsToPassToImage = args[1:]
user = subuserlib.classes.user.User()
if subuserName in user.getRegistry().getSubusers():
subuser = user.getRegistry().getSubusers()[subuserName]
print("The image will be prepared using the Dockerfile:")
print(subuser.getRunReadyImage().generateImagePreparationDockerfile())
print("The command to launch the image is:")
print(subuser.getRuntime(os.environ).getPrettyCommand(argsToPassToImage))
else:
sys.exit(subuserName + " not found.\n"+helpString+"\n The following subusers are available for use:"+str(user.getRegistry().getSubusers().keys()))
if __name__ == "__main__":
dryRun(sys.argv[1:])
| lgpl-3.0 | -8,418,473,655,893,674,000 | 38.775194 | 189 | 0.693237 | false | 3.758974 | true | false | false |
jkbrzt/httpie | tests/test_ssl.py | 1 | 5076 | import pytest
import pytest_httpbin.certs
import requests.exceptions
import ssl
import urllib3
from httpie.ssl import AVAILABLE_SSL_VERSION_ARG_MAPPING, DEFAULT_SSL_CIPHERS
from httpie.status import ExitStatus
from utils import HTTP_OK, TESTS_ROOT, http
try:
# Handle OpenSSL errors, if installed.
# See <https://github.com/jakubroztocil/httpie/issues/729>
# noinspection PyUnresolvedReferences
import OpenSSL.SSL
ssl_errors = (
requests.exceptions.SSLError,
OpenSSL.SSL.Error,
)
except ImportError:
ssl_errors = (
requests.exceptions.SSLError,
)
CERTS_ROOT = TESTS_ROOT / 'client_certs'
CLIENT_CERT = str(CERTS_ROOT / 'client.crt')
CLIENT_KEY = str(CERTS_ROOT / 'client.key')
CLIENT_PEM = str(CERTS_ROOT / 'client.pem')
# We test against a local httpbin instance which uses a self-signed cert.
# Requests without --verify=<CA_BUNDLE> will fail with a verification error.
# See: https://github.com/kevin1024/pytest-httpbin#https-support
CA_BUNDLE = pytest_httpbin.certs.where()
@pytest.mark.parametrize('ssl_version',
AVAILABLE_SSL_VERSION_ARG_MAPPING.keys())
def test_ssl_version(httpbin_secure, ssl_version):
try:
r = http(
'--ssl', ssl_version,
httpbin_secure + '/get'
)
assert HTTP_OK in r
except ssl_errors as e:
if ssl_version == 'ssl3':
# pytest-httpbin doesn't support ssl3
pass
elif e.__context__ is not None: # Check if root cause was an unsupported TLS version
root = e.__context__
while root.__context__ is not None:
root = root.__context__
if isinstance(root, ssl.SSLError) and root.reason == "TLSV1_ALERT_PROTOCOL_VERSION":
pytest.skip("Unsupported TLS version: {}".format(ssl_version))
else:
raise
class TestClientCert:
def test_cert_and_key(self, httpbin_secure):
r = http(httpbin_secure + '/get',
'--cert', CLIENT_CERT,
'--cert-key', CLIENT_KEY)
assert HTTP_OK in r
def test_cert_pem(self, httpbin_secure):
r = http(httpbin_secure + '/get',
'--cert', CLIENT_PEM)
assert HTTP_OK in r
def test_cert_file_not_found(self, httpbin_secure):
r = http(httpbin_secure + '/get',
'--cert', '/__not_found__',
tolerate_error_exit_status=True)
assert r.exit_status == ExitStatus.ERROR
assert 'No such file or directory' in r.stderr
def test_cert_file_invalid(self, httpbin_secure):
with pytest.raises(ssl_errors):
http(httpbin_secure + '/get',
'--cert', __file__)
def test_cert_ok_but_missing_key(self, httpbin_secure):
with pytest.raises(ssl_errors):
http(httpbin_secure + '/get',
'--cert', CLIENT_CERT)
class TestServerCert:
def test_verify_no_OK(self, httpbin_secure):
# Avoid warnings when explicitly testing insecure requests
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
r = http(httpbin_secure.url + '/get', '--verify=no')
assert HTTP_OK in r
@pytest.mark.parametrize('verify_value', ['false', 'fALse'])
def test_verify_false_OK(self, httpbin_secure, verify_value):
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
r = http(httpbin_secure.url + '/get', '--verify', verify_value)
assert HTTP_OK in r
def test_verify_custom_ca_bundle_path(
self, httpbin_secure_untrusted
):
r = http(httpbin_secure_untrusted + '/get', '--verify', CA_BUNDLE)
assert HTTP_OK in r
def test_self_signed_server_cert_by_default_raises_ssl_error(
self,
httpbin_secure_untrusted
):
with pytest.raises(ssl_errors):
http(httpbin_secure_untrusted.url + '/get')
def test_verify_custom_ca_bundle_invalid_path(self, httpbin_secure):
# since 2.14.0 requests raises IOError
with pytest.raises(ssl_errors + (IOError,)):
http(httpbin_secure.url + '/get', '--verify', '/__not_found__')
def test_verify_custom_ca_bundle_invalid_bundle(self, httpbin_secure):
with pytest.raises(ssl_errors):
http(httpbin_secure.url + '/get', '--verify', __file__)
def test_ciphers(httpbin_secure):
r = http(
httpbin_secure.url + '/get',
'--ciphers',
DEFAULT_SSL_CIPHERS,
)
assert HTTP_OK in r
def test_ciphers_none_can_be_selected(httpbin_secure):
r = http(
httpbin_secure.url + '/get',
'--ciphers',
'__FOO__',
tolerate_error_exit_status=True,
)
assert r.exit_status == ExitStatus.ERROR
# Linux/macOS:
# http: error: SSLError: ('No cipher can be selected.',)
# OpenBSD:
# <https://marc.info/?l=openbsd-ports&m=159251948515635&w=2>
# http: error: Error: [('SSL routines', '(UNKNOWN)SSL_internal', 'no cipher match')]
assert 'cipher' in r.stderr
| bsd-3-clause | -7,776,369,014,767,373,000 | 32.615894 | 96 | 0.615051 | false | 3.582216 | true | false | false |
dmccloskey/SBaaS_MFA | SBaaS_MFA/stage02_isotopomer_analysis_query.py | 1 | 4105 | #SBaaS
from .stage02_isotopomer_analysis_postgresql_models import *
from SBaaS_base.sbaas_base import sbaas_base
from SBaaS_base.sbaas_base_query_update import sbaas_base_query_update
from SBaaS_base.sbaas_base_query_drop import sbaas_base_query_drop
from SBaaS_base.sbaas_base_query_initialize import sbaas_base_query_initialize
from SBaaS_base.sbaas_base_query_insert import sbaas_base_query_insert
from SBaaS_base.sbaas_base_query_select import sbaas_base_query_select
from SBaaS_base.sbaas_base_query_delete import sbaas_base_query_delete
from SBaaS_base.sbaas_template_query import sbaas_template_query
class stage02_isotopomer_analysis_query(sbaas_template_query):
def initialize_supportedTables(self):
'''Set the supported tables dict for ...
'''
tables_supported = {'data_stage02_isotopomer_analysis':data_stage02_isotopomer_analysis,
};
self.set_supportedTables(tables_supported);
## Query from data_stage02_isotopomer_analysis
# query simulation_id
def get_simulationID_analysisID_dataStage02IsotopomerAnalysis(self,analysis_id_I):
'''Querry simulations that are used for the anlaysis'''
try:
data = self.session.query(data_stage02_isotopomer_analysis.simulation_id).filter(
data_stage02_isotopomer_analysis.analysis_id.like(analysis_id_I),
data_stage02_isotopomer_analysis.used_.is_(True)).group_by(
data_stage02_isotopomer_analysis.simulation_id).order_by(
data_stage02_isotopomer_analysis.simulation_id.asc()).all();
simulation_ids_O = [];
if data:
for d in data:
simulation_ids_O.append(d.simulation_id);
return simulation_ids_O;
except SQLAlchemyError as e:
print(e);
def add_data_stage02_isotopomer_analysis(self, data_I):
'''add rows of data_stage02_isotopomer_analysis'''
if data_I:
for d in data_I:
try:
data_add = data_stage02_isotopomer_analysis(d
#d['analysis_id'],d['simulation_id'],
#d['used_'],
#d['comment_']
);
self.session.add(data_add);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def update_data_stage02_isotopomer_analysis(self,data_I):
#TODO:
'''update rows of data_stage02_isotopomer_analysis'''
if data_I:
for d in data_I:
try:
data_update = self.session.query(data_stage02_isotopomer_analysis).filter(
data_stage02_isotopomer_analysis.id.like(d['id'])
).update(
{
'analysis_id':d['analysis_id'],
'simulation_id':d['simulation_id'],
'used_':d['used_'],
'comment_':d['comment_']},
synchronize_session=False);
except SQLAlchemyError as e:
print(e);
self.session.commit();
def initialize_datastage02_isotopomer_analysis(self):
try:
data_stage02_isotopomer_analysis.__table__.create(self.engine,True);
except SQLAlchemyError as e:
print(e);
def drop_datastage02_isotopomer_analysis(self):
try:
data_stage02_isotopomer_analysis.__table__.drop(self.engine,True);
except SQLAlchemyError as e:
print(e);
def reset_datastage02_isotopomer_analysis(self,analysis_id_I = None):
try:
if analysis_id_I:
reset = self.session.query(data_stage02_isotopomer_analysis).filter(data_stage02_isotopomer_analysis.analysis_id.like(analysis_id_I)).delete(synchronize_session=False);
self.session.commit();
except SQLAlchemyError as e:
print(e);
| mit | -3,496,319,608,237,178,000 | 46.732558 | 184 | 0.583435 | false | 3.684919 | false | false | false |
Subsets and Splits