code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8
'''
adapted from old code: morphology.py (July 2012)
we used x,y rather than i, j then.
USE:
reload(mor)
from armor.morphology import morphology as mor
from armor import pattern
a=pattern.a
mor.erode(a)
'''
"""
* morphology.py
* module for mathematical morphologies
* reference: e.g. http://www.idi.ntnu.no/emner/tdt4265/lectures/lecture3b.pdf - Introduction to Mathematical Morphology, by Ole Christian Eidheim, Department of Computer and Information Science
functions:
shift(M,x,y)
erode(M)
dilate(M)
beucherGradient(M)
opening(M) - morphological opening
closing(M) - " " closing
"""
import numpy as np
import itertools
from armor import pattern
dbz=pattern.DBZ
def shift(M, x, y, fill_value=-999):
""" shifting the image to the right and down by x and y respectively
padding with -999 for missing values
"""
if isinstance(M, dbz):
M, fill_value = M.matrix.filled(), M.matrix.fill_value
height, width = M.shape
M = np.roll(np.roll(M, y, axis=0), x, axis=1)
if x>0:
M[ :, 0:x] = fill_value
if x<0:
M[ :, x: ] = fill_value
if y>0:
M[0:y, : ] = fill_value
if y<0:
M[y:, : ] = fill_value
return M
def squareNeighbourhood(size=3):
""" to be used in function erode, dilate, etc.
min size = 2 (including the centre)
"""
return list(itertools.product(range(-size//2+1,size//2+1), range(-size//2+1,size//2+1)))
def disc(radius=5):
square = squareNeighbourhood(size=int(radius)*2+1)
d = []
for i, j in square:
if i**2+j**2 <= radius**2 :
d.append((i,j))
return d
def erode(M, neighbourhood=squareNeighbourhood(), fill_value=-999):
"""
18 July 2012
generalisation of the function erode in morphology20120718.py
experimental - to be part of a ridge detector
input: a 2-dim numpy array M
a list "neighbourhood", e.g, =[(0,-1),(0,0),(0,1)]
if we want to erode along the y-direction
do : erode along that neighbourhood
"""
if isinstance(M, dbz):
M, fill_value = M.matrix.filled(), M.matrix.fill_value
height, width = M.shape
#if neighbourhood =="":
# neighbourhood = squareNeighbourhood()
fullGrid = list(itertools.product(range(height),range(width)))
shiftedGrid = {}
for (i,j) in neighbourhood:
shiftedGrid[(i,j)] = shift(M, i,j)
Meroded = [ min([shiftedGrid[(i,j)][q,p] for (i,j) in neighbourhood]) for (q,p) in fullGrid]
Meroded = np.array(Meroded).reshape(height,width)
return Meroded
def dilate(M, neighbourhood=squareNeighbourhood()):
"""
18 July 2012
similar as above
generalisation of the function dilate in morphology20120718.py
experimental - to be part of a trough detector
input: a 2-dim numpy array M
a list "neighbourhood", e.g, =[(0,-1),(0,0),(0,1)]
if we want to dilate along the y-direction
do : dilate along that neighbourhood
"""
if isinstance(M, dbz):
M, fill_value = M.matrix.filled(), M.matrix.fill_value
height, width = M.shape
#if neighbourhood =="":
# neighbourhood = squareNeighbourhood()
fullGrid = list(itertools.product(range(height),range(width)))
shiftedGrid = {}
for (i,j) in neighbourhood:
shiftedGrid[(i,j)] = shift(M,i,j)
Mdilated = [ max([shiftedGrid[(i,j)][q,p] for (i,j) in neighbourhood]) for (q,p) in fullGrid]
Mdilated = np.array(Mdilated).reshape(height,width)
return Mdilated
def beucherGradient(M, neighbourhood=squareNeighbourhood(5)):
"""
Dilation and erosion can be used to extract edge information from
images
– Example: Beucher gradient
B = B − B
16-7-2012
"""
return dilate(M, neighbourhood=neighbourhood) - erode(M, neighbourhood= neighbourhood)
def openeth(M, neighbourhood=squareNeighbourhood(5)):
"""
Used to remove unwanted structures in the image (e.g. noise)
Morphological opening is simply an erosion followed by a dilation
16-7-2012
"""
return dilate(erode(M, neighbourhood=neighbourhood), neighbourhood=neighbourhood)
def closeth(M, neighbourhood=squareNeighbourhood(5)):
"""
similar to openeth()
see help(openeth) for information
16-7-2012
"""
return erode(dilate(M,neighbourhood = neighbourhood), neighbourhood = neighbourhood)
def hitAndMiss(M):
pass
def intersect(M1, M2):
"""intersection of two functions = take the minimum (and zero when the domains don't overlap)
for the moment, M1, M2 are two functions with the same domain (e.g. over all of taiwan)
"""
return np.minimum(M1,M2)
pass
def union(M1, M2):
"""union of two functions = take the maximum (and the existing value when the domains don't overlap)
"""
return np.maximum(M1,M2)
pass
def geodesicDilate(marker, mask, neighbourhood=squareNeighbourhood(3)):
"""dilate the marker, then intersect with the mask
~ 17 July 2012
"""
M = dilate(marker,neighbourhood=neighbourhood)
return np.minimum(M, mask)
def geodesicErode(marker,mask,neighbourhood=squareNeighbourhood(3)):
"""the reverse of the function geodesicDilate above
can be used in getMinima
18 July 2012
NOT TESTED ALONE
"""
M = erode(marker, neighbourhood=neighbourhood)
return np.maximum(M, mask)
def grayscaleReconstruct(marker, mask, neighbourhood=squareNeighbourhood(3)):
"""
~ 17 July 2012
J. C. Nunes et. al. (2005), p.179 top right corner
- gray scale reconstruction
Irec_I(J) = union over all n's >=1 of geodesic dilations of J inside I
"""
Irec = marker
done = False
while not done:
IrecNew = geodesicDilate(Irec, mask, neighbourhood = neighbourhood)
if np.array_equal(IrecNew ,Irec): # if stabilises then done, else continue
done = True
Irec = IrecNew
return Irec
def getMaxima(M, neighbourhood=squareNeighbourhood(3)):
"""
~ 17 July 2012
J. C. Nunes et. al. (2005), p.179 top right corner
M is an array
"""
I = M # image
J = I -1
return I-grayscaleReconstruct(marker = J, mask = I, neighbourhood=neighbourhood)
def getMinima(M):
"""The reverse of the function getMaxima above.
18 July 2012
"""
return 1 - getMaxima(-M) # lazy solution
def findRidges(M):
"""
18 July 2012
experimental: to find ridges with erosions along x- and y-directions respectively
"""
neighbourhoodx = [(-1,0) , (0,0) , (1,0)]
neighbourhoody = [( 0,-1), (0,0) , (0,1)]
neighbourhoodxy =[(-1,-1), (0,0) , (1,1)]
neighbourhoodxy_=[(-1, 1), (0,0) ,(1,-1)]
Mx = getMaxima(M, neighbourhood = neighbourhoodx)
My = getMaxima(M, neighbourhood = neighbourhoody)
Mxy = getMaxima(M, neighbourhood = neighbourhoodxy)
Mxy_ = getMaxima(M, neighbourhood = neighbourhoodxy_)
return Mx, My, Mxy, Mxy_
| yaukwankiu/armor | geometry/morphology.py | Python | cc0-1.0 | 7,056 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ai ts=4 sts=4 et sw=4 nu
from __future__ import (unicode_literals, absolute_import,
division, print_function)
import logging
import sys
import os
import time
import datetime
import platform
from subprocess import call, check_output, STDOUT
from collections import OrderedDict
import easygui
import chardet
SYSTEM = platform.system()
if SYSTEM == "Windows":
# silence py2exe log
sys.stderr = sys.stdout
FFMPEG = "ffmpeg.exe" if SYSTEM == 'Windows' else "./ffmpeg"
COCOAP = "./CocoaDialog.app/Contents/MacOS/CocoaDialog"
TOASTERP = "toast.exe"
# AUDIO_CODEC = [] # defaults to AAC
AUDIO_CODEC = ["-acodec", "copy"]
# AUDIO_CODEC = ["-acodec", "libmp3lame"]
YES = "Oui"
NO = "Non"
YESNO = OrderedDict([(True, YES), (False, NO)])
LOGOS = OrderedDict([
('logo_ortm.png', 'ORTM'),
('logo_tm2.png', 'TM2'),
(None, 'AUCUN'),
])
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger('ORTM-VC')
is_frozen = lambda: hasattr(sys, 'frozen')
safe_path = lambda x: '"{}"'.format(x)
safe_str = lambda x: safe_path(x) if SYSTEM == "Windows" else x
def get_root():
def sf(p):
return unicode(p, sys.getfilesystemencoding()) \
if SYSTEM == "Windows" else p
exename = sys.executable if is_frozen() else __file__
return os.path.dirname(sf(os.path.abspath(exename)))
ROOT_DIR = get_root()
def seconds_from_ffmepg(timestr):
hour, minute, second = timestr.split(':', 2)
if "." in second:
second, microsecond = second.split('.')
else:
microsecond = 0
return seconds_from_interval(end_hour=int(hour),
end_minute=int(minute),
end_second=int(second),
end_microsecond=int(microsecond))
def seconds_from_interval(start_hour=0, start_minute=0,
start_second=0, start_microsecond=0,
end_hour=0, end_minute=0,
end_second=0, end_microsecond=0):
s = datetime.datetime(1970, 1, 1, start_hour, start_minute,
start_second, start_microsecond)
e = datetime.datetime(1970, 1, 1, end_hour, end_minute,
end_second, end_microsecond)
return int((e - s).total_seconds())
def nb_seconds_as_ffmepg(seconds):
return time.strftime("%H:%M:%S.0", time.gmtime(seconds))
def duration_from_path(fpath):
duration = None
trash_path = os.path.join(ROOT_DIR if SYSTEM == "Windows" else "/tmp",
"trash.mp4")
ffmpeg_out = syscall([FFMPEG, "-y", "-ss", "00:00:00.0",
"-i", safe_str(fpath), "-to", "00:00:00.1",
"-strict", "-2", trash_path],
shell=True, with_output=True)
for line in ffmpeg_out.split("\n"):
if "Duration:" in line:
duration = line.split(',', 1)[0].split(None, 1)[-1]
return duration
def syscall(args, shell=False, with_print=False, with_output=False):
''' execute an external command. Use shell=True if using bash specifics '''
if isinstance(args, basestring):
args = args.split()
if with_print:
logger.debug(u"-----------\n" + u" ".join(args) + u"\n-----------")
if shell:
args = ' '.join(args)
if with_output:
output = check_output(args, stderr=STDOUT)
encoding = chardet.detect(output)
if encoding is not None and encoding['encoding'] is not None:
return output.decode(encoding['encoding'])
return output
else:
return call(args, shell=shell)
def display_error(message, title="Erreur", exit=True):
easygui.msgbox(title=title, msg=message)
if exit:
sys.exit(1)
def confirm_bubble(message):
title = "Conversion terminee"
if SYSTEM == 'Windows':
syscall([TOASTERP, "-t", title,
"-m", message.encode('ascii', 'ignore')])
else:
syscall([COCOAP, "bubble", "--icon", "document", "--title",
title, "--text", message], with_output=True)
def yesnodialog(title, msg, choices, default=None):
index = easygui.indexbox(
title=title,
msg=msg,
choices=choices.values(),
default_choice=default,
cancel_choice=None)
if index is None:
sys.exit(0)
return choices.keys()[index]
def ffmpeg_encode(input, output, logo=False,
start_after=None,
stop_after=None):
if not os.path.exists(input):
logger.error("Input file does not exist.")
return
if os.path.exists(output):
logger.debug("Output file `{}` exists. removing.".format(output))
os.unlink(output)
args = [FFMPEG, "-y"]
# start at defined position
if start_after:
args += ["-ss", nb_seconds_as_ffmepg(start_after)]
# input movie file
args += ["-i", safe_path(input)]
# logo overlay
if logo:
args += ['-vf',
safe_str("movie={} [watermark];[in][watermark] "
"overlay=main_w-overlay_w-20:20 [out]").format(logo)]
# stop at defined position
if stop_after:
args += ["-to", nb_seconds_as_ffmepg(stop_after - (start_after or 0))]
# ouput mp4 file
args += ["-strict", "-2", safe_path(output)]
syscall(args, with_print=True, shell=True)
def ffmpeg_audio_encode(input, output, start_after=None, stop_after=None):
if not os.path.exists(input):
logger.error("Input file does not exist.")
return
if os.path.exists(output):
logger.debug("Output file `{}` exists. removing.".format(output))
os.unlink(output)
args = [FFMPEG, "-y"]
# start at defined position
if start_after:
args += ["-ss", nb_seconds_as_ffmepg(start_after)]
# input movie file
args += ["-i", input]
# exlude video and select audio codec
# args += ["-vn"]
args += ["-map", "0:a"]
# audio codec
args += AUDIO_CODEC
# stop at defined position
if stop_after:
args += ["-to", nb_seconds_as_ffmepg(stop_after - (start_after or 0))]
# ouput mp4 file
args += ["-strict", "-2", output]
syscall(args, with_print=False)
def convert_file(fpath):
logger.info("Started converter for {}".format(fpath))
if not os.path.exists(fpath):
display_error("Le fichier `{}` n'existe pas."
.format(fpath))
# options
logo = None
has_clips = False
encode_full = False
encode_audio = False
clips = []
# gather basic infor about video (help ensure it is a video)
folder = os.path.dirname(fpath)
fname = os.path.basename(fpath)
title = fname.rsplit('.', 1)[0]
dest_folder = os.path.join(folder, "WEB-{}".format(title))
duration = duration_from_path(fpath)
duration_seconds = seconds_from_ffmepg(duration)
# ask about logo
logo = yesnodialog(
title=title,
msg="Quel logo ajouter sur la/les vidéos ?",
choices=LOGOS)
# ask about full encoding
encode_full = yesnodialog(
title=title,
msg="Convertir la vidéo complète ?",
choices=YESNO,
default=YES)
# ask about clips
has_clips = yesnodialog(
title="Découper la vidéo ?",
msg="La vidéo doit-elle être découpée en clips ?",
choices=YESNO,
default=YES)
if has_clips:
done_prepping_clips = False
clip_counter = 1
while not done_prepping_clips:
clip_data = easygui.enterbox(
title="Clip nº{n}".format(t=title, n=clip_counter),
default="00:00:00 00:02:00 Sujet",
msg="Début, fin et nom au format:\nHH:MM:SS "
"HH:MM:SS NOM DU CLIP\nDurée vidéo complète: {}"
.format(duration))
# user entered empty string (cancel a 'next' event)
if not clip_data:
break
start_ts, end_ts, name = clip_data.split(None, 2)
try:
nbs_start = seconds_from_ffmepg(start_ts)
nbs_end = seconds_from_ffmepg(end_ts)
except:
display_error("Format incorrect.\nMerci de reprendre",
exit=False)
continue
else:
if nbs_end < nbs_start:
display_error("La fin du clip ne peut pas être antérieure "
"au début du clip.\n"
"Merci de reprendre.", exit=False)
continue
if nbs_start > duration_seconds or nbs_end > duration_seconds:
display_error("Le clip dépasse la durée de la vidéo.\n"
"Merci de reprendre.", exit=False)
continue
clips.append((nbs_start, nbs_end, name))
clip_counter += 1
# audio-only versions
encode_audio = yesnodialog(
title="Encoder les pistes audio ?",
msg="Les vidéos doivent-elles être exportées "
"en version audio aussi ?",
choices=YESNO,
default=NO)
# summary
yn = lambda x: YES if x else NO
summary = ("Encoding summary:\n"
"Adding logo: {l}{l2}\n"
"Encoding full video: {f} ({d})\n"
"Encoding clips: {c}\n"
.format(l=yn(logo),
l2=" ({})".format(logo) if logo else "",
f=yn(encode_full),
d=duration,
c=len(clips) if len(clips) else NO))
for ss, es, name in clips:
summary += "\t{n}: {s} -> {e} ({t}s)\n".format(
n=name,
s=nb_seconds_as_ffmepg(ss),
e=nb_seconds_as_ffmepg(es),
t=(es - ss))
summary += "Encoding audio-only also: {a}".format(a=yn(encode_audio))
logger.info(summary)
# Everything's ready. let's confirm
confirm = yesnodialog(
title=title,
msg=summary,
choices=YESNO,
default=YES)
if not confirm:
sys.exit()
# create an output folder for our mp4s
try:
logger.info("Creating destination folder")
os.mkdir(dest_folder)
except OSError:
logger.debug("\tFailed.")
# Encoding main title
if encode_full:
logger.info("Encoding main video")
fname_full = "COMPLET-{}.mp4".format(title)
fpath_full = os.path.join(dest_folder, fname_full)
ffmpeg_encode(input=fpath, logo=logo, output=fpath_full)
confirm_bubble("La video {} a ete convertie et est prete pour envoi"
.format(title))
if encode_audio:
logger.info("Encoding main video's audio")
fname_afull = "COMPLET-{}.aac".format(title)
fpath_afull = os.path.join(dest_folder, fname_afull)
ffmpeg_audio_encode(input=fpath_full, output=fpath_afull)
confirm_bubble("La version audio de la video {} "
"a ete convertie et est prete pour envoi"
.format(title))
# convert clips
for clip_id, clip_data in enumerate(clips):
clip_id += 1
logger.info("Encoding clip #{}".format(clip_id))
start_sec, end_sec, name = clip_data
fname_clip = "CLIP{}-{}.mp4".format(clip_id, name)
fpath_clip = os.path.join(dest_folder, fname_clip)
ffmpeg_encode(input=fpath, logo=logo, output=fpath_clip,
start_after=start_sec, stop_after=end_sec)
confirm_bubble("Le clip#{}: {} a ete converti et est pret pour envoi"
.format(clip_id, name))
logger.info("Conversion of clip #{} ({}) has completed."
.format(clip_id, name))
if encode_audio:
logger.info("Encoding clip #{}' audio track".format(clip_id))
fname_aclip = "CLIP{}-{}.aac".format(clip_id, name)
fpath_aclip = os.path.join(dest_folder, fname_aclip)
ffmpeg_audio_encode(input=fpath_clip, output=fpath_aclip)
confirm_bubble("La version audio du clip#{}: {} "
"a ete convertie et est prete pour envoi"
.format(clip_id, name))
logger.info("Complete\n"
"##################################\n"
"All done. Opening output folder...\n"
"##################################")
if SYSTEM == "Windows":
syscall(["explorer.exe", '/e,"{}"'.format(dest_folder)],
shell=True)
else:
syscall(["open", dest_folder])
if __name__ == '__main__':
if len(sys.argv) < 2:
logger.error("Missing video path parameter")
sys.exit(1)
fpath = sys.argv[1]
if SYSTEM != 'Windows':
fpath = fpath.replace('\\', '')
convert_file(fpath)
| yeleman/ORTM-VC | convert-video.py | Python | cc0-1.0 | 13,034 |
from streamajoker import plugin
from streamajoker.scrapers import scraper
from streamajoker.ga import tracked
from streamajoker.caching import cached_route
from streamajoker.utils import ensure_fanart
from streamajoker.library import library_context
BASE_URL = plugin.get_setting("base_btdigg")
HEADERS = {
"Referer": BASE_URL,
}
SORT_RELEVANCE = 0
SORT_POPULARITY = 1
SORT_ADDTIME = 2
SORT_SIZE = 3
SORT_FILES = 4
@scraper("BTDigg - DHT Search Engine", "%s/logo.png" % BASE_URL)
@plugin.route("/btdigg")
@ensure_fanart
@tracked
def btdigg_index():
plugin.redirect(plugin.url_for("btdigg_search"))
@plugin.route("/btdigg/search/<query>/<sort>/<page>")
@library_context
@ensure_fanart
@tracked
def btdigg_page(query, sort, page):
from bs4 import BeautifulSoup
from streamajoker.utils import url_get
html_data = url_get("%s/search" % BASE_URL, headers=HEADERS, params={
"order": sort,
"q": query,
"p": page,
})
soup = BeautifulSoup(html_data, "html5lib")
name_nodes = soup.findAll("td", "torrent_name")
attr_nodes = soup.findAll("table", "torrent_name_tbl")[1::2]
for name_node, attr_node in zip(name_nodes, attr_nodes):
attrs = attr_node.findAll("span", "attr_val")
title = "%s (%s, DLs:%s)" % (name_node.find("a").text, attrs[0].text, attrs[2].text)
yield {
"label": title,
"path": plugin.url_for("play", uri=attr_node.find("a")["href"]),
"is_playable": True,
}
yield {
"label": ">> Next page",
"path": plugin.url_for("btdigg_page", query=query, sort=sort, page=int(page) + 1),
"is_playable": False,
}
@plugin.route("/btdigg/search")
@tracked
def btdigg_search():
query = plugin.request.args_dict.pop("query", None)
if not query:
query = plugin.keyboard("", "streamajoker - BTDigg - Search")
if query:
plugin.redirect(plugin.url_for("btdigg_page", query=query, sort=SORT_POPULARITY, page=0, **plugin.request.args_dict))
| Mafarricos/Mafarricos-modded-xbmc-addons | plugin.video.streamajoker/resources/site-packages/streamajoker/scrapers/btdigg.py | Python | gpl-2.0 | 2,022 |
# TODO: Docstrings
import processes.base
class MACChangerProcess(processes.base.BaseProcess):
# TODO: Docstrings
def set_specific_mac(self, interface, new_mac):
# TODO: Docstrings
'''
:param interface:
:param new_mac:
:return:
'''
# TODO: Convert to Python
"""
try
{
if (CAircrackUtilities.ValidMACAddress(strNewMAC))
{
String astrCommand[] = new String[] {"macchanger", "-m", strNewMAC, strInterface};
super.RunProcess(astrCommand, true, true, true);
}
}
catch (Exception excError)
{
CUtilities.WriteLog(excError);
}
"""
raise NotImplementedError
def set_random_mac(self, interface):
# TODO: Docstrings
'''
:param interface:
:return:
'''
# TODO: Convert to Python
"""
try
{
String astrCommand[] = new String[] {"macchanger", "-A", strInterface};
super.RunProcess(astrCommand, true, true, true);
}
catch (Exception excError)
{
CUtilities.WriteLog(excError);
}
"""
raise NotImplementedError
def get_manufacturers(self, search):
# TODO: Docstrings
'''
:param search:
:return:
'''
# TODO: Convert to Python
"""
try
{
String astrCommand[] = new String[] {"macchanger", "--list=" + strSearch};
super.RunProcess(astrCommand, true, true, false);
}
catch (Exception excError)
{
CUtilities.WriteLog(excError);
}
"""
raise NotImplementedError
def get_interface_mac_and_manufacturer(self, interface):
# TODO: Docstrings
'''
:param interface:
:return:
'''
# TODO: Convert to Python
"""
String astrResult[] = null;
try
{
String astrCommand[] = new String[] {"macchanger", "-s", strInterface};
super.RunProcess(astrCommand, true, true, true);
BufferedReader brOutput = new BufferedReader(GetOutput());
String strCurrentInformation = brOutput.readLine().replaceAll("Current MAC: ", "").trim();
Matcher mhrMatcher = CAircrackUtilities.GetMACAddressFormat().matcher(strCurrentInformation);
if (mhrMatcher.find())
{
MatchResult rstResult = mhrMatcher.toMatchResult();
int intMACStartIndex = rstResult.start();
int intMACEndIndex = rstResult.end();
String strMACAddress = strCurrentInformation.substring(intMACStartIndex, intMACEndIndex);
String strManufacturer = strCurrentInformation.substring(strCurrentInformation.indexOf("(") + 1, strCurrentInformation.lastIndexOf(")"));
astrResult = new String[] {strMACAddress, strManufacturer};
}
}
catch (Exception excError)
{
CUtilities.WriteLog(excError);
}
return astrResult;
"""
raise NotImplementedError
| pbromwelljr/crack_py | processes/macchanger.py | Python | gpl-2.0 | 3,226 |
import time
from mutagen.flac import FLAC
from mutagen.easyid3 import EasyID3
from mutagen.id3 import ID3NoHeaderError
from mutagen.mp3 import MP3
from schema import *
def scan_tag(player, pause = None):
session = Session()
prescanned = session.query(Scan.song_id).filter(Scan.scan == 'tag').subquery()
tracks = session.query(Song).outerjoin((prescanned, Song.id == prescanned.c.song_id)).filter(prescanned.c.song_id == None).all()
session.close()
for song in tracks:
if not player.alive:
break
path = song.path
info = {}
session = Session()
session.begin()
try:
if song.mimetype == "audio/mpeg":
tagobj = EasyID3(path)
fileobj = MP3(path)
info['bitrate'] = int(fileobj.info.bitrate)
elif song.mimetype == "audio/flac":
tagobj = FLAC(path)
fileobj = tagobj
else:
print 'unknown mimetype', song.mimetype
continue
try:
info['title'] = tagobj['title'][0]
except:
pass
try:
info['year'] = tagobj['date'][0]
except:
pass
try:
info['track'] = tagobj['tracknumber'][0]
except:
pass
try:
length = int(fileobj.info.length)
except:
length = None
try:
info['album'] = tagobj['album'][0]
except:
pass
try:
info['artist'] = tagobj['artist'][0]
except:
pass
# update
if length:
song.length = length
session.add(song)
if info:
for key in info.iterkeys():
tag = Tag(song.id, key, info[key])
session.add(tag)
success = True
except:
success = False
scan = Scan(song.id, 'tag', success)
session.add(scan)
session.commit()
session.close()
if pause:
time.sleep(pause)
| tobes/jukebox | scan.py | Python | gpl-2.0 | 2,236 |
from django import forms
from django.db import models
from django.contrib.auth.models import User
from datetime import *
from django.utils import timezone
"""
Modelo de area
"""
class Area(models.Model):
# Atributos
is_active = models.BooleanField(default=True)
nombre = models.CharField(max_length=30)
def __unicode__(self):
return self.nombre
class Meta:
verbose_name = 'area'
verbose_name_plural = 'areas'
"""
Modelo de profesional
:Usuario: Usuario del sistema, hereda del modelo User
:RNP: Registro nacional de proveedores
"""
class Profesional(User):
# Relaciones
area = models.ForeignKey(Area)
# Atributos
rnp = models.CharField(max_length=15)
dni = models.CharField(max_length=15)
num_matricula = models.CharField(max_length=15)
tel_personal = models.CharField(max_length=20)
cuit = models.CharField(max_length=20)
def __unicode__(self):
return self.first_name + " " + self.last_name
class Meta:
verbose_name = 'profesional'
verbose_name_plural = 'profesionales'
"""
Modelo de paciente
"""
class Paciente(models.Model):
# Atributos
is_active = models.BooleanField(default=True)
nombres = models.CharField(max_length=40)
apellidos = models.CharField(max_length=40)
dni = models.CharField(max_length=15)
obra_social = models.CharField(max_length=20)
numero_afiliado = models.CharField(max_length=30)
fecha_nacimiento = models.DateField(blank=False)
diagnostico = models.CharField(max_length=300)
foto = models.ImageField(upload_to='profile_images', blank=True)
def __unicode__(self):
return self.nombres + " " + self.apellidos
def edad(fecha_nacimiento):
hoy = date.today()
return hoy.year - fecha_nacimiento.year - ((hoy.month, hoy.day) < (fecha_nacimiento.month, fecha_nacimiento.day))
class Meta:
verbose_name = 'paciente'
verbose_name_plural = 'pacientes'
"""
Modelo de informe
"""
class Informe(models.Model):
# Relaciones
paciente = models.ForeignKey(Paciente)
profesional = models.ForeignKey(Profesional)
# Atributos
is_active = models.BooleanField(default=True)
fecha = models.DateField(blank=False, default=timezone.now)
contenido = models.CharField(max_length=400)
def __unicode__(self):
return "Informe de "+self.paciente.nombres+" del "+self.fecha.strftime("%d/%m/%Y")
class Meta:
verbose_name = 'informe'
verbose_name_plural = 'informes'
"""
Modelo de presupuesto
"""
class Presupuesto(models.Model):
# Relaciones
paciente = models.ForeignKey(Paciente)
profesional = models.ForeignKey(Profesional)
# Atributos
is_active = models.BooleanField(default=True)
tratamiento_prestacion = models.CharField(max_length=50)
domicilio_prestacion = models.CharField(max_length=40)
#costo de una hora
costo_hora = models.IntegerField()
costo_mensual = models.IntegerField()
#Fecha en que se genera el presupuesto
fecha_creacion = models.DateField(default=datetime.now)
def __unicode__(self):
return self.paciente.nombres+" "+self.paciente.apellidos+" "+self.fecha_creacion.strftime("%d/%m/%Y")
class Meta:
verbose_name = 'presupuesto'
verbose_name_plural = 'presupuestos'
"""
Modelo de un Horario, necesario para poder crear un presupueso
# Dia #Hora de entrada #Hora de salida
"""
class Horario(models.Model):
#Relaciones
presupuesto = models.ForeignKey(Presupuesto)
#Atributos
dia = models.CharField(max_length=7)
hora_entrada = models.CharField(max_length = 20)
hora_salida = models.CharField(max_length = 20)
cantidad_horas = models.IntegerField(blank=True)
def __unicode__(self):
return self.dia+" de "+self.hora_entrada+" a "+self.hora_salida | nico4021/RegistroTEA | ProyectoTEA/AppTEA/models.py | Python | gpl-2.0 | 3,917 |
# -*- coding: utf-8 -*-
'''The app module, containing the app factory function.'''
from flask import Flask, render_template
from blogaggregator.settings import ProdConfig
from blogaggregator.assets import assets
from blogaggregator.extensions import (
bcrypt,
cache,
db,
login_manager,
migrate,
debug_toolbar,
gravatar,
)
from blogaggregator import public, user
def create_app(config_object=ProdConfig):
'''An application factory, as explained here:
http://flask.pocoo.org/docs/patterns/appfactories/
:param config_object: The configuration object to use.
'''
app = Flask(__name__)
app.config.from_object(config_object)
register_extensions(app)
register_blueprints(app)
register_errorhandlers(app)
return app
def register_extensions(app):
assets.init_app(app)
bcrypt.init_app(app)
cache.init_app(app)
db.init_app(app)
login_manager.init_app(app)
debug_toolbar.init_app(app)
migrate.init_app(app, db)
gravatar.init_app(app)
gravatar.size = 500
return None
def register_blueprints(app):
app.register_blueprint(public.views.blueprint)
app.register_blueprint(user.views.blueprint)
return None
def register_errorhandlers(app):
def render_error(error):
# If a HTTPException, pull the `code` attribute; default to 500
error_code = getattr(error, 'code', 500)
return render_template("{0}.html".format(error_code)), error_code
for errcode in [401, 404, 500]:
app.errorhandler(errcode)(render_error)
return None
| andre-geldenhuis/bloggregator | blogaggregator/blogaggregator/app.py | Python | gpl-2.0 | 1,580 |
'''
Created on Nov 1, 2011
@author: hampt
'''
import sys
from commons import bpRoot, mfRoot, parseGOAnnotations, GOPrediction
from f1Measure import maxf1Measure
'''
Created on Sep 25, 2011
@author: hampt
'''
from multiprocessing.pool import Pool
from operator import itemgetter
def calcThreshMeasure(goPrediction):
#print "calcing top N measure"
returnVals = []
inputs = [(target, goPrediction) for target in goPrediction.targetToTermToScore.keys()]
p=None
# if len(inputs) > 1000:
# pass
## p=Pool(processes=10)
## results = p.map(topNmeasure, inputs,chunksize=50)
# else:
results = map(threshMeasure, inputs)
for i in range(100):
allPrec = []
allRecs = []
for result in results:
if result != None and len(result) > 0 and result[i] != (-1,-1):
if len(result) >= i+1:
allRecs.append(result[i][0])
allPrec.append(result[i][1])
# if i==0:
# print "First prec: " + str(allPrec)
if len(allPrec) == 0:
returnVals.append((0, 0))
else:
returnVals.append((sum(allPrec) / len(allPrec), sum(allRecs) / goPrediction.numberOfTargets))
print "\n".join([str(tuplei) for tuplei in returnVals])
return returnVals
def threshMeasure((target, goPrediction)):
result = []
go = goPrediction.goAnnotations.go
termToScore = goPrediction.targetToTermToScore[target]
annotTerms = goPrediction.goAnnotations.annotations[target]
if len(annotTerms) > 0:
threshs = range(100)
threshs.reverse()
for i in threshs:
currThresh = i / 100.0
predictedTermsSub = set([term for term, score in termToScore.iteritems() if score >= currThresh])
predictedTermsSubParents = go.getAllParents(predictedTermsSub)
allPredictedTermsSub = predictedTermsSub | predictedTermsSubParents
if bpRoot in allPredictedTermsSub:
allPredictedTermsSub.remove(bpRoot)
if mfRoot in allPredictedTermsSub:
allPredictedTermsSub.remove(mfRoot)
if len(allPredictedTermsSub) > 0:
result.append((calcPrecRecall(target, allPredictedTermsSub, annotTerms )))
else:
result.append((-1,-1))
else:
print "empty target: " + target
return result
def calcPrecRecall(target, predictedTerms, actualTerms):
TPs = len(actualTerms.intersection(predictedTerms))
FPs = len(predictedTerms - actualTerms)
FNs = len(actualTerms - predictedTerms)
recall = TPs / float(TPs + FNs)
precision = TPs / float(TPs + FPs)
#print str(precision) + "\t" + "FPs: " + str(len(predictedTerms - actualTerms)) + "\t" + "TPs: " + str(len(actualTerms.intersection(predictedTerms)))
return recall, precision
if __name__ == "__main__":
predictionFilePath = sys.argv[1] #"/mnt/project/interres/cafa/datasets/predictions/TEAM_31_BPO_PRED.txt.new.txt"
annotationFilePath = sys.argv[2]
goTreeFilePath = sys.argv[3] #"/mnt/project/interres/cafa/goGraph.txt"
closureFilePath = sys.argv[4] #"/mnt/project/interres/cafa/fullTransitiveClosureGO.txt"
ontology= sys.argv[5] #"BPO"
numTargets = sys.argv[6] #Number of targets
goAnnotations = parseGOAnnotations(goTreeFilePath, closureFilePath, ontology, annotationFilePath=annotationFilePath)
goPrediction = GOPrediction(predictionFilePath, goAnnotations, int(numTargets))
# print scores2ranks([("bla",0.5),("go2",1.0),("go4",1.0),("go5",1.0),("go6",0.5),("go7",0.5),("go0",0.5),("go1",0.5),("go3",0.5),("bla",0.5)])
print maxf1Measure(calcThreshMeasure(goPrediction))
| Rostlab/MetaStudent | EVAL/threshMeasure.py | Python | gpl-2.0 | 3,372 |
# -*- coding: utf-8 -*-
from django import forms
import re
class QuestionarioRespondidoForm(forms.Form):
pass
def __init__(self, *args, **kwargs):
perguntas = kwargs.pop('perguntas')
respostas = kwargs.pop('respostas')
super(QuestionarioRespondidoForm, self).__init__(*args, **kwargs)
if not respostas:
for pergunta in perguntas:
self.fields['pergunta_'+str(pergunta.id)] = self.correctField(pergunta.pergunta,pergunta.opcoes,pergunta.tipo)
else:
for resposta in respostas:
if resposta.pergunta.tipo == "3":
_resposta = re.sub('[,][ ]+', ',',resposta.resposta)
_resposta = _resposta.split(',')
else:
_resposta = resposta.resposta
self.fields['pergunta_'+str(resposta.pergunta.id)] = self.correctField(resposta.pergunta.pergunta,resposta.pergunta.opcoes,resposta.pergunta.tipo,_resposta)
def clean(self):
return self.cleaned_data
def extra_answers(self):
for name, value in self.cleaned_data.items():
if name.startswith('pergunta_'):
yield (name[9:], value)
def cleanOptions(self,optionlist):
tpl = ()
optionlist = re.sub('[,][ ]+',',', optionlist)
optionlist = optionlist.split(',')
for index,item in enumerate(optionlist):
tpl = tpl + ((item,item),)
return tpl
def correctField(self,label, opcoes,tipo = 0, initial = None):
if tipo =='0':
return forms.CharField(label=label,required=False, widget=forms.Textarea, initial = initial)
elif tipo =='1':
return forms.ChoiceField(label = label,required=False, choices = ((u'sim',u'sim'),(u'não',u'não')), widget= forms.RadioSelect, initial = initial)
elif tipo=='2':
if opcoes:
return forms.ChoiceField(label = label,required=False, choices = self.cleanOptions(opcoes), widget= forms.RadioSelect, initial = initial)
else:
return forms.CharField(label=label,required=False, widget=forms.Textarea, initial = initial)
elif tipo=='3':
if opcoes:
return forms.MultipleChoiceField(label=label,required=False, widget=forms.CheckboxSelectMultiple, choices=self.cleanOptions(opcoes), initial = initial)
else:
return forms.CharField(label=label,required=False, widget=forms.Textarea, initial = initial)
elif tipo=='4':
return forms.CharField(label='<h3>'+label+'</h3>',required=False, widget=forms.TextInput(attrs={'readonly':'readonly'}))
else:
return forms.CharField(label=label,required=False, widget=forms.Textarea)
| agendaTCC/AgendaTCC | tccweb/apps/questionarios/forms.py | Python | gpl-2.0 | 2,396 |
# -*- coding: utf-8 -*-
{
'!langcode!': 'es',
'!langname!': 'Español',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"actualice" es una expresión opcional como "campo1=\'nuevo_valor\'". No se puede actualizar o eliminar resultados de un JOIN',
'%s %%{row} deleted': '%s %%{fila} %%{eliminada}',
'%s %%{row} updated': '%s %%{fila} %%{actualizada}',
'%s selected': '%s %%{seleccionado}',
'%Y-%m-%d': '%d/%m/%Y',
'%Y-%m-%d %H:%M:%S': '%d/%m/%Y %H:%M:%S',
'(something like "it-it")': '(algo como "eso-eso")',
'@markmin\x01An error occured, please [[reload %s]] the page': 'Ha ocurrido un error, por favor [[recargar %s]] la página',
'@markmin\x01Number of entries: **%s**': 'Number of entries: **%s**',
'A new version of web2py is available': 'Hay una nueva versión de web2py disponible',
'A new version of web2py is available: %s': 'Hay una nueva versión de web2py disponible: %s',
'About': 'Acerca de',
'about': 'acerca de',
'About application': 'Acerca de la aplicación',
'Access Control': 'Control de Acceso',
'additional code for your application': 'código adicional para su aplicación',
'admin disabled because no admin password': 'admin deshabilitado por falta de contraseña',
'admin disabled because not supported on google app engine': 'admin deshabilitado, no es soportado en GAE',
'admin disabled because unable to access password file': 'admin deshabilitado, imposible acceder al archivo con la contraseña',
'Admin is disabled because insecure channel': 'Admin deshabilitado, el canal no es seguro',
'Admin is disabled because unsecure channel': 'Admin deshabilitado, el canal no es seguro',
'Administrative interface': 'Interfaz administrativa',
'Administrative Interface': 'Interfaz Administrativa',
'Administrator Password:': 'Contraseña del Administrador:',
'Ajax Recipes': 'Recetas AJAX',
'An error occured, please %s the page': 'Ha ocurrido un error, por favor %s la página',
'and rename it (required):': 'y renómbrela (requerido):',
'and rename it:': ' y renómbrelo:',
'Aplicar cambios': 'Aplicar cambios',
'appadmin': 'appadmin',
'appadmin is disabled because insecure channel': 'admin deshabilitado, el canal no es seguro',
'application "%s" uninstalled': 'aplicación "%s" desinstalada',
'application compiled': 'aplicación compilada',
'application is compiled and cannot be designed': 'la aplicación está compilada y no puede ser modificada',
'Apply changes': 'Aplicar cambios',
'Are you sure you want to delete file "%s"?': '¿Está seguro que desea eliminar el archivo "%s"?',
'Are you sure you want to delete this object?': '¿Está seguro que desea borrar este objeto?',
'Are you sure you want to uninstall application "%s"': '¿Está seguro que desea desinstalar la aplicación "%s"',
'Are you sure you want to uninstall application "%s"?': '¿Está seguro que desea desinstalar la aplicación "%s"?',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': 'ATENCION: Inicio de sesión requiere una conexión segura (HTTPS) o localhost.',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': 'ATENCION: NO EJECUTE VARIAS PRUEBAS SIMULTANEAMENTE, NO SON THREAD SAFE.',
'ATTENTION: you cannot edit the running application!': 'ATENCION: no puede modificar la aplicación que está ejecutandose!',
'Authentication': 'Autenticación',
'Available Databases and Tables': 'Bases de datos y tablas disponibles',
'Buy this book': 'Compra este libro',
'Cache': 'Caché',
'cache': 'caché',
'Cache Keys': 'Llaves de la Caché',
'cache, errors and sessions cleaned': 'caché, errores y sesiones eliminados',
'Cambie la contraseña': 'Cambie la contraseña',
'Cannot be empty': 'No puede estar vacío',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': 'No se puede compilar: hay errores en su aplicación. Depure, corrija errores y vuelva a intentarlo.',
'cannot create file': 'no es posible crear archivo',
'cannot upload file "%(filename)s"': 'no es posible subir archivo "%(filename)s"',
'Change Password': 'Cambie la Contraseña',
'Change password': 'Cambie la contraseña',
'change password': 'cambie la contraseña',
'check all': 'marcar todos',
'Check to delete': 'Marque para eliminar',
'clean': 'limpiar',
'Clear CACHE?': '¿Limpiar CACHÉ?',
'Clear DISK': 'Limpiar DISCO',
'Clear RAM': 'Limpiar RAM',
'Click on the link %(link)s to reset your password': 'Pulse en el enlace %(link)s para reiniciar su contraseña',
'click to check for upgrades': 'haga clic para buscar actualizaciones',
'Client IP': 'IP del Cliente',
'Community': 'Comunidad',
'compile': 'compilar',
'compiled application removed': 'aplicación compilada eliminada',
'Components and Plugins': 'Componentes y Plugins',
'Controller': 'Controlador',
'Controllers': 'Controladores',
'controllers': 'controladores',
'Copyright': 'Copyright',
'Correo electrónico inválido': 'Correo electrónico inválido',
'create file with filename:': 'cree archivo con nombre:',
'Create new application': 'Cree una nueva aplicación',
'create new application:': 'nombre de la nueva aplicación:',
'Created By': 'Creado Por',
'Created On': 'Creado En',
'crontab': 'crontab',
'Current request': 'Solicitud en curso',
'Current response': 'Respuesta en curso',
'Current session': 'Sesión en curso',
'currently saved or': 'actualmente guardado o',
'customize me!': '¡Adáptame!',
'data uploaded': 'datos subidos',
'Database': 'Base de datos',
'Database %s select': 'selección en base de datos %s',
'database administration': 'administración base de datos',
'Database Administration (appadmin)': 'Database Administration (appadmin)',
'Date and Time': 'Fecha y Hora',
'db': 'bdd',
'DB Model': 'Modelo BDD',
'defines tables': 'define tablas',
'Delete': 'Eliminar',
'delete': 'eliminar',
'delete all checked': 'eliminar marcados',
'Delete:': 'Eliminar:',
'Demo': 'Demostración',
'Deploy on Google App Engine': 'Despliegue en Google App Engine',
'Deployment Recipes': 'Recetas de despliegue',
'Description': 'Descripción',
'design': 'diseño',
'DESIGN': 'DISEÑO',
'Design for': 'Diseño por',
'DISK': 'DISCO',
'Disk Cache Keys': 'Llaves de Caché en Disco',
'Disk Cleared': 'Disco limpiado',
'Documentation': 'Documentación',
"Don't know what to do?": '¿No sabe que hacer?',
'done!': '¡hecho!',
'Download': 'Descargas',
'E-mail': 'Correo electrónico',
'edit': 'editar',
'EDIT': 'EDITAR',
'Edit': 'Editar',
'Edit application': 'Editar aplicación',
'edit controller': 'editar controlador',
'Edit current record': 'Edite el registro actual',
'Edit Profile': 'Editar Perfil',
'edit profile': 'editar perfil',
'Edit This App': 'Edite esta App',
'Editing file': 'Editando archivo',
'Editing file "%s"': 'Editando archivo "%s"',
'Email and SMS': 'Correo electrónico y SMS',
'Email sent': 'Correo electrónico enviado',
'enter a number between %(min)g and %(max)g': 'introduzca un número entre %(min)g y %(max)g',
'enter an integer between %(min)g and %(max)g': 'introduzca un entero entre %(min)g y %(max)g',
'Error logs for "%(app)s"': 'Bitácora de errores en "%(app)s"',
'errors': 'errores',
'Errors': 'Errores',
'Errors in form, please check it out.': 'Hay errores en el formulario, por favor comprúebelo.',
'export as csv file': 'exportar como archivo CSV',
'exposes': 'expone',
'extends': 'extiende',
'failed to reload module': 'la recarga del módulo ha fallado',
'FAQ': 'FAQ',
'file "%(filename)s" created': 'archivo "%(filename)s" creado',
'file "%(filename)s" deleted': 'archivo "%(filename)s" eliminado',
'file "%(filename)s" uploaded': 'archivo "%(filename)s" subido',
'file "%(filename)s" was not deleted': 'archivo "%(filename)s" no fué eliminado',
'file "%s" of %s restored': 'archivo "%s" de %s restaurado',
'file changed on disk': 'archivo modificado en el disco',
'file does not exist': 'archivo no existe',
'file saved on %(time)s': 'archivo guardado %(time)s',
'file saved on %s': 'archivo guardado %s',
'First name': 'Nombre',
'Forgot username?': '¿Olvidó el nombre de usuario?',
'Forms and Validators': 'Formularios y validadores',
'Free Applications': 'Aplicaciones Libres',
'Functions with no doctests will result in [passed] tests.': 'Funciones sin doctests equivalen a pruebas [aceptadas].',
'Group %(group_id)s created': 'Grupo %(group_id)s creado',
'Group ID': 'ID de Grupo',
'Group uniquely assigned to user %(id)s': 'Grupo asignado únicamente al usuario %(id)s',
'Groups': 'Grupos',
'Hello World': 'Hola Mundo',
'help': 'ayuda',
'Home': 'Inicio',
'How did you get here?': '¿Cómo llegaste aquí?',
'htmledit': 'htmledit',
'Impersonate': 'Suplantar',
'import': 'importar',
'Import/Export': 'Importar/Exportar',
'includes': 'incluye',
'Index': 'Índice',
'Inicio de sesión': 'Inicio de sesión',
'insert new': 'inserte nuevo',
'insert new %s': 'inserte nuevo %s',
'Installed applications': 'Aplicaciones instaladas',
'Insufficient privileges': 'Privilegios insuficientes',
'internal error': 'error interno',
'Internal State': 'Estado Interno',
'Introduction': 'Introducción',
'Invalid action': 'Acción inválida',
'Invalid email': 'Correo electrónico inválido',
'Invalid login': 'Inicio de sesión inválido',
'invalid password': 'contraseña inválida',
'Invalid Query': 'Consulta inválida',
'invalid request': 'solicitud inválida',
'Invalid reset password': 'Reinicio de contraseña inválido',
'invalid ticket': 'tiquete inválido',
'Is Active': 'Está Activo',
'Key': 'Llave',
'language file "%(filename)s" created/updated': 'archivo de lenguaje "%(filename)s" creado/actualizado',
'Language files (static strings) updated': 'Archivos de lenguaje (cadenas estáticas) actualizados',
'languages': 'lenguajes',
'Languages': 'Lenguajes',
'languages updated': 'lenguajes actualizados',
'Last name': 'Apellido',
'Last saved on:': 'Guardado en:',
'Layout': 'Diseño de página',
'Layout Plugins': 'Plugins de diseño',
'Layouts': 'Diseños de páginas',
'License for': 'Licencia para',
'Live Chat': 'Chat en vivo',
'loading...': 'cargando...',
'Logged in': 'Sesión iniciada',
'Logged out': 'Sesión finalizada',
'Login': 'Inicio de sesión',
'login': 'inicio de sesión',
'Login disabled by administrator': 'Inicio de sesión deshabilitado por el administrador',
'Login to the Administrative Interface': 'Inicio de sesión para la Interfaz Administrativa',
'logout': 'fin de sesión',
'Logout': 'Fin de sesión',
'Los campos de contraseña no coinciden': 'Los campos de contraseña no coinciden',
'Lost Password': 'Contraseña perdida',
'Lost password?': '¿Olvidó la contraseña?',
'lost password?': '¿olvidó la contraseña?',
'Main Menu': 'Menú principal',
'Manage Cache': 'Gestionar la Caché',
'Menu Model': 'Modelo "menu"',
'merge': 'combinar',
'Models': 'Modelos',
'models': 'modelos',
'Modified By': 'Modificado Por',
'Modified On': 'Modificado En',
'Modules': 'Módulos',
'modules': 'módulos',
'must be YYYY-MM-DD HH:MM:SS!': '¡debe ser DD/MM/YYYY HH:MM:SS!',
'must be YYYY-MM-DD!': '¡debe ser DD/MM/YYYY!',
'My Sites': 'Mis Sitios',
'Name': 'Nombre',
'new application "%s" created': 'nueva aplicación "%s" creada',
'New password': 'Contraseña nueva',
'New Record': 'Registro nuevo',
'new record inserted': 'nuevo registro insertado',
'next 100 rows': '100 filas siguientes',
'NO': 'NO',
'No databases in this application': 'No hay bases de datos en esta aplicación',
'Not authorized': 'No autorizado',
'Object or table name': 'Nombre del objeto o tabla',
'Old password': 'Contraseña vieja',
'Online examples': 'Ejemplos en línea',
'or import from csv file': 'o importar desde archivo CSV',
'or provide application url:': 'o provea URL de la aplicación:',
'Origin': 'Origen',
'Original/Translation': 'Original/Traducción',
'Other Plugins': 'Otros Plugins',
'Other Recipes': 'Otras Recetas',
'Overview': 'Resumen',
'pack all': 'empaquetar todo',
'pack compiled': 'empaquete compiladas',
'Password': 'Contraseña',
'Password changed': 'Contraseña cambiada',
"Password fields don't match": 'Los campos de contraseña no coinciden',
'Password reset': 'Reinicio de contraseña',
'Peeking at file': 'Visualizando archivo',
'Phone': 'Teléfono',
'please input your password again': 'por favor introduzca su contraseña otra vez',
'Plugins': 'Plugins',
'Powered by': 'Este sitio usa',
'Preface': 'Prefacio',
'previous 100 rows': '100 filas anteriores',
'Profile': 'Perfil',
'Profile updated': 'Perfil actualizado',
'Python': 'Python',
'Query:': 'Consulta:',
'Quick Examples': 'Ejemplos Rápidos',
'RAM': 'RAM',
'RAM Cache Keys': 'Llaves de la Caché en RAM',
'Ram Cleared': 'Ram Limpiada',
'Recipes': 'Recetas',
'Record': 'Registro',
'record does not exist': 'el registro no existe',
'Record ID': 'ID de Registro',
'Record id': 'Id de registro',
'register': 'regístrese',
'Register': 'Regístrese',
'Registration identifier': 'Identificador de Registro',
'Registration key': 'Llave de registro',
'Registration successful': 'Registro con éxito',
'Regístrese': 'Regístrese',
'reload': 'recargar',
'Remember me (for 30 days)': 'Recuérdame (durante 30 días)',
'remove compiled': 'eliminar compiladas',
'Request reset password': 'Solicitar reinicio de contraseña',
'Reset Password key': 'Restaurar Llave de la Contraseña',
'Resolve Conflict file': 'archivo Resolución de Conflicto',
'restore': 'restaurar',
'Retrieve username': 'Recuperar nombre de usuario',
'revert': 'revertir',
'Role': 'Rol',
'Rows in Table': 'Filas en la tabla',
'Rows selected': 'Filas seleccionadas',
'save': 'guardar',
'Saved file hash:': 'Hash del archivo guardado:',
'Semantic': 'Semántica',
'Services': 'Servicios',
'session expired': 'sesión expirada',
'shell': 'terminal',
'site': 'sitio',
'Size of cache:': 'Tamaño de la Caché:',
'some files could not be removed': 'algunos archivos no pudieron ser removidos',
'state': 'estado',
'static': 'estáticos',
'Static files': 'Archivos estáticos',
'Statistics': 'Estadísticas',
'Stylesheet': 'Hoja de estilo',
'Submit': 'Enviar',
'submit': 'enviar',
'Support': 'Soporte',
'Sure you want to delete this object?': '¿Está seguro que desea eliminar este objeto?',
'Table': 'tabla',
'Table name': 'Nombre de la tabla',
'test': 'probar',
'Testing application': 'Probando aplicación',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': 'La "consulta" es una condición como "db.tabla1.campo1==\'valor\'". Algo como "db.tabla1.campo1==db.tabla2.campo2" resulta en un JOIN SQL.',
'the application logic, each URL path is mapped in one exposed function in the controller': 'la lógica de la aplicación, cada ruta URL se mapea en una función expuesta en el controlador',
'The Core': 'El Núcleo',
'the data representation, define database tables and sets': 'la representación de datos, define tablas y conjuntos de base de datos',
'The output of the file is a dictionary that was rendered by the view %s': 'La salida de dicha función es un diccionario que es desplegado por la vista %s',
'the presentations layer, views are also known as templates': 'la capa de presentación, las vistas también son llamadas plantillas',
'The Views': 'Las Vistas',
'There are no controllers': 'No hay controladores',
'There are no models': 'No hay modelos',
'There are no modules': 'No hay módulos',
'There are no static files': 'No hay archivos estáticos',
'There are no translators, only default language is supported': 'No hay traductores, sólo el lenguaje por defecto es soportado',
'There are no views': 'No hay vistas',
'these files are served without processing, your images go here': 'estos archivos son servidos sin procesar, sus imágenes van aquí',
'This App': 'Esta Aplicación',
'This email already has an account': 'Este correo electrónico ya tiene una cuenta',
'This is a copy of the scaffolding application': 'Esta es una copia de la aplicación de andamiaje',
'This is the %(filename)s template': 'Esta es la plantilla %(filename)s',
'Ticket': 'Tiquete',
'Time in Cache (h:m:s)': 'Tiempo en Caché (h:m:s)',
'Timestamp': 'Marca de tiempo',
'to previous version.': 'a la versión previa.',
'translation strings for the application': 'cadenas de carácteres de traducción para la aplicación',
'try': 'intente',
'try something like': 'intente algo como',
'Twitter': 'Twitter',
'Unable to check for upgrades': 'No es posible verificar la existencia de actualizaciones',
'unable to create application "%s"': 'no es posible crear la aplicación "%s"',
'unable to delete file "%(filename)s"': 'no es posible eliminar el archivo "%(filename)s"',
'Unable to download': 'No es posible la descarga',
'Unable to download app': 'No es posible descarga la aplicación',
'unable to parse csv file': 'no es posible analizar el archivo CSV',
'unable to uninstall "%s"': 'no es posible instalar "%s"',
'uncheck all': 'desmarcar todos',
'uninstall': 'desinstalar',
'update': 'actualizar',
'update all languages': 'actualizar todos los lenguajes',
'Update:': 'Actualice:',
'upload application:': 'subir aplicación:',
'Upload existing application': 'Suba esta aplicación',
'upload file:': 'suba archivo:',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': 'Use (...)&(...) para AND, (...)|(...) para OR, y ~(...) para NOT, para crear consultas más complejas.',
'User %(id)s is impersonating %(other_id)s': 'El usuario %(id)s está suplantando %(other_id)s',
'User %(id)s Logged-in': 'El usuario %(id)s inició la sesión',
'User %(id)s Logged-out': 'El usuario %(id)s finalizó la sesión',
'User %(id)s Password changed': 'Contraseña del usuario %(id)s cambiada',
'User %(id)s Password reset': 'Contraseña del usuario %(id)s reiniciada',
'User %(id)s Profile updated': 'Actualizado el perfil del usuario %(id)s',
'User %(id)s Registered': 'Usuario %(id)s Registrado',
'User %(id)s Username retrieved': 'Se ha recuperado el nombre de usuario del usuario %(id)s',
'User Id': 'Id de Usuario',
'User ID': 'ID de Usuario',
'Username': 'Nombre de usuario',
'Username retrieve': 'Recuperar nombre de usuario',
'value already in database or empty': 'el valor ya existe en la base de datos o está vacío',
'value not in database': 'el valor no está en la base de datos',
'Verify Password': 'Verificar Contraseña',
'versioning': 'versiones',
'Videos': 'Vídeos',
'View': 'Vista',
'view': 'vista',
'Views': 'Vistas',
'views': 'vistas',
'web2py is up to date': 'web2py está actualizado',
'web2py Recent Tweets': 'Tweets Recientes de web2py',
'Welcome': 'Bienvenido',
'Welcome %s': 'Bienvenido %s',
'Welcome to web2py': 'Bienvenido a web2py',
'Welcome to web2py!': '¡Bienvenido a web2py!',
'Which called the function %s located in the file %s': 'La cual llamó la función %s localizada en el archivo %s',
'Working...': 'Trabajando...',
'YES': 'SÍ',
'You are successfully running web2py': 'Usted está ejecutando web2py exitosamente',
'You can modify this application and adapt it to your needs': 'Usted puede modificar esta aplicación y adaptarla a sus necesidades',
'You visited the url %s': 'Usted visitó la url %s',
'Your username is: %(username)s': 'Su nombre de usuario es: %(username)s',
}
| davidblaisonneau/fabm | server/languages/es.py | Python | gpl-2.0 | 19,174 |
# -*- coding: utf-8 -*-
from openerp import models, fields, api
class Solicitantes(models.Model):
_name = 'propuestas.solicitantes'
_inherit = 'contactos.personas'
familiares_ids = fields.One2many('propuestas.familiares', 'solicitantes_id',string="Familiares")
conyuges_ids = fields.One2many('propuestas.conyuges', 'solicitantes_id',string="Conyuge")
unidades_productivas_ids = fields.One2many('propuestas.unidades_productivas', 'solicitantes_id', string="Unidades Productivas")
propuestas_ids = fields.One2many('propuestas.propuestas', 'solicitantes_id', string="Propuesta")
| sani-coop/tinjaca | addons/propuestas/models/solicitantes.py | Python | gpl-2.0 | 604 |
"""
This creates and generated tokens for users stored in the database
"""
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from rest_framework.authtoken.models import Token
# This code is triggered whenever a new user has been created and saved to
# the datatbase
@receiver(post_save, sender=settings.AUTH_USER_MODEL)
def create_auth_token(sender, instance=None, created=False, **kwargs):
if created:
Token.objects.create(user=instance)
| andela-ooshodi/django-bucketlist-application | djangobucketlist/apiv1/models.py | Python | gpl-2.0 | 521 |
# -*- coding: utf-8 -*-
from django import forms
from web.models import Contact, Venue, Image, Description
class AdvancedSearchForm(forms.Form):
q = forms.CharField(label="Sökord", max_length=128, required=False)
audience = forms.IntegerField(min_value=1, required=False, label="Publikmängd")
class VenueForm(forms.Form):
name = forms.CharField(label='Namn')
address = forms.CharField(label='Adress')
description = forms.CharField(widget=forms.Textarea, label='Beskrivning', help_text='Scen, loger, ljusutrustning, instrument etc.')
audience_min = forms.IntegerField(label='Minsta publik')
audience_max = forms.IntegerField(label='Max publik')
class DescriptionForm(forms.ModelForm):
class Meta:
model = Description
fields = ['text']
class ContactForm(forms.ModelForm):
class Meta:
model = Contact
fields = ['name', 'email', 'phone']
class ImageForm(forms.ModelForm):
class Meta:
model = Image
fields = ['image', 'caption']
# class ImageForm(forms.Form):
# image = forms.ImageField(required=False)
# caption = forms.CharField(max_length=128, required=False)
class DeleteVenueForm(forms.Form):
pass
| eriklavander/klaviaturregistret | web/forms.py | Python | gpl-2.0 | 1,157 |
#!/usr/bin/python
import subprocess
import re
import argparse
def stripper(fasta):
result = {}
with open(fasta) as f:
for name, seq in fastq_fasta_parser.read_fasta(f):
bases = list(seq)
end1 = bases[-3:]
end1 = ''.join(end1)
if end1 == "CCA":
tmpseq = bases[:-3]
seq = ''.join(tmpseq)
end2 = bases[-4:]
end2 = ''.join(end2)
if end2 == "CCAC":
tmpseq = bases[:-4]
seq = ''.join(tmpseq)
end3 = bases[-5:]
end3 = ''.join(end3)
if end3 == "CCACC":
tmpseq = bases[:-5]
seq = ''.join(tmpseq)
end4 = bases[-6:]
end4 = ''.join(end4)
if end4 == "CCACCA":
tmpseq = bases[:-6]
seq = ''.join(tmpseq)
result[name] = seq
return result
def strip_ends(paired):
if paired == True:
output1 = open("clipped_1.fa", "w")
output2 = open("clipped_2.fa", "w")
data1 = stripper("unclipped_multi_unmapped_1.fa")
data2 = stripper("unclipped_multi_unmapped_2.fa")
for key in sorted(data1.keys()):
output1.write("{}\n{}\n".format(key, data1[key])),
for key in sorted(data2.keys()):
output2.write("{}\n{}\n".format(key, data2[key])),
else:
data1 = stripper("unclipped_multi_unmapped.fa")
output1 = open("clipped_fasta.fa", "w")
for key in sorted(data1.keys()):
output1.write("{}\n{}\n".format(key, data1[key])),
def paired_bowtie(index, clipped=False):
if clipped==False:
sam1_o = open("unclipped_unique.sam", "wb")
report1_o = open("unclipped_unique_report.txt", "wb")
sam2_o = open("unclipped_multimap.sam", "wb")
report2_o = open("unclipped_multi_report.txt", "wb")
uniq = "bowtie --best -f -m 1 -v 2 --sam --un unclipped_unique_unmapped.fa {0} -1 original_fasta_1.fa -2 original_fasta_2.fa".format(index)
multi= "bowtie --best -k 10 -f -m 500 -v 2 --sam --un unclipped_multi_unmapped.fa {0} -1 unclipped_unique_unmapped_1.fa -2 unclipped_unique_unmapped_2.fa".format(index)
p = subprocess.Popen(uniq.split(), stdout = sam1_o, stderr=report1_o)
p.communicate()
p = subprocess.Popen(multi.split(), stdout = sam2_o, stderr=report2_o)
p.communicate()
else:
sam1_o = open("clipped_unique.sam", "wb")
report1_o = open("clipped_unique_report.txt", "wb")
sam2_o = open("clipped_multimap.sam", "wb")
report2_o = open("clipped_multimap_report.txt", "wb")
uniq = "bowtie --best -f -m 1 -v 2 --sam --un clipped_unmapped.fa {0} -1 clipped_1.fa -2 clipped_2.fa".format(index)
multi= "bowtie --best -k 10 -f -m 500 -v 2 --sam {0} -1 clipped_unmapped_1.fa -2 clipped_unmapped_2.fa".format(index)
p = subprocess.Popen(uniq.split(), stdout = sam1_o, stderr=report1_o)
p.communicate()
p = subprocess.Popen(multi.split(), stdout = sam2_o, stderr=report2_o)
p.communicate()
def single_bowtie(index, clipped=False):
if clipped==False:
sam1_o = open("unclipped_unique.sam", "wb")
report1_o = open("unclipped_unique_report.txt", "wb")
sam2_o = open("unclipped_multimap.sam", "wb")
report2_o = open("unclipped_multi_report.txt", "wb")
uniq = "bowtie --best -f -m 1 -v 2 --sam --un unclipped_unique_unmapped.fa {0} original_fasta.fa".format(index)
multi= "bowtie --best -k 10 -f -m 500 -v 2 --sam --un unclipped_multi_unmapped.fa {0} unclipped_unique_unmapped.fa".format(index)
p = subprocess.Popen(uniq.split(), stdout = sam1_o, stderr=report1_o)
p.communicate()
p = subprocess.Popen(multi.split(), stdout = sam2_o, stderr=report2_o)
p.communicate()
else:
sam1_o = open("clipped_unique.sam", "wb")
report1_o = open("clipped_unique_report.txt", "wb")
sam2_o = open("clipped_multimap.sam", "wb")
report2_o = open("clipped_multimap_report.txt", "wb")
uniq = "bowtie --best -f -m 1 -v 2 --sam --un clipped_unique_unmapped.fa {0} clipped_fasta.fa".format(index)
multi= "bowtie --best -k 10 -f -m 500 -v 2 --sam {0} clipped_unique_unmapped.fa".format(index)
p = subprocess.Popen(uniq.split(), stdout = sam1_o, stderr=report1_o)
p.communicate()
p = subprocess.Popen(multi.split(), stdout = sam2_o, stderr=report2_o)
p.communicate()
def grep_unique(samfile):
out = re.sub(".sam", ".unique.sam", samfile)
out2 = re.sub(".sam", ".multi.sam", samfile)
output= open(out, "w")
output2= open(out2, "w")
with open(samfile) as f:
for line in f:
line = line.rstrip()
word = line.split("\t")
if line.startswith("@"):
output.write("{}\n".format(line)),
output2.write("{}\n".format(line)),
continue
if len(word) > 12:
m = re.match("XS:i:", word[12])
if m:
if int(word[1]) == 147 or int(word[1]) == 83 or int(word[1]) == 99 or int(word[1]) == 163 or int(word[1]) == 81 or int(word[1]) == 97 or int(word[1]) == 145 or int(word[1]) == 161:
output2.write("{}\n".format(line)),
else:
if int(word[1]) == 147 or int(word[1]) == 83 or int(word[1]) == 99 or int(word[1]) == 163 or int(word[1]) == 81 or int(word[1]) == 97 or int(word[1]) == 145 or int(word[1]) == 161:
output.write("{}\n".format(line)),
def grep_single_unique(samfile):
out = re.sub(".sam", ".unique.sam", samfile)
out2 = re.sub(".sam", ".multi.sam", samfile)
output= open(out, "w")
output2= open(out2, "w")
with open(samfile) as f:
for line in f:
line = line.rstrip()
word = line.split("\t")
if line.startswith("@"):
output.write("{}\n".format(line)),
output2.write("{}\n".format(line)),
continue
if len(word) > 12:
m = re.match("XS:i:", word[12])
if m:
if int(word[1]) == 0 or int(word[1]) == 16:
output2.write("{}\n".format(line)),
else:
if int(word[1]) == 0 or int(word[1]) == 16:
output.write("{}\n".format(line)),
def paired_bowtie2(index, clipped=False):
if clipped==False:
report1_o = open("unclipped_unique_report.txt", "wb")
uniq = "bowtie2 -k 10 -N 1 -f -p 12 --no-mixed --no-discordant --un-conc unmapped_round1.fa -x {0} -1 fasta_1.fa -2 fasta_2.fa -S tmp.sam".format(index)
p = subprocess.Popen(uniq.split(), stderr=report1_o)
p.communicate()
grep_unique("tmp.sam")
subprocess.call(["mv", "tmp.unique.sam", "bowtie2.uc.unique.sam"])
subprocess.call(["mv", "tmp.multi.sam", "bowtie2.uc.multi.sam"])
else:
report1_o = open("clipped_unique_report.txt", "wb")
uniq = "bowtie2 -k 10 -N 1 -f -p 12 --no-mixed --no-discordant --un-conc unmapped_round2.fa -x {0} -1 clipped_1.fa -2 clipped_2.fa -S tmp.sam".format(index)
p = subprocess.Popen(uniq.split(), stderr=report1_o)
p.communicate()
grep_unique("tmp.sam")
subprocess.call(["mv", "tmp.unique.sam", "bowtie2.c.unique.sam"])
subprocess.call(["mv", "tmp.multi.sam", "bowtie2.c.multi.sam"])
def single_bowtie2(index, clipped=False):
if clipped==False:
report1_o = open("unclipped_unique_report.txt", "wb")
uniq = "bowtie2 -k 10 -N 1 -f -p 12 --un unmapped_round1.fa -x {0} -U fasta.fa -S tmp.sam".format(index)
p = subprocess.Popen(uniq.split(), stderr=report1_o)
p.communicate()
grep_single_unique("tmp.sam")
subprocess.call(["mv", "tmp.unique.sam", "bowtie2.uc.unique.sam"])
subprocess.call(["mv", "tmp.multi.sam", "bowtie2.uc.multi.sam"])
else:
report1_o = open("clipped_unique_report.txt", "wb")
uniq = "bowtie2 -k 10 -N 1 -f -p 12 --un unmapped_round2.fa -x {0} -U clipped_fasta.fa -S tmp.sam".format(index)
p = subprocess.Popen(uniq.split(), stderr=report1_o)
p.communicate()
grep_single_unique("tmp.sam")
subprocess.call(["mv", "tmp.unique.sam", "bowtie2.c.unique.sam"])
subprocess.call(["mv", "tmp.multi.sam", "bowtie2.c.multi.sam"])
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Runs bowtie\n')
parser.add_argument('-p','--paired', help='Options True/False, is sample paired end?', required=False)
parser.add_argument('-i','--index', help='Bowtie index', required=True)
parser.add_argument('-c','--clipped', help='Options True/False, has sample been clipped?', required=False)
args = vars(parser.parse_args())
index = args["index"]
if args["paired"] == True:
if args["clipped"] == True:
paired_bowtie(index, True)
else:
paired_bowtie(index, False)
else:
if args["clipped"] == True:
single_bowtie(index, True)
else:
single_bowtie(index, False)
| pdl30/pynoncode | pynoncode/run_bowtie.py | Python | gpl-2.0 | 8,057 |
#!/usr/bin/python
#
# Copyright (C) 2006, 2007, 2008, 2010, 2012, 2013 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Script for unittesting the objects module"""
import copy
import unittest
from ganeti import constants
from ganeti import objects
from ganeti import errors
import testutils
class SimpleObject(objects.ConfigObject):
__slots__ = ["a", "b"]
class TestDictState(unittest.TestCase):
"""Simple dict tansformation tests"""
def testSimpleObjectToDict(self):
o1 = SimpleObject(a="1")
self.assertEquals(o1.ToDict(), {"a": "1"})
self.assertEquals(o1.__getstate__(), {"a": "1"})
self.assertEquals(o1.__getstate__(), o1.ToDict())
o1.a = 2
o1.b = 5
self.assertEquals(o1.ToDict(), {"a": 2, "b": 5})
o2 = SimpleObject.FromDict(o1.ToDict())
self.assertEquals(o1.ToDict(), {"a": 2, "b": 5})
class TestClusterObject(unittest.TestCase):
"""Tests done on a L{objects.Cluster}"""
def setUp(self):
hvparams = {
constants.HT_FAKE: {
"foo": "bar",
"bar": "foo",
"foobar": "barfoo",
},
}
os_hvp = {
"lenny-image": {
constants.HT_FAKE: {
"foo": "baz",
"foobar": "foobar",
"blah": "blibb",
"blubb": "blah",
},
constants.HT_XEN_PVM: {
"root_path": "/dev/sda5",
"foo": "foobar",
},
},
"ubuntu-hardy": {
},
}
ndparams = {
constants.ND_OOB_PROGRAM: "/bin/cluster-oob",
constants.ND_SPINDLE_COUNT: 1,
constants.ND_EXCLUSIVE_STORAGE: False,
}
self.fake_cl = objects.Cluster(hvparams=hvparams, os_hvp=os_hvp,
ndparams=ndparams)
self.fake_cl.UpgradeConfig()
def testGetHVDefaults(self):
cl = self.fake_cl
self.failUnlessEqual(cl.GetHVDefaults(constants.HT_FAKE),
cl.hvparams[constants.HT_FAKE])
self.failUnlessEqual(cl.GetHVDefaults(None), {})
self.failUnlessEqual(cl.GetHVDefaults(constants.HT_XEN_PVM,
os_name="lenny-image"),
cl.os_hvp["lenny-image"][constants.HT_XEN_PVM])
def testFillHvFullMerge(self):
inst_hvparams = {
"blah": "blubb",
}
fake_dict = constants.HVC_DEFAULTS[constants.HT_FAKE].copy()
fake_dict.update({
"foo": "baz",
"bar": "foo",
"foobar": "foobar",
"blah": "blubb",
"blubb": "blah",
})
fake_inst = objects.Instance(name="foobar",
os="lenny-image",
hypervisor=constants.HT_FAKE,
hvparams=inst_hvparams)
self.assertEqual(fake_dict, self.fake_cl.FillHV(fake_inst))
def testFillHvGlobalParams(self):
fake_inst = objects.Instance(name="foobar",
os="ubuntu-hardy",
hypervisor=constants.HT_FAKE,
hvparams={})
self.assertEqual(self.fake_cl.hvparams[constants.HT_FAKE],
self.fake_cl.FillHV(fake_inst))
def testFillHvInstParams(self):
inst_hvparams = {
"blah": "blubb",
}
fake_inst = objects.Instance(name="foobar",
os="ubuntu-hardy",
hypervisor=constants.HT_XEN_PVM,
hvparams=inst_hvparams)
self.assertEqual(inst_hvparams, self.fake_cl.FillHV(fake_inst))
def testFillHvEmptyParams(self):
fake_inst = objects.Instance(name="foobar",
os="ubuntu-hardy",
hypervisor=constants.HT_XEN_PVM,
hvparams={})
self.assertEqual({}, self.fake_cl.FillHV(fake_inst))
def testFillHvPartialParams(self):
os = "lenny-image"
fake_inst = objects.Instance(name="foobar",
os=os,
hypervisor=constants.HT_XEN_PVM,
hvparams={})
self.assertEqual(self.fake_cl.os_hvp[os][constants.HT_XEN_PVM],
self.fake_cl.FillHV(fake_inst))
def testFillNdParamsCluster(self):
fake_node = objects.Node(name="test",
ndparams={},
group="testgroup")
fake_group = objects.NodeGroup(name="testgroup",
ndparams={})
self.assertEqual(self.fake_cl.ndparams,
self.fake_cl.FillND(fake_node, fake_group))
def testFillNdParamsNodeGroup(self):
fake_node = objects.Node(name="test",
ndparams={},
group="testgroup")
group_ndparams = {
constants.ND_OOB_PROGRAM: "/bin/group-oob",
constants.ND_SPINDLE_COUNT: 10,
constants.ND_EXCLUSIVE_STORAGE: True,
}
fake_group = objects.NodeGroup(name="testgroup",
ndparams=group_ndparams)
self.assertEqual(group_ndparams,
self.fake_cl.FillND(fake_node, fake_group))
def testFillNdParamsNode(self):
node_ndparams = {
constants.ND_OOB_PROGRAM: "/bin/node-oob",
constants.ND_SPINDLE_COUNT: 2,
constants.ND_EXCLUSIVE_STORAGE: True,
}
fake_node = objects.Node(name="test",
ndparams=node_ndparams,
group="testgroup")
fake_group = objects.NodeGroup(name="testgroup",
ndparams={})
self.assertEqual(node_ndparams,
self.fake_cl.FillND(fake_node, fake_group))
def testFillNdParamsAll(self):
node_ndparams = {
constants.ND_OOB_PROGRAM: "/bin/node-oob",
constants.ND_SPINDLE_COUNT: 5,
constants.ND_EXCLUSIVE_STORAGE: True,
}
fake_node = objects.Node(name="test",
ndparams=node_ndparams,
group="testgroup")
group_ndparams = {
constants.ND_OOB_PROGRAM: "/bin/group-oob",
constants.ND_SPINDLE_COUNT: 4,
}
fake_group = objects.NodeGroup(name="testgroup",
ndparams=group_ndparams)
self.assertEqual(node_ndparams,
self.fake_cl.FillND(fake_node, fake_group))
def testPrimaryHypervisor(self):
assert self.fake_cl.enabled_hypervisors is None
self.fake_cl.enabled_hypervisors = [constants.HT_XEN_HVM]
self.assertEqual(self.fake_cl.primary_hypervisor, constants.HT_XEN_HVM)
self.fake_cl.enabled_hypervisors = [constants.HT_XEN_PVM, constants.HT_KVM]
self.assertEqual(self.fake_cl.primary_hypervisor, constants.HT_XEN_PVM)
self.fake_cl.enabled_hypervisors = sorted(constants.HYPER_TYPES)
self.assertEqual(self.fake_cl.primary_hypervisor, constants.HT_CHROOT)
def testUpgradeConfig(self):
# FIXME: This test is incomplete
cluster = objects.Cluster()
cluster.UpgradeConfig()
cluster = objects.Cluster(ipolicy={"unknown_key": None})
self.assertRaises(errors.ConfigurationError, cluster.UpgradeConfig)
class TestClusterObjectTcpUdpPortPool(unittest.TestCase):
def testNewCluster(self):
self.assertTrue(objects.Cluster().tcpudp_port_pool is None)
def testSerializingEmpty(self):
self.assertEqual(objects.Cluster().ToDict(), {
"tcpudp_port_pool": [],
})
def testSerializing(self):
cluster = objects.Cluster.FromDict({})
self.assertEqual(cluster.tcpudp_port_pool, set())
cluster.tcpudp_port_pool.add(3546)
cluster.tcpudp_port_pool.add(62511)
data = cluster.ToDict()
self.assertEqual(data.keys(), ["tcpudp_port_pool"])
self.assertEqual(sorted(data["tcpudp_port_pool"]), sorted([3546, 62511]))
def testDeserializingEmpty(self):
cluster = objects.Cluster.FromDict({})
self.assertEqual(cluster.tcpudp_port_pool, set())
def testDeserialize(self):
cluster = objects.Cluster.FromDict({
"tcpudp_port_pool": [26214, 10039, 267],
})
self.assertEqual(cluster.tcpudp_port_pool, set([26214, 10039, 267]))
class TestOS(unittest.TestCase):
ALL_DATA = [
"debootstrap",
"debootstrap+default",
"debootstrap++default",
]
def testSplitNameVariant(self):
for name in self.ALL_DATA:
self.assertEqual(len(objects.OS.SplitNameVariant(name)), 2)
def testVariant(self):
self.assertEqual(objects.OS.GetVariant("debootstrap"), "")
self.assertEqual(objects.OS.GetVariant("debootstrap+default"), "default")
class TestInstance(unittest.TestCase):
def _GenericCheck(self, inst):
for i in [inst.all_nodes, inst.secondary_nodes]:
self.assertTrue(isinstance(inst.all_nodes, (list, tuple)),
msg="Data type doesn't guarantee order")
self.assertTrue(inst.primary_node not in inst.secondary_nodes)
self.assertEqual(inst.all_nodes[0], inst.primary_node,
msg="Primary node not first node in list")
def testNodesNoDisks(self):
inst = objects.Instance(name="fakeinst.example.com",
primary_node="pnode.example.com",
disks=[
])
self._GenericCheck(inst)
self.assertEqual(len(inst.secondary_nodes), 0)
self.assertEqual(set(inst.all_nodes), set([inst.primary_node]))
self.assertEqual(inst.MapLVsByNode(), {
inst.primary_node: [],
})
def testNodesPlainDisks(self):
inst = objects.Instance(name="fakeinstplain.example.com",
primary_node="node3.example.com",
disks=[
objects.Disk(dev_type=constants.LD_LV, size=128,
logical_id=("myxenvg", "disk25494")),
objects.Disk(dev_type=constants.LD_LV, size=512,
logical_id=("myxenvg", "disk29071")),
])
self._GenericCheck(inst)
self.assertEqual(len(inst.secondary_nodes), 0)
self.assertEqual(set(inst.all_nodes), set([inst.primary_node]))
self.assertEqual(inst.MapLVsByNode(), {
inst.primary_node: ["myxenvg/disk25494", "myxenvg/disk29071"],
})
def testNodesDrbdDisks(self):
inst = objects.Instance(name="fakeinstdrbd.example.com",
primary_node="node10.example.com",
disks=[
objects.Disk(dev_type=constants.LD_DRBD8, size=786432,
logical_id=("node10.example.com", "node15.example.com",
12300, 0, 0, "secret"),
children=[
objects.Disk(dev_type=constants.LD_LV, size=786432,
logical_id=("myxenvg", "disk0")),
objects.Disk(dev_type=constants.LD_LV, size=128,
logical_id=("myxenvg", "meta0"))
],
iv_name="disk/0")
])
self._GenericCheck(inst)
self.assertEqual(set(inst.secondary_nodes), set(["node15.example.com"]))
self.assertEqual(set(inst.all_nodes),
set([inst.primary_node, "node15.example.com"]))
self.assertEqual(inst.MapLVsByNode(), {
inst.primary_node: ["myxenvg/disk0", "myxenvg/meta0"],
"node15.example.com": ["myxenvg/disk0", "myxenvg/meta0"],
})
self.assertEqual(inst.FindDisk(0), inst.disks[0])
self.assertRaises(errors.OpPrereqError, inst.FindDisk, "hello")
self.assertRaises(errors.OpPrereqError, inst.FindDisk, 100)
self.assertRaises(errors.OpPrereqError, inst.FindDisk, 1)
class TestNode(unittest.TestCase):
def testEmpty(self):
self.assertEqual(objects.Node().ToDict(), {})
self.assertTrue(isinstance(objects.Node.FromDict({}), objects.Node))
def testHvState(self):
node = objects.Node(name="node18157.example.com", hv_state={
constants.HT_XEN_HVM: objects.NodeHvState(cpu_total=64),
constants.HT_KVM: objects.NodeHvState(cpu_node=1),
})
node2 = objects.Node.FromDict(node.ToDict())
# Make sure nothing can reference it anymore
del node
self.assertEqual(node2.name, "node18157.example.com")
self.assertEqual(frozenset(node2.hv_state), frozenset([
constants.HT_XEN_HVM,
constants.HT_KVM,
]))
self.assertEqual(node2.hv_state[constants.HT_KVM].cpu_node, 1)
self.assertEqual(node2.hv_state[constants.HT_XEN_HVM].cpu_total, 64)
def testDiskState(self):
node = objects.Node(name="node32087.example.com", disk_state={
constants.LD_LV: {
"lv32352": objects.NodeDiskState(total=128),
"lv2082": objects.NodeDiskState(total=512),
},
})
node2 = objects.Node.FromDict(node.ToDict())
# Make sure nothing can reference it anymore
del node
self.assertEqual(node2.name, "node32087.example.com")
self.assertEqual(frozenset(node2.disk_state), frozenset([
constants.LD_LV,
]))
self.assertEqual(frozenset(node2.disk_state[constants.LD_LV]), frozenset([
"lv32352",
"lv2082",
]))
self.assertEqual(node2.disk_state[constants.LD_LV]["lv2082"].total, 512)
self.assertEqual(node2.disk_state[constants.LD_LV]["lv32352"].total, 128)
def testFilterEsNdp(self):
node1 = objects.Node(name="node11673.example.com", ndparams={
constants.ND_EXCLUSIVE_STORAGE: True,
})
node2 = objects.Node(name="node11674.example.com", ndparams={
constants.ND_SPINDLE_COUNT: 3,
constants.ND_EXCLUSIVE_STORAGE: False,
})
self.assertTrue(constants.ND_EXCLUSIVE_STORAGE in node1.ndparams)
node1.UpgradeConfig()
self.assertFalse(constants.ND_EXCLUSIVE_STORAGE in node1.ndparams)
self.assertTrue(constants.ND_EXCLUSIVE_STORAGE in node2.ndparams)
self.assertTrue(constants.ND_SPINDLE_COUNT in node2.ndparams)
node2.UpgradeConfig()
self.assertFalse(constants.ND_EXCLUSIVE_STORAGE in node2.ndparams)
self.assertTrue(constants.ND_SPINDLE_COUNT in node2.ndparams)
class TestInstancePolicy(unittest.TestCase):
def setUp(self):
# Policies are big, and we want to see the difference in case of an error
self.maxDiff = None
def _AssertIPolicyIsFull(self, policy):
self.assertEqual(frozenset(policy.keys()), constants.IPOLICY_ALL_KEYS)
self.assertTrue(len(policy[constants.ISPECS_MINMAX]) > 0)
for minmax in policy[constants.ISPECS_MINMAX]:
self.assertEqual(frozenset(minmax.keys()), constants.ISPECS_MINMAX_KEYS)
for key in constants.ISPECS_MINMAX_KEYS:
self.assertEqual(frozenset(minmax[key].keys()),
constants.ISPECS_PARAMETERS)
self.assertEqual(frozenset(policy[constants.ISPECS_STD].keys()),
constants.ISPECS_PARAMETERS)
def testDefaultIPolicy(self):
objects.InstancePolicy.CheckParameterSyntax(constants.IPOLICY_DEFAULTS,
True)
self._AssertIPolicyIsFull(constants.IPOLICY_DEFAULTS)
def _AssertPolicyIsBad(self, ipolicy, do_check_std=None):
if do_check_std is None:
check_std_vals = [False, True]
else:
check_std_vals = [do_check_std]
for check_std in check_std_vals:
self.assertRaises(errors.ConfigurationError,
objects.InstancePolicy.CheckISpecSyntax,
ipolicy, check_std)
def testCheckISpecSyntax(self):
default_stdspec = constants.IPOLICY_DEFAULTS[constants.ISPECS_STD]
incomplete_ipolicies = [
{
constants.ISPECS_MINMAX: [],
constants.ISPECS_STD: default_stdspec,
},
{
constants.ISPECS_MINMAX: [{}],
constants.ISPECS_STD: default_stdspec,
},
{
constants.ISPECS_MINMAX: [{
constants.ISPECS_MIN: NotImplemented,
}],
constants.ISPECS_STD: default_stdspec,
},
{
constants.ISPECS_MINMAX: [{
constants.ISPECS_MAX: NotImplemented,
}],
constants.ISPECS_STD: default_stdspec,
},
{
constants.ISPECS_MINMAX: [{
constants.ISPECS_MIN: NotImplemented,
constants.ISPECS_MAX: NotImplemented,
}],
},
]
for ipol in incomplete_ipolicies:
self.assertRaises(errors.ConfigurationError,
objects.InstancePolicy.CheckISpecSyntax,
ipol, True)
oldminmax = ipol[constants.ISPECS_MINMAX]
if oldminmax:
# Prepending valid specs shouldn't change the error
ipol[constants.ISPECS_MINMAX] = ([constants.ISPECS_MINMAX_DEFAULTS] +
oldminmax)
self.assertRaises(errors.ConfigurationError,
objects.InstancePolicy.CheckISpecSyntax,
ipol, True)
good_ipolicy = {
constants.ISPECS_MINMAX: [
{
constants.ISPECS_MIN: {
constants.ISPEC_MEM_SIZE: 64,
constants.ISPEC_CPU_COUNT: 1,
constants.ISPEC_DISK_COUNT: 2,
constants.ISPEC_DISK_SIZE: 64,
constants.ISPEC_NIC_COUNT: 1,
constants.ISPEC_SPINDLE_USE: 1,
},
constants.ISPECS_MAX: {
constants.ISPEC_MEM_SIZE: 16384,
constants.ISPEC_CPU_COUNT: 5,
constants.ISPEC_DISK_COUNT: 12,
constants.ISPEC_DISK_SIZE: 1024,
constants.ISPEC_NIC_COUNT: 9,
constants.ISPEC_SPINDLE_USE: 18,
},
},
{
constants.ISPECS_MIN: {
constants.ISPEC_MEM_SIZE: 32768,
constants.ISPEC_CPU_COUNT: 8,
constants.ISPEC_DISK_COUNT: 1,
constants.ISPEC_DISK_SIZE: 1024,
constants.ISPEC_NIC_COUNT: 1,
constants.ISPEC_SPINDLE_USE: 1,
},
constants.ISPECS_MAX: {
constants.ISPEC_MEM_SIZE: 65536,
constants.ISPEC_CPU_COUNT: 10,
constants.ISPEC_DISK_COUNT: 5,
constants.ISPEC_DISK_SIZE: 1024 * 1024,
constants.ISPEC_NIC_COUNT: 3,
constants.ISPEC_SPINDLE_USE: 12,
},
},
],
}
good_ipolicy[constants.ISPECS_STD] = copy.deepcopy(
good_ipolicy[constants.ISPECS_MINMAX][0][constants.ISPECS_MAX])
# Check that it's really good before making it bad
objects.InstancePolicy.CheckISpecSyntax(good_ipolicy, True)
bad_ipolicy = copy.deepcopy(good_ipolicy)
for minmax in bad_ipolicy[constants.ISPECS_MINMAX]:
for (key, spec) in minmax.items():
for param in spec:
oldv = spec[param]
del spec[param]
self._AssertPolicyIsBad(bad_ipolicy)
if key == constants.ISPECS_MIN:
spec[param] = minmax[constants.ISPECS_MAX][param] + 1
self._AssertPolicyIsBad(bad_ipolicy)
spec[param] = oldv
assert bad_ipolicy == good_ipolicy
stdspec = bad_ipolicy[constants.ISPECS_STD]
for param in stdspec:
oldv = stdspec[param]
del stdspec[param]
self._AssertPolicyIsBad(bad_ipolicy, True)
# Note that std spec is the same as a max spec
stdspec[param] = oldv + 1
self._AssertPolicyIsBad(bad_ipolicy, True)
stdspec[param] = oldv
assert bad_ipolicy == good_ipolicy
for minmax in good_ipolicy[constants.ISPECS_MINMAX]:
for spec in minmax.values():
good_ipolicy[constants.ISPECS_STD] = spec
objects.InstancePolicy.CheckISpecSyntax(good_ipolicy, True)
def testCheckISpecParamSyntax(self):
par = "my_parameter"
for check_std in [True, False]:
# Min and max only
good_values = [(11, 11), (11, 40), (0, 0)]
for (mn, mx) in good_values:
minmax = dict((k, {}) for k in constants.ISPECS_MINMAX_KEYS)
minmax[constants.ISPECS_MIN][par] = mn
minmax[constants.ISPECS_MAX][par] = mx
objects.InstancePolicy._CheckISpecParamSyntax(minmax, {}, par,
check_std)
minmax = dict((k, {}) for k in constants.ISPECS_MINMAX_KEYS)
minmax[constants.ISPECS_MIN][par] = 11
minmax[constants.ISPECS_MAX][par] = 5
self.assertRaises(errors.ConfigurationError,
objects.InstancePolicy._CheckISpecParamSyntax,
minmax, {}, par, check_std)
# Min, std, max
good_values = [
(11, 11, 11),
(11, 11, 40),
(11, 40, 40),
]
for (mn, st, mx) in good_values:
minmax = {
constants.ISPECS_MIN: {par: mn},
constants.ISPECS_MAX: {par: mx},
}
stdspec = {par: st}
objects.InstancePolicy._CheckISpecParamSyntax(minmax, stdspec, par, True)
bad_values = [
(11, 11, 5, True),
(40, 11, 11, True),
(11, 80, 40, False),
(11, 5, 40, False,),
(11, 5, 5, True),
(40, 40, 11, True),
]
for (mn, st, mx, excp) in bad_values:
minmax = {
constants.ISPECS_MIN: {par: mn},
constants.ISPECS_MAX: {par: mx},
}
stdspec = {par: st}
if excp:
self.assertRaises(errors.ConfigurationError,
objects.InstancePolicy._CheckISpecParamSyntax,
minmax, stdspec, par, True)
else:
ret = objects.InstancePolicy._CheckISpecParamSyntax(minmax, stdspec,
par, True)
self.assertFalse(ret)
def testCheckDiskTemplates(self):
invalid = "this_is_not_a_good_template"
for dt in constants.DISK_TEMPLATES:
objects.InstancePolicy.CheckDiskTemplates([dt])
objects.InstancePolicy.CheckDiskTemplates(list(constants.DISK_TEMPLATES))
bad_examples = [
[invalid],
[constants.DT_DRBD8, invalid],
list(constants.DISK_TEMPLATES) + [invalid],
[],
None,
]
for dtl in bad_examples:
self.assertRaises(errors.ConfigurationError,
objects.InstancePolicy.CheckDiskTemplates,
dtl)
def testCheckParameterSyntax(self):
invalid = "this_key_shouldnt_be_here"
for check_std in [True, False]:
objects.InstancePolicy.CheckParameterSyntax({}, check_std)
policy = {invalid: None}
self.assertRaises(errors.ConfigurationError,
objects.InstancePolicy.CheckParameterSyntax,
policy, check_std)
for par in constants.IPOLICY_PARAMETERS:
for val in ("blah", None, {}, [42]):
policy = {par: val}
self.assertRaises(errors.ConfigurationError,
objects.InstancePolicy.CheckParameterSyntax,
policy, check_std)
def testFillIPolicyEmpty(self):
policy = objects.FillIPolicy(constants.IPOLICY_DEFAULTS, {})
objects.InstancePolicy.CheckParameterSyntax(policy, True)
self.assertEqual(policy, constants.IPOLICY_DEFAULTS)
def _AssertISpecsMerged(self, default_spec, diff_spec, merged_spec):
for (param, value) in merged_spec.items():
if param in diff_spec:
self.assertEqual(value, diff_spec[param])
else:
self.assertEqual(value, default_spec[param])
def _AssertIPolicyMerged(self, default_pol, diff_pol, merged_pol):
for (key, value) in merged_pol.items():
if key in diff_pol:
if key == constants.ISPECS_STD:
self._AssertISpecsMerged(default_pol[key], diff_pol[key], value)
else:
self.assertEqual(value, diff_pol[key])
else:
self.assertEqual(value, default_pol[key])
def testFillIPolicy(self):
partial_policies = [
{constants.IPOLICY_VCPU_RATIO: 3.14},
{constants.IPOLICY_SPINDLE_RATIO: 2.72},
{constants.IPOLICY_DTS: [constants.DT_FILE]},
{constants.ISPECS_STD: {constants.ISPEC_DISK_COUNT: 3}},
{constants.ISPECS_MINMAX: [constants.ISPECS_MINMAX_DEFAULTS,
constants.ISPECS_MINMAX_DEFAULTS]}
]
for diff_pol in partial_policies:
policy = objects.FillIPolicy(constants.IPOLICY_DEFAULTS, diff_pol)
objects.InstancePolicy.CheckParameterSyntax(policy, True)
self._AssertIPolicyIsFull(policy)
self._AssertIPolicyMerged(constants.IPOLICY_DEFAULTS, diff_pol, policy)
def testFillIPolicyKeepsUnknown(self):
INVALID_KEY = "invalid_ipolicy_key"
diff_pol = {
INVALID_KEY: None,
}
policy = objects.FillIPolicy(constants.IPOLICY_DEFAULTS, diff_pol)
self.assertTrue(INVALID_KEY in policy)
if __name__ == "__main__":
testutils.GanetiTestProgram()
| narurien/ganeti-ceph | test/py/ganeti.objects_unittest.py | Python | gpl-2.0 | 24,989 |
import base64
import binascii
import json
from Crypto.Cipher import AES
from lib.pyaes import Encrypter, AESModeOfOperationCBC
partner_id = 918
domain_id = 515601
_mw_adb = False
video_token = 'add4fb87365dc508'
e = '617adae21a8aedc4e13938619b62f4ecdd3b947cd64620569df257d333e4f11d'
def pad(s):
block_size = 16
return s + (block_size - len(s) % block_size) * chr(block_size - len(s) % block_size)
def unpad(s):
return s[0:-ord(s[-1])]
class EncryptedData:
def __init__(self):
pass
def toJSON(self):
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, separators=(',', ':'))
t = EncryptedData()
t.a = partner_id
t.b = domain_id
t.c = _mw_adb
t.d = "8156109e46b295466542f3587f35f0fe"
t.e = video_token
t.f = "Mozilla/5.0"
n = 'c46b534f9def34b0f2040a503d978eed'
r = t.toJSON()
key = e
iv = n
line = ""
encr = AESModeOfOperationCBC(binascii.a2b_hex(key), binascii.a2b_hex(iv))
encrypter = Encrypter(encr)
ciphertext = bytes()
ciphertext += encrypter.feed(r)
ciphertext += encrypter.feed()
encryptor = AES.new(binascii.a2b_hex(key), AES.MODE_CBC, binascii.a2b_hex(iv))
encrypted2 = encryptor.encrypt(pad(r))
print ("Cipher1 (CBC): ")
print (base64.standard_b64encode(ciphertext))
print ("Cipher2 (CBC): ")
print (base64.standard_b64encode(encrypted2))
| seppius-xbmc-repo/ru | plugin.video.baskino.com/test.py | Python | gpl-2.0 | 1,321 |
# homogeneous equilibrium of a gas.
from Cantera import *
# create an object representing the gas phase
gas = importPhase("h2o2.cti")
# set the initial state
gas.set(T = 2000.0, P = 0.1*OneAtm, X = "H2:1.0, O2:0.6")
# equilibrate the gas holding T and P fixed
gas.equilibrate("TP")
# print a summary of the results
print gas
# Individual properties can also be retrieved...
x = gas.moleFractions()
y = gas.massFractions()
mu = gas.chemPotentials()
names = gas.speciesNames()
for n in range(gas.nSpecies()):
print "%20s %10.4g %10.4g %10.4g " % (names[n], x[n], y[n], mu[n])
| HyperloopTeam/FullOpenMDAO | cantera-2.0.2/samples/python/equilibrium/simple_test/simple.py | Python | gpl-2.0 | 589 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2016 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Pytest configuration."""
import pytest
import httpretty
@pytest.yield_fixture
def httppretty_mock():
httpretty.enable()
yield
httpretty.disable()
| inspirehep/invenio-grobid | tests/conftest.py | Python | gpl-2.0 | 1,137 |
#
# Copyright (C) 2006-2019 Nexedi SA
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os, socket
from binascii import a2b_hex, b2a_hex
from datetime import timedelta, datetime
from hashlib import sha1
from Queue import deque
from struct import pack, unpack, Struct
from time import gmtime
# https://stackoverflow.com/a/6163157
def nextafter():
global nextafter
from ctypes import CDLL, util as ctypes_util, c_double
from time import time
_libm = CDLL(ctypes_util.find_library('m'))
nextafter = _libm.nextafter
nextafter.restype = c_double
nextafter.argtypes = c_double, c_double
x = time()
y = nextafter(x, float('inf'))
assert x < y and (x+y)/2 in (x,y), (x, y)
nextafter()
TID_LOW_OVERFLOW = 2**32
TID_LOW_MAX = TID_LOW_OVERFLOW - 1
SECOND_FROM_UINT32 = 60. / TID_LOW_OVERFLOW
MICRO_FROM_UINT32 = 1e6 / TID_LOW_OVERFLOW
TID_CHUNK_RULES = (
(-1900, 0),
(-1, 12),
(-1, 31),
(0, 24),
(0, 60),
)
def tidFromTime(tm):
gmt = gmtime(tm)
return packTID(
(gmt.tm_year, gmt.tm_mon, gmt.tm_mday, gmt.tm_hour, gmt.tm_min),
int((gmt.tm_sec + (tm - int(tm))) / SECOND_FROM_UINT32))
def packTID(higher, lower):
"""
higher: a 5-tuple containing year, month, day, hour and minute
lower: seconds scaled to 60:2**32 into a 64 bits TID
"""
assert len(higher) == len(TID_CHUNK_RULES), higher
packed_higher = 0
for value, (offset, multiplicator) in zip(higher, TID_CHUNK_RULES):
assert isinstance(value, (int, long)), value
value += offset
assert 0 <= value, (value, offset, multiplicator)
assert multiplicator == 0 or value < multiplicator, (value,
offset, multiplicator)
packed_higher *= multiplicator
packed_higher += value
# If the machine is configured in such way that gmtime() returns leap
# seconds (e.g. TZ=right/UTC), then the best we can do is to use
# TID_LOW_MAX, because TID format was not designed to support them.
# For more information about leap seconds on Unix, see:
# https://en.wikipedia.org/wiki/Unix_time
# http://www.madore.org/~david/computers/unix-leap-seconds.html
return pack('!LL', packed_higher, min(lower, TID_LOW_MAX))
def unpackTID(ptid):
"""
Unpack given 64 bits TID in to a 2-tuple containing:
- a 5-tuple containing year, month, day, hour and minute
- seconds scaled to 60:2**32
"""
packed_higher, lower = unpack('!LL', ptid)
higher = []
append = higher.append
for offset, multiplicator in reversed(TID_CHUNK_RULES):
if multiplicator:
packed_higher, value = divmod(packed_higher, multiplicator)
else:
packed_higher, value = 0, packed_higher
append(value - offset)
higher.reverse()
return (tuple(higher), lower)
def datetimeFromTID(tid):
higher, lower = unpackTID(tid)
seconds, lower = divmod(lower * 60, TID_LOW_OVERFLOW)
return datetime(*(higher + (seconds, int(lower * MICRO_FROM_UINT32))))
def addTID(ptid, offset):
"""
Offset given packed TID.
"""
higher, lower = unpackTID(ptid)
high_offset, lower = divmod(lower + offset, TID_LOW_OVERFLOW)
if high_offset:
d = datetime(*higher) + timedelta(0, 60 * high_offset)
higher = (d.year, d.month, d.day, d.hour, d.minute)
return packTID(higher, lower)
p64, u64 = (lambda unpack: (
unpack.__self__.pack,
lambda s: unpack(s)[0]
))(Struct('!Q').unpack)
def add64(packed, offset):
"""Add a python number to a 64-bits packed value"""
return p64(u64(packed) + offset)
def dump(s):
"""Dump a binary string in hex."""
if s is not None:
if isinstance(s, bytes):
return b2a_hex(s)
return repr(s)
def bin(s):
"""Inverse of dump method."""
if s is not None:
return a2b_hex(s)
def makeChecksum(s):
"""Return a 20-byte checksum against a string."""
return sha1(s).digest()
def parseNodeAddress(address, port_opt=None):
if address[:1] == '[':
(host, port) = address[1:].split(']')
if port[:1] == ':':
port = port[1:]
else:
port = port_opt
elif address.count(':') == 1:
(host, port) = address.split(':')
else:
host = address
port = port_opt
# Resolve (maybe) and cast to canonical form
# XXX: Always pick the first result. This might not be what is desired, and
# if so this function should either take a hint on the desired address type
# or return either raw host & port or getaddrinfo return value.
return socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)[0][4][:2]
def parseMasterList(masters):
return map(parseNodeAddress, masters.split())
class cached_property(object):
"""
A property that is only computed once per instance and then replaces itself
with an ordinary attribute. Deleting the attribute resets the property.
"""
def __init__(self, func):
self.__doc__ = func.__doc__
self.func = func
def __get__(self, obj, cls):
if obj is None: return self
value = obj.__dict__[self.func.__name__] = self.func(obj)
return value
# This module is always imported before multiprocessing is used, and the
# main process does not want to change name when task are run in threads.
spt_pid = os.getpid()
def setproctitle(title):
global spt_pid
pid = os.getpid()
if spt_pid == pid:
return
spt_pid = pid
# Try using https://pypi.org/project/setproctitle/
try:
# On Linux, this is done by clobbering argv, and the main process
# usually has a longer command line than the title of subprocesses.
os.environ['SPT_NOENV'] = '1'
from setproctitle import setproctitle
except ImportError:
return
finally:
del os.environ['SPT_NOENV']
setproctitle(title)
| Nexedi/neoppod | neo/lib/util.py | Python | gpl-2.0 | 6,503 |
# -*- coding: utf-8 -*-
'''
Exodus Add-on
Copyright (C) 2016 Exodus
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import urllib,json,time
from resources.lib.modules import cache
from resources.lib.modules import control
from resources.lib.modules import client
def rdAuthorize():
try:
CLIENT_ID = 'X245A4XAIBGVM'
USER_AGENT = 'Kodi Exodus/3.0'
if not '' in credentials()['realdebrid'].values():
if control.yesnoDialog(control.lang(30411).encode('utf-8'), control.lang(30413).encode('utf-8'), '', 'RealDebrid', control.lang(30415).encode('utf-8'), control.lang(30414).encode('utf-8')):
control.setSetting(id='realdebrid.id', value='')
control.setSetting(id='realdebrid.secret', value='')
control.setSetting(id='realdebrid.token', value='')
control.setSetting(id='realdebrid.refresh', value='')
control.setSetting(id='realdebrid.auth', value='')
raise Exception()
headers = {'User-Agent': USER_AGENT}
url = 'https://api.real-debrid.com/oauth/v2/device/code?client_id=%s&new_credentials=yes' % (CLIENT_ID)
result = client.request(url, headers=headers)
result = json.loads(result)
verification_url = control.lang(30416).encode('utf-8') + '[COLOR skyblue]%s[/COLOR]' % (result['verification_url'])
user_code = control.lang(30417).encode('utf-8') + '[COLOR skyblue]%s[/COLOR]' % (result['user_code'])
device_code = result['device_code']
interval = result['interval']
progressDialog = control.progressDialog
progressDialog.create('RealDebrid', verification_url, user_code)
for i in range(0, 3600):
try:
if progressDialog.iscanceled(): break
time.sleep(1)
if not float(i) % interval == 0: raise Exception()
url = 'https://api.real-debrid.com/oauth/v2/device/credentials?client_id=%s&code=%s' % (CLIENT_ID, device_code)
result = client.request(url, headers=headers, error=True)
result = json.loads(result)
if 'client_secret' in result: break
except:
pass
try: progressDialog.close()
except: pass
id, secret = result['client_id'], result['client_secret']
url = 'https://api.real-debrid.com/oauth/v2/token'
post = urllib.urlencode({'client_id': id, 'client_secret': secret, 'code': device_code, 'grant_type': 'http://oauth.net/grant_type/device/1.0'})
result = client.request(url, post=post, headers=headers)
result = json.loads(result)
token, refresh = result['access_token'], result['refresh_token']
control.setSetting(id='realdebrid.id', value=id)
control.setSetting(id='realdebrid.secret', value=secret)
control.setSetting(id='realdebrid.token', value=token)
control.setSetting(id='realdebrid.refresh', value=refresh)
control.setSetting(id='realdebrid.auth', value='*************')
raise Exception()
except:
control.openSettings('3.13')
def rdDict():
try:
if '' in credentials()['realdebrid'].values(): raise Exception()
url = 'https://api.real-debrid.com/rest/1.0/hosts/domains'
result = cache.get(client.request, 24, url)
hosts = json.loads(result)
hosts = [i.lower() for i in hosts]
return hosts
except:
return []
def pzDict():
try:
if '' in credentials()['premiumize'].values(): raise Exception()
user, password = credentials()['premiumize']['user'], credentials()['premiumize']['pass']
url = 'http://api.premiumize.me/pm-api/v1.php?method=hosterlist¶ms[login]=%s¶ms[pass]=%s' % (user, password)
result = cache.get(client.request, 24, url)
hosts = json.loads(result)['result']['hosterlist']
hosts = [i.lower() for i in hosts]
return hosts
except:
return []
def adDict():
try:
if '' in credentials()['alldebrid'].values(): raise Exception()
url = 'http://alldebrid.com/api.php?action=get_host'
result = cache.get(client.request, 24, url)
hosts = json.loads('[%s]' % result)
hosts = [i.lower() for i in hosts]
return hosts
except:
return []
def rpDict():
try:
if '' in credentials()['rpnet'].values(): raise Exception()
url = 'http://premium.rpnet.biz/hoster2.json'
result = cache.get(client.request, 24, url)
result = json.loads(result)
hosts = result['supported']
hosts = [i.lower() for i in hosts]
return hosts
except:
return []
def debridDict():
return {
'realdebrid': rdDict(),
'premiumize': pzDict(),
'alldebrid': adDict(),
'rpnet': rpDict()
}
def credentials():
return {
'realdebrid': {
'id': control.setting('realdebrid.id'),
'secret': control.setting('realdebrid.secret'),
'token': control.setting('realdebrid.token'),
'refresh': control.setting('realdebrid.refresh')
},
'premiumize': {
'user': control.setting('premiumize.user'),
'pass': control.setting('premiumize.pin')
},
'alldebrid': {
'user': control.setting('alldebrid.user'),
'pass': control.setting('alldebrid.pass')
},
'rpnet': {
'user': control.setting('rpnet.user'),
'pass': control.setting('rpnet.api')
}}
def status():
try:
c = [i for i in credentials().values() if not '' in i.values()]
if len(c) == 0: return False
else: return True
except:
return False
def resolver(url, debrid):
u = url
u = u.replace('filefactory.com/stream/', 'filefactory.com/file/')
try:
if not debrid == 'realdebrid' and not debrid == True: raise Exception()
if '' in credentials()['realdebrid'].values(): raise Exception()
id, secret, token, refresh = credentials()['realdebrid']['id'], credentials()['realdebrid']['secret'], credentials()['realdebrid']['token'], credentials()['realdebrid']['refresh']
USER_AGENT = 'Kodi Exodus/3.0'
post = urllib.urlencode({'link': u})
headers = {'Authorization': 'Bearer %s' % token, 'User-Agent': USER_AGENT}
url = 'https://api.real-debrid.com/rest/1.0/unrestrict/link'
result = client.request(url, post=post, headers=headers, error=True)
result = json.loads(result)
if 'error' in result and result['error'] == 'bad_token':
result = client.request('https://api.real-debrid.com/oauth/v2/token', post=urllib.urlencode({'client_id': id, 'client_secret': secret, 'code': refresh, 'grant_type': 'http://oauth.net/grant_type/device/1.0'}), headers={'User-Agent': USER_AGENT}, error=True)
result = json.loads(result)
if 'error' in result: return
headers['Authorization'] = 'Bearer %s' % result['access_token']
result = client.request(url, post=post, headers=headers)
result = json.loads(result)
url = result['download']
return url
except:
pass
try:
if not debrid == 'premiumize' and not debrid == True: raise Exception()
if '' in credentials()['premiumize'].values(): raise Exception()
user, password = credentials()['premiumize']['user'], credentials()['premiumize']['pass']
url = 'http://api.premiumize.me/pm-api/v1.php?method=directdownloadlink¶ms[login]=%s¶ms[pass]=%s¶ms[link]=%s' % (user, password, urllib.quote_plus(u))
result = client.request(url, close=False)
url = json.loads(result)['result']['location']
return url
except:
pass
try:
if not debrid == 'alldebrid' and not debrid == True: raise Exception()
if '' in credentials()['alldebrid'].values(): raise Exception()
user, password = credentials()['alldebrid']['user'], credentials()['alldebrid']['pass']
login_data = urllib.urlencode({'action': 'login', 'login_login': user, 'login_password': password})
login_link = 'http://alldebrid.com/register/?%s' % login_data
cookie = client.request(login_link, output='cookie', close=False)
url = 'http://www.alldebrid.com/service.php?link=%s' % urllib.quote_plus(u)
result = client.request(url, cookie=cookie, close=False)
url = client.parseDOM(result, 'a', ret='href', attrs = {'class': 'link_dl'})[0]
url = client.replaceHTMLCodes(url)
url = '%s|Cookie=%s' % (url, urllib.quote_plus(cookie))
return url
except:
pass
try:
if not debrid == 'rpnet' and not debrid == True: raise Exception()
if '' in credentials()['rpnet'].values(): raise Exception()
user, password = credentials()['rpnet']['user'], credentials()['rpnet']['pass']
login_data = urllib.urlencode({'username': user, 'password': password, 'action': 'generate', 'links': u})
login_link = 'http://premium.rpnet.biz/client_api.php?%s' % login_data
result = client.request(login_link, close=False)
result = json.loads(result)
url = result['links'][0]['generated']
return url
except:
return
| AMOboxTV/AMOBox.LegoBuild | plugin.video.exodus/resources/lib/modules/debrid.py | Python | gpl-2.0 | 9,950 |
#@file sql_tokenizer.py - Parsing commands
from pyparsing import *
import re
class sql_tokenizer:
def AK_parse_grant(self, string):
'''
@author Boris Kisic, edited by Zvonimir Kapes
@brief sql parsing of GRANT command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
ident = Word( alphas, alphanums + "_$" ).setName("identifier")
grantToken = Keyword("grant", caseless=True)
onToken = Keyword("on", caseless=True)
toToken = Keyword("to", caseless=True)
withToken = Keyword("with", caseless=True)
privilege = delimitedList(ident, ",", combine=True)
privilegeList = Group(delimitedList(privilege))
table = delimitedList(ident, ",", combine=True)
tableList = Group(delimitedList(table))
userName = delimitedList(ident, ",", combine=True)
userNameList = Group(delimitedList(userName))
groupToken = Keyword("group", caseless=True).setResultsName("group")
grantStmt = Forward()
grantStmt << (grantToken + privilegeList.setResultsName("privileges") +
onToken + tableList.setResultsName("tables") +
toToken + Optional(groupToken) + userNameList.setResultsName("users") +
Optional(withToken + restOfLine.setResultsName("grantOption")))
try:
tokens = grantStmt.parseString( string )
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
def AK_parse_drop(self, string):
'''
@autor mparavac
@brief sql parsing of DROP command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
ident = Word( alphas, alphanums + "_$" ).setName("identifier")
dropToken=Keyword("drop", caseless=True)
objectToken=( Keyword("table", caseless=True) | Keyword("user", caseless=True) | Keyword("column", caseless=True) |\
Keyword("index", caseless=True) | Keyword("sequence", caseless=True) |\
Keyword("function", caseless=True) | Keyword("procedure", caseless=True) |\
Keyword("schema", caseless=True) | Keyword("trigger", caseless=True) | Keyword("role", caseless=True)
)
onToken=Keyword("on", caseless=True)
optionalToken1= Keyword("temporary", caseless=True)
optionalToken2= Keyword("if", caseless=True) + Keyword("exists", caseless=True)
optionalToken3= Keyword("cascade", caseless=True) | Keyword("restrict", caseless=True)
objectName = delimitedList( ident, ",", combine=True )
objectNameList = Group( delimitedList( objectName ) )
objectName2 = Upcase( delimitedList( ident, ",", combine=True ) )
objectNameList2 = Group( delimitedList( objectName ) )
optionalName = Upcase( delimitedList( ident, ",", combine=True ) )
optionalNameList = Group( delimitedList( objectName ) )
sequenceName = Upcase( delimitedList( ident, ",", combine=True ) )
sequenceNameList = Group( delimitedList( sequenceName ) )
dropStmt=Forward()
dropStmt << ( dropToken + Optional(optionalToken1.setResultsName("opcija1")) + objectToken.setResultsName("objekt") +\
Optional(optionalToken2.setResultsName("opcija2")) + objectNameList.setResultsName("ime_objekta") +\
Optional(optionalToken3.setResultsName("opcija3")) + Optional(onToken.setResultsName("onToken") +\
Optional(objectNameList2.setResultsName("ime_objekta2")))
)
try:
tokens = dropStmt.parseString( string )
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
def AK_alter_table(self, string):
'''
@autor Krunoslav Bilic
@brief sql parsing of ALTER TABLE command (ref. http://www.w3schools.com/sql/sql_alter.asp )
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
LPAR, RPAR, COMMA = map(Suppress,"(),")
#keywords
alterToken = Keyword("alter",caseless=True).setResultsName("statement")
addToken = Keyword("add",caseless=True).setResultsName("operation")
dropToken = Keyword("drop",caseless=True).setResultsName("operation")
tableToken = Keyword("table",caseless=True)
columnToken = Keyword("column",caseless=True)
#definiranje gramatike
slozeni = LPAR + ZeroOrMore(CharsNotIn(")")) + RPAR
jednostavni = Word(alphas,alphanums+"_\"':-")
#dohvacanje naziva tablice i stupca
table_name = jednostavni.copy().setResultsName("table_name")
column_name = jednostavni.copy().setResultsName("column_name")
#definiranje tipa podataka iz add naredbe
field_def = OneOrMore(jednostavni | slozeni)
field_list_def = delimitedList(field_def).setResultsName("data_type")
#definiranje osnove upita te operacija dodavanja i uklanjanja stupca tablice
alter_core = (alterToken + tableToken + table_name)
adding = (addToken + column_name + field_list_def)
dropping = (dropToken + columnToken + column_name)
#definiranje same naredbe
alter_stmt = Forward()
alter_def = (dropping) | (adding)
alter_stmt << (alter_core + alter_def)
try:
tokens = alter_stmt.parseString( string )
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
def AK_parse_createIndex(self, string):
'''
@autor Domagoj Tulicic
@brief sql parsing of CREATE INDEX command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
ident = Word( alphas, alphanums + "_$").setName("identifier")
nazivIndexa = Word( alphas, alphanums + "_$").setName("identifier")
nazivTablice = Word( alphas, alphanums + "_$").setName("identifier")
createToken = Keyword("create", caseless=True)
indexToken = Keyword("index", caseless = True)
onToken = Keyword("on", caseless = True)
usingToken = Keyword("using", caseless=True)
column = delimitedList(ident, ",", combine=True)
columnList = Group(delimitedList(column))
lzagrada = Suppress("(")
dzagrada = Suppress(")")
createIndexStmt = Forward()
createIndexStmt << (createToken + indexToken + nazivIndexa.setResultsName("IndexIme") + onToken +
nazivTablice.setResultsName("tablica") + lzagrada + columnList.setResultsName("stupci") + dzagrada +
usingToken + restOfLine.setResultsName("IndexVrsta"))
try:
tokens = createIndexStmt.parseString( string )
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
def AK_create_sequence(self, string):
'''
@autor Domagoj Tulicic modified by Danko Sacer
@brief sql parsing of CREATE SEQUENCE command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
LPAR, RPAR, COMMA = map(Suppress,"(),")
(CREATE, SEQUENCE, AS, START, WITH, INCREMENT, BY, MINVALUE, MAXVALUE, CACHE ) = map(CaselessKeyword, """CREATE, SEQUENCE, AS, START, WITH, INCREMENT, BY, MINVALUE, MAXVALUE, CACHE""".replace(",","").split())
keyword = MatchFirst((CREATE, SEQUENCE, AS, START, WITH, INCREMENT, BY, MINVALUE, MAXVALUE, CACHE))
cycleToken = Keyword("cycle", caseless=True).setResultsName("cycle")
identifier = ~keyword + Word(alphas, alphanums+"_")
identifier2 = ~keyword + Word(nums)
sequence_name = identifier.copy().setResultsName("sekvenca")
as_value = identifier.copy().setResultsName("as_value")
min_value = identifier2.copy().setResultsName("min_value")
max_value = identifier2.copy().setResultsName("max_value")
start_with = identifier2.copy().setResultsName("start_with")
increment_by = identifier2.copy().setResultsName("increment_by")
cache_value = identifier2.copy().setResultsName("cache")
sequence_stmt = Forward()
sequence_stmt << (CREATE + SEQUENCE + sequence_name +\
(Optional((AS),default=AS) + Optional((as_value),default="bigint")) +\
(Optional((START), default=START) + Optional((WITH),default=WITH) +\
Optional((start_with),default="no start")) +\
(Optional((INCREMENT),default=INCREMENT) + Optional((BY),default=BY) +\
Optional((increment_by),default="1")) +\
(Optional((MINVALUE),default=MINVALUE) +\
Optional((min_value),default="no minvalue")) +\
(Optional((MAXVALUE),default=MAXVALUE) +\
Optional((max_value),default="no maxvalue")) +\
(Optional((CACHE),default=CACHE) +\
Optional((cache_value),default="15")) +\
Optional((cycleToken),default="no cycle"))
try:
tokens = sequence_stmt.parseString(string)
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
#definiranje min, max i start default vrijednosti na temelju tipa sequence
if(tokens.as_value[0]=="smallint"):
if(tokens.min_value=="no minvalue"):
tokens.min_value = "-32768"
if(tokens.start_with=="no start"):
tokens.start_with = tokens.min_value
else:
tokens.start_with = tokens.start_with[0]
else:
tokens.min_value = tokens.min_value[0]
if(tokens.start_with=="no start"):
tokens.start_with = tokens.min_value[0]
else:
tokens.start_with = tokens.start_with[0]
if(tokens.max_value=="no maxvalue"):
tokens.max_value = "32767"
else:
tokens.max_value = tokens.max_value[0]
elif(tokens.as_value[0]=="int"):
if(tokens.min_value=="no minvalue"):
tokens.min_value = "-2147483648"
if(tokens.start_with=="no start"):
tokens.start_with = tokens.min_value
else:
tokens.start_with = tokens.start_with[0]
else:
tokens.min_value = tokens.min_value[0]
if(tokens.start_with=="no start"):
tokens.start_with = tokens.min_value[0]
else:
tokens.start_with = tokens.start_with[0]
if(tokens.max_value=="no maxvalue"):
tokens.max_value = "2147483647"
else:
tokens.max_value = tokens.max_value[0]
elif(tokens.as_value[0]=="bigint" or tokens.as_value=="bigint"):
if(tokens.min_value=="no minvalue"):
tokens.min_value = "-9223372036854775808"
if(tokens.start_with=="no start"):
tokens.start_with = tokens.min_value
else:
tokens.start_with = tokens.start_with[0]
else:
tokens.min_value = tokens.min_value[0]
if(tokens.start_with=="no start"):
tokens.start_with = tokens.min_value[0]
else:
tokens.start_with = tokens.start_with[0]
if(tokens.max_value=="no maxvalue"):
tokens.max_value = "9223372036854775807"
else:
tokens.max_value = tokens.max_value[0]
elif(tokens.as_value[0]=="tinyint" or tokens.as_value[0]=="numeric" or tokens.as_value[0]=="decimal"):
if(tokens.min_value=="no minvalue"):
tokens.min_value = "0"
if(tokens.start_with=="no start"):
tokens.start_with = tokens.min_value
else:
tokens.start_with = tokens.start_with[0]
else:
tokens.min_value = tokens.min_value[0]
if(tokens.start_with=="no start"):
tokens.start_with = tokens.min_value[0]
else:
tokens.start_with = tokens.start_with[0]
if(tokens.max_value=="no maxvalue"):
tokens.max_value = "255"
else:
tokens.max_value = tokens.max_value[0]
if(tokens.cache!="15"):
tokens.cache = tokens.cache[0]
if(tokens.increment_by!="1"):
tokens.increment_by = tokens.increment_by[0]
if(tokens.as_value!="bigint"):
tokens.as_value = tokens.as_value[0]
return tokens
def AK_parse_where(self, string):
'''
@author Kresimir Ivkovic, updated by Davor Tomala
@brief parser for select, update, delete and any function containing the "WHERE" clause (automatically detects statement type)
@param string: sql query as string
@return tokenized query
'''
tokens = None
#literals
deleteLit=CaselessKeyword("DELETE")
selectLit=CaselessKeyword("SELECT")
updateLit=CaselessKeyword("UPDATE")
fromLit=CaselessKeyword("FROM")
usingLit=CaselessKeyword("USING")
whereLit=CaselessKeyword("WHERE")
setLit=CaselessKeyword("SET")
lBracket=Suppress(Literal("("))
rBracket=Suppress(Literal(")"))
valuesLiteral=Suppress(CaselessLiteral("VALUES"))
comma=Suppress(Literal(","))
#identifier
identifier=Word(alphas, alphanums + "_$")
tableName=identifier
userName=Word(alphas, alphanums)
#values
E = CaselessLiteral("e")
arithSign=Word("+-",exact=1)
realNum=Combine(Optional(arithSign)+(Word(nums)+"."+Optional( Word(nums))|("."+Word(nums)))+Optional(E+Optional(arithSign)+Word(nums)))
intNum=Combine(Optional(arithSign)+Word(nums)+Optional(E+Optional("+")+Word(nums)))
value=quotedString | realNum | intNum
valuesList=Group(delimitedList(value))
values=lBracket+valuesList+rBracket
#columns
columnName= tableName+'.'+identifier | identifier
colAs = columnName + CaselessKeyword("AS") + identifier
aggrfn = Group((CaselessKeyword("AVG") | CaselessKeyword("SUM") | CaselessKeyword("COUNT") | CaselessKeyword("MIN") | CaselessKeyword("MAX")) + lBracket + ('*' | columnName) + rBracket)
columnNameSelect= Group(colAs) | aggrfn | (tableName+'.'+identifier) | identifier ### select stmt can contain aggr. functions in their att list, so a special columnName is needed for select
columnNameList=Group(delimitedList(columnName))
columnNameListSelect = Group(delimitedList(columnNameSelect) | '*') ### select can contain an asterisk for "all attributes" instead of att names
columns=lBracket+columnNameList+rBracket
#data types
dataSize=lBracket+intNum+rBracket
floatType=CaselessKeyword("float")
integerType=CaselessKeyword("integer")
varcharType=CaselessKeyword("varchar")+dataSize
#predicate(limited)
binrelop=oneOf(["<", "<=", ">=", ">", "=", "!="])
sqlIN=CaselessKeyword("IN")
sqlBetween=CaselessKeyword("BETWEEN")
sqlLike=CaselessKeyword("LIKE")
sqlOR=CaselessKeyword("OR")
sqlAND=CaselessKeyword("AND")
sqlUnion=CaselessKeyword("UNION")+Optional(CaselessKeyword("ALL"))
sqlIntersect=CaselessKeyword("INTERSECT")+Optional(CaselessKeyword("ALL"))
sqlExcept=CaselessKeyword("EXCEPT")+Optional(CaselessKeyword("ALL"))
predicate=Forward()
predicate<<columnName+binrelop+value+ZeroOrMore((sqlOR | sqlAND | sqlIN | sqlBetween | sqlLike)+predicate)
selectInner=Forward()
expression=Forward()
where=Forward()
expression << Group(
(columnName.setResultsName("expLval") + binrelop.setResultsName("whereOp") + columnName.setResultsName("expRval")) |
(aggrfn.setResultsName("expLval") + binrelop.setResultsName("whereOp") + columnName.setResultsName("expRval")) |
(columnName.setResultsName("expLval") + binrelop.setResultsName("whereOp") + value.setResultsName("expRval")) |
(aggrfn.setResultsName("expLval") + binrelop.setResultsName("whereOp") + value.setResultsName("expRval")) |
(columnName.setResultsName("expLval") + sqlIN.setResultsName("whereOp") + values.setResultsName("expRval")) |
(columnName.setResultsName("expLval") + sqlIN.setResultsName("whereOp") + Group(selectInner.setResultsName("expRval")))
) + ZeroOrMore((sqlOR | sqlAND) + expression)
where << whereLit.setResultsName("where")+expression.setResultsName("expression")
select=Forward()
join=Forward()
distinct = CaselessKeyword("DISTINCT ON")+lBracket+columnNameList+rBracket
limit = CaselessKeyword("LIMIT")+intNum.setResultsName("limitVal")
offset = CaselessKeyword("OFFSET")+intNum.setResultsName("offsetVal")
groupBy = CaselessKeyword("GROUP BY")+columnNameList.setResultsName("groupByCol")+Optional(CaselessKeyword("HAVING")+expression)
orderBy = CaselessKeyword("ORDER BY")+columnNameList+Optional(CaselessKeyword("ASC")|CaselessKeyword("DESC"))
joinStatement = Optional(
CaselessKeyword("INNER")|
CaselessKeyword("CROSS")|
(
CaselessKeyword("LEFT")|
CaselessKeyword("RIGHT")|
CaselessKeyword("FULL")
)+Optional(CaselessKeyword("OUTER"))
)+CaselessKeyword("JOIN")+columnName.setResultsName("joinCol")+CaselessKeyword("ON")+expression
join << joinStatement + ZeroOrMore(joinStatement)
selectStatement=selectLit.setResultsName("commandName")+\
Optional(Group(distinct.setResultsName("distinct")))+\
columnNameListSelect.setResultsName("attributes")+\
fromLit.setResultsName("from")+\
tableName.setResultsName("tableName")+\
Optional(Group(join.setResultsName("join")))+\
Optional(where.setResultsName("condition"))+\
Optional(Group(groupBy.setResultsName("group")))+\
Optional(Group(orderBy.setResultsName("order")))+\
Optional(Group(limit.setResultsName("limit")))+\
Optional(Group(offset.setResultsName("offset")))
select << selectStatement + ZeroOrMore((sqlUnion|sqlIntersect|sqlExcept) + select) + stringEnd
selectInner << lBracket+selectStatement+rBracket + ZeroOrMore((sqlUnion|sqlIntersect|sqlExcept) + selectInner)
deleteFrom=Forward()
deleteFrom<<deleteLit.setResultsName("commandName")+\
fromLit.setResultsName("from")+\
tableName.setResultsName("tableName")+\
Optional(usingLit.setResultsName("using")+tableName.setResultsName("usingTable"))+\
Optional(where.setResultsName("condition"))+stringEnd
update = Forward()
updateStatement = updateLit.setResultsName("commandName")+\
tableName.setResultsName("tableName")+\
setLit.setResultsName("set")+\
columnNameList.setResultsName("columnNames")+\
binrelop.setResultsName("operator")+\
valuesList.setResultsName("columnValues")
updatePair = Literal(",") + columnNameList.setResultsName("columnNames") + binrelop.setResultsName("operator") + valuesList.setResultsName("columnValues")
update << updateStatement + ZeroOrMore(updatePair) + Optional(where.setResultsName("condition")) + stringEnd
sqlGrammar= select | deleteFrom | update
try:
tokens = sqlGrammar.parseString(string)
except ParseException, err:
return "\n\t"+"Syntax error at char "+str(err.loc)+" "+err.msg+":\n"+\
'\t"'+string+'"'+\
'\n\t-'+''.join(['-' for i in range(0,err.loc)])+'^'+'\n'
return tokens
def AK_parse_create_user(self, string):
'''
@author Franjo Kovacic
@brief sql parsing of CREATE USER command
@param string sql command as string
@return if command is successfully parsed returns list od tokens, else returns error message as string
'''
createUserToken = Keyword("CREATE USER", caseless=True)
setPasswordToken = Keyword("WITH PASSWORD", caseless=True)
createDBToken = Keyword("CREATEDB", caseless=True)
createUsToken = Keyword("CREATEUSER", caseless=True)
inGroupToken = Keyword("IN GROUP", caseless=True)
validUntilToken = Keyword("VALID UNTIL", caseless=True)
tokens = Word(alphanums+"_:.-")
userName = tokens.copy().setResultsName("username")
password = tokens.copy().setResultsName("password")
groupName = tokens.setResultsName("groupName")
validUntil = tokens.setResultsName("validUntil")
constraints = ZeroOrMore(setPasswordToken + password.setResultsName("password") | createDBToken | createUsToken.setResultsName("createUser") | inGroupToken + groupName.setResultsName("groupName") | validUntilToken + validUntil.setResultsName("validUntil"))
createUserStmt = createUserToken.setResultsName("commandName")+\
userName.setResultsName("username")+\
Optional(constraints)+\
stringEnd
try:
tokens = createUserStmt.parseString(string)
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
def AK_parse_create_table(self, string):
'''
@author Franjo Kovacic
@brief sql parsing of CREATE TABLE command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
createTableToken = Keyword("CREATE TABLE", caseless=True)
notNullToken = Keyword("NOT NULL", caseless=True)
uniqueToken = Keyword("UNIQUE", caseless=True)
primaryKeyToken = Keyword("PRIMARY KEY", caseless=True)
defaultToken = Keyword("DEFAULT", caseless=True)
foreignKeyToken = Keyword("FOREIGN KEY", caseless=True)
referencesToken = Keyword("REFERENCES", caseless=True)
autoIncrementToken = Keyword("AUTO_INCREMENT", caseless=True)
checkToken = Keyword("CHECK", caseless=True)
lBracket=Suppress(Literal("("))
rBracket=Suppress(Literal(")"))
comma=Suppress(Literal(","))
identifier=Word(alphas, alphanums + "_$")
tableName = identifier
columnName = identifier
columnNameList=Group(delimitedList(columnName))
columns=lBracket+columnNameList+rBracket
#values
E = CaselessLiteral("e")
arithSign=Word("+-",exact=1)
realNum=Combine(Optional(arithSign)+(Word(nums)+"."+Optional( Word(nums))|("."+Word(nums)))+Optional(E+Optional(arithSign)+Word(nums)))
intNum=Combine(Optional(arithSign)+Word(nums)+Optional(E+Optional("+")+Word(nums)))
value=quotedString | realNum | intNum
valuesList=Group(delimitedList(value))
values=lBracket+valuesList+rBracket
#types
dataSize=lBracket+intNum+rBracket
floatType=CaselessKeyword("float")
integerType=CaselessKeyword("int")
varcharType=CaselessKeyword("varchar")+dataSize
textType=CaselessKeyword("text")
#predicate(limited)
binrelop=oneOf(["<", "<=", ">=", ">", "=", "!="])
sqlOR=CaselessKeyword("OR")
sqlAND=CaselessKeyword("AND")
predicate=Forward()
predicate<<columnName+binrelop+value+ZeroOrMore((sqlOR | sqlAND)+predicate)
#attribute constraint
defaultConstraint = defaultToken + value
foreignKeyConstraint = foreignKeyToken + referencesToken + tableName + columns
checkConstraint = Group(CaselessKeyword("CHECK") + predicate)
constraint = notNullToken | uniqueToken | primaryKeyToken | checkConstraint | autoIncrementToken
constraints = ZeroOrMore(constraint)
#constraint at the end
pkUnique=uniqueToken | primaryKeyToken
pkUniqueEConstraint=pkUnique + columns
foreignKeyEConstraint=foreignKeyToken+columnName+referencesToken+tableName+lBracket+columnName+rBracket
checkEConstraint="ERROR"
endConstraint=Group(CaselessKeyword("CONSTRAINT")+identifier+(pkUniqueEConstraint | checkEConstraint | foreignKeyEConstraint))
endConstraints=Group(delimitedList(endConstraint))
#attributes (create table)
attributeType=floatType | integerType | varcharType | textType
attributeDefinition=Group(identifier+attributeType+constraints)
attributeDefinitionList=Group(delimitedList(attributeDefinition))
attributes=attributeDefinitionList
createTableStmt=createTableToken.setResultsName("commandName")+\
tableName.setResultsName("tableName")+\
lBracket+\
attributes.setResultsName("attributes")+\
Optional(comma+endConstraints).setResultsName("endConstraints")+\
rBracket+stringEnd
try:
tokens = createTableStmt.parseString(string)
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
def AK_parse_insert_into(self, string):
'''
@author Franjo Kovacic
@brief sql parsing of INSERT INTO command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
insertIntoToken = Keyword("INSERT INTO", caseless=True)
valuesToken = Keyword("VALUES", caseless=True)
lBracket=Suppress(Literal("("))
rBracket=Suppress(Literal(")"))
comma=Suppress(Literal(","))
identifier=Word(alphas, alphanums + "_$")
tableName=identifier
columnName=identifier
columnNameList=Group(delimitedList(columnName))
columns=lBracket+columnNameList+rBracket
#values
E = CaselessLiteral("e")
arithSign=Word("+-",exact=1)
realNum=Combine(Optional(arithSign)+(Word(nums)+"."+Optional( Word(nums))|("."+Word(nums)))+Optional(E+Optional(arithSign)+Word(nums)))
intNum=Combine(Optional(arithSign)+Word(nums)+Optional(E+Optional("+")+Word(nums)))
value=quotedString | realNum | intNum
valuesList=Group(delimitedList(value))
values=lBracket+valuesList+rBracket
insertStmt=insertIntoToken.setResultsName("commandName")+\
tableName.setResultsName("tableName")+\
Optional(columns).setResultsName("columns")+\
valuesToken+\
values.setResultsName("columnValues")+stringEnd
try:
tokens = insertStmt.parseString(string)
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
return tokens
def AK_parse_trigger(self, string):
'''
@author Davorin Vukelic
@brief sql parsing of TRIGGER command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
createToken = Keyword("create trigger", caseless=True)
whenToken = ( Keyword("after", caseless=True) | Keyword("before", caseless=True) )
actionToken = ( Keyword("delete", caseless=True) | Keyword("insert", caseless=True) | Keyword("update", caseless=True) )
onToken = Keyword("on", caseless=True)
forToken = Keyword("for", caseless=True)
eachToken = Keyword("each", caseless=True)
whatToken = (Keyword("row", caseless=True) | Keyword("statement", caseless=True) )
executeToken = Keyword("each", caseless=True)
procedureToken = Keyword("execute procedure", caseless=True)
orToken = Keyword("or", caseless=True)
eventToken = ( Keyword("insert", caseless=True) | Keyword("update", caseless=True) | Keyword("delete", caseless=True))
tokens =Word( alphas, alphanums + "_$")
trigName = tokens.copy().setResultsName("name")
event = tokens.copy().setResultsName("event")
table = tokens.copy().setResultsName("tableName")
function = tokens.copy().setResultsName("functionName")
ident=Word(nums,alphas)
param = delimitedList(ident, ",", combine=True)
paramList = Group(delimitedList(param))
lBracket = Suppress("(")
rBracket = Suppress(")")
#params
E = CaselessLiteral("e")
arithSign=Word("+-",exact=1)
realNum=Combine(Optional(arithSign)+(Word(nums)+"."+Optional( Word(nums))|("."+Word(nums)))+Optional(E+Optional(arithSign)+Word(nums)))
intNum=Combine(Optional(arithSign)+Word(nums)+Optional(E+Optional("+")+Word(nums)))
value=quotedString | realNum | intNum
valuesList=Group(delimitedList(value))
triggerStmt = Forward()
triggerStmt << (createToken+trigName+whenToken.setResultsName("whenOption")+\
eventToken.setResultsName("EventOption1")+Optional(orToken+\
eventToken.setResultsName("EventOption2")+Optional(orToken+\
eventToken.setResultsName("EventOption3")))+onToken+table+\
Optional(forToken+Optional(eachToken)+whatToken.setResultsName("whatOption"))+\
procedureToken+function+lBracket+Optional(valuesList.setResultsName("params"))+rBracket)
try:
tokens = triggerStmt.parseString( string )
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
def AK_parse_trans(self, string):
'''
@author Filip Sostarec
@brief sql parsing of BEGIN TRANS command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
beginToken = Keyword("begin", caseless=True)
transToken = ( Keyword("work", caseless=True) | Keyword("transaction", caseless=True))
isolationToken = Keyword("isolation level", caseless=True)
transmodeToken = (Keyword("serializable",caseless=True) | Keyword("repeatable read",caseless=True) |\
Keyword("read commited",caseless=True) | Keyword("read uncommited",caseless=True))
commitToken = (Keyword("commit",caseless=True) | Keyword("rollback",caseless=True) )
tokens =Word( alphas, alphanums + "_$")
tijelo = tokens.copy().setResultsName("tijelo")
transStmt = Forward()
transStmt << (beginToken+Optional(transToken)+Optional(isolationToken+transmodeToken)+tijelo+commitToken)
try:
tokens = transStmt.parseString( string )
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
def AKTokenizeCreateFunction(self, sql, types):
typesPattern = Regex(types, re.IGNORECASE)
createFunction = CaselessKeyword("CREATE") + CaselessKeyword("FUNCTION")
functionName = Word(alphanums + "_") + Literal("(")
argumentMode = Optional(Regex("IN|OUT", re.IGNORECASE))
argumentNameAndOrType = Optional(Regex("[A-Za-z0-9]+")) + typesPattern
defaultArgument = Optional(And(Regex("DEFAULT|=", re.IGNORECASE) + Word(alphanums)))
argument = argumentMode+argumentNameAndOrType+defaultArgument
functionArguments = Optional(delimitedList(argument, ",")) + Literal(")")
returnTableAttributes = Literal("(") + delimitedList(Word(alphanums) + typesPattern, ",") + Literal(")")
returnVariant1 = And(CaselessKeyword("RETURNS") + typesPattern)
returnVariant2 = And(CaselessKeyword("RETURNS") + CaselessKeyword("TABLE") + returnTableAttributes)
returnType = Or(returnVariant1 + returnVariant2)
language = Optional(CaselessKeyword("LANGUAGE") + Regex("SQL|PLSQL|INTERNAL|C", re.IGNORECASE))
functionCode = CaselessKeyword("AS") + Regex(".*")
wholeSyntax = createFunction + functionName + argumentMode + functionArguments + returnType + functionCode
parsedCommand = wholeSyntax.parseString(sql)
return parsedCommand
#Author: Tomislav Dukaric
#Parameter "sql" is the definition query to create a function
#to create a view object and store it into the database
#The method returns a list of keywords used inside the query
def AK_parse_CreateView(self, sql):
create = CaselessKeyword("CREATE")
mode = Optional(Regex("TEMP|TEMPORARY", re.IGNORECASE))
view = CaselessKeyword("VIEW")
viewName = Word(alphanums + "_")
as_key = CaselessKeyword("AS")
query = Regex(".*")
wholeSyntax = create + mode.setResultsName("mode") + view + viewName.setResultsName("name") + as_key + query.setResultsName("query")
try:
parsedCommand = wholeSyntax.parseString(sql)
except ParseException, err:
return ""*err.loc + "^\n" + err.msg
print
return parsedCommand
def AK_parse_alter_user(self, string):
'''
@author Marinko Radic
@brief sql parsing of ALTER USER command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
alterUserToken = Keyword("ALTER USER", caseless=True)
passwordToken = Keyword("WITH PASSWORD", caseless=True)
createDB = Keyword("CREATEDB", caseless=True)
createUshortToken = Keyword("CREATEUSER", caseless=True)
validToken = Keyword("VALID UNTIL", caseless=True)
renameToToken = Keyword("RENAME TO", caseless=True)
tokens = Word(alphanums+"_:.-")
user = tokens.copy().setResultsName("username")
password = tokens.copy().setResultsName("password")
valid = tokens.copy().setResultsName("validUntil")
newName = tokens.copy().setResultsName("newname")
constraints = passwordToken + password.setResultsName("password") | createUshortToken + createDB| validToken + valid.setResultsName("validUntil") | renameToToken + newName.setResultsName("newname")
alterUserCmd = alterUserToken.setResultsName("commandName") + user.setResultsName("username") + Optional(constraints)
try:
tokens=alterUserCmd.parseString(string)
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
def AK_parse_alter_view(self, string):
'''
@author Marinko Radic
@brief sql parsing of ALTER VIEW command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
alterViewToken = Keyword("ALTER VIEW", caseless=True)
setShemaToken = Keyword("SET SHEMA", caseless=True)
renameToToken = Keyword("RENAME TO",caseless=True)
ownerToToken = Keyword("OWNER TO", caseless=True)
setDefaultToken = Keyword("SET DEFAULT", caseless=True)
dropDefaultToken = Keyword("DROP DEFAULT", caseless=True)
alterColumnToken = Keyword("ALTER COLUMN",caseless= True)
tokens = Word(alphanums+"_:.-")
name = tokens.copy().setResultsName("name")
columnName = tokens.copy().setResultsName("columnName")
expression = tokens.copy().setResultsName("expression")
newName = tokens.copy().setResultsName("newName")
alterViewCmd = alterViewToken + name.setResultsName("name") + alterColumnToken + columnName.setResultsName("columnName")+ setDefaultToken + expression.setResultsName("expression")| alterViewToken + name.setResultsName("name") + alterColumnToken + columnName.setResultsName("columnName")+ dropDefaultToken | alterViewToken + name.setResultsName("name") + ownerToToken+ newName.setResultsName("newName")| alterViewToken + name.setResultsName("name") + setShemaToken+ newName.setResultsName("newName")|alterViewToken + name.setResultsName("name") + renameToToken+ newName.setResultsName("newName")
try:
tokens=alterViewCmd.parseString(string)
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
def AK_parse_alter_index(self, string):
'''
@author Marinko Radic
@brief sql parsing of ALTER INDEX command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
alterIndexToken = Keyword("ALTER INDEX", caseless=True)
renameToToken = Keyword("RENAME TO",caseless=True)
tokens = Word(alphanums+"_:.-")
name = tokens.copy().setResultsName("name")
newName = tokens.copy().setResultsName("newName")
alterIndexCmd = alterIndexToken + name.setResultsName("name") + renameToToken + newName.setResultsName("newName")
try:
tokens=alterIndexCmd.parseString(string)
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
def AK_parse_alter_sequence(self, string):
'''
@author Marinko Radic
@brief sql parsing of ALTER SEQUNCE command
@param string sql command as string
@return if command is successfully parsed returns list of tokens, else returns error message as string
'''
alterSequenceToken = Keyword("ALTER SEQUENCE", caseless=True)
incrementByToken = Keyword("INCREMENT BY", caseless=True)
minValueToken = Keyword("MINVALUE",caseless=True)
NoMinValueToken = Keyword("NO MINVALUE",caseless=True)
maxValueToken = Keyword("MAXVALUE",caseless=True)
noMaxValueToken = Keyword("NO MAXVALUE",caseless=True)
restartWithToken = Keyword("RESTART WITH",caseless=True)
cacheToken = Keyword("CACHE",caseless=True)
noCacheToken = Keyword("NO CACHE",caseless=True)
cycleToken = Keyword("CYCLE",caseless=True)
noCycleToken = Keyword("NO CYCLE",caseless=True)
setShemaToken = Keyword("SET SHEMA", caseless=True)
tokens = Word(alphanums+"_")
tokens2 = Word(nums)
sequenceName = tokens.copy().setResultsName("sequenceName")
increment = tokens2.copy().setResultsName("increment")
minValue = tokens2.copy().setResultsName("minvalue")
maxValue = tokens2.copy().setResultsName("maxvalue")
cache = tokens.copy().setResultsName("cache")
newName = tokens.copy().setResultsName("newName")
restartValue = tokens2.copy().setResultsName("restartValue")
constraints = ZeroOrMore( restartWithToken + restartValue.setResultsName("restartValue") |
incrementByToken + increment.setResultsName("increment")|
minValueToken + minValue.setResultsName("minvalue")| NoMinValueToken| maxValueToken + maxValue.setResultsName("maxvalue") | noMaxValueToken |
cacheToken + cache.setResultsName("cache") |cycleToken | noCycleToken )
alterSequenceCmd = alterSequenceToken.setResultsName("commandName") + sequenceName.setResultsName("sequenceName") + Optional(constraints)
try:
tokens=alterSequenceCmd.parseString(string)
except ParseException, err:
return " "*err.loc + "^\n" + err.msg
print
return tokens
#--------------------------------------------testne funkcije--------------------------------------------#
def AK_parse_grant_test(self):
'''
@author Boris Kisic
@brief testing of sql parsing command GRANT
@return No return value
'''
print "\n---------------------------------GRANT test---------------------------------\n"
commands = ["GRANT SELECT, INSERT, UPDATE, DELETE ON album, song TO Elvis, Jimmy WITH ADMIN OPTION",
"grant update on table1, table2 to Ivica, pero22foi1",
"Grant insert on drivers to Hamilton, Raikkonen, Alonso"]
for command in commands:
token = test.AK_parse_grant(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token
print "tokens.privileges = ", token.privileges
print "tokens.tables = ", token.tables
print "tokens.users =", token.users
print "tokens.grantOption =", token.grantOption
def AK_parse_drop_test(self):
print "\n---------------------------------DROP test---------------------------------\n"
commands=["DROP temporary table if exists tabela1231 cascade",
"drop user matija, pero, jura",
"Drop column kolona1, kolona_2",
"drop index bla on tablica",
"drop trigger bla on tablica2",
"drop role test",
"drop function if exists funcija1",
"drop procedure if exists procedurea_df489f"]
for command in commands:
token = test.AK_parse_drop(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token
print "tokens.opcija1 = ", token.opcija1
print "tokens.objekt = ", token.objekt
print "tokens.ime_objekta = ", token.ime_objekta
print "tokens.opcija2 = ", token.opcija2
print "tokens.opcija3 = ", token.opcija3
print "tokens.onToken = ", token.onToken
print "tokens.ime_objekta2 = ", token.ime_objekta2
def AK_alter_table_test(self):
'''
@author Krunoslav Bilic
@brief testing of sql parsing command ALTER TABLE
@return statement, operation, table_name, column_name, data_type[list]
'''
print "\n---------------------------------ALTER TABLE test---------------------------------\n"
commands = ["alter table imena drop column srednje_ime",\
"alter table icecream add flavor varchar(20)",\
"alter table icecream add flavor",\
"alter table icecream drop flavor varchar(20)"]
for command in commands:
print "\n"+command
token = test.AK_alter_table(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token
print "statement = ", token.statement
print "operation = ", token.operation
print "table_name = ", token.table_name
print "column_name = ", token.column_name
print "data_type = ", token.data_type
def AK_trans_test(self):
'''
@author Filip Sostarec
@brief testing of transaction
'''
print "\n---------------------------------TRANSACTION test---------------------------------\n"
commands = ["begin tijelo commit",\
"begin tijelo rollback",\
"begin work tijelo commit",\
"begin transaction isolation level serializable tijelo rollback"]
for command in commands:
print "\n"+command
token = test.AK_parse_trans(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token
print "tijelo = ", token.tijelo
def AK_parse_createIndex_test(self):
'''
@author Domagoj Tulicic
@brief testing of sql parsing command CREATE INDEX
@return No return value
'''
print "\n---------------------------------CREATE INDEX test---------------------------------\n"
commands = ["CREATE INDEX Pindex ON tablica ( stupac1, stupac2 ) USING Btree",\
"create index Pindex on tablica ( stupac1 ) USING Btree",\
"create index Pindex on tablica ( stupac1, stupac2 ) USING Hash"]
for command in commands:
print "\n"+command
token = test.AK_parse_createIndex(command)
if isinstance(token, str):
print "Error: " + token
else:
print "token = ", token
print "IndexIme = ", token.IndexIme
print "tablica = ", token.tablica
print "stupci = ", token.stupci
print "IndexVrsta = ", token.IndexVrsta
def AK_create_sequence_test(self):
'''
@author Domagoj Tulicic
@brief testing of sql parsing command CREATE SEQUENCE
@return No return value
'''
print "\n---------------------------------CREATE SEQUENCE test---------------------------------\n"
commands = ["create sequence sequenca start with 1 increment by 2 minvalue 9 maxvalue 9999999 cache 10 CYCLE",\
"create sequence sequenca start with 1 increment by 2 minvalue 9 maxvalue 9999999 cache 10"]
for command in commands:
print "\n"+command
token = test.AK_create_sequence(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token
print "SekvencaIme = ", token.sekvenca
print "min value = ", token.min_value
print "max value = ", token.max_value
print "increment by = ", token.increment_by
print "cache = ", token.cache
print "cycle = ", token.cycle
print "'AS' definition: ", token.as_value
print "'Start with' value: ", token.start_with
'''
@author Danko Sacer
@brief additional testing of sql parsing command CREATE SEQUENCE
@return No return value
'''
print "\n********CREATE SEQUENCE test by Danko Sacer***********"
sql_1 = "create sequence brojac_1 as smallint start with 10 increment by 1 minvalue 5"
print "\n"+sql_1
token = test.AK_create_sequence(sql_1)
if isinstance(token, str):
print "Error: "
print sql_1
print token
else:
print "Tokens: ", token
print "\nSequence name: ", token.sekvenca
print "'AS' definition: ", token.as_value
print "'Start with' value: ", token.start_with
print "'Increment by' value: ", token.increment_by
print "'MinValue' value: ", token.min_value
print "'MaxValue' value: ", token.max_value
print "'Cache' value: ", token.cache
print "'Cycle' value: ", token.cycle
sql_2 = "create sequence brojac_2"
print "\n"+sql_2
token = test.AK_create_sequence(sql_2)
if isinstance(token, str):
print "Error:"
print sql_2
print token
else:
print "Tokens: ", token
print "\nSequence name: ", token.sekvenca
print "'AS' definition: ", token.as_value
print "'Start with' value: ", token.start_with
print "'Increment by' value: ", token.increment_by
print "'MinValue' value: ", token.min_value
print "'MaxValue' value: ", token.max_value
print "'Cache' value: ", token.cache
print "'Cycle' value: ", token.cycle
def AK_parse_where_test(self):
'''
@author Kresimir Ivkovic
@brief tests parsing of select and delete statements
@return No return value
'''
query = ["select * from t1 inner join t2 on t1.at1 = t2.at1 left outer join t3 on t2.at2 = t3.at2 where t3.at3 > 0 and t2.at1 in (select count(*) from t3) order by t1.at1 desc limit 100 offset 10",
"select at1, avg(at2) from t1 cross join t2 on t1.at1 > t2.at1 group by at1 having avg(at2) > 100 order by at1 asc union select count(*) from t3 where t3 in (select distinct on(a) a, b, c from t4)",
"update t1 set at1 = 1 where at2 = 2 and at3 = 3",
"update t2 set at1 = 1, at2 = 2, at3 = 3 where at4 = 4",
"delete from t1 using t2 where t1.at1 = t2.at2",
"delete from t1 where t1.at1 in (select * from t2 where at2 > 0 and at3 < 0 or at4 = 0) or t1.at2 in (select * from at3)"
]
print "\n---------------------------------SELECT test---------------------------------\n"
print query[0]
print test.AK_parse_where(query[0])
print '\n'
print query[1]
print test.AK_parse_where(query[1])
print "\n---------------------------------UPDATE test---------------------------------\n"
print query[2]
print test.AK_parse_where(query[2])
print '\n'
print query[3]
print test.AK_parse_where(query[3])
print "\n---------------------------------DELETE test---------------------------------\n"
print query[4]
print test.AK_parse_where(query[4])
print '\n'
print query[5]
print test.AK_parse_where(query[5])
def AK_parse_create_user_test(self):
'''
@author Franjo Kovacic
@brief tests parsing of CREATE USER statement
@return No return value
'''
print "\n---------------------------------CREATE USER test---------------------------------\n"
commands = ["CREATE USER test1 WITH PASSWORD tt", "CREATE USER user WITH PASSWORD 1pass1", "CREATE USER ub1 IN GROUP grupa", "CREATE USER mmw WITH PASSWORD pas1mac IN GROUP anim CREATEDB VALID UNTIL 2013-22-02"]
for command in commands:
token = test.AK_parse_create_user(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token.dump()
def AK_parse_create_table_test(self):
'''
@author Franjo Kovacic
@brief tests parsing of CREATE TABLE statement
@return No return value
'''
print "\n---------------------------------CREATE TABLE test---------------------------------\n"
commands = ["CREATE TABLE tablica (var1 INT NOT NULL, var2 INT PRIMARY KEY)", "CREATE TABLE tabla1 (v2 INT, v4 TEXT, v11 INT AUTO_INCREMENT)"]
for command in commands:
token = test.AK_parse_create_table(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token.dump()
def AK_parse_insert_into_test(self):
'''
@author Franjo Kovacic
@brief tests parsing of INSERT INTO statement
@return No return value
'''
print "\n---------------------------------INSERT INTO test---------------------------------\n"
commands = ["INSERT INTO tablica(vr1, v2, ttt3) VALUES ('a1', 'ss2', 'a2')", "INSERT INTO tablica1 VALUES (11, 'kk2', 'j2')"]
for command in commands:
token = test.AK_parse_insert_into(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token.dump()
def Ak_create_trigger_test(self):
'''
@author Davorin Vukelic
@brief testing of sql parsing command Ak_create_trigger_test
@return No return value
'''
print "\n---------------------------------TRIGGER test---------------------------------\n"
commands=["CREATE TRIGGER prihvat_veze AFTER INSERT ON veza FOR ROW EXECUTE PROCEDURE veza_prihvacena()",\
"CREATE TRIGGER prihvat_veze BEFORE DELETE OR INSERT ON veza EXECUTE PROCEDURE veza_prihvacena()",\
"CREATE TRIGGER prihvat_veze BEFORE DELETE OR INSERT ON veza FOR EACH STATEMENT EXECUTE PROCEDURE veza_prihvacena(2,3,'data')",\
"CREATE TRIGGER prihvat_veze AFTER DELETE OR INSERT OR UPDATE ON veza FOR EACH STATEMENT EXECUTE PROCEDURE veza_prihvacena(2,10.5,'data')"]
for command in commands:
token = test.AK_parse_trigger(command)
if isinstance(token, str):
print "Error: " + token
else:
print "tokens = ",token
print "tokens.name = ",token.name
print "tokens.whenOption = ",token.whenOption
print "tokens.EventOption1 = ",token.EventOption1
print "tokens.EventOption2 = ",token.EventOption2
print "tokens.EventOption3 = ",token.EventOption3
print "tokens.tableName = ",token.tableName
print "tokens.whatOption = ",token.whatOption
print "tokens.functionName = ",token.functionName
print "tokens.paramList = ",token.params
def AKTokenizeCreateFunctionTest(self):
string1 = "CREATE FUNCTION myFunction(IN number INT DEFAULT 12) RETURNS TABLE(varijabla1 INT, varijabla2 FLOAT) AS SELECT * FROM tablica"
print test.AKTokenizeCreateFunction(string1, "INT|FLOAT|DECIMAL")
def AK_parse_CreateView_test(self):
string1 = "CREATE VIEW TestView AS SELECT * FROM someTable"
tmp = test.AK_parse_CreateView(string1)
print "tokens = ", tmp
print "tokens.name = ", tmp.name
print "tokens.mode = ", tmp.mode
print "tokens.query = ", tmp.query
string2 = "CREATE TEMP VIEW TestView AS SELECT * FROM someTable"
tmp = test.AK_parse_CreateView(string2)
print "tokens = ", tmp
print "tokens.name = ", tmp.name
print "tokens.mode = ", tmp.mode
print "tokens.query = ", tmp.query
def AK_alter_sequence_test(self):
'''
@author Marinko Radic
@brief testing of sql parsing command ALTER SEQUENCE
@no return value
'''
print "\n----------------------------ALTER SEQUENCE test-------------------------------\n"
commands = ["ALTER SEQUENCE sequenca INCREMENT BY 2 MINVALUE 9 MAXVALUE 9999999 cache 10 CYCLE",\
"ALTER sequence sequenca increment by 2 minvalue 9 maxvalue 9999999 cache 10",\
"ALTER SEQUENCE serial RESTART WITH 105"]
for command in commands:
token = test.AK_parse_alter_sequence(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token.dump()
def AK_alter_view_test(self):
'''
@author Marinko Radic
@brief testing of sql parsing command ALTER VIEW
@no return value
'''
print "\n----------------------------ALTER VIEW test-------------------------------\n"
commands = ["ALTER VIEW foo RENAME TO bar", "ALTER VIEW nek ALTER COLUMN mag SET DEFAULT neke"]
for command in commands:
token = test.AK_parse_alter_view(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token.dump()
def AK_alter_index_test(self):
'''
@author Marinko Radic
@brief testing of sql parsing command ALTER INDEX
@no return value
'''
print "\n----------------------------ALTER INDEX test-------------------------------\n"
commands = ["ALTER INDEX distributors RENAME TO suppliers"]
for command in commands:
token = test.AK_parse_alter_index(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token.dump()
def AK_alter_user_test(self):
'''
@author Marinko Radic
@brief testing of sql parsing command ALTER USER
@no return value
'''
print "\n----------------------------ALTER USER test-------------------------------\n"
commands = ["ALTer USEr davide WITH PASSWORD hu8jmn3", "ALTER USER manuel VALID UNTIL 2013-22-02",
"ALTER USER miriam CREATEUSER CREATEDB", "ALTER USER marinac RENAME TO marinac666", "ALTER USER dd VALID UNTIL 1","ALTER USER marinac RENAME TO marinac666 "]
for command in commands:
token = test.AK_parse_alter_user(command)
if isinstance(token, str):
print "Error:"
print command
print token
else:
print "tokens = ", token.dump()
test = sql_tokenizer()
'''
#testing grant statement
test.AK_parse_grant_test()
#testing drop statement
test.AK_parse_drop_test()
#testing alter statement
test.AK_alter_table_test()
#testing create sequence statement
test.AK_create_sequence_test()
#testing create index statement
test.AK_parse_createIndex_test()
#testing select and delete statements
test.AK_parse_where_test()
#testing create user statement
test.AK_parse_create_user_test()
#testing create table statement
test.AK_parse_create_table_test()
#testing insert into statement
test.AK_parse_insert_into_test()
#testing create trigger statement
test.Ak_create_trigger_test()
#testing create sequence statement
test.AK_create_sequence_test()
test.AKTokenizeCreateFunctionTest()
test.AK_parse_CreateView_test()
#testing alter user statment
test.AK_alter_user_test()
#testing alter sequnce statment
test.AK_alter_sequence_test()
#testing alter index statment
test.AK_alter_index_test()
#testing alter view statment
test.AK_alter_view_test()
#testing alter user statement
test.AK_alter_user_test()
#testing alter sequence statement
test.AK_alter_sequence_test()
#testing alter view test
test.AK_alter_view_test()
'''
| mschatten/akdb | akdb/src/srv/sql_tokenizer.py | Python | gpl-2.0 | 56,862 |
import threading
class ClientStack(threading.local):
"""Thread local stack of StatsdClients.
Applications and tests can either set the global statsd client using
perfmetrics.set_statsd_client() or set a statsd client for each thread
using statsd_client_stack.push()/.pop()/.clear().
This is like pyramid.threadlocal but it handles the default differently.
"""
default = None
def __init__(self):
self.stack = []
def get(self):
stack = self.stack
return stack[-1] if stack else self.default
def push(self, obj):
self.stack.append(obj)
def pop(self):
stack = self.stack
if stack:
return stack.pop()
def clear(self):
del self.stack[:]
client_stack = ClientStack()
| Labgoo/appengine-logstash-rabbitmq-log-handler | code/clientstack.py | Python | gpl-2.0 | 789 |
__all__ = ["build_extra",
"build_i18n",
"build_icons",
"build_help",
"clean_i18n"]
| ceibal-tatu/sugar-update-control | dist_extra/__init__.py | Python | gpl-2.0 | 127 |
from datetime import datetime
from inflection import underscore
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.ext.declarative.api import declared_attr, declarative_base
from sqlalchemy.orm import relationship, backref
from sqlalchemy.orm.session import sessionmaker
from sqlalchemy.sql.schema import Column, UniqueConstraint, ForeignKey
from sqlalchemy.sql.sqltypes import String, Integer, DateTime
class Base(object):
@declared_attr
def __tablename__(cls):
return underscore(cls.__name__)
__table_args__ = ({'sqlite_autoincrement': True},)
Session = sessionmaker()
Base = declarative_base(cls=Base)
class IdMixin(object):
id = Column(Integer, primary_key=True)
class FileReference(IdMixin, Base):
@declared_attr
def __table_args__(cls):
return (UniqueConstraint('path', 'checksum'),) + Base.__table_args__
path = Column(String, index=True, nullable=False)
backup_path = Column(String, unique=True, nullable=False)
checksum = Column(String, nullable=False)
class Snapshot(IdMixin, Base):
time = Column(DateTime, nullable=False, default=datetime.utcnow())
file_references = association_proxy(
'file_references_in_snapshot',
'file_reference'
)
class FileReferenceInSnapshot(Base):
file_reference_id = Column(
Integer,
ForeignKey('%s.id' % FileReference.__tablename__),
primary_key=True
)
file_reference = relationship(
FileReference,
backref=backref("file_references_in_snapshot", cascade="all")
)
snapshot_id = Column(
Integer,
ForeignKey('%s.id' % Snapshot.__tablename__),
primary_key=True
)
snapshot = relationship(
Snapshot,
backref=backref('file_references_in_snapshot', cascade="all")
)
def __init__(self, file_reference=None, snapshot=None):
self.file_reference = file_reference
self.snapshot = snapshot
| behos/backup-to-s3 | packages/backup_to_s3/database/models.py | Python | gpl-2.0 | 1,972 |
# PyTransit: fast and easy exoplanet transit modelling in Python.
# Copyright (C) 2010-2019 Hannu Parviainen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from numpy import ceil, sqrt, where, inf
from matplotlib.pyplot import subplots
from pytransit.contamination import TabulatedFilter, Instrument, SMContamination
from pytransit.contamination.filter import sdss_g, sdss_r, sdss_i, sdss_z
from pytransit.lpf.cntlpf import PhysContLPF
from pytransit.param import NormalPrior as NP
from .mocklc import MockLC
class MockLPF(PhysContLPF):
def __init__(self, name: str, lc: MockLC):
super().__init__(name, passbands=lc.pb_names, times=lc.npb * [lc.time],
fluxes=list(lc.flux.T), pbids=list(range(lc.npb)))
self._lc = lc
self.know_host = lc.setup.know_host
self.misidentify_host = lc.setup.misidentify_host
self.hteff = lc.hteff if not self.misidentify_host else lc.cteff
self.cteff = lc.cteff
self.t0_bjd = 0.0
self.period = lc.p
self.sma = lc.a
self.inc = lc.i
self.k_apparent = lc.k_apparent
self.b = lc.b
self.set_prior(1, NP(lc.p, 1e-7))
if lc.setup.know_orbit:
self.set_prior(2, NP(5.0, 0.05))
self.set_prior(3, NP(lc.b, 0.01))
if lc.setup.know_host:
if lc.setup.misidentify_host:
self.set_prior(6, NP(self._lc.cteff, 10))
else:
self.set_prior(6, NP(self._lc.hteff, 10))
def _init_instrument(self):
"""Set up the instrument and contamination model."""
qe = TabulatedFilter('MockQE',
[300, 350, 500, 550, 700, 800, 1000, 1050],
[0.10, 0.20, 0.90, 0.96, 0.90, 0.75, 0.11, 0.05])
self.instrument = Instrument('MockInstrument', [sdss_g, sdss_r, sdss_i, sdss_z], (qe, qe, qe, qe))
self.cm = SMContamination(self.instrument, "i'")
self.lnpriors.append(lambda pv: where(pv[:, 4] < pv[:, 5], 0, -inf))
def plot_light_curves(self, ncols: int = 2, figsize: tuple = (13, 5)):
nrows = int(ceil(self.nlc) / ncols)
fig, axs = subplots(nrows, ncols, figsize=figsize, sharex='all', sharey='all', constrained_layout=True)
fmodel = self.flux_model(self.de.population)[self.de.minimum_index]
for i, ax in enumerate(axs.flat):
ax.plot(self.times[i], self.fluxes[i], '.', alpha=0.25)
ax.plot(self.times[i], fmodel[self.lcslices[i]], 'k')
def posterior_samples(self, burn: int = 0, thin: int = 1, include_ldc: bool = False):
df = super().posterior_samples(burn, thin, include_ldc)
df['k_app'] = sqrt(df.k2_app)
df['k_true'] = sqrt(df.k2_true)
df['cnt'] = 1. - df.k2_app / df.k2_true
return df
| hpparvi/PyTransit | notebooks/contamination/src/blendlpf.py | Python | gpl-2.0 | 3,425 |
from tkinter import *
import icon
class TabMaker:
def __init__(self, main):
# S'occupe de l'installation du tab sur le tab area
self.main = main
self.frame = main.frame
self.cache = main.cache
self.tab_title = self.cache["CurrentTab"][0]
self.tab_bg_color = self.cache["CurrentTab"][1][0]
self.tab_img_pathname = self.cache["CurrentTab"][1][1]
self.icon_list = self.cache["CurrentTab"][2]
# Emplacement dans le dico cache pour stocker PhotoImage
self.cache["tab_img"] = None
def install(self):
# Installe le tab sur le frame du graphique
width = self.main.width
height = self.main.tab_height
self.tab_loaded = Canvas(self.frame, width = width,
height = height, bg = self.tab_bg_color)
self.tab_loaded.pack()
# Image de fond
if self.tab_img_pathname != None:
try:
self.cache["tab_img"] = PhotoImage(file = self.tab_img_pathname)
except:
pass
else:
x, y = self.main.width / 2, self.main.tab_height / 2
self.tab_loaded.create_image(x, y,
image = self.cache["tab_img"])
else:
pass
# Stocker l'ID du tab afin de pouvoir y appliquer
# destroy() plus tard
self.cache["CurrentTabID"] = self.tab_loaded
# Bind
self.tab_loaded.bind("<Button-3>", self.main.tab_menu_eventhandler)
self.cache["CurrentTabID"].bind("<Motion>",
self.main.drag_eventhandler)
# Appeler IconMaker
self.icon_maker()
def icon_maker(self):
# Installe les icon sur le tab
for x in self.icon_list:
icon.Icon(self.main, x)
| EnceladOnline/interfaX | tab.py | Python | gpl-2.0 | 1,558 |
#!/usr/bin/env python
#
# Author : Vikas Chouhan
# email : [email protected]
#
# This is an automated script for mining down the list of all scrips available on
# https://in.finance.yahoo.com
# NOTE : This script uses selenium python module to function properly.
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.proxy import *
from selenium.common.exceptions import *
import os
import sys
import argparse
###################################################
def assertm(cond, msg):
if not cond:
print msg
sys.exit(-1)
# endif
# enddef
def main_thread(fout):
yahoo_base_url = "https://in.finance.yahoo.com/lookup/stocks?s=%3f&t=S&m=IN&r="
next_xpath_first_page = '//*[@id="pagination"]/a[3]'
next_xpath_other_page = '//*[@id="pagination"]/a[3]'
next_csspath_first_page = '#pagination > a:nth-child(1)'
next_csspath_other_page = '#pagination > a:nth-child(3)'
driver = webdriver.PhantomJS()
scrip_dict = {}
page_num = 1
# Load starting page
driver.get(yahoo_base_url)
while True:
if True:
# Check if "Next Button" exists
if page_num == 1:
# If next button doesn't exists, just break out of the loop !!
try:
next_button = driver.find_element_by_css_selector(next_csspath_first_page)
except NoSuchElementException:
break
except:
continue
# endtry
else:
# If next button dnoesn't exist, just break out of the loop !!
try:
next_button = driver.find_element_by_css_selector(next_csspath_other_page)
except NoSuchElementException:
break
except:
continue
# endtry
# endif
# Get list of all rows
odd_list = driver.find_elements_by_class_name("yui-dt-odd")
even_list = driver.find_elements_by_class_name("yui-dt-even")
# Collect all information for odd rows
for item_this in odd_list:
column_list = item_this.find_elements_by_tag_name("td")
scrip_id = column_list[0].text.encode('ascii', 'ignore')
name = column_list[1].text.encode('ascii', 'ignore')
scrip_type = column_list[3].text.encode('ascii', 'ignore')
exchange = column_list[4].text.encode('ascii', 'ignore')
scrip_dict[scrip_id] = {
"id" : scrip_id,
"name" : name,
"type" : scrip_type,
"exch" : exchange,
}
# Write to target file
fout.write("{},{},{},{}\n".format(scrip_id, name, scrip_type, exchange))
# endfor
# Collect all information for even rows
for item_this in even_list:
column_list = item_this.find_elements_by_tag_name("td")
scrip_id = column_list[0].text.encode('ascii', 'ignore')
name = column_list[1].text.encode('ascii', 'ignore')
scrip_type = column_list[3].text.encode('ascii', 'ignore')
exchange = column_list[4].text.encode('ascii', 'ignore')
scrip_dict[scrip_id] = {
"id" : scrip_id,
"name" : name,
"type" : scrip_type,
"exch" : exchange,
}
# Write to target file
fout.write("{},{},{},{}\n".format(scrip_id, name, scrip_type, exchange))
# endfor
# Print
sys.stdout.write("{}-{}..".format(len(even_list) + len(odd_list), page_num))
sys.stdout.flush()
# Click "Next Button"
next_button.click()
# Increment page number count
page_num = page_num + 1
# endwhile
# Close driver
driver.close()
return scrip_dict
# enddef
# Main
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--outfile", help="output file for downloaded data", type=str, default=None)
args = parser.parse_args()
if not args.__dict__["outfile"]:
print "--outfile is required !!"
sys.exit(-1)
# endif
outfile = args.__dict__["outfile"]
fout = open(outfile, "w")
# Start collecting all values
sdict = main_thread(fout)
# endfor
fout.close()
# endif
| vikaschouhan/stock_analysis | mine_scrips_from_yahoo.py | Python | gpl-2.0 | 4,934 |
from . import num_reader
import sys
import math
class Parser(num_reader.NumReader):
def __init__(self, writer):
num_reader.NumReader.__init__(self, writer)
self.i = 0
self.j = 0
self.x = 0
self.y = 0
self.down_z = 0
self.up_z = 20
self.up = True
self.units_to_mm = 0.01
def ParsePuOrPd(self, up):
self.line_index = self.line_index + 1
x = self.get_number()
if len(x) > 0:
y = self.get_number()
if len(y) > 0:
if up: color = "rapid"
else: color = "feed"
self.add_word(color)
self.begin_path(color)
if up: z = self.up_z
else: z = self.down_z
if self.up != up:
self.add_line(self.x * self.units_to_mm, self.y * self.units_to_mm, z)
self.add_line(int(x) * self.units_to_mm, int(y) * self.units_to_mm, z)
self.end_path()
self.up = up
self.x = int(x)
self.y = int(y)
def ParseAA(self):
self.line_index = self.line_index + 1
cx = self.get_number()
if len(cx) > 0:
cy = self.get_number()
if len(cy) > 0:
a = self.get_number()
if len(a) > 0:
self.add_word("feed")
self.begin_path("feed")
z = self.down_z
if self.up:
self.add_line(self.x * self.units_to_mm, self.y * self.units_to_mm, z)
sdx = self.x - int(cx)
sdy = self.y - int(cy)
start_angle = math.atan2(sdy, sdx)
end_angle = start_angle + int(a) * math.pi/180
radius = math.sqrt(sdx*sdx + sdy*sdy)
ex = int(cx) + radius * math.cos(end_angle)
ey = int(cy) + radius * math.sin(end_angle)
if int(a) > 0: d = 1
else: d = -1
self.add_arc(ex * self.units_to_mm, ey * self.units_to_mm, i = int(-sdx) * self.units_to_mm, j = int(-sdy) * self.units_to_mm, d = d)
self.end_path()
self.up = False
self.x = int(ex)
self.y = int(ey)
def ParseFromFirstLetter(self, c):
if c == 'P':
self.line_index = self.line_index + 1
if self.line_index < self.line_length:
c1 = self.line[self.line_index]
self.parse_word += c1
if c1 == 'U': # PU
self.ParsePuOrPd(True)
elif c1 == 'D': # PD
self.ParsePuOrPd(False)
elif c == 'A':
self.line_index = self.line_index + 1
if self.line_index < self.line_length:
c1 = self.line[self.line_index]
self.parse_word += c1
if c1 == 'A': # AA, arc absolute
self.ParseAA()
| vilemnovak/blendercam | scripts/addons/cam/nc/hpgl2d_read.py | Python | gpl-2.0 | 3,087 |
"""Contains forms related to INIS submissions."""
import copy
import os
# from datetime import date
from datetime import datetime
from flask import current_app, request
from inis.modules.deposit.field_widgets import SelectInput
from inis.modules.deposit.fields.inis_fields import CreatorForm, date_factory, location_factory
from inis.utils import get_kb_items
from invenio.base.i18n import _
from invenio.modules.deposit import fields
from invenio.modules.deposit.autocomplete_utils import kb_autocomplete
from invenio.modules.deposit.field_widgets import ExtendedListWidget, ItemWidget, \
TagInput, TagListWidget, plupload_widget
from invenio.modules.deposit.filter_utils import strip_string
from invenio.modules.deposit.form import WebDepositForm
from invenio.modules.deposit.processor_utils import replace_field_data
from invenio.modules.knowledge.api import get_kb_mapping
# from invenio.modules.deposit.validation_utils import required_if
# from invenio.utils.forms import AutocompleteField
from wtforms import validators, widgets
from wtforms.validators import ValidationError
# from wtforms.widgets import TextInput
# from .autocomplete import descriptor_autocomplete
lang_codes_list = get_kb_items('languages')
lang_codes_list.sort(key=lambda tup: tup[1])
trn = fields.StringField(
label="TRN",
default='',
validators=[validators.DataRequired(), ],
filters=[strip_string, ],
widget_classes='form-control',
icon='fa fa-barcode fa-fw',
export_key='trn',
)
title = fields.TitleField(
validators=[validators.DataRequired()],
description='Required.',
filters=[strip_string, ],
icon='fa fa-book fa-fw',
)
original_title = fields.StringField(
label=_("Original title"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
language = fields.DynamicFieldList(
fields.SelectField(
validators=[validators.DataRequired()],
filters=[strip_string, ],
default='',
choices=[('', ''), ('EN', 'English'),
('FR', 'French'), ('DE', 'German'),
('', '------'), ] + lang_codes_list,
widget=SelectInput(class_="col-xs-3"),
),
add_label='Add another language',
label=_('Publication Language'),
icon='fa fa-flag fa-fw',
validators=[validators.DataRequired()],
widget_classes='',
min_entries=1,
max_entries=8,
)
description = fields.TextAreaField(
label=_("Physical description"),
validators=[validators.DataRequired()],
default='',
filters=[strip_string, ],
widget_classes='form-control',
icon='fa fa-pencil fa-fw',
)
def place_factory(mandatory=False):
place = fields.FormField(
location_factory(),
widget=ExtendedListWidget(
item_widget=ItemWidget(),
html_tag='div'
),
label=_("Place of Publication"),
icon='fa fa-globe fa-fw',
widget_classes='',
)
return place
place = place_factory()
publisher = fields.StringField(
label=_("Publisher"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
def publication_date_factory(mandatory=False):
publication_date = fields.FormField(
date_factory(mandatory),
label=_('Publication date'),
icon='fa fa-calendar fa-fw',
widget_classes='',
)
return publication_date
publication_date = publication_date_factory(False)
edition = fields.StringField(
label=_("Edition"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
creators = fields.DynamicFieldList(
fields.FormField(
CreatorForm,
widget=ExtendedListWidget(
item_widget=ItemWidget(),
html_tag='div'
),
),
label='Authors',
add_label='Add another author',
icon='fa fa-user fa-fw',
widget_classes='',
min_entries=1,
export_key='authors',
)
conference_title = fields.StringField(
label=_("Conference title"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
original_conference_title = fields.StringField(
label=_("Original conference title"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
conference_place = fields.FormField(
location_factory(),
widget=ExtendedListWidget(
item_widget=ItemWidget(),
html_tag='div'
),
label=_("Conference place"),
icon='fa fa-globe fa-fw',
widget_classes='',
)
conference_date = fields.FormField(
date_factory(False),
label=_('Conference date'),
icon='fa fa-calendar fa-fw',
widget_classes='',
)
secondary_number = fields.StringField(
label=_("Secondary numbers"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
isbn = fields.StringField(
label=_("ISBN/ISSN"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
contract_number = fields.StringField(
label=_("Contract/Project number"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
general_notes = fields.TextAreaField(
label=_("General notes"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
icon='fa fa-pencil fa-fw',
)
availability = fields.StringField(
label=_("Availability"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
title_augmentation = fields.StringField(
label=_("Title Augmentation"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
funding_organization_code = fields.StringField(
label=_("Funding Organization code"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
corporate_entry_code = fields.StringField(
label=_("Corporate Entry code"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
)
abstract = fields.TextAreaField(
label=_("Abstract"),
default='',
filters=[strip_string, ],
widget_classes='form-control',
icon='fa fa-pencil fa-fw',
)
#
# Descriptors
#
def descriptor_kb_value(key_name):
def _getter(field):
if field.data:
val = get_kb_mapping('descriptors', str(field.data))
if val:
data = descriptors_kb_mapper(val)
return data['fields'][key_name]
return ''
return _getter
def descriptors_kb_mapper(val):
data = val['value']
return {
'value': "%s" % (data),
'fields': {
'id': data,
'descriptor': data,
}
}
class DescriptorForm(WebDepositForm):
id = fields.StringField(
widget=widgets.HiddenInput(),
processors=[
replace_field_data('descriptor', descriptor_kb_value('descriptor')),
],
)
descriptor = fields.StringField(
placeholder="Start typing a descriptor...",
autocomplete_fn=kb_autocomplete(
'descriptors',
mapper=descriptors_kb_mapper,
match_type='b',
),
widget=TagInput(),
widget_classes='form-control',
)
descriptors = fields.DynamicFieldList(
fields.FormField(
DescriptorForm,
widget=ExtendedListWidget(html_tag='div', item_widget=ItemWidget()),
export_key='descriptors',
),
widget=TagListWidget(template="{{descriptor}}",
html_tag='ul',
class_='list-unstyled',
),
widget_classes=' dynamic-field-list',
icon='fa fa-tags fa-fw',
description="Add here the descriptors",
#validators=[grants_validator],
)
proposed_descriptors = fields.DynamicFieldList(
fields.StringField(
placeholder="Propose a descriptor",
filters=[strip_string, ],
),
add_label='Add another proposed descriptor',
icon='fa fa-flag fa-fw',
# widget_classes='',
min_entries=1,
max_entries=8,
)
#
# Subjects
#
def subject_kb_value(key_name):
def _getter(field):
if field.data:
val = get_kb_mapping('subjects', str(field.data))
if val:
data = subjects_kb_mapper(val)
return data['fields'][key_name]
return ''
return _getter
def subjects_kb_mapper(val):
data = val['value']
return {
'value': "%s" % (data),
'fields': {
'id': data[:3],
'subject': data,
}
}
class SubjectForm(WebDepositForm):
id = fields.StringField(
widget=widgets.HiddenInput(),
# processors=[
# replace_field_data('subject', subject_kb_value('id')),
# ],
)
subject = fields.StringField(
placeholder="Start typing a subject...",
autocomplete_fn=kb_autocomplete(
'subjects',
mapper=subjects_kb_mapper
),
widget=TagInput(),
widget_classes='form-control',
)
subjects = fields.DynamicFieldList(
fields.FormField(
SubjectForm,
widget=ExtendedListWidget(html_tag='div', item_widget=ItemWidget()),
export_key='subjects',
),
widget=TagListWidget(template="{{subject}}",
html_tag='ul',
class_='list-unstyled',
),
widget_classes=' dynamic-field-list',
icon='fa fa-tags fa-fw',
description="Add here the subjects",
export_key='subjects',
#validators=[grants_validator],
)
groups = [
('Basic information', [
'trn', 'title', 'original_title', 'subjects', 'language',
'description', 'descriptors', 'proposed_descriptors',
], {'indication': 'required', }),
('Publication information', [
'place', 'publisher', 'publication_date', 'edition',
], {
'indication': 'required',
}),
('Authors', [
'creators',
], {
'classes': '',
'indication': 'recommended',
}),
('Conference', [
'conference_title', 'original_conference_title',
'conference_place', 'conference_date',
], {
'classes': '',
'indication': 'optional',
}),
('Identifying numbers', [
'secondary_number', 'isbn', 'contract_number',
], {
'classes': '',
'indication': 'optional',
}),
('Extra information', [
'general_notes', 'availability', 'title_augmentation',
'funding_organization_code', 'corporate_entry_code',
], {
'classes': '',
'indication': 'optional',
}),
('Abstract', [
'abstract',
], {
'classes': '',
'indication': 'recommended',
}),
]
def mandatory(field):
field2 = copy.deepcopy(field)
field2.kwargs['validators'] = [validators.DataRequired()]
return field2
######################
# Input #
######################
class INISForm(WebDepositForm):
"""INIS record input form fields."""
# Fields
# Basic information
record_type = fields.HiddenField(
label='',
default="",
)
trn = trn
title = title
original_title = original_title
subjects = subjects
language = language
description = description
descriptors = descriptors
proposed_descriptors = proposed_descriptors
# Publication information
place = place
publisher = publisher
publication_date = publication_date
edition = edition
creators = creators
conference_title = conference_title
original_conference_title = original_conference_title
conference_place = conference_place
conference_date = conference_date
secondary_number = secondary_number
isbn = isbn
contract_number = contract_number
general_notes = general_notes
availability = availability
title_augmentation = title_augmentation
funding_organization_code = funding_organization_code
corporate_entry_code = corporate_entry_code
abstract = abstract
# Form configuration
_title = ''
_drafting = True # enable and disable drafting
groups = groups
class BookForm(INISForm):
record_type = fields.HiddenField(label='', default="B", )
_title = _('Book or Monograph')
publisher = mandatory(publisher)
class AudiovisualForm(INISForm):
record_type = fields.HiddenField(label='', default="F", )
_title = _('Audiovisual Material')
class MiscellaneousForm(INISForm):
record_type = fields.HiddenField(label='', default="I", )
_title = _('Miscellaneous')
class PatentForm(INISForm):
record_type = fields.HiddenField(label='', default="P", )
_title = _('Patent')
conference_title = None
conference_place = None
conference_date = None
original_conference_title = None
edition = None
groups = [e for e in groups if e[0] != 'Conference']
class ReportForm(INISForm):
record_type = fields.HiddenField(label='', default="R", )
_title = _('Report')
edition = None
corporate_entry_code = mandatory(corporate_entry_code)
class ComputerForm(INISForm):
record_type = fields.HiddenField(label='', default="T", )
_title = _('ComputerMedia')
conference_title = None
conference_place = None
conference_date = None
original_conference_title = None
groups = [e for e in groups if e[0] != 'Conference']
######################
# Upload #
######################
class UploadForm(WebDepositForm):
"""INIS record upload form fields."""
title = fields.TitleField(
label=_('Upload name'),
widget_classes="form-control",
placeholder='e.g. Upload of ' + datetime.now().strftime('%Y-%m-%d %H:%M'),
icon='fa fa-book fa-fw',
validators=[validators.DataRequired()],
export_key='title.title',
)
action = fields.RadioField(
label=_('Action'),
icon='fa fa-book fa-fw',
validators=[validators.DataRequired()],
export_key='action',
widget_classes='list-unstyled',
default='INPUT',
choices=[('INPUT', 'Final input for INIS'),
('CAI', 'To be indexed in CAI')]
)
note = fields.TextAreaField(
label=_("Notes"),
description='Optional.',
default='',
validators=[validators.optional()],
filters=[strip_string, ],
widget_classes='form-control',
icon='fa fa-pencil fa-fw',
export_key='notes',
placeholder="e.g. Journal of Radiation Research. JP. 23 records"
)
plupload_file = fields.FileUploadField(
label="",
widget=plupload_widget,
export_key=False
)
def validate_plupload_file(form, field):
"""Ensure attached files have valid extensions."""
if not getattr(request, 'is_api_request', False):
# Tested in API by a separate workflow task.
if len(form.files) == 0:
raise ValidationError("You must provide minumim one file")
for f in form.files:
if os.path.splitext(f.name)[1] not in current_app.config['DEPOSIT_ACCEPTED_EXTENSIONS']:
raise ValidationError("All files must have one of the following extensions :" +
', '.join(current_app.config['DEPOSIT_ACCEPTED_EXTENSIONS']))
# if '-' in f.name:
# raise ValidationError("The character '-' is forbidden in the file name")
_title = _("Upload")
groups = [
('Basic information',
['title', 'note', 'action']),
]
| SDSG-Invenio/inis | inis/modules/deposit/forms.py | Python | gpl-2.0 | 15,561 |
#!/usr/bin/env python
# thecloud.py - Micro-sized Twitter client.
# Copyright (C) 2012 Amanda Folson <[email protected]>
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import sys
import tweepy
CONSUMER_KEY = 'H0bdGOfmNffauOUwMqjViw'
CONSUMER_SECRET = ''
ACCESS_KEY = ''
ACCESS_SECRET = ''
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
api = tweepy.API(auth)
statuses = ['Welcome to Linodecom.', 'This is Linodecom.', 'Welcome.', 'You can do anything at Linodecom.', 'Anything at all.', 'The only limit is yourself.', 'Yes.', 'This is Linodecom and welcome to you who have come to Linodecom.', 'Anything is possible at Linodecom.', 'You can do anything at Linodecom.', 'The infinite is possible at Linodecom.', 'The unattainable is unknown at Linodecom.']
from random import choice
api.update_status(choice(statuses))
| afolson/thecloud | thecloud.py | Python | gpl-2.0 | 1,545 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
# ex: /winecat/5/
url(r'^(?P<wine_id>[0-9]+)/$', views.detail, name='detail'),
url(r'^(?P<wine_id>[0-9]+)/vineyard/$', views.vineyard, name='vineyard'),
# ex: /polls/5/vote/
url(r'^(?P<wine_id>[0-9]+)/varietal/$', views.varietal, name='varietal'),
] | tday55/wineapp | winecat/urls.py | Python | gpl-2.0 | 383 |
# ~*~ coding: utf-8 ~*~
from __future__ import unicode_literals
import os
from collections import namedtuple, defaultdict
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.vars import VariableManager
from ansible.parsing.dataloader import DataLoader
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.playbook.play import Play
import ansible.constants as C
from ansible.utils.vars import load_extra_vars
from ansible.utils.vars import load_options_vars
from .inventory import JMSInventory
from .callback import AdHocResultCallback, PlaybookResultCallBack, \
CommandResultCallback
from common.utils import get_logger
__all__ = ["AdHocRunner", "PlayBookRunner"]
C.HOST_KEY_CHECKING = False
logger = get_logger(__name__)
# Jumpserver not use playbook
class PlayBookRunner(object):
"""
用于执行AnsiblePlaybook的接口.简化Playbook对象的使用.
"""
Options = namedtuple('Options', [
'listtags', 'listtasks', 'listhosts', 'syntax', 'connection',
'module_path', 'forks', 'remote_user', 'private_key_file', 'timeout',
'ssh_common_args', 'ssh_extra_args', 'sftp_extra_args',
'scp_extra_args', 'become', 'become_method', 'become_user',
'verbosity', 'check', 'extra_vars'])
def __init__(self,
hosts=None,
playbook_path=None,
forks=C.DEFAULT_FORKS,
listtags=False,
listtasks=False,
listhosts=False,
syntax=False,
module_path=None,
remote_user='root',
timeout=C.DEFAULT_TIMEOUT,
ssh_common_args=None,
ssh_extra_args=None,
sftp_extra_args=None,
scp_extra_args=None,
become=True,
become_method=None,
become_user="root",
verbosity=None,
extra_vars=None,
connection_type="ssh",
passwords=None,
private_key_file=None,
check=False):
C.RETRY_FILES_ENABLED = False
self.callbackmodule = PlaybookResultCallBack()
if playbook_path is None or not os.path.exists(playbook_path):
raise AnsibleError(
"Not Found the playbook file: %s." % playbook_path)
self.playbook_path = playbook_path
self.loader = DataLoader()
self.variable_manager = VariableManager()
self.passwords = passwords or {}
self.inventory = JMSInventory(hosts)
self.options = self.Options(
listtags=listtags,
listtasks=listtasks,
listhosts=listhosts,
syntax=syntax,
timeout=timeout,
connection=connection_type,
module_path=module_path,
forks=forks,
remote_user=remote_user,
private_key_file=private_key_file,
ssh_common_args=ssh_common_args or "",
ssh_extra_args=ssh_extra_args or "",
sftp_extra_args=sftp_extra_args,
scp_extra_args=scp_extra_args,
become=become,
become_method=become_method,
become_user=become_user,
verbosity=verbosity,
extra_vars=extra_vars or [],
check=check
)
self.variable_manager.extra_vars = load_extra_vars(loader=self.loader,
options=self.options)
self.variable_manager.options_vars = load_options_vars(self.options)
self.variable_manager.set_inventory(self.inventory)
# 初始化playbook的executor
self.runner = PlaybookExecutor(
playbooks=[self.playbook_path],
inventory=self.inventory,
variable_manager=self.variable_manager,
loader=self.loader,
options=self.options,
passwords=self.passwords)
if self.runner._tqm:
self.runner._tqm._stdout_callback = self.callbackmodule
def run(self):
if not self.inventory.list_hosts('all'):
raise AnsibleError('Inventory is empty')
self.runner.run()
self.runner._tqm.cleanup()
return self.callbackmodule.output
class AdHocRunner(object):
"""
ADHoc接口
"""
Options = namedtuple("Options", [
'connection', 'module_path', 'private_key_file', "remote_user",
'timeout', 'forks', 'become', 'become_method', 'become_user',
'check', 'extra_vars',
]
)
results_callback_class = AdHocResultCallback
def __init__(self,
hosts=C.DEFAULT_HOST_LIST,
forks=C.DEFAULT_FORKS, # 5
timeout=C.DEFAULT_TIMEOUT, # SSH timeout = 10s
remote_user=C.DEFAULT_REMOTE_USER, # root
module_path=None, # dirs of custome modules
connection_type="smart",
become=None,
become_method=None,
become_user=None,
check=False,
passwords=None,
extra_vars=None,
private_key_file=None,
gather_facts='no'):
self.pattern = ''
self.variable_manager = VariableManager()
self.loader = DataLoader()
self.gather_facts = gather_facts
self.results_callback = AdHocRunner.results_callback_class()
self.options = self.Options(
connection=connection_type,
timeout=timeout,
module_path=module_path,
forks=forks,
become=become,
become_method=become_method,
become_user=become_user,
check=check,
remote_user=remote_user,
extra_vars=extra_vars or [],
private_key_file=private_key_file,
)
self.variable_manager.extra_vars = load_extra_vars(self.loader,
options=self.options)
self.variable_manager.options_vars = load_options_vars(self.options)
self.passwords = passwords or {}
self.inventory = JMSInventory(hosts)
self.variable_manager.set_inventory(self.inventory)
self.tasks = []
self.play_source = None
self.play = None
self.runner = None
@staticmethod
def check_module_args(module_name, module_args=''):
if module_name in C.MODULE_REQUIRE_ARGS and not module_args:
err = "No argument passed to '%s' module." % module_name
print(err)
return False
return True
def run(self, task_tuple, pattern='all', task_name='Ansible Ad-hoc'):
"""
:param task_tuple: (('shell', 'ls'), ('ping', ''))
:param pattern:
:param task_name:
:return:
"""
for module, args in task_tuple:
if not self.check_module_args(module, args):
return
self.tasks.append(
dict(action=dict(
module=module,
args=args,
))
)
self.play_source = dict(
name=task_name,
hosts=pattern,
gather_facts=self.gather_facts,
tasks=self.tasks
)
self.play = Play().load(
self.play_source,
variable_manager=self.variable_manager,
loader=self.loader,
)
self.runner = TaskQueueManager(
inventory=self.inventory,
variable_manager=self.variable_manager,
loader=self.loader,
options=self.options,
passwords=self.passwords,
stdout_callback=self.results_callback,
)
if not self.inventory.list_hosts("all"):
raise AnsibleError("Inventory is empty.")
if not self.inventory.list_hosts(self.pattern):
raise AnsibleError(
"pattern: %s dose not match any hosts." % self.pattern)
try:
self.runner.run(self.play)
except Exception as e:
logger.warning(e)
else:
logger.debug(self.results_callback.result_q)
return self.results_callback.result_q
finally:
if self.runner:
self.runner.cleanup()
if self.loader:
self.loader.cleanup_all_tmp_files()
def clean_result(self):
"""
:return: {
"success": ['hostname',],
"failed": [('hostname', 'msg'), {}],
}
"""
result = {'success': [], 'failed': []}
for host in self.results_callback.result_q['contacted']:
result['success'].append(host)
for host, msgs in self.results_callback.result_q['dark'].items():
msg = '\n'.join(['{} {}: {}'.format(
msg.get('module_stdout', ''),
msg.get('invocation', {}).get('module_name'),
msg.get('msg', '')) for msg in msgs])
result['failed'].append((host, msg))
return result
def test_run():
assets = [
{
"hostname": "192.168.244.129",
"ip": "192.168.244.129",
"port": 22,
"username": "root",
"password": "redhat",
},
]
task_tuple = (('shell', 'ls'),)
hoc = AdHocRunner(hosts=assets)
hoc.results_callback = CommandResultCallback()
ret = hoc.run(task_tuple)
print(ret)
#play = PlayBookRunner(assets, playbook_path='/tmp/some.yml')
"""
# /tmp/some.yml
---
- name: Test the plabybook API.
hosts: all
remote_user: root
gather_facts: yes
tasks:
- name: exec uptime
shell: uptime
"""
#play.run()
if __name__ == "__main__":
test_run()
| choldrim/jumpserver | apps/ops/ansible/runner.py | Python | gpl-2.0 | 9,951 |
import logging
import os
import sys
import ConfigParser
import string
import urllib2
class Config:
def __init__(self):
config = ConfigParser.ConfigParser()
with open(os.path.join(self._get_dir_path(),'config.cfg'), 'r') as cfg_file:
config.readfp(cfg_file)
self.service_host = config.get('local', 'server_host')
self.service_port = config.get('local', 'server_port')
self.block_storage = config.get('local', 'block_storage')
block_size = config.get('local', 'block_size')
self.block_size = 16 if block_size == '' else int(block_size)
self.minimap = {}
self.loggingLevel = logging.ERROR
#TEST
self.server_folder = config.get('test', 'server_folder')
self.client_folder = config.get('test', 'client_folder')
try:
level = config.get('log', 'level')
if hasattr(logging, level):
self.loggingLevel = getattr(logging, level)
except:
pass
self._init_logger()
self._init_minimap()
def _init_minimap(self):
with open(os.path.join(self._get_dir_path(),'minemap.txt'), 'r') as f:
for line in f:
temp = line.split(' = ')
if len(temp) == 2:
self.minimap[str(temp[0].strip())] = temp[1].strip()
def _init_logger(self):
logger = logging.getLogger('incremental-update-client')
logger.setLevel(self.loggingLevel)
ch = logging.StreamHandler()
cf = logging.FileHandler(os.path.join(self._get_dir_path(),'app.log'))
ch.setLevel(self.loggingLevel)
cf.setLevel(self.loggingLevel)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(funcName)s : %(message)s')
ch.setFormatter(formatter)
cf.setFormatter(formatter)
logger.addHandler(ch)
logger.addHandler(cf)
def _get_dir_path(self):
return os.path.dirname(os.path.abspath(sys.argv[0]))
def _str2list(self,str):
try:
split = str.split(',')
dirs = []
for dir in split:
dirs.append(dir)
return dirs
except:
return []
config = Config()
log = logging.getLogger('incremental-update-client') | JasonJDong/CompressBinary | IncrementalUpdate/IncrementalUpdate/config.py | Python | gpl-2.0 | 2,379 |
# BurnMan - a lower mantle toolkit
# Copyright (C) 2012, 2013, Heister, T., Unterborn, C., Rose, I. and Cottaar, S.
# Released under GPL v2 or later.
import numpy as np
import warnings
from burnman import Material
from burnman import Mineral
def check_pairs(fractions, minerals):
if len(fractions) < 1:
raise Exception('ERROR: we need at least one mineral')
if len(fractions) != len(minerals):
raise Exception('ERROR: different array lengths')
total = sum(fractions)
if abs(total-1.0)>1e-10:
raise Exception('ERROR: list of molar fractions does not add up to one')
for p in minerals:
if not isinstance(p, Mineral):
raise Exception('ERROR: object of type ''%s'' is not of type material' % (type(p)))
# static composite of minerals/composites
class Composite(Material):
"""
Base class for a static composite material with fixed molar fractions. The
elements can be minerals or materials, meaning composite can be nested
arbitrarily.
This class is available as ``burnman.Composite``.
"""
def __init__(self, fractions, phases=None):
"""
Create a composite using a list of phases and their fractions (adding to 1.0).
Parameters
----------
fractions: list of floats
molar fraction for each phase.
phases: list of :class:`burnman.Material`
list of phases.
"""
if phases is None:
# compatibility hack:
tmp = fractions
fractions = [pt[1] for pt in tmp]
phases = [pt[0] for pt in tmp]
assert(len(phases)==len(fractions))
assert(len(phases)>0)
for f in fractions:
# we would like to check for >= 0, but this creates nasty behavior due to
# floating point rules: 1.0-0.8-0.1-0.1 is not 0.0 but -1e-14.
assert (f >= -1e-12)
fractions = [max(0.0, fr) for fr in fractions] # turn -1e-14 into 0.0
total = sum(fractions)
if abs(total - 1.0) > 1e-12:
warnings.warn("Warning: list of molar fractions does not add up to one but %g. Normalizing." % total)
fractions = [fr / total for fr in fractions]
self.children = zip(fractions, phases)
def debug_print(self, indent=""):
print "%sComposite:" % indent
indent += " "
for (fraction, phase) in self.children:
print "%s%g of" % (indent, fraction)
phase.debug_print(indent + " ")
def set_method(self, method):
"""
set the same equation of state method for all the phases in the composite
"""
for (fraction, phase) in self.children:
phase.set_method(method)
def unroll(self):
fractions = []
minerals = []
for (fraction, phase) in self.children:
p_fr,p_min = phase.unroll()
check_pairs(p_fr, p_min)
fractions.extend([i*fraction for i in p_fr])
minerals.extend(p_min)
return (fractions, minerals)
def to_string(self):
"""
return the name of the composite
"""
return "'" + self.__class__.__name__ + "'"
def set_state(self, pressure, temperature):
"""
Update the material to the given pressure [Pa] and temperature [K].
"""
self.pressure = pressure
self.temperature = temperature
for (fraction, phase) in self.children:
phase.set_state(pressure, temperature)
def density(self):
"""
Compute the density of the composite based on the molar volumes and masses
"""
densities = np.array([ph.density() for (_,ph) in self.children])
volumes = np.array([ph.molar_volume()*fraction for (fraction, ph) in self.children])
return np.sum(densities*volumes)/np.sum(volumes)
| QuLogic/burnman | burnman/composite.py | Python | gpl-2.0 | 3,925 |
#!/usr/bin/env python
"""Program to compare 2 images
Process these images to show the differences more clearly
and show with 3 tabs: Master image, Slave image, Diff image.
Author: Henk Speksnijder october 2013.
20130927 Version 0.73 using PYTHON-PILLOW
"""
import sys
#import os
import io
from PIL import Image, ImageChops
try:
import gi
gi.require_version('Gtk', '3.0') # tell we want GTK3
except ImportError:
print('You need to install python-gobject.')
sys.exit(1)
try:
#from gi.repository import Gtk, GdkPixbuf, GObject # pylint: disable=E0611
from gi.repository import Gtk, GdkPixbuf # pylint: disable=E0611
except ImportError:
print("You need to install python-Gobject or GTK3\n"
"or set your PYTHONPATH correctly.\n"
"try: export PYTHONPATH="
"/usr/lib/python3.2/site-packages/")
sys.exit(1)
# Now we have both gtk and Gtk.glade imported and run GTK v3
class tabimage(Gtk.ScrolledWindow):
"""Class on Gtk.notebook to manage 1 tabsheet with an image."""
def __init__(self, parent): # pylint: disable=E1002
self.imagewidth = 0
self.imageheight = 0
self.ntbk = parent
self.pixbuf = None
self.image = None
super().__init__()
def imagefile(self, fn, iw, ih):
"""Load image file fn and scale to size iw * ih."""
self.pixbuf = GdkPixbuf.Pixbuf.new_from_file(fn)
self.image = self.imagestart(self.pixbuf, iw, ih)
self.image.connect('draw', self.imageresize, self.pixbuf)
self.add(self.image)
self.imagewidth = self.pixbuf.get_width()
self.imageheight = self.pixbuf.get_height()
return
def imagepixb(self, apixbuf, iw, ih):
"""apixbuf, scale to size iw * ih."""
self.pixbuf = apixbuf
self.image = self.imagestart(apixbuf, iw, ih)
self.image.connect('draw', self.imageresize, self.pixbuf)
self.add(self.image)
self.imagewidth = apixbuf.get_width()
self.imageheight = apixbuf.get_height()
return
def imagestart(self, apixbuf, targetw, targeth):
"""Make sure we start with images fit in window."""
iw = apixbuf.get_width()
ih = apixbuf.get_height()
if iw > targetw or ih > targeth:
apixbuf = self.imagezoom(apixbuf, iw, ih)
ret = Gtk.Image.new_from_pixbuf(apixbuf)
return ret
def imageresize(self, imgwidget, event, apixbuf): # pylint: disable=W0613
"""Resize image to FIT on imgwidget (an Gtk.Image)."""
allocation = self.ntbk.get_allocation()
aw = allocation.width # - 40
ah = allocation.height # - 50
if aw != apixbuf.get_width() or ah != apixbuf.get_height():
zpixbuf = self.imagezoom(apixbuf, aw, ah)
imgwidget.set_from_pixbuf(zpixbuf)
self.imagewidth = aw # + 40
self.imageheight = ah # + 50
return
def imagezoom(self, apixbuf, targetwidth, targetheight):
"""Return resized image from apixbuf to targetw * targeth."""
preimg_width = apixbuf.get_width()
preimg_height = apixbuf.get_height()
wratio = float(preimg_width)/targetwidth
hratio = float(preimg_height)/targetheight
if wratio < hratio:
zoomratio = hratio
else:
zoomratio = wratio
wfinal = int(preimg_width / zoomratio)
hfinal = int(preimg_height / zoomratio)
zpixbuf = apixbuf.scale_simple(
wfinal,
hfinal,
GdkPixbuf.InterpType.BILINEAR)
return zpixbuf
class CompareImages(Gtk.Window):
"""Class to compare 2 images"""
def __init__(self, fnm, fns):
self.imgwidth = 600
self.imgheight = 400
self.window = Gtk.Window()
self.window.set_title('Compare Images')
self.window.set_size_request(self.imgwidth+40, self.imgheight+50)
self.window.connect("destroy", Gtk.main_quit)
#===== notebook with images =====
self.ntbk = Gtk.Notebook()
self.tabw1 = tabimage(self.ntbk)
self.tabw1.imagefile(fnm, 600, 400)
self.tabw2 = tabimage(self.ntbk)
self.tabw2.imagefile(fns, 600, 400)
self.pixbdif = self.imgdif(fna, fnb)
self.tabw3 = tabimage(self.ntbk)
self.tabw3.imagepixb(self.pixbdif, 600, 400)
self.tabt1 = Gtk.Label("Master image")
self.tabt2 = Gtk.Label("Slave Image")
self.tabt3 = Gtk.Label("Differences")
self.ntbk.append_page(self.tabw1, self.tabt1)
self.ntbk.append_page(self.tabw2, self.tabt2)
self.ntbk.append_page(self.tabw3, self.tabt3)
#self.ntbk.connect("switch-page", self.ntbkswitch)
self.window.add(self.ntbk)
#===== Bring it On ! =====
self.window.show_all()
return
def main(self): # pylint: disable=C0111
Gtk.main()
def imgdif(self, fnm, fns):
"""Take 2 images and generate a image diff, Return GdkPixbuf."""
buff1 = io.BytesIO()
imagem = Image.open(fnm)
images = Image.open(fns)
imaged = ImageChops.difference(imagem, images)
# That's all we need to compare the images.
# Seems mold from PIL image to GTK image is weird, see:
# http://stackoverflow.com/questions/12413645/displaying-an-image-with-pygobject-and-python-3-from-in-memory-data
# Anyway, we need a lot of boilerplate to get there:
imaged.save(buff1, "ppm")
contents = buff1.getvalue()
buff1.close()
loader = GdkPixbuf.PixbufLoader.new_with_type('pnm')
loader.write(contents)
pixbuf = loader.get_pixbuf()
loader.close()
return pixbuf
if __name__ == "__main__":
if sys.version[0] == 2:
print("Error: Program designed for Python Version 3.")
sys.exit(1)
if len(sys.argv) < 3:
print('You need to provide 2 filenames')
exit(1)
fna = sys.argv[1]
fnb = sys.argv[2]
app = CompareImages(fna, fnb)
Gtk.main()
sys.exit(0)
# 0.1 initial incomplete design by Henk Speksnijder 11 march 2013
# Not much progress as PIL is not available for python3
# 0.5 improvements october 2013 using python-pillow
# Meanwhile an PIL fork is developped: pillow for image processing.
# GdkPixbuf.Scale(dest,
# 0,0 # dest_x, dest_y,
# dest_width, dest_height,
# 0,0 #offset_x, offset_y,
# scale_x, scale_y,
# gtk.gdk.INTERP_BILINEAR) # interp_type
# The scale() method creates a transformation of the pixbuf's image by
# scaling by scale_x and scale_y and translating by offset_x and offset_y it,
# then rendering the rectangle (dest_x, dest_y, dest_width, dest_height) of
# the resulting image onto the destination image specified by dest replacing
# the previous contents.
# Gtk3 Nieuw: GdkPixbuf.InterpType.BILINEAR
# Gtk2 oud was: gtk.gdk.INTERP_BILINEAR
# 0.6 GdkPixbuf.scale_simple found, is more easy.
# Searched long and hard in the internet but cannot find how to
# make the image scale automatic. Finally 'borrow' from mirage.
# self.window.connect("size-allocate", self.window_resized)
# mirage line 3148 wanted_zoomratio = self.calc_ratio(self.currimg)
# useful, copied 3148 and line 3120 to 3132
# VERY very interesting:
# gc.collect() # Clean up (free memory)
# 0.7 20131009 To get the image resizable try
# with scrolled window and with img1.connevt(....
# 0.73 20131011 change several functions to a new class tabimage
# Wow: a lot simpler and works mutch better.
# 0.74 20131012 code cleanup (pylint)
# TO DO:
# add information pane to compare image information
# like filesize, date/time, w * h, extension, exif info.
# make keys m/s/d and 1/2/3 switch tabs
# make help-panel
# make translations
#=============== pylintrc: ====================
# [MASTER]
# profile=no
# ignore=CVS
# persistent=no
# [MESSAGES CONTROL]
# disable=I0011
# [REPORTS]
# output-format=text
# reports=no
# [FORMAT]
# indent-string=' '
# max-line-length=100
# [DESIGN]
# max-args=7
# max-attributes=15
# [BASIC]
# class-rgx=[a-zA-Z_][a-zA-Z0-9]+$
# argument-rgx=[a-z_][a-z0-9_]{1,30}$
# variable-rgx=[a-z_][a-z0-9_]{1,30}$
# C0111 = Missing Docstring
# some procedures are so obvious a docstring looks silly.
# E0611 = No name 'Gtk' in module 'gi.repository (line 14)
# Due to pylint limitations it cannot scan gi-repository.
# E1002 = Use of super on old style class
# pylint thinks ive made an old style while it IS a new stye class !
# W0613 = Unused argument 'event'
# Yea right, the argument is build into the Gtk callback mechanism
# and in this function i cannot invent a way to make use of it.
| henkidefix/CompareDir | compareimage.py | Python | gpl-2.0 | 8,354 |
#!/usr/bin/python
#
# Copyright (C) 2015 Cisco Systems Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script can be run with the help option to print command line help:
#
# ./getIntfDataRaw.py -h
#
# If you do not enter command line options, it will interactively prompt
# for input. Password will be hidden in interactive input.
# Sample run without entering password on CLI:
#
# Note this script uses the requests library which can be brought in
# through the python package manager "pip".
import requests
import json
import sys
import logging
import getpass
from optparse import OptionParser
# Gather CLI and interactive input options
optp = OptionParser()
optp.add_option("-i", "--IP", dest="IP",
help="IP address to connect to")
optp.add_option("-u", "--USER", dest="USER",
help="Username")
optp.add_option("-p", "--PASS", dest="PASS",
help="Password")
opts, args = optp.parse_args()
if opts.IP is None:
url='http://' + raw_input("IP Address: ") + '/ins'
else:
url='http://' + opts.IP + '/ins'
if opts.USER is None:
user = raw_input("Username: ")
else:
user = opts.USER
if opts.PASS is None:
passer = getpass.getpass("Password: ")
else:
passer = opts.PASS
# Setup JSON-RPC for show version
myheaders={'content-type':'application/json-rpc'}
payload=[
{
"jsonrpc": "2.0",
"method": "cli",
"params": {
"cmd": "show int mgmt0",
"version": 1
},
"id": 1
}
]
#Send payload to network element, and print response
response = requests.post(url,data=json.dumps(payload), headers=myheaders,auth=(user,passer)).json()
print json.dumps(response, indent=4, sort_keys=True)
| tecdct2941/scripts | guestshell/getIntfDataRaw.py | Python | gpl-2.0 | 2,192 |
import csv
# Steps
# 1. print each line
# 2. print each item in each line
# 3. add all names to a set
# 4. create a friend dictionary of sets of friends
people = set()
with open('harry_potter.csv') as file:
reader = csv.reader(file)
for row in reader:
if(len(row) == 2):
people.add(row[0])
people.add(row[1])
#print(row[0], ":", row[1])
else:
print("Malformed line:", row)
print(people) | emhill/117-S15 | src/morea/10.project2/in_class/csv_reader.py | Python | gpl-2.0 | 406 |
# Copyright (C) 2014-2014 Project
# License: http://www.gnu.org/licenses/gpl.html GPL version 2 or higher
import argparse
import logging
_logger = logging.getLogger('pwebs.main')
#logging.basicConfig(level=0)
exitcodes = {'OK': 0,
'Warning': 1,
'Critical': 2,
'Unknown': 3}
def _get_threshold_range(data):
import re
_logger.debug('Got the folowing data for _get_threshold_range: %s' % data)
if ':' in data:
value_strings = data.split(':')
match0 = re.match('([~]|[0-9]+)$', value_strings[0])
match1 = re.match('[0-9]+$', value_strings[1])
if not (match0 and match1):
raise argparse.ArgumentTypeError('Supplied range contains invalid character(s)')
value1 = int(value_strings[1])
if value_strings[0] == '~':
value0 = value_strings[0]
else:
value0 = int(value_strings[0])
if value0 > value1:
raise argparse.ArgumentTypeError('Invalid range; lower bound must be smaller than upper bound')
values = (value0, value1)
else:
match = re.match('[0-9]+$', data)
if not match:
raise argparse.ArgumentTypeError('Supplied Argument contains invalid character(s)')
values = (0, int(data))
return values
def threshold(data):
_logger.debug('threshold testing data: %s' % data)
import re
re_in = 'i(?:n)?([^,]+)'
re_out = 'o(?:ut)?([^,]+)'
l_in = re.findall(re_in, data)
l_out = re.findall(re_out, data)
if not (l_in or l_out):
range_in = _get_threshold_range(data)
range_out = range_in
elif not l_in:
range_in = None
range_out = _get_threshold_range(l_out[0])
elif not l_out:
range_out = None
range_in = _get_threshold_range(l_in[0])
else:
range_in = _get_threshold_range(l_in[0])
range_out = _get_threshold_range(l_out[0])
return (range_in, range_out)
def run():
nagios_base_parser = argparse.ArgumentParser(add_help=False)
nagios_base_parser.add_argument('-H', '--hostname', required=True,
help='The Host to check')
nagios_base_parser.add_argument('-v', action='count', default=0)
nagios_parser = argparse.ArgumentParser(add_help=False,
parents=[nagios_base_parser])
nagios_parser.add_argument('-c', '--critical', required=True,
type=threshold,
help='The lower bound for CRITICAL Error, or non-CRITICAL range')
nagios_parser.add_argument('-w', '--warning', required=True,
type=threshold,
help='The lower bound for WARNING Error, or non-WARNING range')
parser = argparse.ArgumentParser(
description='FIXME some fancy description here')
subparsers = parser.add_subparsers()
parser_rittal = subparsers.add_parser('rittal',
help='Check a cabinet of type rittal',
parents=[nagios_parser])
parser_rittal.add_argument('category', choices=['Water', 'Air',
'CoolingCapacity'])
parser_rittal.add_argument('--select', choices=['in', 'out', 'both'],
default='both')
parser_rittal.add_argument('-u', '--user', required=True,
help='The user to login with')
parser_rittal.add_argument('-p', '--password', required=True,
help='The password to login with')
parser_rittal.set_defaults(func=_parse_rittal)
args = parser.parse_args()
logging.basicConfig(level=50 - (args.v * 10))
try:
exitcode, text = args.func(args)
except Exception as e:
exitcode = exitcodes.get('Unknown', 3)
text = str(e)
_logger.exception('Caught an Exception! What happened?')
print(text)
exit(exitcode)
def _parse_rittal(args):
from .rittal import Rittal
hostname = args.hostname
critical = args.critical
warning = args.warning
category = args.category
category_extra = args.select
user = args.user
password = args.password
cabinet = Rittal(hostname, user, password)
water_in = cabinet.water_in
water_out = cabinet.water_out
air_in = cabinet.air_in
air_out = cabinet.air_out
#setpoint = cabinet.setpoint # don't know, seems not important
cooling_capacity = cabinet.cooling_capacity
waterflow = cabinet.waterflow
control_valve = cabinet.control_valve
text = ''
exitcode = 3
if category == 'Water':
labels = _create_labels('Water', category_extra)
exitcode, text = _check_temperatures(hostname, labels, water_in,
water_out, warning, critical,
{'Waterflow': waterflow,
'Control Valve': control_valve})
elif category == 'Air':
labels = _create_labels('Air', category_extra)
exitcode, text = _check_temperatures(hostname, labels, air_in, air_out,
warning, critical)
elif category == 'CoolingCapacity':
text = _get_text_ok(hostname, 'Cooling Capacity', cooling_capacity,
None, None, None)
exitcode = exitcodes.get('OK', 3)
return exitcode, text
def _create_labels(category, category_extra):
import re
labels = []
if re.match('(in|both)', category_extra):
labels.append(category + ' in')
if re.match('(out|both)', category_extra):
labels.append(category + ' out')
return labels
def _check_temperatures(hostname, labels, value_in, value_out, warning,
critical, extra=None):
if _values_ok(value_in, value_out, *critical):
if _values_ok(value_in, value_out, *warning):
get_text = _get_text_ok
exitcode = exitcodes.get('OK', 3)
else:
get_text = _get_text_warning
exitcode = exitcodes.get('Warning', 3)
else:
get_text = _get_text_critical
exitcode = exitcodes.get('Critical', 3)
text = get_text(hostname, labels, value_in, value_out, warning, critical,
extra)
return exitcode, text
def _values_ok(value_in, value_out, threshold_in, threshold_out):
_logger.debug('Checking values. In: %s, out: %s' % (threshold_in,
threshold_out))
in_ok = True
out_ok = True
if threshold_in:
in_ok = _is_in_range(int(value_in[0]), *threshold_in)
if threshold_out:
out_ok = _is_in_range(int(value_out[0]), *threshold_out)
return (in_ok and out_ok)
def _is_in_range(value, lower_bound, upper_bound):
is_in_range = True
if lower_bound != '~':
is_in_range = value >= lower_bound
is_in_range = is_in_range and value <= upper_bound
return is_in_range
def _get_text_warning(hostname, labels, temperature_in, temperature_out,
warning, critical, extra=None):
return 'Warning' + _get_text(hostname, labels, temperature_in,
temperature_out, warning, critical, extra)
def _get_text_critical(hostname, labels, temperature_in, temperature_out,
warning, critical, extra=None):
return 'Critical' + _get_text(hostname, labels, temperature_in,
temperature_out, warning, critical, extra)
def _get_text_ok(hostname, labels, temperature_in, temperature_out,
warning, critical, extra=None):
return 'OK' + _get_text(hostname, labels, temperature_in,
temperature_out, warning, critical, extra)
def _get_text(hostname, labels, temperature_in, temperature_out,
warning, critical, extra=None):
if not warning:
warning = [[], []]
if not critical:
critical = [[], []]
text = ' - %s %s%s' % (labels[0], temperature_in[0], temperature_in[1])
if temperature_out:
text += ' %s: %s%s' % (labels[1], temperature_out[0],
temperature_out[1])
text += '| '
text += _get_pnp_text(labels, temperature_in, temperature_out, warning,
critical, extra)
return text
def _get_pnp_text(labels, temperature_in, temperature_out, warning, critical,
extra=None):
value, uom = temperature_in # uom = Unit of Measurement; etwa Celsius, etc.
text = _format_label(labels[0], value, uom, warning[0], critical[0])
if temperature_out:
text += ' '
value, uom = temperature_out # uom = Unit of Measurement; etwa Celsius, etc.
text += _format_label(labels[1], value, uom, warning[1], critical[1])
if extra:
for key, value in extra.items():
text += ' ' + _format_label(key, ''.join(value), '')
return text
def _format_label(label, value, uom, warning=None, critical=None):
# uom = Unit of Measurement; Maßeinheit, etwa Celsius, etc.
text = "'%s'=%s%s;" % (label, value, uom)
if warning:
text += '%s:%s' % warning
text += ';'
if critical:
text += '%s:%s' % critical
text += ';;'
return text
| lakrahn-de/pwebs | pwebs/main.py | Python | gpl-2.0 | 9,421 |
# -*- coding: utf-8 -*-
#
#
# TheVirtualBrain-Framework Package. This package holds all Data Management, and
# Web-UI helpful to run brain-simulations. To use it, you also need do download
# TheVirtualBrain-Scientific Package (for simulators). See content of the
# documentation-folder for more details. See also http://www.thevirtualbrain.org
#
# (c) 2012-2013, Baycrest Centre for Geriatric Care ("Baycrest")
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the Free
# Software Foundation. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details. You should have received a copy of the GNU General
# Public License along with this program; if not, you can download it here
# http://www.gnu.org/licenses/old-licenses/gpl-2.0
#
#
# CITATION:
# When using The Virtual Brain for scientific publications, please cite it as follows:
#
# Paula Sanz Leon, Stuart A. Knock, M. Marmaduke Woodman, Lia Domide,
# Jochen Mersmann, Anthony R. McIntosh, Viktor Jirsa (2013)
# The Virtual Brain: a simulator of primate brain network dynamics.
# Frontiers in Neuroinformatics (7:10. doi: 10.3389/fninf.2013.00010)
#
#
"""
Persistence of data in HDF5 format.
.. moduleauthor:: Lia Domide <[email protected]>
.. moduleauthor:: Bogdan Neacsa <[email protected]>
.. moduleauthor:: Calin Pavel <[email protected]>
"""
import os
import copy
import threading
import h5py as hdf5
import numpy as numpy
import tvb.core.utils as utils
from datetime import datetime
from tvb.basic.logger.builder import get_logger
from tvb.basic.config.settings import TVBSettings as cfg
from tvb.core.entities.file.exceptions import FileStructureException, MissingDataSetException
from tvb.core.entities.file.exceptions import IncompatibleFileManagerException, MissingDataFileException
from tvb.core.entities.transient.structure_entities import GenericMetaData
# Create logger for this module
LOG = get_logger(__name__)
LOCK_OPEN_FILE = threading.Lock()
## The chunk block size recommended by h5py should be between 10k - 300k, larger for
## big files. Since performance will mostly be important for the simulator we'll just use the top range for now.
CHUNK_BLOCK_SIZE = 300000
class HDF5StorageManager(object):
"""
This class is responsible for saving / loading data in HDF5 file / format.
"""
__file_title_ = "TVB data file"
__storage_full_name = None
__hfd5_file = None
TVB_ATTRIBUTE_PREFIX = "TVB_"
ROOT_NODE_PATH = "/"
BOOL_VALUE_PREFIX = "bool:"
DATETIME_VALUE_PREFIX = "datetime:"
DATE_TIME_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
LOCKS = {}
def __init__(self, storage_folder, file_name, buffer_size=600000):
"""
Creates a new storage manager instance.
:param buffer_size: the size in Bytes of the amount of data that will be buffered before writing to file.
"""
if storage_folder is None:
raise FileStructureException("Please provide the folder where to store data")
if file_name is None:
raise FileStructureException("Please provide the file name where to store data")
self.__storage_full_name = os.path.join(storage_folder, file_name)
self.__buffer_size = buffer_size
self.__buffer_array = None
self.data_buffers = {}
def is_valid_hdf5_file(self):
"""
This method checks if specified file exists and if it has correct HDF5 format
:returns: True is file exists and has HDF5 format. False otherwise.
"""
try:
return os.path.exists(self.__storage_full_name) and hdf5.h5f.is_hdf5(self.__storage_full_name)
except RuntimeError:
return False
def store_data(self, dataset_name, data_list, where=ROOT_NODE_PATH):
"""
This method stores provided data list into a data set in the H5 file.
:param dataset_name: Name of the data set where to store data
:param data_list: Data to be stored
:param where: represents the path where to store our dataset (e.g. /data/info)
"""
if dataset_name is None:
dataset_name = ''
if where is None:
where = self.ROOT_NODE_PATH
data_to_store = self._check_data(data_list)
try:
LOG.debug("Saving data into data set: %s" % dataset_name)
# Open file in append mode ('a') to allow adding multiple data sets in the same file
chunk_shape = self.__compute_chunk_shape(data_to_store.shape)
hdf5File = self._open_h5_file(chunk_shape=chunk_shape)
hdf5File[where + dataset_name] = data_to_store
finally:
# Now close file
self.close_file()
def append_data(self, dataset_name, data_list, grow_dimension=-1, close_file=True, where=ROOT_NODE_PATH):
"""
This method appends data to an existing data set. If the data set does not exists, create it first.
:param dataset_name: Name of the data set where to store data
:param data_list: Data to be stored / appended
:param grow_dimension: The dimension to be used to grow stored array. By default will grow on the LAST dimension
:param close_file: Specify if the file should be closed automatically after write operation. If not,
you have to close file by calling method close_file()
:param where: represents the path where to store our dataset (e.g. /data/info)
"""
if dataset_name is None:
dataset_name = ''
if where is None:
where = self.ROOT_NODE_PATH
data_to_store = self._check_data(data_list)
data_buffer = self.data_buffers.get(where + dataset_name, None)
if data_buffer is None:
chunk_shape = self.__compute_chunk_shape(data_to_store.shape, grow_dimension)
hdf5File = self._open_h5_file(chunk_shape=chunk_shape)
try:
dataset = hdf5File[where + dataset_name]
self.data_buffers[where + dataset_name] = HDF5StorageManager.H5pyStorageBuffer(dataset,
buffer_size=self.__buffer_size,
buffered_data=data_to_store,
grow_dimension=grow_dimension)
except KeyError:
data_shape_list = list(data_to_store.shape)
data_shape_list[grow_dimension] = None
data_shape = tuple(data_shape_list)
dataset = hdf5File.create_dataset(where + dataset_name, data=data_to_store, shape=data_to_store.shape,
dtype=data_to_store.dtype, maxshape=data_shape)
self.data_buffers[where + dataset_name] = HDF5StorageManager.H5pyStorageBuffer(dataset,
buffer_size=self.__buffer_size,
buffered_data=None,
grow_dimension=grow_dimension)
else:
if not data_buffer.buffer_data(data_to_store):
data_buffer.flush_buffered_data()
if close_file:
self.close_file()
def remove_data(self, dataset_name, where=ROOT_NODE_PATH):
"""
Deleting a data set from H5 file.
:param dataset_name:name of the data set to be deleted
:param where: represents the path where dataset is stored (e.g. /data/info)
"""
LOG.debug("Removing data set: %s" % dataset_name)
if dataset_name is None:
dataset_name = ''
if where is None:
where = self.ROOT_NODE_PATH
try:
# Open file in append mode ('a') to allow data remove
hdf5File = self._open_h5_file()
del hdf5File[where + dataset_name]
except KeyError:
LOG.warn("Trying to delete data set: %s but current file does not contain it." % dataset_name)
raise FileStructureException("Could not locate dataset: %s" % dataset_name)
finally:
self.close_file()
def get_data(self, dataset_name, data_slice=None, where=ROOT_NODE_PATH, ignore_errors=False):
"""
This method reads data from the given data set based on the slice specification
:param dataset_name: Name of the data set from where to read data
:param data_slice: Specify how to retrieve data from array {e.g (slice(1,10,1),slice(1,6,2)) }
:param where: represents the path where dataset is stored (e.g. /data/info)
:returns: a numpy.ndarray containing filtered data
"""
LOG.debug("Reading data from data set: %s" % dataset_name)
if dataset_name is None:
dataset_name = ''
if where is None:
where = self.ROOT_NODE_PATH
try:
# Open file to read data
hdf5File = self._open_h5_file('r')
data_array = hdf5File[where + dataset_name]
# Now read data
if data_slice is None:
return data_array[()]
else:
return data_array[data_slice]
except KeyError:
if not ignore_errors:
LOG.error("Trying to read data from a missing data set: %s" % dataset_name)
raise MissingDataSetException("Could not locate dataset: %s" % dataset_name)
else:
return numpy.ndarray(0)
finally:
self.close_file()
def get_data_shape(self, dataset_name, where=ROOT_NODE_PATH, ignore_errors=False):
"""
This method reads data-size from the given data set
:param dataset_name: Name of the data set from where to read data
:param where: represents the path where dataset is stored (e.g. /data/info)
:returns: a tuple containing data size
"""
LOG.debug("Reading data from data set: %s" % dataset_name)
if dataset_name is None:
dataset_name = ''
if where is None:
where = self.ROOT_NODE_PATH
try:
# Open file to read data
hdf5File = self._open_h5_file('r')
data_array = hdf5File[where + dataset_name]
return data_array.shape
except KeyError:
if not ignore_errors:
LOG.debug("Trying to read data from a missing data set: %s" % dataset_name)
raise MissingDataSetException("Could not locate dataset: %s" % dataset_name)
else:
return 0
finally:
self.close_file()
def set_metadata(self, meta_dictionary, dataset_name='', tvb_specific_metadata=True, where=ROOT_NODE_PATH):
"""
Set meta-data information for root node or for a given data set.
:param meta_dictionary: dictionary containing meta info to be stored on node
:param dataset_name: name of the dataset where to assign metadata. If None, metadata is assigned to ROOT node.
:param tvb_specific_metadata: specify if the provided metadata is TVB specific (All keys will have a TVB prefix)
:param where: represents the path where dataset is stored (e.g. /data/info)
"""
LOG.debug("Setting metadata on node: %s" % dataset_name)
if dataset_name is None:
dataset_name = ''
if where is None:
where = self.ROOT_NODE_PATH
# Open file to read data
hdf5File = self._open_h5_file()
try:
node = hdf5File[where + dataset_name]
except KeyError:
LOG.debug("Trying to set metadata on a missing data set: %s" % dataset_name)
node = hdf5File.create_dataset(where + dataset_name, (1,))
try:
# Now set meta-data
for meta_key in meta_dictionary:
key_to_store = meta_key
if tvb_specific_metadata:
key_to_store = self.TVB_ATTRIBUTE_PREFIX + meta_key
processed_value = self._serialize_value(meta_dictionary[meta_key])
node.attrs[key_to_store] = processed_value
finally:
self.close_file()
def _serialize_value(self, value):
"""
This method takes a value which will be stored as metadata and
apply some transformation if necessary
:param value: value which is planned to be stored
:returns: value to be stored
"""
if value is None:
return ''
# Force unicode strings to simple strings.
if isinstance(value, unicode):
return str(value)
# Transform boolean to string and prefix it
elif isinstance(value, bool):
return self.BOOL_VALUE_PREFIX + utils.bool2string(value)
# Transform date to string and append prefix
elif isinstance(value, datetime):
return self.DATETIME_VALUE_PREFIX + utils.date2string(value, date_format=self.DATE_TIME_FORMAT)
else:
return value
def remove_metadata(self, meta_key, dataset_name='', tvb_specific_metadata=True, where=ROOT_NODE_PATH):
"""
Remove meta-data information for root node or for a given data set.
:param meta_key: name of the metadata attribute to be removed
:param dataset_name: name of the dataset from where to delete metadata.
If None, metadata will be removed from ROOT node.
:param tvb_specific_metadata: specify if the provided metadata is specific to TVB (keys will have a TVB prefix).
:param where: represents the path where dataset is stored (e.g. /data/info)
"""
LOG.debug("Deleting metadata: %s for dataset: %s" % (meta_key, dataset_name))
if dataset_name is None:
dataset_name = ''
if where is None:
where = self.ROOT_NODE_PATH
try:
# Open file to read data
hdf5File = self._open_h5_file()
node = hdf5File[where + dataset_name]
# Now delete metadata
key_to_remove = meta_key
if tvb_specific_metadata:
key_to_remove = self.TVB_ATTRIBUTE_PREFIX + meta_key
del node.attrs[key_to_remove]
except KeyError:
LOG.error("Trying to delete metadata on a missing data set: %s" % dataset_name)
raise FileStructureException("Could not locate dataset: %s" % dataset_name)
except AttributeError:
LOG.error("Trying to delete missing metadata %s" % meta_key)
raise FileStructureException("There is no metadata named %s on this node" % meta_key)
finally:
self.close_file()
def get_metadata(self, dataset_name='', where=ROOT_NODE_PATH, ignore_errors=False):
"""
Retrieve ALL meta-data information for root node or for a given data set.
:param dataset_name: name of the dataset for which to read metadata. If None, read metadata from ROOT node.
:param where: represents the path where dataset is stored (e.g. /data/info)
:returns: a dictionary containing all metadata associated with the node
"""
LOG.debug("Retrieving metadata for dataset: %s" % dataset_name)
if dataset_name is None:
dataset_name = ''
if where is None:
where = self.ROOT_NODE_PATH
meta_key = ""
try:
# Open file to read data
hdf5File = self._open_h5_file('r')
node = hdf5File[where + dataset_name]
# Now retrieve metadata values
all_meta_data = {}
for meta_key in node.attrs:
new_key = meta_key
if meta_key.startswith(self.TVB_ATTRIBUTE_PREFIX):
new_key = meta_key[len(self.TVB_ATTRIBUTE_PREFIX):]
value = node.attrs[meta_key]
all_meta_data[new_key] = self._deserialize_value(value)
return all_meta_data
except KeyError:
if not ignore_errors:
msg = "Trying to read data from a missing data set: %s" % (where + dataset_name)
LOG.warning(msg)
raise MissingDataSetException(msg)
else:
return numpy.ndarray(0)
except AttributeError:
msg = "Trying to get value for missing metadata %s" % meta_key
LOG.error(msg)
raise FileStructureException(msg)
except Exception, excep:
msg = "Failed to read metadata from H5 file! %s" % self.__storage_full_name
LOG.exception(excep)
LOG.error(msg)
raise FileStructureException(msg)
finally:
self.close_file()
def get_file_data_version(self):
"""
Checks the data version for the current file.
"""
if not os.path.exists(self.__storage_full_name):
raise MissingDataFileException("File storage data not found at path %s" % (self.__storage_full_name,))
if self.is_valid_hdf5_file():
metadata = self.get_metadata()
if cfg.DATA_VERSION_ATTRIBUTE in metadata:
return metadata[cfg.DATA_VERSION_ATTRIBUTE]
else:
raise IncompatibleFileManagerException("Could not find TVB specific data version attribute %s in file: "
"%s." % (cfg.DATA_VERSION_ATTRIBUTE, self.__storage_full_name))
raise IncompatibleFileManagerException("File %s is not a hdf5 format file. Are you using the correct "
"manager for this file?" % (self.__storage_full_name,))
def get_gid_attribute(self):
"""
Used for obtaining the gid of the DataType of
which data are stored in the current file.
"""
if self.is_valid_hdf5_file():
metadata = self.get_metadata()
if GenericMetaData.KEY_GID in metadata:
return metadata[GenericMetaData.KEY_GID]
else:
raise IncompatibleFileManagerException("Could not find the Gid attribute in the "
"input file %s." % self.__storage_full_name)
raise IncompatibleFileManagerException("File %s is not a hdf5 format file. Are you using the correct "
"manager for this file?" % (self.__storage_full_name,))
def _deserialize_value(self, value):
"""
This method takes value loaded from H5 file and transform it to TVB data.
"""
if value is not None:
if isinstance(value, numpy.string_):
if len(value) == 0:
value = None
else:
value = str(value)
if isinstance(value, str):
if value.startswith(self.BOOL_VALUE_PREFIX):
# Remove bool prefix and transform to bool
return utils.string2bool(value[len(self.BOOL_VALUE_PREFIX):])
if value.startswith(self.DATETIME_VALUE_PREFIX):
# Remove datetime prefix and transform to datetime
return utils.string2date(value[len(self.DATETIME_VALUE_PREFIX):], date_format=self.DATE_TIME_FORMAT)
return value
def __aquire_lock(self):
"""
Aquire a unique lock for each different file path on the system.
"""
lock = self.LOCKS.get(self.__storage_full_name, None)
if lock is None:
lock = threading.Lock()
self.LOCKS[self.__storage_full_name] = lock
lock.acquire()
def __release_lock(self):
"""
Aquire a unique lock for each different file path on the system.
"""
lock = self.LOCKS.get(self.__storage_full_name, None)
if lock is None:
raise Exception("Some lock was deleted without being released beforehand.")
lock.release()
def close_file(self):
"""
The synchronization of open/close doesn't seem to be needed anymore for h5py in
contrast to PyTables for concurrent reads. However since it shouldn't add that
much overhead in most situation we'll leave it like this for now since in case
of concurrent writes(metadata) this provides extra safety.
"""
self.__aquire_lock()
self.__close_file()
self.__release_lock()
def _open_h5_file(self, mode='a', chunk_shape=None):
"""
The synchronization of open/close doesn't seem to be needed anymore for h5py in
contrast to PyTables for concurrent reads. However since it shouldn't add that
much overhead in most situation we'll leave it like this for now since in case
of concurrent writes(metadata) this provides extra safety.
"""
self.__aquire_lock()
file_obj = self.__open_h5_file(mode, chunk_shape)
self.__release_lock()
return file_obj
def __compute_chunk_shape(self, data_shape, grow_dim=None):
data_shape = list(data_shape)
if not data_shape:
return 1
nr_elems_per_block = CHUNK_BLOCK_SIZE / 8.0
if grow_dim is None:
# We don't know what dimension is growing or we are not in
# append mode and just want to write the whole data.
max_leng_dim = data_shape.index(max(data_shape))
for dim in data_shape:
nr_elems_per_block = nr_elems_per_block / dim
nr_elems_per_block = nr_elems_per_block * data_shape[max_leng_dim]
if nr_elems_per_block < 1:
nr_elems_per_block = 1
data_shape[max_leng_dim] = int(nr_elems_per_block)
return tuple(data_shape)
else:
for idx, dim in enumerate(data_shape):
if idx != grow_dim:
nr_elems_per_block = nr_elems_per_block / dim
if nr_elems_per_block < 1:
nr_elems_per_block = 1
data_shape[grow_dim] = int(nr_elems_per_block)
return tuple(data_shape)
def __close_file(self):
"""
Close file used to store data.
"""
hdf5_file = self.__hfd5_file
# Try to close file only if it was opened before
if hdf5_file is not None and hdf5_file.fid.valid:
LOG.debug("Closing file: %s" % self.__storage_full_name)
try:
for h5py_buffer in self.data_buffers.values():
h5py_buffer.flush_buffered_data()
self.data_buffers = {}
hdf5_file.close()
except Exception, excep:
### Do nothing is this situation.
### The file is correctly closed, but the list of open files on HDF5 is not updated in a synch manner.
### del _open_files[filename] might throw KeyError
LOG.exception(excep)
if not hdf5_file.fid.valid:
self.__hfd5_file = None
# -------------- Private methods --------------
def __open_h5_file(self, mode='a', chunk_shape=None):
"""
Open file for reading, writing or append.
:param mode: Mode to open file (possible values are w / r / a).
Default value is 'a', to allow adding multiple data to the same file.
:param chunk_shape: Shape for chunks at write.
:returns: returns the file which stores data in HDF5 format opened for read / write according to mode param
"""
if self.__storage_full_name is not None:
# Check if file is still open from previous writes.
if self.__hfd5_file is None or not self.__hfd5_file.fid.valid:
file_exists = os.path.exists(self.__storage_full_name)
LOG.debug("Opening file: %s in mode: %s" % (self.__storage_full_name, mode))
self.__hfd5_file = hdf5.File(self.__storage_full_name, mode, libver='latest', chunks=chunk_shape)
# If this is the first time we access file, write data version
if not file_exists:
os.chmod(self.__storage_full_name, cfg.ACCESS_MODE_TVB_FILES)
self.__hfd5_file['/'].attrs[self.TVB_ATTRIBUTE_PREFIX +
cfg.DATA_VERSION_ATTRIBUTE] = cfg.DATA_VERSION
return self.__hfd5_file
else:
raise FileStructureException("Invalid storage file. Please provide a valid path.")
def _check_data(self, data_list):
"""
Check if the data to be stores is in a good format. If not adapt it.
"""
if data_list is None:
raise FileStructureException("Could not store null data")
if not (isinstance(data_list, list) or isinstance(data_list, numpy.ndarray)):
raise FileStructureException("Invalid data type. Could not store data of type:" + str(type(data_list)))
data_to_store = data_list
if isinstance(data_to_store, list):
data_to_store = numpy.array(data_list)
return data_to_store
class H5pyStorageBuffer():
"""
Helper class in order to buffer data for append operations, to limit the number of actual
HDD I/O operations.
"""
def __init__(self, h5py_dataset, buffer_size=300, buffered_data=None, grow_dimension=-1):
self.buffered_data = buffered_data
self.buffer_size = buffer_size
if h5py_dataset is None:
raise MissingDataSetException("A H5pyStorageBuffer instance must have a h5py dataset for which the"
"buffering is done. Please supply one to the 'h5py_dataset' parameter.")
self.h5py_dataset = h5py_dataset
self.grow_dimension = grow_dimension
def buffer_data(self, data_list):
"""
Add data_list to an internal buffer in order to improve performance for append_data type of operations.
:returns: True if buffer is still fine, \
False if a flush is necessary since the buffer is full
"""
if self.buffered_data is None:
self.buffered_data = data_list
else:
self.buffered_data = self.__custom_numpy_append(self.buffered_data, data_list)
if self.buffered_data.nbytes > self.buffer_size:
return False
else:
return True
def __custom_numpy_append(self, array1, array2):
array_1_shape = numpy.array(array1.shape)
array_2_shape = numpy.array(array2.shape)
result_shape = copy.deepcopy(array_1_shape)
result_shape[self.grow_dimension] += array_2_shape[self.grow_dimension]
result_array = numpy.empty(shape=tuple(result_shape), dtype=array1.dtype)
full_slice = slice(None, None, None)
full_index = [full_slice for _ in array_1_shape]
full_index[self.grow_dimension] = slice(0, array_1_shape[self.grow_dimension], None)
result_array[tuple(full_index)] = array1
full_index[self.grow_dimension] = slice(array_1_shape[self.grow_dimension],
result_shape[self.grow_dimension], None)
result_array[tuple(full_index)] = array2
return result_array
def flush_buffered_data(self):
"""
Append the data buffered so far to the input dataset using :param grow_dimension: as the dimension that
will be expanded.
"""
if self.buffered_data is not None:
current_shape = self.h5py_dataset.shape
new_shape = list(current_shape)
new_shape[self.grow_dimension] += self.buffered_data.shape[self.grow_dimension]
## Create the required slice to which the new data will be added.
## For example if the 3nd dimension of a 4D datashape (74, 1, 100, 1)
## we want to get the slice (:, :, 100:200, :) in order to add 100 new entries
full_slice = slice(None, None, None)
slice_to_add = slice(current_shape[self.grow_dimension], new_shape[self.grow_dimension], None)
appendTo_address = [full_slice for _ in new_shape]
appendTo_address[self.grow_dimension] = slice_to_add
## Do the data reshape and copy the new data
self.h5py_dataset.resize(tuple(new_shape))
self.h5py_dataset[tuple(appendTo_address)] = self.buffered_data
self.buffered_data = None
| stuart-knock/tvb-framework | tvb/core/entities/file/hdf5_storage_manager.py | Python | gpl-2.0 | 29,494 |
#--->
#-from enigma import eTimer, eTPM, eEnv
#---<
#+++>
from enigma import eTimer, eEnv
#+++<
from Screens.Screen import Screen
from Components.ActionMap import ActionMap, NumberActionMap
from Components.Pixmap import Pixmap,MultiPixmap
from Components.Label import Label
from Components.Sources.StaticText import StaticText
from Components.Sources.List import List
from Components.MenuList import MenuList
from Components.config import config, getConfigListEntry, ConfigYesNo, NoSave, ConfigSubsection, ConfigText, ConfigSelection, ConfigPassword
from Components.ConfigList import ConfigListScreen
from Components.Network import iNetwork
from Components.Console import Console
from Plugins.Plugin import PluginDescriptor
from os import system, path as os_path, listdir
from Tools.Directories import resolveFilename, SCOPE_PLUGINS, SCOPE_SKIN_IMAGE
from Tools.LoadPixmap import LoadPixmap
from Tools.HardwareInfo import HardwareInfo
from Wlan import iWlan, wpaSupplicant, iStatus, getWlanConfigName
import hashlib
from time import time
from os import urandom, system
from re import escape as re_escape
plugin_path = eEnv.resolve("${libdir}/enigma2/python/Plugins/SystemPlugins/WirelessLan")
list = []
list.append("Unencrypted")
list.append("WEP")
list.append("WPA")
list.append("WPA/WPA2")
list.append("WPA2")
weplist = []
weplist.append("ASCII")
weplist.append("HEX")
config.plugins.wlan = ConfigSubsection()
config.plugins.wlan.essid = NoSave(ConfigText(default = "", fixed_size = False))
config.plugins.wlan.hiddenessid = NoSave(ConfigYesNo(default = False))
config.plugins.wlan.encryption = NoSave(ConfigSelection(list, default = "WPA2"))
config.plugins.wlan.wepkeytype = NoSave(ConfigSelection(weplist, default = "ASCII"))
config.plugins.wlan.psk = NoSave(ConfigPassword(default = "", fixed_size = False))
class WlanStatus(Screen):
skin = """
<screen name="WlanStatus" position="center,center" size="560,400" title="Wireless Network State" >
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="LabelBSSID" render="Label" position="10,60" size="250,25" valign="left" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<widget source="LabelESSID" render="Label" position="10,100" size="250,25" valign="center" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<widget source="LabelQuality" render="Label" position="10,140" size="250,25" valign="center" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<widget source="LabelSignal" render="Label" position="10,180" size="250,25" valign="center" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<widget source="LabelBitrate" render="Label" position="10,220" size="250,25" valign="center" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<widget source="LabelEnc" render="Label" position="10,260" size="250,25" valign="center" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<widget source="BSSID" render="Label" position="320,60" size="180,25" valign="center" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<widget source="ESSID" render="Label" position="320,100" size="180,25" valign="center" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<widget source="quality" render="Label" position="320,140" size="180,25" valign="center" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<widget source="signal" render="Label" position="320,180" size="180,25" valign="center" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<widget source="bitrate" render="Label" position="320,220" size="180,25" valign="center" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<widget source="enc" render="Label" position="320,260" size="180,25" valign="center" font="Regular;20" transparent="1" foregroundColor="#FFFFFF" />
<ePixmap pixmap="skin_default/div-h.png" position="0,350" zPosition="1" size="560,2" />
<widget source="IFtext" render="Label" position="10,355" size="120,21" zPosition="10" font="Regular;20" halign="left" backgroundColor="#25062748" transparent="1" />
<widget source="IF" render="Label" position="120,355" size="400,21" zPosition="10" font="Regular;20" halign="left" backgroundColor="#25062748" transparent="1" />
<widget source="Statustext" render="Label" position="10,375" size="115,21" zPosition="10" font="Regular;20" halign="left" backgroundColor="#25062748" transparent="1"/>
<widget name="statuspic" pixmaps="skin_default/buttons/button_green.png,skin_default/buttons/button_green_off.png" position="130,380" zPosition="10" size="15,16" transparent="1" alphatest="on"/>
</screen>"""
def __init__(self, session, iface):
Screen.__init__(self, session)
self.session = session
self.iface = iface
self["LabelBSSID"] = StaticText(_('Accesspoint:'))
self["LabelESSID"] = StaticText(_('SSID:'))
self["LabelQuality"] = StaticText(_('Link Quality:'))
self["LabelSignal"] = StaticText(_('Signal Strength:'))
self["LabelBitrate"] = StaticText(_('Bitrate:'))
self["LabelEnc"] = StaticText(_('Encryption:'))
self["BSSID"] = StaticText()
self["ESSID"] = StaticText()
self["quality"] = StaticText()
self["signal"] = StaticText()
self["bitrate"] = StaticText()
self["enc"] = StaticText()
self["IFtext"] = StaticText()
self["IF"] = StaticText()
self["Statustext"] = StaticText()
self["statuspic"] = MultiPixmap()
self["statuspic"].hide()
self["key_red"] = StaticText(_("Close"))
self.resetList()
self.updateStatusbar()
self["actions"] = NumberActionMap(["WizardActions", "InputActions", "EPGSelectActions", "ShortcutActions"],
{
"ok": self.exit,
"back": self.exit,
"red": self.exit,
}, -1)
self.timer = eTimer()
self.timer.timeout.get().append(self.resetList)
self.onShown.append(lambda: self.timer.start(8000))
self.onLayoutFinish.append(self.layoutFinished)
self.onClose.append(self.cleanup)
def cleanup(self):
iStatus.stopWlanConsole()
def layoutFinished(self):
self.setTitle(_("Wireless network state"))
def resetList(self):
iStatus.getDataForInterface(self.iface,self.getInfoCB)
def getInfoCB(self,data,status):
if data is not None:
if data is True:
if status is not None:
if status[self.iface]["essid"] == "off":
essid = _("No Connection")
else:
essid = status[self.iface]["essid"]
if status[self.iface]["accesspoint"] == "Not-Associated":
accesspoint = _("Not-Associated")
essid = _("No Connection")
else:
accesspoint = status[self.iface]["accesspoint"]
if self.has_key("BSSID"):
self["BSSID"].setText(accesspoint)
if self.has_key("ESSID"):
self["ESSID"].setText(essid)
quality = status[self.iface]["quality"]
if self.has_key("quality"):
self["quality"].setText(quality)
if status[self.iface]["bitrate"] == '0':
bitrate = _("Unsupported")
else:
bitrate = str(status[self.iface]["bitrate"]) + " Mb/s"
if self.has_key("bitrate"):
self["bitrate"].setText(bitrate)
signal = status[self.iface]["signal"]
if self.has_key("signal"):
self["signal"].setText(signal)
if status[self.iface]["encryption"] == "off":
if accesspoint == "Not-Associated":
encryption = _("Disabled")
else:
encryption = _("Unsupported")
else:
encryption = _("Enabled")
if self.has_key("enc"):
self["enc"].setText(encryption)
self.updateStatusLink(status)
def exit(self):
self.timer.stop()
self.close(True)
def updateStatusbar(self):
wait_txt = _("Please wait...")
self["BSSID"].setText(wait_txt)
self["ESSID"].setText(wait_txt)
self["quality"].setText(wait_txt)
self["signal"].setText(wait_txt)
self["bitrate"].setText(wait_txt)
self["enc"].setText(wait_txt)
self["IFtext"].setText(_("Network:"))
self["IF"].setText(iNetwork.getFriendlyAdapterName(self.iface))
self["Statustext"].setText(_("Link:"))
def updateStatusLink(self,status):
if status is not None:
if status[self.iface]["essid"] == "off" or status[self.iface]["accesspoint"] == "Not-Associated" or status[self.iface]["accesspoint"] == False:
self["statuspic"].setPixmapNum(1)
else:
self["statuspic"].setPixmapNum(0)
self["statuspic"].show()
class WlanScan(Screen):
skin = """
<screen name="WlanScan" position="center,center" size="560,400" title="Choose a Wireless Network" >
<ePixmap pixmap="skin_default/buttons/red.png" position="0,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/green.png" position="140,0" size="140,40" alphatest="on" />
<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,0" size="140,40" alphatest="on" />
<widget source="key_red" render="Label" position="0,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />
<widget source="key_green" render="Label" position="140,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />
<widget source="key_yellow" render="Label" position="280,0" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />
<widget source="list" render="Listbox" position="5,40" size="550,300" scrollbarMode="showOnDemand">
<convert type="TemplatedMultiContent">
{"template": [
MultiContentEntryText(pos = (0, 0), size = (550, 30), font=0, flags = RT_HALIGN_LEFT, text = 0), # index 0 is the essid
MultiContentEntryText(pos = (0, 30), size = (175, 20), font=1, flags = RT_HALIGN_LEFT, text = 5), # index 5 is the interface
MultiContentEntryText(pos = (175, 30), size = (175, 20), font=1, flags = RT_HALIGN_LEFT, text = 4), # index 0 is the encryption
MultiContentEntryText(pos = (350, 0), size = (200, 20), font=1, flags = RT_HALIGN_LEFT, text = 2), # index 0 is the signal
MultiContentEntryText(pos = (350, 30), size = (200, 20), font=1, flags = RT_HALIGN_LEFT, text = 3), # index 0 is the maxrate
MultiContentEntryPixmapAlphaTest(pos = (0, 52), size = (550, 2), png = 6), # index 6 is the div pixmap
],
"fonts": [gFont("Regular", 28),gFont("Regular", 18)],
"itemHeight": 54
}
</convert>
</widget>
<ePixmap pixmap="skin_default/div-h.png" position="0,340" zPosition="1" size="560,2" />
<widget source="info" render="Label" position="0,350" size="560,50" font="Regular;24" halign="center" valign="center" backgroundColor="#25062748" transparent="1" />
</screen>"""
def __init__(self, session, iface):
Screen.__init__(self, session)
self.session = session
self.iface = iface
self.skin_path = plugin_path
self.oldInterfaceState = iNetwork.getAdapterAttribute(self.iface, "up")
self.APList = None
self.newAPList = None
self.WlanList = None
self.cleanList = None
self.oldlist = {}
self.listLength = None
self.rescanTimer = eTimer()
self.rescanTimer.callback.append(self.rescanTimerFired)
self["info"] = StaticText()
self.list = []
self["list"] = List(self.list)
self["key_red"] = StaticText(_("Close"))
self["key_green"] = StaticText(_("Connect"))
self["key_yellow"] = StaticText()
self["actions"] = NumberActionMap(["WizardActions", "InputActions", "EPGSelectActions"],
{
"ok": self.select,
"back": self.cancel,
}, -1)
self["shortcuts"] = ActionMap(["ShortcutActions"],
{
"red": self.cancel,
"green": self.select,
})
iWlan.setInterface(self.iface)
self.w = iWlan.getInterface()
self.onLayoutFinish.append(self.layoutFinished)
self.getAccessPoints(refresh = False)
def layoutFinished(self):
self.setTitle(_("Choose a wireless network"))
def select(self):
cur = self["list"].getCurrent()
if cur is not None:
iWlan.stopGetNetworkList()
self.rescanTimer.stop()
del self.rescanTimer
if cur[0] is not None:
self.close(cur[0])
else:
self.close(None)
else:
iWlan.stopGetNetworkList()
self.rescanTimer.stop()
del self.rescanTimer
self.close(None)
def cancel(self):
iWlan.stopGetNetworkList()
self.rescanTimer.stop()
del self.rescanTimer
self.close(None)
def rescanTimerFired(self):
self.rescanTimer.stop()
self.updateAPList()
def buildEntryComponent(self, essid, bssid, encrypted, iface, maxrate, signal):
divpng = LoadPixmap(cached=True, path=resolveFilename(SCOPE_SKIN_IMAGE, "skin_default/div-h.png"))
encryption = encrypted and _("Yes") or _("No")
return((essid, bssid, _("Signal: ") + str(signal), _("Max. Bitrate: ") + str(maxrate), _("Encrypted: ") + encryption, _("Interface: ") + str(iface), divpng))
def updateAPList(self):
newList = []
newList = self.getAccessPoints(refresh = True)
self.newAPList = []
tmpList = []
newListIndex = None
currentListEntry = None
currentListIndex = None
for ap in self.oldlist.keys():
data = self.oldlist[ap]['data']
if data is not None:
tmpList.append(data)
if len(tmpList):
for entry in tmpList:
self.newAPList.append(self.buildEntryComponent( entry[0], entry[1], entry[2], entry[3], entry[4], entry[5] ))
currentListEntry = self["list"].getCurrent()
if currentListEntry is not None:
idx = 0
for entry in self.newAPList:
if entry[0] == currentListEntry[0]:
newListIndex = idx
idx +=1
self['list'].setList(self.newAPList)
if newListIndex is not None:
self["list"].setIndex(newListIndex)
self["list"].updateList(self.newAPList)
self.listLength = len(self.newAPList)
self.buildWlanList()
self.setInfo()
def getAccessPoints(self, refresh = False):
self.APList = []
self.cleanList = []
aps = iWlan.getNetworkList()
if aps is not None:
print "[WirelessLan.py] got Accespoints!"
tmpList = []
compList = []
for ap in aps:
a = aps[ap]
if a['active']:
tmpList.append( (a['essid'], a['bssid']) )
compList.append( (a['essid'], a['bssid'], a['encrypted'], a['iface'], a['maxrate'], a['signal']) )
for entry in tmpList:
if entry[0] == "":
for compentry in compList:
if compentry[1] == entry[1]:
compList.remove(compentry)
for entry in compList:
self.cleanList.append( ( entry[0], entry[1], entry[2], entry[3], entry[4], entry[5] ) )
if not self.oldlist.has_key(entry[0]):
self.oldlist[entry[0]] = { 'data': entry }
else:
self.oldlist[entry[0]]['data'] = entry
for entry in self.cleanList:
self.APList.append(self.buildEntryComponent( entry[0], entry[1], entry[2], entry[3], entry[4], entry[5] ))
if refresh is False:
self['list'].setList(self.APList)
self.listLength = len(self.APList)
self.setInfo()
self.rescanTimer.start(5000)
return self.cleanList
def setInfo(self):
length = self.getLength()
if length == 0:
self["info"].setText(_("No wireless networks found! Searching..."))
elif length == 1:
self["info"].setText(_("1 wireless network found!"))
else:
self["info"].setText(str(length)+_(" wireless networks found!"))
def buildWlanList(self):
self.WlanList = []
for entry in self['list'].list:
self.WlanList.append( (entry[0], entry[0]) )
def getLength(self):
return self.listLength
def getWlanList(self):
if self.WlanList is None:
self.buildWlanList()
return self.WlanList
def bin2long(s):
return reduce( lambda x,y:(x<<8L)+y, map(ord, s))
def long2bin(l):
res = ""
for byte in range(128):
res += chr((l >> (1024 - (byte + 1) * 8)) & 0xff)
return res
def rsa_pub1024(src, mod):
return long2bin(pow(bin2long(src), 65537, bin2long(mod)))
def decrypt_block(src, mod):
if len(src) != 128 and len(src) != 202:
return None
dest = rsa_pub1024(src[:128], mod)
hash = hashlib.sha1(dest[1:107])
if len(src) == 202:
hash.update(src[131:192])
result = hash.digest()
if result == dest[107:127]:
return dest
return None
def validate_certificate(cert, key):
buf = decrypt_block(cert[8:], key)
if buf is None:
return None
return buf[36:107] + cert[139:196]
def get_random():
try:
xor = lambda a,b: ''.join(chr(ord(c)^ord(d)) for c,d in zip(a,b*100))
random = urandom(8)
x = str(time())[-8:]
result = xor(random, x)
return result
except:
return None
def WlanStatusScreenMain(session, iface):
session.open(WlanStatus, iface)
def callFunction(iface):
iWlan.setInterface(iface)
i = iWlan.getWirelessInterfaces()
if i:
if iface in i or iNetwork.isWirelessInterface(iface):
return WlanStatusScreenMain
return None
return None
def configStrings(iface):
#--->
#- try:
#- device = open("/proc/stb/info/model", "r").readline().strip()
#- except:
#- device = ""
#- if device != "dm7025":
#- rootkey = ['\x9f', '|', '\xe4', 'G', '\xc9', '\xb4', '\xf4', '#', '&', '\xce', '\xb3', '\xfe', '\xda', '\xc9', 'U', '`', '\xd8', '\x8c', 's', 'o', '\x90', '\x9b', '\\', 'b', '\xc0', '\x89', '\xd1', '\x8c', '\x9e', 'J', 'T', '\xc5', 'X', '\xa1', '\xb8', '\x13', '5', 'E', '\x02', '\xc9', '\xb2', '\xe6', 't', '\x89', '\xde', '\xcd', '\x9d', '\x11', '\xdd', '\xc7', '\xf4', '\xe4', '\xe4', '\xbc', '\xdb', '\x9c', '\xea', '}', '\xad', '\xda', 't', 'r', '\x9b', '\xdc', '\xbc', '\x18', '3', '\xe7', '\xaf', '|', '\xae', '\x0c', '\xe3', '\xb5', '\x84', '\x8d', '\r', '\x8d', '\x9d', '2', '\xd0', '\xce', '\xd5', 'q', '\t', '\x84', 'c', '\xa8', ')', '\x99', '\xdc', '<', '"', 'x', '\xe8', '\x87', '\x8f', '\x02', ';', 'S', 'm', '\xd5', '\xf0', '\xa3', '_', '\xb7', 'T', '\t', '\xde', '\xa7', '\xf1', '\xc9', '\xae', '\x8a', '\xd7', '\xd2', '\xcf', '\xb2', '.', '\x13', '\xfb', '\xac', 'j', '\xdf', '\xb1', '\x1d', ':', '?']
#- etpm = eTPM()
#- l2cert = etpm.getCert(eTPM.TPMD_DT_LEVEL2_CERT)
#- if l2cert is None:
#- return
#- l2key = validate_certificate(l2cert, rootkey)
#- if l2key is None:
#- return
#- l3cert = etpm.getCert(eTPM.TPMD_DT_LEVEL3_CERT)
#- if l3cert is None:
#- return
#- l3key = validate_certificate(l3cert, l2key)
#- if l3key is None:
#- return
#- rnd = get_random()
#- if rnd is None:
#- return
#- val = etpm.challenge(rnd)
#- result = decrypt_block(val, l3key)
#- if device == "dm7025" or result[80:88] == rnd:
#---<
if True:
driver = iNetwork.detectWlanModule(iface)
else:
driver = 'dreambox'
print 'Using "%s" as wpa-supplicant driver' % (driver)
ret = ""
if driver == 'madwifi' and config.plugins.wlan.hiddenessid.value:
ret += "\tpre-up iwconfig " + iface + " essid \"" + re_escape(config.plugins.wlan.essid.value) + "\" || true\n"
ret += "\tpre-up wpa_supplicant -i" + iface + " -c" + getWlanConfigName(iface) + " -B -dd -D" + driver + " || true\n"
ret += "\tpre-down wpa_cli -i" + iface + " terminate || true\n"
return ret
def Plugins(**kwargs):
return PluginDescriptor(name=_("Wireless LAN"), description=_("Connect to a Wireless Network"), where = PluginDescriptor.WHERE_NETWORKSETUP, needsRestart = False, fnc={"ifaceSupported": callFunction, "configStrings": configStrings, "WlanPluginEntry": lambda x: "Wireless Network Configuartion..."})
| popazerty/enigma2cuberevo | lib/python/Plugins/SystemPlugins/WirelessLan/plugin.py | Python | gpl-2.0 | 19,320 |
# Nothing alliance specific in here.
# qebab, 24/6/08.
import re
import math
from munin import loadable
class rprod(loadable.loadable):
"""Find out how much you can spend with n factories
in m ticks."""
def __init__(self, cursor):
super().__init__(cursor, 1)
self.paramre = re.compile(r"^\s*(\S+)\s+(\d+)\s+(\d+)")
self.usage = self.__class__.__name__ + " <ship> <ticks> <factories>."
self.helptext = [
"Calculate how many <ship>" " you can build in <ticks> " "with <factories>."
]
self.dx = self.tolerance = 0.00001
def derive(self, f):
"""Numerical derivation of the function f."""
return lambda x: (f(x + self.dx) - f(x)) / self.dx
def close(self, a, b):
"""Is the result acceptable?"""
return abs(a - b) < self.tolerance
def newton_transform(self, f):
"""Do a newton transform of the function f."""
return lambda x: x - (f(x) / self.derive(f)(x))
def fixed_point(self, f, guess):
"""Fixed point search."""
while not self.close(guess, f(guess)):
guess = f(guess)
return guess
def newton(self, f, guess):
"""Generic equation solver using newtons method."""
return self.fixed_point(self.newton_transform(f), guess)
def rpu(self, y, math):
"""Curry it."""
return lambda x: 2 * math.sqrt(x) * math.log(x, math.e) - y
def revprod(self, ticks, facs):
"""Reversed production formula."""
import math
output = (4000 * facs) ** 0.98
return self.newton(self.rpu(ticks * output - 10000 * facs, math), 10)
def execute(self, user, access, irc_msg):
match = irc_msg.match_command(self.commandre)
if not match:
return 0
match = self.paramre.search(match.group(1))
if not match:
irc_msg.reply(
"Usage: %s, how much you can spend with n factories in m ticks."
% self.usage
)
return 0
if access < self.level:
irc_msg.reply("You do not have the access necessary to use this command.")
return 0
shipname = match.group(1)
ticks = int(match.group(2))
factories = int(match.group(3))
query = "SELECT * FROM ship WHERE name ILIKE %s ORDER BY id"
self.cursor.execute(query, ("%" + shipname + "%",))
ship = self.cursor.fetchone()
if not ship:
irc_msg.reply("%s is not a ship." % shipname)
return 0
res = int(self.revprod(ticks, factories))
ships = int(res / ship["total_cost"])
feud_ships = int(
res
/ (
(
ship["total_cost"]
* (1 - float(self.config.get("Planetarion", "feudalism")))
)
/ 1.2
)
)
irc_msg.reply(
"You can build %s %s (%s) in %d ticks, or \
%s %s in (%s) %d ticks with feudalism."
% (
self.format_value(ships * 100),
ship["name"],
self.format_value(ships * ship["total_cost"]),
ticks,
self.format_value(feud_ships * 100),
ship["name"],
self.format_value(feud_ships * ship["total_cost"]),
ticks,
)
)
return 1
| munin/munin | deprecated/rprod.py | Python | gpl-2.0 | 3,450 |
from unittest import TestCase
from duckduckgo.duckduckgo import query, Redirect, Result, Results, Abstract, Image, Answer
__author__ = 'robbie'
class TestDDG(TestCase):
def setUp(self):
pass
def test_ddg(self):
result = query('meat')
string = ""
related = result.related
print('')
for r in related:
string += '\n----------------\n'
string += r.url + "\n"
string += r.text + "\n"
string += '\n----------------\n'
self.assertNotEquals("", string)
| robbielynch/RoblySearch | tests/test_duckduckgo.py | Python | gpl-2.0 | 567 |
from builder.btools import RegisterCustomTest
from builder.btools import AddConfigKey
from builder.bconfig import getAutoconfPrefix
from builder.bconfig import getArchInfo, ARCH_X86_64
from builder.bconfig import Version
import os.path
def CheckOptix(ctx, write_config_h=False, add_to_compiler_env=False,
min_version=None, max_version=None):
ctx.Message('Checking for Optix Library... ')
confprefix = getAutoconfPrefix(ctx.env)
key = confprefix+'HAVE_OPTIX'
platform = ctx.env['PLATFORM']
archInfo = getArchInfo(ctx.env)
# If CUDA is not available Optix cannot be used
if not ctx.env.GetPackage('cuda'):
ctx.Message('No CUDA detected, call CheckCUDA first')
if write_config_h:
AddConfigKey(ctx, key, 0)
ctx.Result(0)
return 0
isWin32 = platform == 'win32'
isX64 = archInfo.getArchID() == ARCH_X86_64
if min_version is not None:
min_version = Version(min_version)
if max_version is not None:
max_version = Version(max_version)
# RequirePackage('cuda') will add all libraries needed for linking with
# CUDA and return dictionary of all modified variables with original
# values.
savedVars = ctx.env.RequirePackage('cuda')
# detect common locations
commonLocations = [None] # default location
paths = ctx.env.Glob(os.path.join(os.path.expanduser('~'),
'NVIDIA-OptiX-SDK*'))
for p in paths:
if os.path.exists(p.Dir('include').File('optix.h').abspath):
commonLocations.append(p)
ret = 0
for location in commonLocations:
ctx.env.RestoreVars(savedVars)
ctx.env.RequirePackage('cuda')
ctx.env.Append(LIBS = ['optix'])
vars = {'LIBS' : ['optix']}
if location is not None:
includeDir = str(location.Dir('include'))
ctx.env.Append(CPPPATH = [includeDir])
vars['CPPPATH']= [includeDir]
if isX64:
lib64Dir = str(location.Dir('lib64'))
ctx.env.Append(LIBPATH = [lib64Dir])
vars['LIBPATH'] = [lib64Dir]
else:
libDir = str(location.Dir('lib'))
ctx.env.Append(LIBPATH = [libDir])
vars['LIBPATH'] = [libDir]
ret, outputStr = ctx.TryRun("""
#include <stdio.h>
#include <optix.h>
int main(int argc, char** argv)
{
printf("%i\\n", OPTIX_VERSION);
return 0;
}
""", extension='.c')
if ret:
v = int(outputStr)
vmajor = int(v / 1000)
vminor = (v % 1000) / 10
vmicro = v % 10
libVersion = Version(vmajor, vminor, vmicro)
if not libVersion.compatible(min_version, max_version):
ctx.Message('version %s is not within required [%s, %s] version range ' % \
(libVersion, min_version, max_version))
ret = 0
continue
ctx.Message('version %s ' % libVersion)
# check for optixu/optixpp
ret = ctx.TryLink("""
#include <optixu/optixu_matrix.h>
int main(int argc, char** argv)
{
optix::Matrix3x3 matrix;
return 0;
}
""", extension='.cpp')
if not ret:
ctx.Message('could not link with optixu library')
ret = 0
continue
ctx.env.RestoreVars(savedVars)
vars['OPTIX_VERSION'] = libVersion
libPackage = ctx.env.DeclarePackage(
'optix',
vars=vars,
dependencies=['cuda'],
trigger_libs=['optix', 'Optix'])
break
if not (write_config_h and AddConfigKey(ctx, key, ret)):
# no config file is specified or it is disabled, use compiler options
if ret and add_to_compiler_env:
ctx.env.Append(CPPDEFINES=[key])
ctx.Result(ret)
return ret
RegisterCustomTest('CheckOptix', CheckOptix)
| paeschli/scons-builder | modules/optix_check.py | Python | gpl-2.0 | 4,137 |
import tkinter.ttk as ttk
class SearchClient(ttk.Frame):
def __init__(self, master, model, ctrl):
ttk.Frame.__init__(self, master)
self.model = model
self.ctrl = ctrl
self.create_widgets()
def create_widgets(self):
ttk.Label(self, text="Email:").grid(row=0, sticky="E")
self.e1 = ttk.Entry(self)
self.e1.grid(row=0, column=1)
b1 = ttk.Button(self, text="Get",
command=lambda: self.ctrl.search_client_get(self))
b1.grid(row=1, columnspan=2)
self.result = self.SearchResult(self, self.model, self.ctrl)
self.result.grid(row=2, columnspan=2)
class SearchResult(ttk.Frame):
def __init__(self, master, model, ctrl):
ttk.Frame.__init__(self, master)
self.master = master
self.model = model
self.ctrl = ctrl
def create_widgets(self):
self.ctrl.clear_frame(self)
client = self.model.client_db.getByEmail(self.master.e1.get())
if client is False:
ttk.Label(self, text="No such client").grid(row=0, columnspan=2)
else:
ttk.Label(self, text="ClientID:").grid(row=0, sticky="E")
ttk.Label(self, text=str(client.getID())).grid(row=0, column=1, sticky="W")
ttk.Label(self, text="Full name:").grid(row=1, sticky="E")
ttk.Label(self, text=client.getName()).grid(row=1, column=1, sticky="W")
ttk.Label(self, text="Email:").grid(row=2, sticky="E")
ttk.Label(self, text=client.getEmail()).grid(row=2, column=1, sticky="W")
ttk.Label(self, text="Address:").grid(row=3, sticky="E")
ttk.Label(self, text=client.getAddress()).grid(row=3, column=1, sticky="W")
ttk.Label(self, text="Postal code:").grid(row=4, sticky="E")
ttk.Label(self, text=client.getPostalCode()).grid(row=4, column=1, sticky="W")
ttk.Label(self, text="City:").grid(row=5, sticky="E")
ttk.Label(self, text=client.getCity()).grid(row=5, column=1, sticky="W")
ttk.Label(self, text="Day of birth (MM/DD/YYYY):").grid(row=6, sticky="E")
ttk.Label(self, text=client.getBirthDate()).grid(row=6, column=1, sticky="W")
ttk.Label(self, text="").grid(row=7, columnspan=2, sticky="WE")
# View the clients requests
requests = self.model.request_db.getByClientID(client.getID())
if requests is False:
ttk.Label(self, text="No requests found").grid(row=8, columnspan=2)
else:
ttk.Label(self, text="RequestID(Status):").grid(row=8, sticky="E")
ttk.Label(self, text="Event type").grid(row=8, column=1, sticky="W")
row = 9
for r in requests:
ttk.Label(self, text=str(r.getID()) + "(" + str(r.getStatus()) + "):").grid(row=row, sticky="E")
ttk.Label(self, text=r.getEventType()).grid(row=row, column=1, sticky="W")
row += 1
| rssalessio/MMSE15Project-RussoJohansson | mmse15project/views/subviews/SearchClient.py | Python | gpl-2.0 | 3,195 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio_ext.assets import Bundle
js = Bundle(
"js/personal_collection/init.js",
output="personal-collections.js",
weight=92,
filters="requirejs",
bower={
# Personal collections
"async": "~1.2.1",
"depot": "~0.1.6",
"lodash": "~3.9.3",
"sortable.js": "~1.2.0",
}
)
| ddaze/cds | cds/modules/personal_collection/bundles.py | Python | gpl-2.0 | 1,113 |
# This file contains the WSGI configuration required to serve up your
# web application.
# It works by setting the variable 'application' to a WSGI handler of some
# description.
#
# The below has been auto-generated for your Bottle project
import bottle
import os
import sys
here = os.path.dirname(__file__)
# add your project directory to the sys.path
project_home = os.path.join(here, "src/")
project_server = os.path.join(here, "src/server/")
if project_home not in sys.path:
sys.path = [project_home] + [project_server] + sys.path
# make sure the default templates directory is known to Bottle
templates_dir = os.path.join(project_home, 'server/views/')
if templates_dir not in bottle.TEMPLATE_PATH:
bottle.TEMPLATE_PATH.insert(0, templates_dir)
from server.control import application
if __name__ == "__main__":
bottle.run(host='localhost', port=8080)
| labase/eica | wsgi.py | Python | gpl-2.0 | 872 |
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
## rank_display.py
#############################################################################
# Copyright (C) Labomedia November 2012
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franproplin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#############################################################################
'''
Ce script est lancé avec la scène Rank
Crée et applique le texte du classement à chaque frame
'''
from bge import logic as gl
# avec
# gl.classement = {'toto': 3, 'labomedia': 2, 'gddgsg': 1}
# on doit avoir
# 1. gddgsg + "\n\n"
# 2 . labomedia + "\n\n"
# 3 . toto + "\n\n"
def main():
create_text()
apply_text()
def create_text():
# Pour tous les niveaux, y compris le 1
if gl.level == 1:
level_corrected = 2 # 2 joueurs même au niveau 1
gl.classement = gl.classement_level1
else :
level_corrected = gl.level
# Création d'une liste avec 1 élément = 1 joueur
text_list = [0] * level_corrected # exemple c=3 donne [0, 0, 0]
# Si value=0, le joueur est en cours et non classé,
# le texte sera vide, affichage du rang seulement
a = 0
for key, value in gl.classement.items():
if value == 0:
# \n\n crée les sauts de ligne
text_list[a] = str(a+1) + ". \n\n"
a += 1
# Si value!=0, le joueur est classé,
b = 0
for key, value in gl.classement.items():
if value > 0:
if key != "machine":
# je coupe les entiers du time(), machine n'a pas de time()
key = key[:-4]
if len(key) > 11:
key = key[:-10]
# \n\n crée les sauts de ligne
text_list[value - 1] = str(value) + " . " + str(key) + "\n\n"
b += 1
# String de l'ensemble du texte à afficher
gl.text = ""
for c in range(level_corrected):
gl.text = gl.text + str(text_list[c])
def apply_text():
'''Récupération de l'objet Rank_display et sa prop Text'''
a = "L'objet rank_obj n'est pas accesssible pour application du texte"
try:
gl.rank_obj["Text"] = gl.text
except:
print(a)
| sergeLabo/mpff | game/scripts/rank_display.py | Python | gpl-2.0 | 2,839 |
from django.test import TestCase
from ..models import series_alias
class test_series_alias(TestCase):
def setUp(self):
self.subject = series_alias(name='Name')
def test__series_alias__instance(self):
self.assertIsInstance(self.subject, series_alias)
def test__series_alias__str(self):
self.assertEqual(str(self.subject), self.subject.name)
| marios-zindilis/musicbrainz-django-models | musicbrainz_django_models/tests/test_series_alias.py | Python | gpl-2.0 | 380 |
# -*- coding: utf-8 -*-
"""
@brief GUI panel for export functionality
This program is free software under the GNU General Public License
(>=v2). Read the file COPYING that comes with GRASS for details.
@author: Anna Petrasova ([email protected])
"""
import os
import wx
import wx.lib.filebrowsebutton as filebrowse
from gui_core.gselect import Select
from grass.pydispatch.signal import Signal
from tangible_utils import get_show_layer_icon
class OutputPanel(wx.Panel):
def __init__(self, parent, giface, settings):
wx.Panel.__init__(self, parent)
self.giface = giface
self.settings = settings
self.settingsChanged = Signal('OutputPanel.settingsChanged')
if 'output' not in self.settings:
self.settings['output'] = {}
self.settings['output']['scan'] = 'scan'
self.settings['output']['PLY'] = False
self.settings['output']['PLY_file'] = ''
self.settings['output']['color'] = False
self.settings['output']['color_name'] = ''
self.settings['output']['blender'] = False
self.settings['output']['blender_path'] = ''
if self.settings['output']['PLY_file']:
initDir = os.path.dirname(self.settings['output']['PLY_file'])
else:
initDir = ""
# scan
self.scan_name = wx.TextCtrl(self)
self.scan_name.SetValue(self.settings['output']['scan'])
self.scan_name.Bind(wx.EVT_TEXT, self.OnChange)
bmp = get_show_layer_icon()
self.addScan = wx.BitmapButton(self, bitmap=bmp, size=(bmp.GetWidth() + 12, bmp.GetHeight() + 8))
self.addScan.Bind(wx.EVT_BUTTON, lambda evt: self._addLayer('scan'))
# color
self.ifColor = wx.CheckBox(self, label=_("Save color rasters (with postfixes _r, _g, _b):"))
self.ifColor.SetValue(self.settings['output']['color'])
self.ifColor.Bind(wx.EVT_CHECKBOX, self.OnChange)
self.exportColor = Select(self, size=(-1, -1), type='raster')
self.exportColor.SetValue(self.settings['output']['color_name'])
self.exportColor.Bind(wx.EVT_TEXT, self.OnChange)
bmp = get_show_layer_icon()
self.addColor = wx.BitmapButton(self, bitmap=bmp, size=(bmp.GetWidth() + 12, bmp.GetHeight() + 8))
self.addColor.Bind(wx.EVT_BUTTON, lambda evt: self._addLayer('color'))
# Blender
self.ifBlender = wx.CheckBox(self, label='')
self.ifBlender.SetValue(self.settings['output']['blender'])
self.ifBlender.Bind(wx.EVT_CHECKBOX, self.OnChange)
initDirBlender = ''
if self.settings['output']['blender_path']:
initDirBlender = self.settings['output']['blender_path']
self.blenderPath = filebrowse.DirBrowseButton(self, labelText="Export folder for Blender coupling:",
startDirectory=initDirBlender, newDirectory=True,
changeCallback=self.OnChange)
# PLY
self.ifPLY = wx.CheckBox(self, label="")
self.ifPLY.SetValue(self.settings['output']['PLY'])
self.ifPLY.Bind(wx.EVT_CHECKBOX, self.OnChange)
self.exportPLY = filebrowse.FileBrowseButton(self, labelText="Export PLY:", fileMode=wx.FD_SAVE,
startDirectory=initDir, initialValue=self.settings['output']['PLY_file'],
changeCallback=self.OnChange)
# must be called after all widgets are created
self.blenderPath.SetValue(initDirBlender)
mainSizer = wx.BoxSizer(wx.VERTICAL)
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(wx.StaticText(self, label="Name of scanned raster:"), flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL, border=5)
sizer.Add(self.scan_name, proportion=1, flag=wx.EXPAND | wx.ALL, border=5)
sizer.Add(self.addScan, proportion=0, flag=wx.EXPAND | wx.RIGHT | wx.TOP | wx.BOTTOM, border=5)
mainSizer.Add(sizer, flag=wx.EXPAND | wx.ALL, border=5)
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(self.ifColor, flag=wx.ALIGN_CENTER_VERTICAL, border=5)
sizer.Add(self.exportColor, proportion=1, flag=wx.ALIGN_CENTER_VERTICAL, border=5)
sizer.Add(self.addColor, proportion=0, flag=wx.EXPAND | wx.ALL, border=5)
mainSizer.Add(sizer, flag=wx.EXPAND | wx.ALL, border=5)
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(self.ifBlender, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL | wx.LEFT, border=3)
sizer.Add(self.blenderPath, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL, proportion=1, border=0)
mainSizer.Add(sizer, flag=wx.EXPAND | wx.ALL, border=5)
sizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add(self.ifPLY, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL | wx.LEFT, border=3)
sizer.Add(self.exportPLY, flag=wx.ALL | wx.ALIGN_CENTER_VERTICAL, proportion=1, border=0)
mainSizer.Add(sizer, flag=wx.EXPAND | wx.ALL, border=5)
self.SetSizer(mainSizer)
mainSizer.Fit(self)
def OnChange(self, event):
self.settings['output']['scan'] = self.scan_name.GetValue()
self.settings['output']['color'] = self.ifColor.IsChecked()
self.settings['output']['color_name'] = self.exportColor.GetValue()
self.settings['output']['blender'] = self.ifBlender.IsChecked()
self.settings['output']['blender_path'] = self.blenderPath.GetValue()
self.settings['output']['PLY'] = self.ifPLY.IsChecked()
self.settings['output']['PLY_file'] = self.exportPLY.GetValue()
self.settingsChanged.emit()
def _addLayer(self, ltype):
ll = self.giface.GetLayerList()
if ltype == 'scan':
raster = self.scan_name.GetValue()
if not raster:
return
cmd = ['d.rast', 'map=' + raster]
ll.AddLayer('raster', name=raster, checked=True, cmd=cmd)
elif ltype == 'color':
name = self.exportColor.GetValue()
if not name:
return
cmd = ['d.rgb', 'red={m}'.format(m=name + '_r'), 'green={m}'.format(m=name + '_g'), 'blue={m}'.format(m=name + '_b'), '-n']
ll.AddLayer('rgb', name=name, checked=True, cmd=cmd)
| ncsu-osgeorel/grass-tangible-landscape | export.py | Python | gpl-2.0 | 6,303 |
import pygame
pygame.init()
screen = pygame.display.set_mode((640, 480))
clock = pygame.time.Clock()
done = False
font = pygame.font.SysFont("comicsansms", 72)
text = font.render("Hello, World", True, (0, 128, 0))
while not done:
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
done = True
screen.fill((255, 255, 255))
screen.blit(text,
(320 - text.get_width() // 2, 240 - text.get_height() // 2))
pygame.display.flip()
clock.tick(60)
| scottjames/LightArcade | raspi/quick.py | Python | gpl-2.0 | 611 |
# Copyright (C) 2013-2018 Samuel Damashek, Peter Foley, James Forcier, Srijay Kasturi, Reed Koser, Christopher Reffett, and Tris Wilson
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import re
from lxml.html import fromstring
from requests import get
from ..helpers.command import Command
from ..helpers.urlutils import get_short
@Command(['cve', 'cveid'], ['config'])
def cmd(send, msg, args):
"""Gets info on a CVE id from MITRE's CVE database
Syntax: {command} <cveid>
"""
elements = msg.split('-')
if len(elements) > 3 or len(elements) < 2:
send("Invalid CVE format")
return
# If there are three fields, ignore the first (we don't actually need to send CVE-
if len(elements) == 3:
if elements[0].upper() != 'CVE':
send("Invalid CVE format")
return
elements.pop(0)
# The first digit field should be exactly four digits long, the second is 4+
if not re.search(r"^[\d]{4}$", elements[0]) or not re.search(r"^[\d]{4,}$", elements[1]):
send("Invalid CVE format")
return
search = f"{elements[0]}-{elements[1]}"
url = 'http://cve.mitre.org/cgi-bin/cvename.cgi?name=%s' % search
html = fromstring(get(url).text)
title = html.find(".//title").text.splitlines()[2]
if title.startswith('ERROR'):
output = 'Invalid CVE Number'
else:
key = args['config']['api']['bitlykey']
output = f"{title} -- {get_short(url, key)}"
send(output)
| tjcsl/cslbot | cslbot/commands/cve.py | Python | gpl-2.0 | 2,149 |
# -*- coding: utf-8 -*-
#
# Copyright © 2008-2015 Red Hat, Inc. All rights reserved.
# Copyright © 2008-2015 Luke Macken <[email protected]>
# Copyright © 2008 Kushal Das <[email protected]>
#
# This copyrighted material is made available to anyone wishing to use, modify,
# copy, or redistribute it subject to the terms and conditions of the GNU
# General Public License v.2. This program is distributed in the hope that it
# will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details. You should have
# received a copy of the GNU General Public License along with this program; if
# not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth
# Floor, Boston, MA 02110-1301, USA. Any Red Hat trademarks that are
# incorporated in the source code or documentation are not subject to the GNU
# General Public License and may only be used or replicated with the express
# permission of Red Hat, Inc.
#
# Author(s): Luke Macken <[email protected]>
# Kushal Das <[email protected]>
"""
A cross-platform graphical interface for the LiveUSBCreator
"""
import os
import sys
import logging
import urlparse
from time import sleep
from datetime import datetime
from PyQt4 import QtCore, QtGui
from liveusb import LiveUSBCreator, LiveUSBError, LiveUSBInterface, _
from liveusb.releases import releases, get_fedora_releases
if sys.platform == 'win32':
from liveusb.urlgrabber.grabber import URLGrabber, URLGrabError
from liveusb.urlgrabber.progress import BaseMeter
else:
from urlgrabber.grabber import URLGrabber, URLGrabError
from urlgrabber.progress import BaseMeter
try:
import dbus.mainloop.qt
dbus.mainloop.qt.DBusQtMainLoop(set_as_default=True)
except:
pass
MAX_FAT16 = 2047
MAX_FAT32 = 3999
MAX_EXT = 2097152
class LiveUSBApp(QtGui.QApplication):
""" Main application class """
def __init__(self, opts, args):
if sys.platform[:5] == 'linux':
QtGui.QApplication.setGraphicsSystem('native')
QtGui.QApplication.__init__(self, args)
self.mywindow = LiveUSBWindow(opts, args)
self.mywindow.show()
try:
self.exec_()
finally:
self.mywindow.terminate()
class ReleaseDownloader(QtCore.QThread):
def __init__(self, release, progress, proxies):
QtCore.QThread.__init__(self)
self.release = release
self.progress = progress
self.proxies = proxies
for rel in releases:
if rel['name'] == str(release):
self.url = rel['url']
break
else:
raise LiveUSBError(_("Unknown release: %s" % release))
def run(self):
self.emit(QtCore.SIGNAL("status(PyQt_PyObject)"),
_("Downloading %s..." % os.path.basename(self.url)))
grabber = URLGrabber(progress_obj=self.progress, proxies=self.proxies)
home = os.getenv('HOME', 'USERPROFILE')
filename = os.path.basename(urlparse.urlparse(self.url).path)
for folder in ('Downloads', 'My Documents'):
if os.path.isdir(os.path.join(home, folder)):
filename = os.path.join(home, folder, filename)
break
try:
iso = grabber.urlgrab(self.url, reget='simple')
except URLGrabError, e:
self.emit(QtCore.SIGNAL("dlcomplete(PyQt_PyObject)"), e.strerror)
else:
self.emit(QtCore.SIGNAL("dlcomplete(PyQt_PyObject)"), iso)
class DownloadProgress(QtCore.QObject, BaseMeter):
""" A QObject urlgrabber BaseMeter class.
This class is called automatically by urlgrabber with our download details.
This class then sends signals to our main dialog window to update the
progress bar.
"""
def start(self, filename=None, url=None, basename=None, size=None,
now=None, text=None):
self.emit(QtCore.SIGNAL("maxprogress(int)"), size)
def update(self, amount_read, now=None):
""" Update our download progressbar.
:read: the number of bytes read so far
"""
self.emit(QtCore.SIGNAL("progress(int)"), amount_read)
def end(self, amount_read):
self.update(amount_read)
class ProgressThread(QtCore.QThread):
""" A thread that monitors the progress of Live USB creation.
This thread periodically checks the amount of free space left on the
given drive and sends a signal to our main dialog window to update the
progress bar.
"""
totalsize = 0
orig_free = 0
drive = None
get_free_bytes = None
alive = True
def set_data(self, size, drive, freebytes):
self.totalsize = size / 1024
self.drive = drive
self.get_free_bytes = freebytes
self.orig_free = self.get_free_bytes()
self.emit(QtCore.SIGNAL("maxprogress(int)"), self.totalsize)
def run(self):
while self.alive:
free = self.get_free_bytes()
value = (self.orig_free - free) / 1024
self.emit(QtCore.SIGNAL("progress(int)"), value)
if value >= self.totalsize:
break
sleep(3)
def stop(self):
self.alive = False
def terminate(self):
self.emit(QtCore.SIGNAL("progress(int)"), self.totalsize)
QtCore.QThread.terminate(self)
class LiveUSBThread(QtCore.QThread):
def __init__(self, live, progress, parent=None):
QtCore.QThread.__init__(self, parent)
self.progress = progress
self.parent = parent
self.live = live
def status(self, text):
self.emit(QtCore.SIGNAL("status(PyQt_PyObject)"), text)
def run(self):
handler = LiveUSBLogHandler(self.status)
self.live.log.addHandler(handler)
now = datetime.now()
try:
#if self.parent.opts.format:
# self.live.unmount_device()
# self.live.format_device()
# Initialize zip-drive-compatible geometry
#if self.parent.opts.zip:
# self.live.dest = self.live.drive['mount']
# self.live.drive['unmount'] = True
# self.live.unmount_device()
# self.live.initialize_zip_geometry()
# self.live.drive = self.parent.get_selected_drive()
# self.live.dest = self.live.drive['mount']
# self.live.drive['unmount'] = True
# self.live.unmount_device()
# self.live.format_device()
# If we're going to dd the image
if self.parent.destructiveButton.isChecked():
self.parent.progressBar.setRange(0, 0)
self.live.dd_image()
self.live.log.removeHandler(handler)
duration = str(datetime.now() - now).split('.')[0]
self.status(_("Complete! (%s)") % duration)
self.parent.progressBar.setRange(0, 1)
return
self.live.verify_filesystem()
if not self.live.drive['uuid'] and not self.live.label:
self.status(_("Error: Cannot set the label or obtain "
"the UUID of your device. Unable to continue."))
self.live.log.removeHandler(handler)
return
self.live.check_free_space()
if not self.parent.opts.noverify:
# Verify the MD5 checksum inside of the ISO image
if not self.live.verify_iso_md5():
self.live.log.removeHandler(handler)
return
# If we know about this ISO, and it's SHA1 -- verify it
release = self.live.get_release_from_iso()
if release and ('sha1' in release or 'sha256' in release):
if not self.live.verify_iso_sha1(progress=self):
self.live.log.removeHandler(handler)
return
# Setup the progress bar
self.progress.set_data(size=self.live.totalsize,
drive=self.live.drive['device'],
freebytes=self.live.get_free_bytes)
self.progress.start()
self.live.extract_iso()
self.live.create_persistent_overlay()
self.live.update_configs()
self.live.install_bootloader()
self.live.bootable_partition()
if self.parent.opts.device_checksum:
self.live.calculate_device_checksum(progress=self)
if self.parent.opts.liveos_checksum:
self.live.calculate_liveos_checksum()
self.progress.stop()
# Flush all filesystem buffers and unmount
self.live.flush_buffers()
self.live.unmount_device()
duration = str(datetime.now() - now).split('.')[0]
self.status(_("Complete! (%s)" % duration))
except Exception, e:
self.status(e.args[0])
self.status(_("LiveUSB creation failed!"))
self.live.log.exception(e)
self.live.log.removeHandler(handler)
self.progress.terminate()
def set_max_progress(self, maximum):
self.emit(QtCore.SIGNAL("maxprogress(int)"), maximum)
def update_progress(self, value):
self.emit(QtCore.SIGNAL("progress(int)"), value)
def __del__(self):
self.wait()
class LiveUSBLogHandler(logging.Handler):
def __init__(self, cb):
logging.Handler.__init__(self)
self.cb = cb
def emit(self, record):
if record.levelname in ('INFO', 'ERROR', 'WARN'):
self.cb(record.msg)
class LiveUSBWindow(QtGui.QMainWindow, LiveUSBInterface):
""" Our main dialog class """
def __init__(self, opts, args):
self.in_process = False
QtGui.QMainWindow.__init__(self)
LiveUSBInterface.__init__(self)
self.setWindowFlags(QtCore.Qt.WindowCloseButtonHint)
self.opts = opts
self.args = args
self.setupUi(self)
self.live = LiveUSBCreator(opts=opts)
self.populate_releases()
self.populate_devices()
self.downloader = None
self.progress_thread = ProgressThread()
self.download_progress = DownloadProgress()
self.live_thread = LiveUSBThread(live=self.live,
progress=self.progress_thread,
parent=self)
self.connect_slots()
self.confirmed = False
self.mbr_reset_confirmed = False
if self.opts.destructive:
self.destructiveButton.setChecked(True)
# Intercept all liveusb INFO log messages, and display them in the gui
self.handler = LiveUSBLogHandler(lambda x: self.textEdit.append(x))
self.live.log.addHandler(self.handler)
if not self.opts.verbose:
self.live.log.removeHandler(self.live.handler)
# If an ISO was specified on the command line, use it.
if args:
for arg in self.args:
if arg.lower().endswith('.iso') and os.path.exists(arg):
self.selectfile(arg)
# Determine if we have admin rights
if not self.live.is_admin():
self.live.log.error(_('Warning: This tool needs to be run as an '
'Administrator. To do this, right click on the icon and open '
'the Properties. Under the Compatibility tab, check the "Run '
'this program as an administrator" box.'))
def populate_devices(self, *args, **kw):
if self.in_process:
return
self.driveBox.clear()
#self.textEdit.clear()
def add_devices():
if not len(self.live.drives):
self.textEdit.setPlainText(_("Unable to find any USB drives"))
self.startButton.setEnabled(False)
return
for device, info in self.live.drives.items():
if info['label']:
self.driveBox.addItem("%s (%s)" % (device, info['label']))
else:
self.driveBox.addItem(device)
self.startButton.setEnabled(True)
try:
self.live.detect_removable_drives(callback=add_devices)
except LiveUSBError, e:
self.textEdit.setPlainText(e.args[0])
self.startButton.setEnabled(False)
def populate_releases(self):
for release in [release['name'] for release in releases]:
self.downloadCombo.addItem(release)
def refresh_releases(self):
self.live.log.info(_('Refreshing releases...'))
global releases
try:
releases = get_fedora_releases()
self.downloadCombo.clear()
for release in [release['name'] for release in releases]:
self.downloadCombo.addItem(release)
self.live.log.info(_('Releases updated!'))
except Exception, e:
self.live.log.error(_('Unable to fetch releases: %r') % e)
def connect_slots(self):
self.connect(self, QtCore.SIGNAL('triggered()'), self.terminate)
self.connect(self.isoBttn, QtCore.SIGNAL("clicked()"), self.selectfile)
self.connect(self.startButton, QtCore.SIGNAL("clicked()"), self.begin)
self.connect(self.overlaySlider, QtCore.SIGNAL("valueChanged(int)"),
self.overlay_value)
self.connect(self.live_thread, QtCore.SIGNAL("status(PyQt_PyObject)"),
self.status)
self.connect(self.live_thread, QtCore.SIGNAL("finished()"),
lambda: self.enable_widgets(True))
self.connect(self.live_thread, QtCore.SIGNAL("terminated()"),
lambda: self.enable_widgets(True))
self.connect(self.live_thread, QtCore.SIGNAL("progress(int)"),
self.progress)
self.connect(self.live_thread, QtCore.SIGNAL("maxprogress(int)"),
self.maxprogress)
self.connect(self.progress_thread, QtCore.SIGNAL("progress(int)"),
self.progress)
self.connect(self.progress_thread, QtCore.SIGNAL("maxprogress(int)"),
self.maxprogress)
self.connect(self.download_progress, QtCore.SIGNAL("maxprogress(int)"),
self.maxprogress)
self.connect(self.download_progress, QtCore.SIGNAL("progress(int)"),
self.progress)
self.connect(self.destructiveButton, QtCore.SIGNAL("toggled(bool)"),
self.method_destructive_toggled)
self.connect(self.nonDestructiveButton, QtCore.SIGNAL("toggled(bool)"),
self.method_nondestructive_toggled)
if hasattr(self, 'refreshDevicesButton'):
self.connect(self.refreshDevicesButton, QtCore.SIGNAL("clicked()"),
self.populate_devices)
if hasattr(self, 'refreshReleasesButton'):
self.connect(self.refreshReleasesButton, QtCore.SIGNAL("clicked()"),
self.refresh_releases)
# If we have access to HAL & DBus, intercept some useful signals
if hasattr(self.live, 'udisks'):
self.live.udisks.connect_to_signal('DeviceAdded',
self.populate_devices)
self.live.udisks.connect_to_signal('DeviceRemoved',
self.populate_devices)
@QtCore.pyqtSignature("QString")
def on_driveBox_currentIndexChanged(self, drive):
""" Change the maximum overlay size when each drive is selected.
This sets the maximum megabyte size of the persistent storage slider
to the number of free megabytes on the currently selected
"Target Device". If the device is not mounted, or if it has more than
2gigs of free space, set the maximum to 2047mb, which is apparently
the largest file we can/should store on a vfat partition.
"""
drive = unicode(drive)
if not drive:
return
self._refresh_overlay_slider(drive.split()[0])
def _refresh_overlay_slider(self, drive=None):
"""
Reset the persistent storage slider based on the amount of free space
on the device and the ISO size.
"""
if not drive:
drive = self.get_selected_drive()
if not drive:
return
device = self.live.drives[drive]
freespace = device['free']
device_size = device['size'] / 1024**2
current_overlay = self.overlaySlider.value()
if device['fsversion'] == 'FAT32':
self.live.log.debug(_('Partition is FAT32; Restricting overlay '
'size to 4G'))
max_space = MAX_FAT32
elif device['fsversion'] == 'FAT16':
self.live.log.debug(_('Partition is FAT16; Restricting overlay '
'size to 2G'))
max_space = MAX_FAT16
else:
max_space = MAX_EXT
if freespace:
if freespace > device_size:
freespace = device_size
if freespace > max_space:
freespace = max_space
if not device['mount']:
self.live.log.warning(_('Device is not yet mounted, so we cannot '
'determine the amount of free space.'))
if not freespace:
freespace = device_size
else:
if not freespace:
self.live.log.warning(_('No free space on %s') % drive)
freespace = 0
# Subtract the size of the ISO from our maximum overlay size
if self.live.isosize:
iso_size = self.live.isosize / 1024**2
if freespace + iso_size > device['free']:
freespace -= iso_size
freespace -= 1 # Don't fill the device 100%
if freespace < 0:
freespace = 0
if freespace < current_overlay:
self.overlaySlider.setValue(freespace)
self.live.overlay = self.overlaySlider.value()
self.overlaySlider.setMaximum(freespace)
def progress(self, value):
self.progressBar.setValue(value)
def maxprogress(self, value):
self.progressBar.setMaximum(value)
def status(self, text):
if not isinstance(text, basestring):
text = str(text)
self.textEdit.append(text)
def enable_widgets(self, enabled=True):
self.startButton.setEnabled(enabled)
self.driveBox.setEnabled(enabled)
self.overlaySlider.setEnabled(enabled)
self.isoBttn.setEnabled(enabled)
self.downloadCombo.setEnabled(enabled)
self.destructiveButton.setEnabled(enabled)
self.nonDestructiveButton.setEnabled(enabled)
if hasattr(self, 'refreshDevicesButton'):
self.refreshDevicesButton.setEnabled(enabled)
if hasattr(self, 'refreshReleasesButton'):
self.refreshReleasesButton.setEnabled(enabled)
self.in_process = not enabled
def overlay_value(self, value):
self.overlayTitle.setTitle(_("Persistent Storage") + " (%d MB)" % value)
def get_selected_drive(self):
text = self.live._to_unicode(self.driveBox.currentText()).split()
if text:
return text[0]
def begin(self):
""" Begin the liveusb creation process.
This method is called when the "Create LiveUSB" button is clicked.
"""
self.enable_widgets(False)
self.live.overlay = self.overlaySlider.value()
self.live.drive = self.get_selected_drive()
# Unmount the device and check the MBR
if self.nonDestructiveButton.isChecked():
if self.live.blank_mbr():
if not self.mbr_reset_confirmed:
self.status(_("The Master Boot Record on your device is blank. "
"Pressing 'Create LiveUSB' again will reset the "
"MBR on this device."))
self.mbr_reset_confirmed = True
self.enable_widgets(True)
return
if self.live.drive['mount']:
self.live.dest = self.live.drive['mount']
self.live.unmount_device()
self.live.reset_mbr()
elif not self.live.mbr_matches_syslinux_bin():
if self.opts.reset_mbr:
self.live.reset_mbr()
else:
self.live.log.warn(_("Warning: The Master Boot Record on your device "
"does not match your system's syslinux MBR. If you "
"have trouble booting this stick, try running the "
"liveusb-creator with the --reset-mbr option."))
try:
self.live.mount_device()
self._refresh_overlay_slider() # To reflect the drives free space
except LiveUSBError, e:
self.status(e.args[0])
self.enable_widgets(True)
return
except OSError, e:
self.status(_('Unable to mount device'))
self.enable_widgets(True)
return
if self.live.existing_liveos():
if not self.confirmed:
self.status(_("Your device already contains a LiveOS.\nIf you "
"continue, this will be overwritten."))
if self.live.existing_overlay() and self.overlaySlider.value():
self.status(_("Warning: Creating a new persistent overlay "
"will delete your existing one."))
self.status(_("Press 'Create Live USB' again if you wish to "
"continue."))
self.confirmed = True
#self.live.unmount_device()
self.enable_widgets(True)
return
else:
# The user has confirmed that they wish to overwrite their
# existing Live OS. Here we delete it first, in order to
# accurately calculate progress.
self.confirmed = False
try:
self.live.delete_liveos()
except LiveUSBError, e:
self.status(e.args[0])
#self.live.unmount_device()
self.enable_widgets(True)
return
else:
# Require confirmation for destructive installs
if not self.confirmed:
self.status(_("WARNING: You are about to perform a destructive install. This will destroy all data and partitions on your USB drive. Press 'Create Live USB' again to continue."))
self.confirmed = True
self.enable_widgets(True)
return
# Remove the log handler, because our live thread will register its own
self.live.log.removeHandler(self.handler)
# If the user has selected an ISO, use it. If not, download one.
if self.live.iso:
self.live_thread.start()
else:
self.downloader = ReleaseDownloader(
self.downloadCombo.currentText(),
progress=self.download_progress,
proxies=self.live.get_proxies())
self.connect(self.downloader,
QtCore.SIGNAL("dlcomplete(PyQt_PyObject)"),
self.download_complete)
self.connect(self.downloader,
QtCore.SIGNAL("status(PyQt_PyObject)"),
self.status)
self.downloader.start()
def download_complete(self, iso):
""" Called by our ReleaseDownloader thread upon completion.
Upon success, the thread passes in the filename of the downloaded
release. If the 'iso' argument is not an existing file, then
it is assumed that the download failed and 'iso' should contain
the error message.
"""
if os.path.exists(iso):
self.status(_("Download complete!"))
self.live.iso = iso
self.live_thread.start()
else:
self.status(_("Download failed: " + iso))
self.status(_("You can try again to resume your download"))
self.enable_widgets(True)
def selectfile(self, isofile=None):
if not isofile:
isofile = QtGui.QFileDialog.getOpenFileName(self,
_("Select Live ISO"), ".", "ISO (*.iso)" )
if isofile:
try:
self.live.set_iso(isofile)
except Exception, e:
self.live.log.error(e.args[0])
self.status(_("Unable to encode the filename of your livecd. "
"You may have better luck if you move your ISO "
"to the root of your drive (ie: C:\)"))
self.live.log.info('%s ' % os.path.basename(self.live.iso) +
_("selected"))
self._refresh_overlay_slider()
self.downloadGroup.setEnabled(False)
def terminate(self):
""" Terminate any processes that we have spawned """
self.live.terminate()
def method_destructive_toggled(self, enabled):
if enabled:
self.overlayTitle.setEnabled(False)
def method_nondestructive_toggled(self, enabled):
if enabled:
self.overlayTitle.setEnabled(True)
| gastrodia/liveusb-creator | liveusb/gui.py | Python | gpl-2.0 | 25,975 |
"""
Parser Exceptions shared by all Parsers
"""
from marshmallow import ValidationError
class ParserException(Exception):
pass
class NoParserException(Exception):
pass
class FormatException(ParserException):
"""
Use for badly formatted files
"""
pass
class UndefinedRecordType(FormatException):
"""
Use when no record type is stated or inferred.
"""
class NonRecordEntry(FormatException):
"""
Use when a CSV or Excel file contains extraneous records
"""
class IllegalCharacter(ValidationError):
"""
Raise when a disallowed character is present
"""
class NoSequence(ValidationError):
"""
Raise when an empty sequence or pattern is provided
"""
class NullCoordinates(ParserException):
"""Features must have a length of 1 or more"""
pass
| DeskGen/dgparse | dgparse/exc.py | Python | gpl-2.0 | 832 |
from threading import Thread
import time
class trackMain(object):
def __init__(self, app):
self.loggedIn = {}
self.socket = app
self.checkTime = 3
self.timeoutThreshhold = 2
@app.on('heartbeat')
def heartbeat(name):
self.loggedIn[name] = True
def addUser(self, user):
Thread(target=self.heartbeat_thread, args=(user,)).start()
self.loggedIn[user] = True
def heartbeat_thread(self, user):
while True:
#time between checks
time.sleep(self.checkTime)
self.loggedIn[user] = False
self.socket.emit('heartbeat')
#time before timeout after sanding signal
time.sleep(self.timeoutThreshhold)
if self.loggedIn[user] == False:
self.userLeft(user)
break
def userLeft(self, user):
del self.loggedIn[user]
self.socket.emit('left', {'user':user,'users':self.getUsers()})
def getUsers(self):
return self.loggedIn.keys()
| sona1111/flaskchat | app/trackUsers.py | Python | gpl-2.0 | 1,192 |
from django.urls import path
from .views import MailchimpSubscribeView
app_name = 'newsletter'
urlpatterns = [
path('subscribe/', MailchimpSubscribeView.as_view(), name='subscribe')
]
| OpenTechFund/WebApp | opentech/public/mailchimp/urls.py | Python | gpl-2.0 | 193 |
from controller.registrar import Registrar
import json
class TestRegistrar:
def __init__(self, conf, auth_info):
self.registrar = Registrar(conf, auth_info)
def test_client(self):
assert self.registrar.client != None
assert self.registrar.client != None
print self.registrar.client.handle.flavors.list()
def test_owner(self, conf):
owner = self.registrar.owner
assert owner["name"] == conf["owner"]
print "name:\t%s" % owner["name"]
if "email" in conf:
assert owner["email"] == conf["email"]
print "email:\t%s" % owner["email"]
if "mobile" in conf:
assert owner["mobile"] == conf["mobile"]
print "mobile:\t%s" % owner["mobile"]
if __name__ == "__main__":
conf_file = open("../config/clients.json")
conf_json = json.load(conf_file)
client = conf_json[0]
auth_info = json.load(open("../config/auth.json"))[client["name"]]
test = TestRegistrar(client, auth_info)
test.test_client()
test.test_owner(client)
| balajiiyer-ufl-projects/OpenStackVaccine | test/registrar_test.py | Python | gpl-2.0 | 1,074 |
import gdb
import gdb_lookup
gdb_lookup.register_printers(gdb)
| eranif/codelite | Runtime/gdb_printers/rustlib/etc/gdb_load_rust_pretty_printers.py | Python | gpl-2.0 | 63 |
# -----------------------------------------------------------------------------
# Sharamaan Web client - Web client of Sharamaan GIS suite
# Copyright (C) 2012-2013 Yellowen Development Team <checkout AUTHORS>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
# -----------------------------------------------------------------------------
from django.shortcuts import render_to_response as rr
from django.contrib.auth import authenticate, login, logout
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.utils.translation import ugettext as _
from auth.forms import LoginForm
def login_view(request):
if request.method == "POST":
form = LoginForm(request.POST)
next_ = request.POST.get("next", "/")
if form.is_valid():
username = form.cleaned_data['username']
password = form.cleaned_data['password']
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect(next_)
else:
return rr("registration/login.html",
{"msg": _("Your account is not active.")},
context_instance=RequestContext(request))
else:
return rr("registration/login.html",
{"msg": _("username or password is incorrect.")},
context_instance=RequestContext(request))
else:
next_ = request.GET.get("next", "/")
if request.user.is_authenticated():
return HttpResponseRedirect(next_)
return rr("registration/login.html", {"next": next_},
context_instance=RequestContext(request))
def logout_view(request):
logout(request)
return HttpResponseRedirect("/")
| Yellowen/Sharamaan | auth/views.py | Python | gpl-2.0 | 2,633 |
"""
MetPX Copyright (C) 2004-2006 Environment Canada
MetPX comes with ABSOLUTELY NO WARRANTY; For details type see the file
named COPYING in the root of the source directory tree.
"""
"""
#############################################################################################
# Name: AlarmFTP.py
#
# Authors:
# Michel Grenier (from a web inspiration)
#
#############################################################################################
"""
import signal,socket
class FtpTimeoutException(Exception):
"""Classe d'exception specialises au ftp timeout"""
pass
class AlarmFTP:
def __init__(self, message ):
self.state = False
self.message = message
def sigalarm(self, n, f):
raise FtpTimeoutException(self.message)
def alarm(self, time):
self.state = True
signal.signal(signal.SIGALRM, self.sigalarm)
signal.alarm(time)
def cancel(self):
signal.alarm(0)
self.state = False
| khosrow/metpx | sundew/lib/AlarmFTP.py | Python | gpl-2.0 | 1,016 |
"""
A SCons tool that tries to run python-config to collect the build environment
variables.
Sets env['HAVE_PYTHON_CONFIG'] to True on success, False otherwise.
$Id: ImportPythonConfig.py 806 2013-12-26 21:50:22Z weegreenblobbie $
"""
"""
Nsound is a C++ library and Python module for audio synthesis featuring
dynamic digital filters. Nsound lets you easily shape waveforms and write
to disk or plot them. Nsound aims to be as powerful as Csound but easy to
use.
Copyright (c) 2004 to Present Nick Hilton
weegreenblobbie2_at_gmail_com
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Library General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
import distutils
import distutils.sysconfig
import os
import os.path
import sys
import warnings
# Local tools
import _nsound_utils as nu
#------------------------------------------------------------------------------
# Globals & constants
on_windows = sys.platform == 'win32'
on_mac = sys.platform == 'darwin'
def generate(env):
"""
On posix systems, try to run the pythonA.B-config tool to import the build
flags necessary to build and link Python extensions.
"""
env['HAVE_PYTHON_CONFIG'] = False
# Print all the config vars
# for x in distutils.sysconfig.get_config_vars():
#
# k = str(x)
# v = str(distutils.sysconfig.get_config_var(x))
#
# nu.dbg_print(env, __file__, "%-16s : %s" % (k, v))
#--------------------------------------------------------------------------
# Ask Python for specifics
include_dir = distutils.sysconfig.get_config_var('INCLUDEPY')
version = distutils.sysconfig.get_python_version()
libname = distutils.sysconfig.get_config_var('LDLIBRARY')
libdir = distutils.sysconfig.get_config_var('LIBDIR')
libversion = distutils.sysconfig.get_config_var('VERSION')
#--------------------------------------------------------------------------
# Do some extra work on Windows boxes
if libname is None and on_windows:
libname = "python%s.lib" % libversion
elif on_mac:
libname = "python%s.dylib" % libversion
if libname is None:
warnings.warn(
"\n\nCould not find the system Python library\n\n")
nu.dbg_print(env, __file__, "python_exe: %s" % sys.executable)
nu.dbg_print(env, __file__, "python_include_dir: %s" % include_dir)
nu.dbg_print(env, __file__, "version: %s" % version)
nu.dbg_print(env, __file__, "python_libname: %s" % libname)
nu.dbg_print(env, __file__, "python_lib_dir: %s" % libdir)
#--------------------------------------------------------------------------
# Search for the directory containing python.so
# Collect search paths
lib_keys = [x for x in distutils.sysconfig.get_config_vars()]
lib_search_paths = set()
for key in lib_keys:
path = distutils.sysconfig.get_config_var(key)
if os.path.isdir(path):
path = os.path.realpath(path)
lib_search_paths.add(path)
# If on windows, add PYTHON\libs
if on_windows:
for path in list(lib_search_paths):
if path.endswith("Lib"):
path = path.replace("Lib", "libs")
if os.path.isdir(path):
lib_search_paths.add(path)
prefix = os.path.realpath(os.path.dirname(sys.executable))
path = os.path.join(prefix, "libs")
if os.path.isdir(path):
lib_search_paths.add(path)
# Search for the library in each path
lib_dir = None
for path in lib_search_paths:
name = libname
if on_mac:
name = "lib" + libname
lib = os.path.join(path, name)
if os.path.isfile(lib):
lib_dir = os.path.dirname(lib)
if lib_dir is None:
path_str = ""
for p in lib_search_paths:
path_str += "\t%s\n" % p
warnings.warn(
"\n\nCould not find the Python library %s while searching these "
"paths:\n\n %s\n\nDo you have the python-dev package installed?\n" % (
repr(libname), path_str))
return
nu.dbg_print(env, __file__, "python_lib_dir: %s" % lib_dir)
# Check that Python.h can be found
python_h = os.path.join(include_dir, "Python.h")
if not os.path.isfile(python_h):
message = "Can't find Python.h: %s\n" % python_h
message += "\nDo you have the python-dev package installed?\n"
warnings.warn(message)
return
#--------------------------------------------------------------------------
# Construction vars
# LIBS, stip off the library prefix and suffix
lib = str(libname)
prefix = env.subst('$SHLIBPREFIX')
suffix = env.subst('$SHLIBSUFFIX')
if lib.startswith(prefix):
lib = lib[len(prefix):]
if lib.endswith(suffix):
lib = lib[:-len(suffix)]
LIBS = [lib]
# CPPPATH
CPPPATH = [include_dir]
# LIBPATH
LIBPATH = [lib_dir]
if on_windows:
python_dll = lib.replace('.lib', '.dll')
found = False
paths_to_search = [lib_dir, distutils.sysconfig.BASE_EXEC_PREFIX]
for dir_ in paths_to_search:
full_path_dll = os.path.join(dir_, python_dll)
if os.path.isfile(full_path_dll):
found = True
if dir_ not in os.environ['PATH']:
warnings.warn(f"{dir_} is not in the system PATH variable, {python_dll} might not be found!")
if not found:
warnings.warn(f"Could not find {python_dll} after looking in {paths_to_search}")
# Check that PYTHONPATH includes VIRTUAL_ENV\Lib\site-pakcages.
found = True
if os.environ.get("VIRTUAL_ENV"):
if "PYTHONPATH" not in os.environ:
warnings.warn('VIRTUAL_ENV is set but not PYTHONPATH, this may result in not finding matplotlib from the C runtime.')
else:
site_packages = os.path.join(os.environ["VIRTUAL_ENV"], "Lib", "site-packages")
if site_packages not in os.environ['PYTHONPATH']:
warnings.warn(r'Could not find VIRTUAL_ENV\Lib\site-packages in PYTHONPATH, thsi may result in not finding matplotlib from the C runtime.')
python_config = {
'CPPPATH' : CPPPATH,
'LIBPATH' : LIBPATH,
'LIBS' : LIBS,
'VERSION' : version,
}
env['PYTHON_CONFIG'] = python_config
s = ""
for k, v in python_config.items():
s += "\t%-10s: %s\n" %(k, repr(v))
nu.dbg_print(env, __file__, "\nPYTHON_CONFIG = \n%s" % s)
env['HAVE_PYTHON_CONFIG'] = True
def exists(env):
return True
| weegreenblobbie/nsound | site_scons/site_tools/ImportPythonConfig.py | Python | gpl-2.0 | 7,292 |
# Copyright 2011 Jaap Karssenberg <[email protected]>
# This module contains a number of meta test to check coding style
# and packaging
import tests
import os
import sys
import copy
import re
import subprocess
import inspect
def zim_modules():
'''Get the all module in the zim source'''
for file in tests.zim_pyfiles():
name = file[:-3].replace('/', '.')
if os.name == 'nt':
name = name.replace('\\', '.')
if name.endswith('.__init__'):
name = name[:-9]
mod = __import__(name)
for part in name.split('.')[1:]:
mod = getattr(mod, part)
yield name, mod
@tests.skipUnless(os.path.isdir('./.git'), 'Not a git source folder')
class TestGit(tests.TestCase):
def runTest(self):
unknown = subprocess.check_output(['git', 'clean', '-dn'])
if unknown:
unknown = unknown.decode(sys.getfilesystemencoding())
raise AssertionError('File unknown to git - need to be added or ignored:\n' + unknown)
else:
pass
class TestCompileAll(tests.TestCase):
def runTest(self):
'''Test if all modules compile'''
for name, module in zim_modules():
#~ print('>>', name)
self.assertIsNotNone(module)
@tests.slowTest
class TestDist(tests.TestCase):
def runTest(self):
# Check build_dist script
from setup import fix_dist
fix_dist()
# Check desktop file
try:
subprocess.check_call(['desktop-file-validate', 'xdg/zim.desktop'])
except OSError:
print("Could not run desktop-file-validate")
#~ @tests.slowTest
#~ class TestNotebookUpgrade(tests.TestCase):
#~
#~ def runTest(self):
#~ '''Test if included notebooks are up to date'''
#~ from zim.newfs import LocalFolder
#~ from zim.notebook import init_notebook
#~ notebook = init_notebook(LocalFolder('data/manual'))
#~ self.assertTrue(not notebook.needs_upgrade)
class TestCoding(tests.TestCase):
'''This test case enforces some coding style items'''
def __init__(self, *a):
self._code_files = []
tests.TestCase.__init__(self, *a)
def list_code(self):
'''Return all python files as text'''
if not self._code_files:
self._read_code()
assert len(self._code_files) > 10
return copy.deepcopy(self._code_files)
def _read_code(self):
self._code_files = []
for root in ('zim', 'tests'):
for dir, dirs, files in os.walk(root):
for basename in files:
if basename.endswith('.py'):
file = dir.replace('\\', '/') + '/' + basename
if file == 'tests/package.py': # skip ourselves
continue
#~ print('READING', file)
fh = open(file, encoding='UTF-8')
self._code_files.append((file, fh.read()))
fh.close()
def testWrongDependencies(self):
'''Check clean dependencies'''
allow_gtk = ('zim/gui/', 'zim/inc/', 'zim/plugins/', 'tests/')
#import_re = re.compile('^from gi.repository import (Gtk|Gdk|Gio|GObject)', re.M)
import_re = re.compile('^from gi.repository import (Gtk|Gdk|Gio)', re.M)
# only match global imports - allow import in limited scope
for file, code in self.list_code():
if os.name == 'nt':
file = file.replace('\\', '/')
if any(map(file.startswith, allow_gtk)):
continue # skip
match = import_re.search(code)
klass = match.group(0) if match else None
self.assertFalse(match, '%s imports %s, this is not allowed' % (file, klass))
def testWrongMethod(self):
'''Check for a couple of constructs to be avoided'''
for file, code in self.list_code():
self.assertFalse('Gtk.Entry(' in code, '%s uses Gtk.Entry - use zim.gui.widgets.InputEntry instead' % file)
#~ self.assertFalse('connect_object(' in code, '%s uses connect_object() - use connect() instead to prevent reference leaking' % file)
self.assertFalse('Gtk.HPaned(' in code, '%s uses Gtk.HPaned - use zim.gui.widgets.HPaned instead' % file)
self.assertFalse('Gtk.VPaned(' in code, '%s uses Gtk.VPaned - use zim.gui.widgets.VPaned instead' % file)
if not file.endswith('pageview.py'):
self.assertFalse('string.letters' in code, '%s uses string.letters - this can case locale dependent issues' % file)
self.assertFalse('string.lowercase' in code, '%s uses string.lowercase - this can case locale dependent issues' % file)
self.assertFalse('string.uppercase' in code, '%s uses string.uppercase - this can case locale dependent issues' % file)
if not file.endswith('widgets.py'):
self.assertFalse('Gtk.ScrolledWindow(' in code, '%s uses Gtk.ScrolledWindow - use zim.gui.widgets.ScrolledWindow instead' % file)
if not file.endswith('clipboard.py'):
self.assertFalse('Gtk.Clipboard(' in code, '%s uses Gtk.Clipboard - use zim.gui.clipboard.Clipboard instead' % file)
if not file.endswith('config.py'):
self.assertFalse('os.environ\[' in code, '%s uses os.environ - use zim.config.get_environ() instead' % file)
def testIndenting(self):
# FIXME need real parser to be more robust for comments, multi-line strings etc.
# for now we just check lines after a line ending with ":"
# assume python itself warns us for changes in the middle of a block
white = re.compile(r'^(\s*)')
for file, code in self.list_code():
if file.startswith('zim/inc/') or file.endswith('generictreemodel.py'):
continue
lineno = 0
start_block = False
for line in code.splitlines():
lineno += 1
text = line.strip()
def_line = text.startswith('def ') or text.startswith('class ')
if start_block or def_line:
m = white.match(line)
indent = str(m.groups(1))
self.assertFalse(' ' in indent, 'Indenting should use tabs - file: %s line %s' % (file, lineno))
start_block = def_line and line.rstrip().endswith(':')
def testLoggerDefined(self):
# Common to forget this import, and only notice it when an exception
# happens much later
for file, code in self.list_code():
if 'logger.' in code:
assert 'logger = logging.getLogger(' in code, 'Forgot to define "logger" in %s' % file
@tests.expectedFailure
class TestDocumentation(tests.TestCase):
def runTest(self):
for modname, mod in zim_modules():
self.assertDocumentationOK(mod, modname)
for name, obj in self.walk_code(mod, modname):
if not '.inc.' in name:
self.assertDocumentationOK(obj, name)
if hasattr(obj, '__signals__'):
self.assertSignalSpecOK(obj, mod.__file__)
def walk_code(self, obj, objname):
# Yield classes, methods, and functions top down
for name, member in inspect.getmembers(obj):
if name == '__class__':
continue
name = objname + '.' + name
if inspect.isclass(member):
if member.__module__ != objname:
continue # skip imported class
yield name, member
for child in self.walk_code(member, name): # recurs
yield child
elif inspect.isfunction(member) \
or inspect.ismethod(member):
yield name, member
def assertDocumentationOK(self, obj, name):
#~ print('CHECK docs for', name)
doc = inspect.getdoc(obj)
if not doc:
return # For now do not bitch about missing docs..
# Check fields
fields = self.parseFields(doc, name)
if not fields:
return
# Check call signature for functions
if inspect.isfunction(obj) \
or inspect.ismethod(obj):
# For now we do not complain about missing docs, just mismatches
documented = set(
list(fields.get('param', {}).keys()) +
list(fields.get('keyword', {}).keys())
)
if documented:
(args, varargs, keywords, defaults) = inspect.getargspec(obj)
defined = set(args)
if args and args[0] in ('self', 'klass'):
defined.discard(args[0])
if varargs:
defined.add(varargs)
if keywords:
defined.add(keywords)
if set(defined) != {'arg', 'kwarg'}:
# ignore mismatched due to generic decorators
self.assertEqual(documented, defined,
msg='Mismatch in documented parameters for %s\n'
'Declared: %s\nDocumented: %s' %
(name, tuple(defined), tuple(documented))
)
# TODO can we also check whether doc should define @returns ??
# Check signature for @signal
if 'signal' in fields:
for spec in fields['signal']:
# e.g. "C{signal-name (L{Page}, L{Path})}: Emitted when opening"
if not re.match('^C{[\w-]+ \(.*?\)\}:', spec):
self.fail('Signal description in %s does not follow templates\n'
'Is: %s\nShould be like "C{signal-name (arg1, arg2)}: description"'
% (name, spec)
)
known_fields = {
# keys are known fields, if values is True, a param is
# required for the first ":"
'param': True,
'type': True,
'keyword': True,
'returns': False,
'rtype': False,
'raises': True,
'cvar': True,
'ivar': True,
'todo': False,
'note': False,
'newfield': True,
}
collect_fields = ('signal',)
def parseFields(self, doc, name):
# Parse files same as epydoc - and check them on the fly
fields = {}
for line in doc.splitlines():
m = re.match('@(\w+)\s*(.*?):', line)
if m:
line = line[m.end():].strip()
field, arg = m.group(1), m.group(2)
if field in self.known_fields:
if self.known_fields[field]:
if not arg:
self.fail('Doc for %s is missing argument for @%s' % (name, field))
else:
if not field in fields:
fields[field] = {}
fields[field][arg] = line
# special case - learn new fields
if field == 'newfield':
self.known_fields[arg] = False
elif field in self.collect_fields:
if not field in fields:
fields[field] = []
fields[field].append(line)
else:
fields[field] = line
else:
self.fail('Doc for %s has unknown field @%s' % (name, field))
elif re.match('@(\w+)', line):
self.fail('Syntax error in docs for %s\nMissing \':\' in "%s"' % (name, line))
else:
pass
return fields
def assertSignalSpecOK(self, obj, file):
for name, spec in list(obj.__signals__.items()):
self.assertTrue(
isinstance(spec, tuple) and len(spec) == 3 and isinstance(spec[2], tuple),
msg='Signal spec is malformed for %s::%s in %s' % (obj.__name__, name, file)
)
| jaap-karssenberg/zim-desktop-wiki | tests/package.py | Python | gpl-2.0 | 9,944 |
#!/usr/bin/python
"""
execute.py
Execute a command in the default shell.
Copyright (C) 2013 William Kettler <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import subprocess
import signal
class Execute(Exception):
pass
class Timeout(Exception):
pass
class Retcode(Exception):
pass
def alarm_handler(signum, frame):
raise Timeout
def execute(cmd, timeout=None):
"""
Execute a command in the default shell. If a timeout is defined the command
will be killed if the timeout is exceeded.
Inputs:
cmd (str): Command to execute
timeout (int): Command timeout in seconds
Outputs:
retcode (int): Return code
output (list): STDOUT/STDERR
"""
# Define the timeout signal
if timeout:
signal.signal(signal.SIGALRM, alarm_handler)
signal.alarm(timeout)
try:
# Execute the command and wait for the subprocess to terminate
# STDERR is redirected to STDOUT
phandle = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
# Read the stdout/sterr buffers and retcode
stdout, stderr = phandle.communicate()
retcode = phandle.returncode
except Timeout as t:
# Kill the running process
phandle.kill()
raise Timeout("command timeout of %ds exceeded" % timeout)
except Exception as e:
raise Execute(e)
else:
# Possible race condition where alarm doesn't disabled in time
signal.alarm(0)
# Split lines into list
if stdout and stdout is not None:
output = stdout.strip()
else:
output = None
return retcode, output
| wkettler/pyzfs | execute.py | Python | gpl-2.0 | 2,315 |
# -*- coding: utf-8 -*-
################################################################################
# Copyright (C) 2012 Travis Shirk <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
################################################################################
import os
from fabric.api import run, put
from pavement import SRC_DIST_TGZ, DOC_DIST, MD5_DIST, SRC_DIST_ZIP
RELEASE_D = "~/www/eyeD3/releases"
def deploy_sdist():
'''Deploy .tgz, .zip, and .md5'''
put("./dist/%s" % SRC_DIST_TGZ, RELEASE_D)
put("./dist/%s" % SRC_DIST_ZIP, RELEASE_D)
put("./dist/%s.md5" % os.path.splitext(SRC_DIST_TGZ)[0], RELEASE_D)
def deploy_docs():
'''Deploy docs tarball and install.'''
put("./dist/%s" % DOC_DIST, RELEASE_D)
run("tar xzf %s -C ./www/eyeD3 --strip-components=1" %
os.path.join(RELEASE_D, DOC_DIST))
def deploy():
deploy_sdist()
deploy_docs()
| daltonsena/eyed3 | fabfile.py | Python | gpl-2.0 | 1,595 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2015-2016 James Clark <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
bhextractor_makePCs.py
Construct catalogues and principal component analysis for NR BBH waveforms; save
the PCA results to file using the file dump method on the PCA result object
This is a pretty simple script but it might be preferable to set up the
configuration in a config parser & ini file.
"""
import sys
from bhex_utils import bhex_wavedata as bwave
from bhex_utils import bhex_pca as bpca
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Some useful info:
#
#valid_series = ["Eq-series", "HRq-series", "HR-series", "Lq-series",
# "RO3-series", "Sq-series", "S-series-v2", "TP2-series"
# "TP-series"]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# USER INPUT
# Options common to train & test data:
SI_deltaT = 1./512
SI_datalen= 4.0
total_mass = 100.
distance=1. # Mpc
#train_series_names = ['HRq-series'] # (see above for valid choices)
train_series_names = ["Eq-series", "HRq-series", "HR-series", "Lq-series",
"RO3-series", "Sq-series", "S-series-v2", "TP2-series",
"TP-series"]
#train_series_names = ['HRq-series'] # (see above for valid choices)
#
# Modify for imposing parameter bounds on the catalogue:
#
#train_bounds=None
train_bounds=dict()
train_bounds['a1'] = [0, 0]
train_bounds['a2'] = [0, 0]
#train_bounds['q'] = [-np.inf, 3]
catalogue_name = 'NonSpinning'
save_pcs = ['NRhplusTimeSeriesPCA', 'NRhcrossTimeSeriesPCA',
'NRAmpTimeSeriesPCA', 'NRPhaseTimeSeriesPCA', 'SIhplusTimeSeriesPCA',
'SIhcrossTimeSeriesPCA', 'SIAmpTimeSeriesPCA', 'SIPhaseTimeSeriesPCA']
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Do the work:
print '~~~~~~~~~~~~~~~~~~~~~'
print 'Selecting Simulations'
print ''
train_simulations = \
bwave.simulation_details(series_names=train_series_names,
param_bounds=train_bounds, Mmin30Hz=total_mass)
print '~~~~~~~~~~~~~~~~~~~~~'
print 'Building NR catalogue'
print ''
train_catalogue = bwave.waveform_catalogue(train_simulations, ref_mass=total_mass,
SI_deltaT=SI_deltaT, SI_datalen=SI_datalen, distance=distance)
# Dump catalogue to pickle
train_catalogue.file_dump(catalogue_name=catalogue_name)
#
# Do the PCA
#
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
print 'Performing PCA (training only)'
print ''
pca = bpca.waveform_pca(train_catalogue)
pca.file_dump(pca_attrs=save_pcs, pcs_filename=catalogue_name)
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
print 'DONE.'
| astroclark/bhextractor | bin/bhex_makePCs.py | Python | gpl-2.0 | 3,267 |
# A part of pdfrw (https://github.com/pmaupin/pdfrw)
# Copyright (C) 2015 Patrick Maupin, Austin, Texas
# MIT license -- See LICENSE.txt for details
''' This module contains a function to find all the XObjects
in a document, and another function that will wrap them
in page objects.
'''
from .objects import PdfDict, PdfArray, PdfName
def find_objects(source, valid_types=(PdfName.XObject, None),
valid_subtypes=(PdfName.Form, PdfName.Image),
no_follow=(PdfName.Parent,),
isinstance=isinstance, id=id, sorted=sorted,
reversed=reversed, PdfDict=PdfDict):
'''
Find all the objects of a particular kind in a document
or array. Defaults to looking for Form and Image XObjects.
This could be done recursively, but some PDFs
are quite deeply nested, so we do it without
recursion.
Note that we don't know exactly where things appear on pages,
but we aim for a sort order that is (a) mostly in document order,
and (b) reproducible. For arrays, objects are processed in
array order, and for dicts, they are processed in key order.
'''
container = (PdfDict, PdfArray)
# Allow passing a list of pages, or a dict
if isinstance(source, PdfDict):
source = [source]
else:
source = list(source)
visited = set()
source.reverse()
while source:
obj = source.pop()
if not isinstance(obj, container):
continue
myid = id(obj)
if myid in visited:
continue
visited.add(myid)
if isinstance(obj, PdfDict):
if obj.Type in valid_types and obj.Subtype in valid_subtypes:
yield obj
obj = [y for (x, y) in sorted(obj.iteritems())
if x not in no_follow]
else:
# TODO: This forces resolution of any indirect objects in
# the array. It may not be necessary. Don't know if
# reversed() does any voodoo underneath the hood.
# It's cheap enough for now, but might be removeable.
obj and obj[0]
source.extend(reversed(obj))
def wrap_object(obj, width, margin):
''' Wrap an xobj in its own page object.
'''
fmt = 'q %s 0 0 %s %s %s cm /MyImage Do Q'
contents = PdfDict(indirect=True)
subtype = obj.Subtype
if subtype == PdfName.Form:
contents._stream = obj.stream
contents.Length = obj.Length
contents.Filter = obj.Filter
contents.DecodeParms = obj.DecodeParms
resources = obj.Resources
mbox = obj.BBox
elif subtype == PdfName.Image: # Image
xoffset = margin[0]
yoffset = margin[1]
cw = width - margin[0] - margin[2]
iw, ih = float(obj.Width), float(obj.Height)
ch = 1.0 * cw / iw * ih
height = ch + margin[1] + margin[3]
p = tuple(('%.9f' % x).rstrip('0').rstrip('.') for x in (cw, ch, xoffset, yoffset))
contents.stream = fmt % p
resources = PdfDict(XObject=PdfDict(MyImage=obj))
mbox = PdfArray((0, 0, width, height))
else:
raise TypeError("Expected Form or Image XObject")
return PdfDict(
indirect=True,
Type=PdfName.Page,
MediaBox=mbox,
Resources=resources,
Contents=contents,
)
def trivial_xobjs(maxignore=300):
''' Ignore XObjects that trivially contain other XObjects.
'''
ignore = set('q Q cm Do'.split())
Image = PdfName.Image
def check(obj):
if obj.Subtype == Image:
return False
s = obj.stream
if len(s) < maxignore:
s = (x for x in s.split() if not x.startswith('/') and
x not in ignore)
s = (x.replace('.', '').replace('-', '') for x in s)
if not [x for x in s if not x.isdigit()]:
return True
return check
def page_per_xobj(xobj_iter, width=8.5 * 72, margin=0.0 * 72,
image_only=False, ignore=trivial_xobjs(),
wrap_object=wrap_object):
''' page_per_xobj wraps every XObj found
in its own page object.
width and margin are used to set image sizes.
'''
try:
iter(margin)
except:
margin = [margin]
while len(margin) < 4:
margin *= 2
if isinstance(xobj_iter, (list, dict)):
xobj_iter = find_objects(xobj_iter)
for obj in xobj_iter:
if not ignore(obj):
if not image_only or obj.Subtype == PdfName.IMage:
yield wrap_object(obj, width, margin)
| Wintermute0110/plugin.program.advanced.MAME.launcher | pdfrw/pdfrw/findobjs.py | Python | gpl-2.0 | 4,640 |
/usr/local/Cellar/python/2.7.10/Frameworks/Python.framework/Versions/2.7/lib/python2.7/genericpath.py | HyperloopTeam/FullOpenMDAO | lib/python2.7/genericpath.py | Python | gpl-2.0 | 101 |
#!/usr/bin/env python
'''
Project Euler Problem 024 - Lexicographic permutations
----------------
Description:
What is the millionth lexicographic permutation of
the digits 0, 1, 2, 3, 4, 5, 6, 7, 8 and 9?
Solution:
2783915460
----------------
Haoliang Wang 02/03/2015
'''
from math import floor, factorial as f
N = 1000000
n = 10
s = []
t = range(n)
for i in range(1, n + 1):
d = int(floor(N / f(n - i)))
s.append(t.pop(d))
N -= f(n - i) * d
if N == 0:
s += list(reversed(t))
break
print s
| haoliangx/Project-Euler | src/P-024.py | Python | gpl-2.0 | 549 |
VERSION = (0, "1.0", None)
| pat1/autoradio | autoradio/player/__init__.py | Python | gpl-2.0 | 27 |
import re
import string
from types import StringType
import nsiqcppstyle_util
import os
import urllib
try:
import hashlib
md5_constructor = hashlib.md5
except ImportError:
import md5
md5_constructor = md5.new
url = "http://nsiqcppstyle.appspot.com"
def Update(currentVersion):
import httplib
import urllib2
systemKey = nsiqcppstyle_util.GetSystemKey()
# Get the latest version info
try:
print 'Update: checking for update'
print url + "/update/" + systemKey
request = urllib2.urlopen(url + "/update/" + systemKey)
response = request.read()
except urllib2.HTTPError as e:
raise Exception(
'Unable to get latest version info - HTTPError = ' + str(e))
except urllib2.URLError as e:
raise Exception(
'Unable to get latest version info - URLError = ' + str(e))
except httplib.HTTPException as e:
raise Exception('Unable to get latest version info - HTTPException')
except Exception as e:
raise Exception(
'Unable to get latest version info - Exception = ' + str(e))
updateInfo = None
import updateagent.minjson
try:
updateInfo = updateagent.minjson.safeRead(response)
except Exception as e:
print e
raise Exception('Unable to get latest version info. Try again later.')
if Version(updateInfo['version']) > Version(currentVersion):
print 'A new version is available.'
# Loop through the new files and call the download function
for agentFile in updateInfo['files']:
#win32str = agentFile["name"].replace("/", "\\")
eachFileName = agentFile["name"]
if (eachFileName.endswith(".dll") or
eachFileName.endswith(".zip") or
eachFileName.endswith(".exe")):
continue
filestr = os.path.join(nsiqcppstyle_util.GetRuntimePath(),
agentFile["name"])
if os.path.exists(filestr):
checksum = md5_constructor()
f = file(filestr, 'rb').read()
checksum.update(f)
if agentFile["md5"] == checksum.hexdigest():
continue
agentFile['tempFile'] = DownloadFile(url, agentFile, systemKey)
if agentFile['tempFile'] is None:
print "Update Failed while downloading : " + agentFile['name']
return
agentFile['new'] = True
import shutil
runtimePath = nsiqcppstyle_util.GetRuntimePath()
for agentFile in updateInfo['files']:
eachFileName = agentFile["name"]
if (eachFileName.endswith(".dll") or
eachFileName.endswith(".zip") or
eachFileName.endswith(".exe")):
continue
if agentFile.get('new', None) is not None:
print 'Updating ' + agentFile['name']
newModule = os.path.join(runtimePath, agentFile['name'])
try:
if os.path.exists(newModule):
os.remove(newModule)
basedirname = os.path.dirname(newModule)
if not os.path.exists(basedirname):
os.makedirs(basedirname)
shutil.move(agentFile['tempFile'], newModule)
except OSError as e:
pass
return True
return False
def DownloadFile(url, agentFile, systemKey, recursed=False):
print 'Downloading ' + agentFile['name']
downloadedFile = urllib.urlretrieve(url + '/update/' + systemKey +
"/" + agentFile['name'])
checksum = md5_constructor()
f = file(downloadedFile[0], 'rb')
part = f.read()
checksum.update(part)
f.close()
# Do we have a match?
if checksum.hexdigest() == agentFile['md5']:
return downloadedFile[0]
else:
# Try once more
if recursed == False:
DownloadFile(url, agentFile, systemKey, True)
else:
print agentFile['name'] + ' did not match its checksum - it is corrupted. This may be caused by network issues so please try again in a moment.'
return None
class Version:
component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
def __init__(self, vstring=None):
if vstring:
self.parse(vstring)
def parse(self, vstring):
# I've given up on thinking I can reconstruct the version string
# from the parsed tuple -- so I just store the string here for
# use by __str__
self.vstring = vstring
components = filter(lambda x: x and x != '.',
self.component_re.split(vstring))
for i in range(len(components)):
try:
components[i] = int(components[i])
except ValueError:
pass
self.version = components
def __str__(self):
return self.vstring
def __repr__(self):
return "LooseVersion ('%s')" % str(self)
def __cmp__(self, other):
if isinstance(other, StringType):
other = Version(other)
return cmp(self.version, other.version)
| kunaltyagi/nsiqcppstyle | updateagent/agent.py | Python | gpl-2.0 | 5,305 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
LrsPlugin
A QGIS plugin
Linear reference system builder and editor
-------------------
begin : 2013-10-02
copyright : (C) 2013 by Radim Blažek
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
# Import the PyQt and QGIS libraries
from qgis.core import QgsVectorLayer, QgsProviderRegistry
# changes done to vector layer attributes are not stored correctly in project file
# http://hub.qgis.org/issues/8997 -> recreate temporary provider first to construct uri
# To add methods on layers, we must use manager, not extended QgsVectorLayer,
# because layers may be stored in project and created by QGIS.
# Inherited layers are only used to create layer with type and attributes
from .lrserrorfields import LRS_ERROR_FIELDS
class LrsErrorLayer(QgsVectorLayer):
def __init__(self, uri, baseName):
provider = QgsProviderRegistry.instance().createProvider('memory', uri)
provider.addAttributes(LRS_ERROR_FIELDS.toList())
uri = provider.dataSourceUri()
# debug ( 'uri = %s' % uri )
super(LrsErrorLayer, self).__init__(uri, baseName, 'memory')
| blazek/lrs | lrs/lrs/error/lrserrorlayer.py | Python | gpl-2.0 | 2,001 |
#!/usr/bin/evn python
import OledTestCaseWrapper
import unittest
import time
class OledMessageDisplayTestCase(OledTestCaseWrapper.OledInitializationWrapper):
def test_display_short_message_should_not_scroll(self):
success, msg = self._oled.Cmd('%c%s' % (0x06, 'no scrolling\0'))
self.assertTrue(success, msg)
time.sleep(1)
def test_display_long_message_should_scroll(self):
success, msg = self._oled.Cmd('%c%s' % (0x06, 'Just a scrolling message\0'))
self.assertTrue(success, msg)
time.sleep(2)
class OledMessageDisplayBoundsTestCase(OledTestCaseWrapper.OledInitializationWrapper):
def test_display_empty_selected_message(self):
success, msg = self._oled.Cmd('%c%s' % (0x06, 'pre empty\0'))
self.assertTrue(success, msg)
time.sleep(1)
success, msg = self._oled.Cmd('%c%s' % (0x06, '\0'))
self.assertTrue(success, msg)
time.sleep(1)
@unittest.skip("Needs fixing, visual check")
def test_display_selected_message_32_chars(self):
success, msg = self._oled.Cmd('%c%s' % (0x06, 'A short message to scroll 123456\0'))
self.assertTrue(success, msg)
time.sleep(15)
@unittest.skip("Needs fixing, visual check")
def test_display_selected_message_64_chars(self):
success, msg = self._oled.Cmd('%c%s' % (0x06, 'A short message to scroll 123456 A short message to scroll 1234\0'))
self.assertTrue(success, msg)
time.sleep(5)
@unittest.skip("Fix 128 char freeze, visual check")
def test_display_selected_message_128_chars(self):
success, msg = self._oled.Cmd('%c%s' % (0x08, 'A short message to scroll 123456 A short message to scroll 12345 A short message to scroll 123456 A short message \0'))
self.assertTrue(success, msg)
time.sleep(15)
class OledScrollWindowTestCase(OledTestCaseWrapper.OledInitializationWrapper):
def test_write_selected_message(self):
success, msg = self._oled.Cmd('%c%s' % (0x06, 'P(Si=qi)\0'))
self.assertTrue(success, msg)
time.sleep(0)
success, msg = self._oled.Cmd('%c' % (0x07))
self.assertTrue(success, msg)
def test_scroll_message_window(self):
success, msg = self._oled.Cmd('%c%s' % (0x06, '1\0'))
self.assertTrue(success, msg)
success, msg = self._oled.Cmd('%c' % (0x07))
self.assertTrue(success, msg)
time.sleep(0)
success, msg = self._oled.Cmd('%c%s' % (0x06, '2\0'))
self.assertTrue(success, msg)
success, msg = self._oled.Cmd('%c' % (0x07))
self.assertTrue(success, msg)
success, msg = self._oled.Cmd('%c%s' % (0x06, 'Een heel lang zinnetje om te wrappen\0'))
self.assertTrue(success, msg)
success, msg = self._oled.Cmd('%c' % (0x07))
self.assertTrue(success, msg)
def suite():
OledMessageDisplayTestSuite = unittest.TestLoader().loadTestsFromTestCase(OledMessageDisplayTestCase)
OledMessageDisplayBoundsTestSuite = unittest.TestLoader().loadTestsFromTestCase(OledMessageDisplayBoundsTestCase)
OledScrollWindowTestSuite = unittest.TestLoader().loadTestsFromTestCase(OledScrollWindowTestCase)
AllSuites = [#OledMessageDisplayTestSuite,
#OledMessageDisplayBoundsTestSuite,
OledScrollWindowTestSuite]
return unittest.TestSuite(AllSuites)
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite()) | simbits/Ad-Hoc-Dark-Roast-Network-Travel-Mug- | Code/Mug/oled_4GDL/tests/OledTextTestSuite.py | Python | gpl-2.0 | 3,580 |
def cheese_and_crackers(cheese_count, boxes_of_crackers):
print "You have %d cheeses!" % cheese_count
print "You have %d boxes of crackers!" % boxes_of_crackers
print "Man that's enough for the party!"
print "Get a blanket.\n"
print "We can just give the function numbers directly:"
cheese_and_crackers(20,30)
print "OR, we can use variables from our script"
amount_of_cheese = 10
amount_of_crackers = 50
cheese_and_crackers(amount_of_cheese, amount_of_crackers)
print "We can even do math inside:"
cheese_and_crackers(10+20,5+6)
print "And we can combine the two, variables and math"
cheese_and_crackers(amount_of_cheese+10,amount_of_crackers+1000)
| anantk17/lpthw | ex19.py | Python | gpl-2.0 | 668 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from parameterized import parameterized
from twisted.internet import defer
from twisted.trial import unittest
from buildbot import config
from buildbot.process.results import FAILURE
from buildbot.process.results import SUCCESS
from buildbot.process.results import WARNINGS
from buildbot.steps import python
from buildbot.test.reactor import TestReactorMixin
from buildbot.test.steps import ExpectShell
from buildbot.test.steps import TestBuildStepMixin
log_output_success = '''\
Making output directory...
Running Sphinx v1.0.7
loading pickled environment... not yet created
No builder selected, using default: html
building [html]: targets for 24 source files that are out of date
updating environment: 24 added, 0 changed, 0 removed
reading sources... [ 4%] index
reading sources... [ 8%] manual/cfg-builders
...
copying static files... done
dumping search index... done
dumping object inventory... done
build succeeded.
'''
log_output_nochange = '''\
Running Sphinx v1.0.7
loading pickled environment... done
No builder selected, using default: html
building [html]: targets for 0 source files that are out of date
updating environment: 0 added, 0 changed, 0 removed
looking for now-outdated files... none found
no targets are out of date.
'''
log_output_warnings = '''\
Running Sphinx v1.0.7
loading pickled environment... done
building [html]: targets for 1 source files that are out of date
updating environment: 0 added, 1 changed, 0 removed
reading sources... [100%] file
file.rst:18: (WARNING/2) Literal block expected; none found.
looking for now-outdated files... none found
pickling environment... done
checking consistency... done
preparing documents... done
writing output... [ 50%] index
writing output... [100%] file
index.rst:: WARNING: toctree contains reference to document 'preamble' that \
doesn't have a title: no link will be generated
writing additional files... search
copying static files... done
dumping search index... done
dumping object inventory... done
build succeeded, 2 warnings.'''
log_output_warnings_strict = '''\
Running Sphinx v1.0.7
loading pickled environment... done
building [html]: targets for 1 source files that are out of date
updating environment: 0 added, 1 changed, 0 removed
reading sources... [100%] file
Warning, treated as error:
file.rst:18:Literal block expected; none found.
'''
warnings = '''\
file.rst:18: (WARNING/2) Literal block expected; none found.
index.rst:: WARNING: toctree contains reference to document 'preamble' that \
doesn't have a title: no link will be generated\
'''
# this is from a run of epydoc against the buildbot source..
epydoc_output = '''\
[...............
+---------------------------------------------------------------------
| In /home/dustin/code/buildbot/t/buildbot/master/buildbot/
| ec2.py:
| Import failed (but source code parsing was successful).
| Error: ImportError: No module named boto (line 19)
|
[....
Warning: Unable to extract the base list for
twisted.web.resource.EncodingResourceWrapper: Bad dotted name
[......
+---------------------------------------------------------------------
| In /home/dustin/code/buildbot/t/buildbot/master/buildbot/worker/
| ec2.py:
| Import failed (but source code parsing was successful).
| Error: ImportError: No module named boto (line 28)
|
[...........
+---------------------------------------------------------------------
| In /home/dustin/code/buildbot/t/buildbot/master/buildbot/status/
| status_push.py:
| Import failed (but source code parsing was successful).
| Error: ImportError: No module named status_json (line 40)
|
[....................<paragraph>Special descriptor for class __provides__</paragraph>
'''
class BuildEPYDoc(TestBuildStepMixin, TestReactorMixin, unittest.TestCase):
def setUp(self):
self.setup_test_reactor()
return self.setup_test_build_step()
def tearDown(self):
return self.tear_down_test_build_step()
def test_sample(self):
self.setup_step(python.BuildEPYDoc())
self.expect_commands(
ExpectShell(workdir='wkdir', command=['make', 'epydocs'])
.stdout(epydoc_output)
.exit(1),
)
self.expect_outcome(result=FAILURE,
state_string='epydoc warn=1 err=3 (failure)')
return self.run_step()
class PyLint(TestBuildStepMixin, TestReactorMixin, unittest.TestCase):
def setUp(self):
self.setup_test_reactor()
return self.setup_test_build_step()
def tearDown(self):
return self.tear_down_test_build_step()
@parameterized.expand([
('no_results', True),
('with_results', False)
])
def test_success(self, name, store_results):
self.setup_step(python.PyLint(command=['pylint'], store_results=store_results))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('Your code has been rated at 10/10')
.exit(python.PyLint.RC_OK))
self.expect_outcome(result=SUCCESS, state_string='pylint')
if store_results:
self.expect_test_result_sets([('Pylint warnings', 'code_issue', 'message')])
self.expect_test_results([])
return self.run_step()
@parameterized.expand([
('no_results', True),
('with_results', False)
])
def test_error(self, name, store_results):
self.setup_step(python.PyLint(command=['pylint'], store_results=store_results))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('W: 11: Bad indentation. Found 6 spaces, expected 4\n'
'E: 12: Undefined variable \'foo\'\n')
.exit((python.PyLint.RC_WARNING | python.PyLint.RC_ERROR)))
self.expect_outcome(result=FAILURE,
state_string='pylint error=1 warning=1 (failure)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-error', 1)
if store_results:
self.expect_test_result_sets([('Pylint warnings', 'code_issue', 'message')])
# note that no results are submitted for tests where we don't know the location
return self.run_step()
def test_header_output(self):
self.setup_step(python.PyLint(command=['pylint'], store_results=False))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.log('stdio', header='W: 11: Bad indentation. Found 6 spaces, expected 4\n')
.exit(0))
self.expect_outcome(result=SUCCESS, state_string='pylint')
return self.run_step()
def test_failure(self):
self.setup_step(python.PyLint(command=['pylint'], store_results=False))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('W: 11: Bad indentation. Found 6 spaces, expected 4\n'
'F: 13: something really strange happened\n')
.exit((python.PyLint.RC_WARNING | python.PyLint.RC_FATAL)))
self.expect_outcome(result=FAILURE,
state_string='pylint fatal=1 warning=1 (failure)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-fatal', 1)
return self.run_step()
def test_failure_zero_returncode(self):
# Make sure that errors result in a failed step when pylint's
# return code is 0, e.g. when run through a wrapper script.
self.setup_step(python.PyLint(command=['pylint'], store_results=False))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('W: 11: Bad indentation. Found 6 spaces, expected 4\n'
'E: 12: Undefined variable \'foo\'\n')
.exit(0))
self.expect_outcome(result=FAILURE,
state_string='pylint error=1 warning=1 (failure)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-error', 1)
return self.run_step()
def test_regex_text(self):
self.setup_step(python.PyLint(command=['pylint'], store_results=False))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('W: 11: Bad indentation. Found 6 spaces, expected 4\n'
'C: 1:foo123: Missing docstring\n')
.exit((python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)))
self.expect_outcome(result=WARNINGS,
state_string='pylint convention=1 warning=1 (warnings)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-convention', 1)
self.expect_property('pylint-total', 2)
return self.run_step()
def test_regex_text_0_24(self):
# pylint >= 0.24.0 prints out column offsets when using text format
self.setup_step(python.PyLint(command=['pylint'], store_results=False))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('W: 11,0: Bad indentation. Found 6 spaces, expected 4\n'
'C: 3,10:foo123: Missing docstring\n')
.exit((python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)))
self.expect_outcome(result=WARNINGS,
state_string='pylint convention=1 warning=1 (warnings)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-convention', 1)
self.expect_property('pylint-total', 2)
return self.run_step()
def test_regex_text_1_3_1(self):
# at least pylint 1.3.1 prints out space padded column offsets when
# using text format
self.setup_step(python.PyLint(command=['pylint'], store_results=False))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('W: 11, 0: Bad indentation. Found 6 spaces, expected 4\n'
'C: 3,10:foo123: Missing docstring\n')
.exit((python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)))
self.expect_outcome(result=WARNINGS,
state_string='pylint convention=1 warning=1 (warnings)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-convention', 1)
self.expect_property('pylint-total', 2)
return self.run_step()
@parameterized.expand([
('no_results', True),
('with_results', False)
])
def test_regex_text_2_0_0(self, name, store_results):
# pylint 2.0.0 changed default format to include file path
self.setup_step(python.PyLint(command=['pylint'], store_results=store_results))
stdout = (
'test.py:9:4: W0311: Bad indentation. Found 6 spaces, expected 4 (bad-indentation)\n' +
'test.py:1:0: C0114: Missing module docstring (missing-module-docstring)\n'
)
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout(stdout)
.exit((python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)))
self.expect_outcome(result=WARNINGS,
state_string='pylint convention=1 warning=1 (warnings)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-convention', 1)
self.expect_property('pylint-total', 2)
if store_results:
self.expect_test_result_sets([('Pylint warnings', 'code_issue', 'message')])
self.expect_test_results([
(1000, 'test.py:9:4: W0311: Bad indentation. Found 6 spaces, expected 4 ' +
'(bad-indentation)',
None, 'test.py', 9, None),
(1000, 'test.py:1:0: C0114: Missing module docstring (missing-module-docstring)',
None, 'test.py', 1, None),
])
return self.run_step()
def test_regex_text_2_0_0_invalid_line(self):
self.setup_step(python.PyLint(command=['pylint'], store_results=False))
stdout = (
'test.py:abc:0: C0114: Missing module docstring (missing-module-docstring)\n'
)
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout(stdout)
.exit(python.PyLint.RC_CONVENTION))
self.expect_outcome(result=SUCCESS, state_string='pylint')
self.expect_property('pylint-warning', 0)
self.expect_property('pylint-convention', 0)
self.expect_property('pylint-total', 0)
return self.run_step()
def test_regex_text_ids(self):
self.setup_step(python.PyLint(command=['pylint'], store_results=False))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('W0311: 11: Bad indentation.\n'
'C0111: 1:funcName: Missing docstring\n')
.exit((python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)))
self.expect_outcome(result=WARNINGS,
state_string='pylint convention=1 warning=1 (warnings)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-convention', 1)
self.expect_property('pylint-total', 2)
return self.run_step()
def test_regex_text_ids_0_24(self):
# pylint >= 0.24.0 prints out column offsets when using text format
self.setup_step(python.PyLint(command=['pylint'], store_results=False))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('W0311: 11,0: Bad indentation.\n'
'C0111: 3,10:foo123: Missing docstring\n')
.exit((python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)))
self.expect_outcome(result=WARNINGS,
state_string='pylint convention=1 warning=1 (warnings)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-convention', 1)
self.expect_property('pylint-total', 2)
return self.run_step()
@parameterized.expand([
('no_results', True),
('with_results', False)
])
def test_regex_parseable_ids(self, name, store_results):
self.setup_step(python.PyLint(command=['pylint'], store_results=store_results))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('test.py:9: [W0311] Bad indentation.\n'
'test.py:3: [C0111, foo123] Missing docstring\n')
.exit((python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)))
self.expect_outcome(result=WARNINGS,
state_string='pylint convention=1 warning=1 (warnings)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-convention', 1)
self.expect_property('pylint-total', 2)
if store_results:
self.expect_test_result_sets([('Pylint warnings', 'code_issue', 'message')])
self.expect_test_results([
(1000, 'test.py:9: [W0311] Bad indentation.', None, 'test.py', 9, None),
(1000, 'test.py:3: [C0111, foo123] Missing docstring', None, 'test.py', 3, None),
])
return self.run_step()
def test_regex_parseable(self):
self.setup_step(python.PyLint(command=['pylint'], store_results=False))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('test.py:9: [W] Bad indentation.\n'
'test.py:3: [C, foo123] Missing docstring\n')
.exit((python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)))
self.expect_outcome(result=WARNINGS,
state_string='pylint convention=1 warning=1 (warnings)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-convention', 1)
self.expect_property('pylint-total', 2)
return self.run_step()
def test_regex_parseable_1_3_1(self):
""" In pylint 1.3.1, output parseable is deprecated, but looks like
that, this is also the new recommended format string:
--msg-template={path}:{line}: [{msg_id}({symbol}), {obj}] {msg}
"""
self.setup_step(python.PyLint(command=['pylint'], store_results=False))
self.expect_commands(
ExpectShell(workdir='wkdir', command=['pylint'])
.stdout('test.py:9: [W0311(bad-indentation), ] '
'Bad indentation. Found 6 '
'spaces, expected 4\n'
'test.py:3: [C0111(missing-docstring), myFunc] Missing '
'function docstring\n')
.exit((python.PyLint.RC_WARNING | python.PyLint.RC_CONVENTION)))
self.expect_outcome(result=WARNINGS,
state_string='pylint convention=1 warning=1 (warnings)')
self.expect_property('pylint-warning', 1)
self.expect_property('pylint-convention', 1)
self.expect_property('pylint-total', 2)
return self.run_step()
class PyFlakes(TestBuildStepMixin, TestReactorMixin, unittest.TestCase):
def setUp(self):
self.setup_test_reactor()
return self.setup_test_build_step()
def tearDown(self):
return self.tear_down_test_build_step()
def test_success(self):
self.setup_step(python.PyFlakes())
self.expect_commands(
ExpectShell(workdir='wkdir', command=['make', 'pyflakes'])
.exit(0))
self.expect_outcome(result=SUCCESS, state_string='pyflakes')
return self.run_step()
def test_content_in_header(self):
self.setup_step(python.PyFlakes())
self.expect_commands(
ExpectShell(workdir='wkdir', command=['make', 'pyflakes'])
# don't match pyflakes-like output in the header
.log('stdio', header="foo.py:1: 'bar' imported but unused\n")
.exit(0))
self.expect_outcome(result=0, state_string='pyflakes')
return self.run_step()
def test_unused(self):
self.setup_step(python.PyFlakes())
self.expect_commands(
ExpectShell(workdir='wkdir', command=['make', 'pyflakes'])
.stdout("foo.py:1: 'bar' imported but unused\n")
.exit(1))
self.expect_outcome(result=WARNINGS,
state_string='pyflakes unused=1 (warnings)')
self.expect_property('pyflakes-unused', 1)
self.expect_property('pyflakes-total', 1)
return self.run_step()
def test_undefined(self):
self.setup_step(python.PyFlakes())
self.expect_commands(
ExpectShell(workdir='wkdir', command=['make', 'pyflakes'])
.stdout("foo.py:1: undefined name 'bar'\n")
.exit(1))
self.expect_outcome(result=FAILURE,
state_string='pyflakes undefined=1 (failure)')
self.expect_property('pyflakes-undefined', 1)
self.expect_property('pyflakes-total', 1)
return self.run_step()
def test_redefs(self):
self.setup_step(python.PyFlakes())
self.expect_commands(
ExpectShell(workdir='wkdir', command=['make', 'pyflakes'])
.stdout("foo.py:2: redefinition of unused 'foo' from line 1\n")
.exit(1))
self.expect_outcome(result=WARNINGS,
state_string='pyflakes redefs=1 (warnings)')
self.expect_property('pyflakes-redefs', 1)
self.expect_property('pyflakes-total', 1)
return self.run_step()
def test_importstar(self):
self.setup_step(python.PyFlakes())
self.expect_commands(
ExpectShell(workdir='wkdir', command=['make', 'pyflakes'])
.stdout("foo.py:1: 'from module import *' used; unable to detect undefined names\n")
.exit(1))
self.expect_outcome(result=WARNINGS,
state_string='pyflakes import*=1 (warnings)')
self.expect_property('pyflakes-import*', 1)
self.expect_property('pyflakes-total', 1)
return self.run_step()
def test_misc(self):
self.setup_step(python.PyFlakes())
self.expect_commands(
ExpectShell(workdir='wkdir', command=['make', 'pyflakes'])
.stdout("foo.py:2: redefinition of function 'bar' from line 1\n")
.exit(1))
self.expect_outcome(result=WARNINGS,
state_string='pyflakes misc=1 (warnings)')
self.expect_property('pyflakes-misc', 1)
self.expect_property('pyflakes-total', 1)
return self.run_step()
class TestSphinx(TestBuildStepMixin, TestReactorMixin, unittest.TestCase):
def setUp(self):
self.setup_test_reactor()
return self.setup_test_build_step()
def tearDown(self):
return self.tear_down_test_build_step()
def test_builddir_required(self):
with self.assertRaises(config.ConfigErrors):
python.Sphinx()
def test_bad_mode(self):
with self.assertRaises(config.ConfigErrors):
python.Sphinx(sphinx_builddir="_build", mode="don't care")
def test_success(self):
self.setup_step(python.Sphinx(sphinx_builddir="_build"))
self.expect_commands(
ExpectShell(workdir='wkdir',
command=['sphinx-build', '.', '_build'])
.stdout(log_output_success)
.exit(0)
)
self.expect_outcome(result=SUCCESS, state_string="sphinx 0 warnings")
return self.run_step()
def test_failure(self):
self.setup_step(python.Sphinx(sphinx_builddir="_build"))
self.expect_commands(
ExpectShell(workdir='wkdir',
command=['sphinx-build', '.', '_build'])
.stdout('oh noes!')
.exit(1)
)
self.expect_outcome(result=FAILURE,
state_string="sphinx 0 warnings (failure)")
return self.run_step()
def test_strict_warnings(self):
self.setup_step(python.Sphinx(sphinx_builddir="_build", strict_warnings=True))
self.expect_commands(
ExpectShell(workdir='wkdir',
command=['sphinx-build', '-W', '.', '_build'])
.stdout(log_output_warnings_strict)
.exit(1)
)
self.expect_outcome(result=FAILURE,
state_string="sphinx 1 warnings (failure)")
return self.run_step()
def test_nochange(self):
self.setup_step(python.Sphinx(sphinx_builddir="_build"))
self.expect_commands(
ExpectShell(workdir='wkdir',
command=['sphinx-build', '.', '_build'])
.stdout(log_output_nochange)
.exit(0)
)
self.expect_outcome(result=SUCCESS,
state_string="sphinx 0 warnings")
return self.run_step()
@defer.inlineCallbacks
def test_warnings(self):
self.setup_step(python.Sphinx(sphinx_builddir="_build"))
self.expect_commands(
ExpectShell(workdir='wkdir',
command=['sphinx-build', '.', '_build'])
.stdout(log_output_warnings)
.exit(0)
)
self.expect_outcome(result=WARNINGS,
state_string="sphinx 2 warnings (warnings)")
self.expect_log_file("warnings", warnings)
yield self.run_step()
self.assertEqual(self.step.statistics, {'warnings': 2})
def test_constr_args(self):
self.setup_step(python.Sphinx(sphinx_sourcedir='src',
sphinx_builddir="bld",
sphinx_builder='css',
sphinx="/path/to/sphinx-build",
tags=['a', 'b'],
strict_warnings=True,
defines=dict(
empty=None, t=True, f=False, s="str"),
mode='full'))
self.expect_commands(
ExpectShell(workdir='wkdir',
command=['/path/to/sphinx-build', '-b', 'css',
'-t', 'a', '-t', 'b', '-D', 'empty',
'-D', 'f=0', '-D', 's=str', '-D', 't=1',
'-E', '-W', 'src', 'bld'])
.stdout(log_output_success)
.exit(0)
)
self.expect_outcome(result=SUCCESS, state_string="sphinx 0 warnings")
return self.run_step()
| buildbot/buildbot | master/buildbot/test/unit/steps/test_python.py | Python | gpl-2.0 | 25,470 |
# Copyright (C) 2001-2006 Quantum ESPRESSO group
# This file is distributed under the terms of the
# GNU General Public License. See the file `License'
# in the root directory of the present distribution,
# or http://www.gnu.org/copyleft/gpl.txt .
#
# Author: Filippo Spiga ([email protected])
# Date: September 29, 2012
# Version: 1.3
import os
import os.path
import sys
import shutil
s=raw_input()
while s.strip()!='':
input_file = os.path.abspath(s)
backup_file = os.path.join(os.path.dirname(input_file), '.' + os.path.basename(input_file) + '.orig')
tmp_file = os.path.join(os.path.dirname(input_file), '.' + os.path.basename(input_file)+'~')
print input_file+": ",
if os.path.exists(backup_file) :
print "skipped."
else:
# read the current contents of the file
f = open( input_file )
text = f.read()
f.close()
# backup original file
shutil.copy (input_file, backup_file)
# open a different file for writing
f = open(tmp_file, 'w')
f.write("""
#if defined(__CUDA) && defined(__PHIGEMM)
#define dgemm UDGEMM
#define zgemm UZGEMM
#define DGEMM UDGEMM
#define ZGEMM UZGEMM
#if defined(__PHIGEMM_PROFILE)
#define _STRING_LINE_(s) #s
#define _STRING_LINE2_(s) _STRING_LINE_(s)
#define __LINESTR__ _STRING_LINE2_(__LINE__)
#define UDGEMM(TRANSA,TRANSB,M,N,K,ALPHA,A,LDA,B,LDB,BETA,C,LDC) phidgemm(TRANSA,TRANSB,M,N,K,ALPHA,A,LDA,B,LDB,BETA,C,LDC,__FILE__,__LINESTR__)
#define UZGEMM(TRANSA,TRANSB,M,N,K,ALPHA,A,LDA,B,LDB,BETA,C,LDC) phizgemm(TRANSA,TRANSB,M,N,K,ALPHA,A,LDA,B,LDB,BETA,C,LDC,__FILE__,__LINESTR__)
#else
#define UDGEMM phidgemm
#define UZGEMM phizgemm
#endif
#endif
""")
# write the original contents
f.write(text)
f.close()
# overwrite
os.rename(tmp_file, input_file)
print "success."
try:
s=raw_input()
except:
exit()
| fspiga/phiGEMM | scripts/addPhigemmSymbs.py | Python | gpl-2.0 | 1,821 |
# -*- coding: utf-8 -*-
from django import template
from django.template.defaultfilters import stringfilter
register = template.Library()
@register.filter
@stringfilter
def ka_translit(value,arg):
""" Returns the passed Georgian string as transliterated into the language
given in arg."""
if arg[0:2] in latin_langs:
return value.translate(CHARMAP_GEO2LAT)
return value
CHARMAP_GEO2LAT = {
ord(u'ა'): u'a', ord(u'ბ'): u'b', ord(u'გ'): u'g',
ord(u'დ'): u'd', ord(u'ე'): u'e', ord(u'ვ'): u'v',
ord(u'ზ'): u'z', ord(u'თ'): u't', ord(u'ი'): u'i',
ord(u'კ'): u'k', ord(u'ქ'): u'k', ord(u'ლ'): u'l',
ord(u'მ'): u'm', ord(u'ნ'): u'n', ord(u'ო'): u'o',
ord(u'პ'): u'p', ord(u'ჟ'): u'zh', ord(u'რ'): u'r',
ord(u'ს'): u's', ord(u'ტ'): u't', ord(u'უ'): u'u',
ord(u'ფ'): u'p', ord(u'ღ'): u'gh', ord(u'ყ'): u'q',
ord(u'შ'): u'sh', ord(u'ჩ'): u'ch', ord(u'ც'): u'ts',
ord(u'ძ'): u'dz', ord(u'წ'): u'ts', ord(u'ჭ'): u'ch',
ord(u'ხ'): u'kh', ord(u'ჯ'): u'j', ord(u'ჰ'): u'h',
}
latin_langs = ("en","de")
| tigeorgia/CorpSearch | apps/django_transliterate/templatetags/transliterate.py | Python | gpl-2.0 | 1,139 |
from functools import reduce
def toposort(data):
"""Dependencies are expressed as a dictionary whose keys are items
and whose values are a set of dependent items. Output is a list of
sets in topological order. The first set consists of items with no
dependences, each subsequent set consists of items that depend upon
items in the preceeding sets.
>>> print '\\n'.join(repr(sorted(x)) for x in toposort2({
... 2: set([11]),
... 9: set([11,8]),
... 10: set([11,3]),
... 11: set([7,5]),
... 8: set([7,3]),
... }) )
[3, 5, 7]
[8, 11]
[2, 9, 10]
"""
# FROM: http://code.activestate.com/recipes/578272-topological-sort/
# Ignore self dependencies.
for k, v in data.items():
v.discard(k)
# Find all items that don't depend on anything.
extra_items_in_deps = reduce(set.union, data.itervalues()) - set(data.iterkeys())
# Add empty dependences where needed
data.update({item:set() for item in extra_items_in_deps})
while True:
ordered = set(item for item, dep in data.iteritems() if not dep)
if not ordered:
break
yield ordered
data = {item: (dep - ordered)
for item, dep in data.iteritems()
if item not in ordered}
assert not data, "Cyclic dependencies exist among these items:\n%s" % '\n'.join(repr(x) for x in data.iteritems())
| velezj/pods.velezj-pods-utils | src/produce-to-build.py | Python | gpl-2.0 | 1,388 |
#
# Copyright 2008 Sun Microsystems, Inc. All rights reserved.
# Use is subject to license terms.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
import platform
import os
import _util
class User(object):
"""Defines a particular user account."""
PRIV_CLONE = 1
PRIV_NFS_MOUNT = 2
PRIV_QEMU_SYSTEM = 3
PRIV_CREATE_DOMAIN = 4
PRIV_CREATE_NETWORK = 5
_privs = [ PRIV_CLONE, PRIV_NFS_MOUNT, PRIV_QEMU_SYSTEM,
PRIV_CREATE_DOMAIN, PRIV_CREATE_NETWORK ]
def __init__(self, euid):
self._euid = euid
def get_euid(self):
return self._euid
euid = property(get_euid)
def has_priv(self, priv, conn=None):
"""Return if the given user is privileged enough to perform the
given operation. This isn't entirely accurate currently,
especially on Solaris."""
if priv not in self._privs:
raise ValueError('unknown privilege %s' % priv)
if priv == self.PRIV_QEMU_SYSTEM:
return self._euid == 0
if priv == self.PRIV_CREATE_NETWORK:
return (self._euid == 0) or _util.is_qemu_system(conn)
if platform.system() == 'SunOS':
return self._sun_has_priv(priv, conn)
# For all others, just assume that prescence of a connection
# means we are privileged enough
return True
def _sun_has_priv(self, priv, conn=None):
# Not easy to work out!
if self._euid != User.current().euid:
return self._euid == 0
import ucred
cred = ucred.get(os.getpid())
if priv in [ self.PRIV_CLONE,
self.PRIV_CREATE_DOMAIN,
self.PRIV_CREATE_NETWORK ]:
return cred.has_priv('Effective', 'virt_manage')
if priv == self.PRIV_NFS_MOUNT:
return (cred.has_priv('Effective', 'sys_mount') and
cred.has_priv('Effective', 'net_privaddr'))
def current():
"""Return the current user."""
return User(os.geteuid())
current = staticmethod(current)
| palli/python-virtinst | virtinst/User.py | Python | gpl-2.0 | 2,726 |
from socket import *
from argparse import ArgumentParser
from bowman.utils import Connection
from math import sqrt
import sys, os
UNIT_TYPES_SYMBOLS = ["r", "k", "h", "w", "s", "a", "dm", "lm", "dr"]
class bot():
def __init__(self, remote_ip, remote_port):
self.remote_ip = remote_ip
self.remote_port = remote_port
self.connect()
self.world_info = None
self.unit_type = self.choice_unit_type()
self.tr_nums = None
self.tb_nums = None
self.team = None
self.main()
def clear_screen(self):
if sys.platform == "win32":
os.system("cls")
else:
os.system("clear")
def connect(self):
self.sock = socket(AF_INET, SOCK_STREAM)
self.sock.connect((self.remote_ip, self.remote_port))
self.connection = Connection(self.sock)
data = self.connection.get_pack()
if data == "hello":
return
raise Exception("Oops! There is an server error")
def choice_unit_type(self):
return 'r'
def lose(self):
print("You lose!")
def win(self):
print("You win!")
def miss(self):
print("You missed!")
def nb(self):
print("You have been stopped by wall!")
def get_matrix(self):
return self.connection.get_pack()
def receive_matrix(self):
global data
data = self.get_matrix()
self.clear_screen()
world = data.splitlines()
for i in range(len(world)):
world[i] = world[i].split()
for j in range(len(world[i])):
if world[i][j][0] == '?':
world[i][j] = '?'
world[i] = ' '.join(world[i])
world = '\n'.join(world)
print("Type 'help' or 'h' for help.")
print(world)
def parse_matrix(self):
mas = [str(x) for x in range(0, 10)]
world = data.splitlines()
l = len(world)
u_n = []
un = world[0].split()
u_n = un[-1][1]
u_h = ''
for i in range(len(un[2])):
if un[2][i] == '/':
break
else:
u_h += un[2][i]
u_h = int(u_h)
a, op_n, op_h = 1, [], []
while True:
if world[a]:
oph = ''
oh = world[a].split()
op_n.append(oh[1])
op_hel = []
for i in range(len(oh[3])):
if oh[3][i] == '/':
break
else:
oph += oh[3][i]
op_h.append(int(oph))
else:
break
a += 1
a += 1
u_team = []
if world[a]:
b = world[a].split()
i = 4
c = True
while c:
u_team.append(b[i][0])
if b[i][-1] != ',':
c = False
i += 1
a += 1
a += 1
for i in range(len(world)):
world[i] = world[i].split()
for i in range(len(world)):
for j in range(len(world[i])):
if world[i][j][0] == '?':
world[i][j] = world[i][j][1]
for i in range(len(u_team)):
b = 0
for j in range(len(op_n)):
if op_n[j - b] == u_team[i]:
q = op_n.pop(j)
b += 1
return u_n, u_h, op_n, op_h, world[a:]
def end_game(self):
print("Game finished!")
self.sock.close()
def abort_game(self):
print("Game aborted, because fatal error has been raised on the server!")
self.sock.close()
def ally_fire(self):
print("This player is your ally!")
def team_lose(self):
print("Your team lose!")
def team_win(self):
print("Your team win!")
def bfs(self, y, x, op, m, n, n2, mas, go):
ma = [x for x in range(1000)]
mop = [str(x) for x in range(10)]
p = []
for i in range(10):
if mop[i] != op:
p.append(mop[i])
if x - 1 != -1:
if m[y][x - 1] != '*' and m[y][x - 1] not in p:
if m[y][x - 1] == op:
go[y][x - 1] = [y, x]
return m, mas, go, True, [y, x - 1], m[y][x] + 1
if m[y][x - 1] not in ma:
m[y][x - 1] = m[y][x] + 1
mas.append([y, x - 1])
go[y][x - 1] = [y, x]
if y - 1 != -1:
if m[y - 1][x] != '*' and m[y - 1][x] not in p:
if m[y - 1][x] == op:
go[y - 1][x] = [y, x]
return m, mas, go, True, [y - 1, x], m[y][x] + 1
if m[y - 1][x] not in ma:
m[y - 1][x] = m[y][x] + 1
mas.append([y - 1, x])
go[y - 1][x] = [y, x]
if x + 1 < n:
if m[y][x + 1] != '*' and m[y][x + 1] not in p:
if m[y][x + 1] == op:
go[y][x + 1] = [y, x]
return m, mas, go, True, [y, x + 1], m[y][x] + 1
if m[y][x + 1] not in ma:
m[y][x + 1] = m[y][x] + 1
mas.append([y, x + 1])
go[y][x + 1] = [y, x]
if y + 1 < n2:
if m[y + 1][x] != '*' and m[y + 1][x] not in p:
if m[y + 1][x] == op:
go[y + 1][x] = [y, x]
return m, mas, go, True, [y + 1, x], m[y][x] + 1
if m[y + 1][x] not in ma:
m[y + 1][x] = m[y][x] + 1
mas.append([y + 1, x])
go[y + 1][x] = [y, x]
if x + 1 < n and y + 1 < n2:
if m[y + 1][x + 1] != '*' and m[y + 1][x + 1] not in p:
if m[y + 1][x + 1] == op:
go[y + 1][x + 1] = [y, x]
return m, mas, go, True, [y + 1, x + 1], m[y][x] + 1
if m[y + 1][x + 1] not in ma:
m[y + 1][x + 1] = m[y][x] + 1
mas.append([y + 1, x + 1])
go[y + 1][x + 1] = [y, x]
if x - 1 > -1 and y - 1 > -1:
if m[y - 1][x - 1] != '*' and m[y - 1][x - 1] not in p:
if m[y - 1][x - 1] == op:
go[y - 1][x - 1] = [y, x]
return m, mas, go, True, [y - 1, x - 1], m[y][x] + 1
if m[y - 1][x - 1] not in ma:
m[y - 1][x - 1] = m[y][x] + 1
mas.append([y - 1, x - 1])
go[y - 1][x - 1] = [y, x]
if y - 1 > -1 and x + 1 < n:
if m[y - 1][x + 1] != '*' and m[y - 1][x + 1] not in p:
if m[y - 1][x + 1] == op:
go[y - 1][x + 1] = [y, x]
return m, mas, go, True, [y - 1, x + 1], m[y][x] + 1
if m[y - 1][x + 1] not in ma:
m[y - 1][x + 1] = m[y][x] + 1
mas.append([y - 1, x + 1])
go[y - 1][x + 1] = [y, x]
if x - 1 > -1 and y + 1 < n2:
if m[y + 1][x - 1] != '*' and m[y + 1][x - 1] not in p:
if m[y + 1][x - 1] == op:
go[y + 1][x - 1] = [y, x]
return m, mas, go, True, [y + 1, x - 1], m[y][x] + 1
if m[y + 1][x - 1] not in ma:
m[y + 1][x - 1] = m[y][x] + 1
mas.append([y + 1, x - 1])
go[y + 1][x - 1] = [y, x]
return m, mas, go, False, [], 0
def dfs(self, matrix, u, op):
m = matrix
go = [[[] for x in range(len(m[y]))] for y in range(len(m))]
n2, n = len(m), len(m[0])
for i in range(len(m)):
for j in range(len(m[i])):
if m[i][j] == str(u):
mas = [[i, j]]
m[i][j] = 0
for i in range(n * n2):
m, mas, go, b, res, ras = self.bfs(mas[i][0], mas[i][1], op, m, n, n2, mas, go)
if b == True:
break
r = [res]
for i in range(ras):
r.append(go[res[0]][res[1]])
res = go[res[0]][res[1]]
return r
def parse_r(self, r):
r.reverse()
res = []
for i in range(len(r) - 1):
if r[i + 1][0] > r[i][0] and r[i + 1][1] > r[i][1]:
res.append(['c', 1])
elif r[i + 1][0] > r[i][0] and r[i + 1][1] == r[i][1]:
res.append(['s', 1])
elif r[i + 1][0] < r[i][0] and r[i + 1][1] < r[i][1]:
res.append(['q', 1])
elif r[i + 1][0] > r[i][0] and r[i + 1][1] < r[i][1]:
res.append(['z', 1])
elif r[i + 1][0] < r[i][0] and r[i + 1][1] > r[i][1]:
res.append(['e', 1])
elif r[i + 1][0] < r[i][0] and r[i + 1][1] == r[i][1]:
res.append(['w', 1])
elif r[i + 1][0] == r[i][0] and r[i + 1][1] > r[i][1]:
res.append(['d', 1])
elif r[i + 1][0] == r[i][0] and r[i + 1][1] < r[i][1]:
res.append(['a', 1])
return res
def parse_res(self, r):
res, a = [], []
for i in range(len(r)):
if a == []:
a = r[i]
elif a[0] == r[i][0]:
a[1] += 1
else:
res.append(a)
a = r[i]
if res == []:
res.append(a)
return res
def sqrt_mi(self, y1, y2, x1, x2):
if y1 >= y2 and x1 >= x2:
return round(sqrt((y1 - y2) ** 2 + (x1 - x2) ** 2))
elif y1 >= y2 and x2 >= x1:
return round(sqrt((y1 - y2) ** 2 + (x2 - x1) ** 2))
elif y2 >= y1 and x1 >= x2:
return round(sqrt((y2 - y1) ** 2 + (x1 - x2) ** 2))
elif y2 >= y1 and x2 >= x1:
return round(sqrt((y2 - y1) ** 2 + (x2 - x1) ** 2))
def g(self, world, u_n, op):
r = self.dfs(world, u_n, op)
res = self.parse_r(r)
go = self.parse_res(res)
return go[0][0] + ' ' + str(go[0][1])
def prompt(self):
u_n, u_h, op_n, op_h, world = self.parse_matrix()
plus = False
m = 1000
ind = 0
for i in range(len(world)):
for j in range(len(world[i])):
if world[i][j] == u_n:
uy, ux = i, j
elif world[i][j] == '+':
plus = True
for x in range(len(op_n)):
for i in range(len(world)):
for j in range(len(world[i])):
if world[i][j] == op_n[x]:
rr = self.sqrt_mi(i, uy, j, ux)
if op_h[x] < 500 and rr < 10:
m = 0
ind = x
r = rr
elif rr < m:
m = rr
ind = x
r = rr
op = op_n[ind]
oh = op_h[ind]
if u_h > 1000:
if r > 9:
return self.g(world, u_n, op)
else:
return 'f' + ' ' + op
elif u_h + 100 > oh:
if r > 9 and plus:
return self.g(world, u_n, '+')
elif r > 9:
return self.g(world, u_n, op)
else:
return 'f' + ' ' + op
elif oh < 1300 and oh > 950:
if r > 9 and plus:
return self.g(world, u_n, '+')
elif r > 9:
return self.g(world, u_n, op)
else:
return 'f' + ' ' + op
else:
if plus:
return self.g(world, u_n, '+')
elif r > 9:
return self.g(world, u_n, op)
else:
return 'f' + ' ' + op
def main(self):
self.connection.send_pack(self.unit_type)
n = self.connection.get_pack()
n = int(n)
self.n = n
print("Waiting for game start...")
while True:
data = self.connection.get_pack()
if data == "go":
string = self.prompt()
while not string:
string = self.prompt()
self.connection.send_pack(string)
elif data == "lo":
self.lose()
elif data == "wi":
self.win()
elif data == "mi":
self.miss()
elif data == "nb":
self.nb()
elif data == "mx":
self.receive_matrix()
elif data == "af":
self.ally_fire()
elif data == "tw":
self.team_win()
elif data == "tl":
self.team_lose()
elif data == "eg":
self.end_game()
break
elif data == "ag":
self.abort_game()
break
elif data == "pr":
self.print()
def main():
arg_parser = ArgumentParser(description="Bowman is a client-server console game. "
"See more: https://github.com/carzil/bowman")
arg_parser.add_argument("ip", help="server IP address")
arg_parser.add_argument("--port", default=9999, type=int, help="server port")
args = arg_parser.parse_args()
bot(args.ip, args.port)
if __name__ == "__main__":
main() | carzil/bowman | bot.py | Python | gpl-2.0 | 13,595 |
#!/usr/bin/env python
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
import pymongo
from pymongo import ASCENDING, DESCENDING
if "Darwin" in os.popen("uname").read():
MONGOSERVER = 'localhost'
else:
MONGOSERVER = '192.168.167.192'
MONGOPORT = 27017
def getDB():
conn = pymongo.Connection(MONGOSERVER, MONGOPORT)
db = conn.newsrivr
return db
def getCollUsers():
db = getDB()
coll = db.users
return coll
def getCollUnprocessedTweets():
db = getDB()
coll = db.tweets
return coll
def getCollDrops():
db = getDB()
coll = db.drops
return coll
def getCollImageMd5s():
db = getDB()
coll = db.imagemd5
return coll
def main():
print("users:", getCollUsers().count())
print("tweets:", getCollUnprocessedTweets().count())
print("drops:", getCollDrops().count())
print("imagemd5:", getCollImageMd5s().count())
#getCollUsers().drop()
getCollUnprocessedTweets().drop()
getCollDrops().drop()
getCollImageMd5s().drop()
print("users:", getCollUsers().count())
print("tweets:", getCollUnprocessedTweets().count())
print("drops:", getCollDrops().count())
print("imagemd5:", getCollImageMd5s().count())
if __name__=='__main__':
main()
print("done")
| erikdejonge/newsrivr | daemons/clear_newsrivr.py | Python | gpl-2.0 | 1,356 |
# encoding: utf-8
# module gtk._gtk
# from /usr/lib/python2.7/dist-packages/gtk-2.0/gtk/_gtk.so
# by generator 1.135
# no doc
# imports
import atk as __atk
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
from DrawingArea import DrawingArea
class Curve(DrawingArea):
"""
Object GtkCurve
Signals from GtkCurve:
curve-type-changed ()
Properties from GtkCurve:
curve-type -> GtkCurveType: Curve type
Is this curve linear, spline interpolated, or free-form
min-x -> gfloat: Minimum X
Minimum possible value for X
max-x -> gfloat: Maximum X
Maximum possible X value
min-y -> gfloat: Minimum Y
Minimum possible value for Y
max-y -> gfloat: Maximum Y
Maximum possible value for Y
Signals from GtkWidget:
composited-changed ()
show ()
hide ()
map ()
unmap ()
realize ()
unrealize ()
size-request (GtkRequisition)
size-allocate (GdkRectangle)
state-changed (GtkStateType)
parent-set (GtkWidget)
hierarchy-changed (GtkWidget)
style-set (GtkStyle)
direction-changed (GtkTextDirection)
grab-notify (gboolean)
child-notify (GParam)
mnemonic-activate (gboolean) -> gboolean
grab-focus ()
focus (GtkDirectionType) -> gboolean
move-focus (GtkDirectionType)
event (GdkEvent) -> gboolean
event-after (GdkEvent)
button-press-event (GdkEvent) -> gboolean
button-release-event (GdkEvent) -> gboolean
scroll-event (GdkEvent) -> gboolean
motion-notify-event (GdkEvent) -> gboolean
keynav-failed (GtkDirectionType) -> gboolean
delete-event (GdkEvent) -> gboolean
destroy-event (GdkEvent) -> gboolean
expose-event (GdkEvent) -> gboolean
key-press-event (GdkEvent) -> gboolean
key-release-event (GdkEvent) -> gboolean
enter-notify-event (GdkEvent) -> gboolean
leave-notify-event (GdkEvent) -> gboolean
configure-event (GdkEvent) -> gboolean
focus-in-event (GdkEvent) -> gboolean
focus-out-event (GdkEvent) -> gboolean
map-event (GdkEvent) -> gboolean
unmap-event (GdkEvent) -> gboolean
property-notify-event (GdkEvent) -> gboolean
selection-clear-event (GdkEvent) -> gboolean
selection-request-event (GdkEvent) -> gboolean
selection-notify-event (GdkEvent) -> gboolean
selection-received (GtkSelectionData, guint)
selection-get (GtkSelectionData, guint, guint)
proximity-in-event (GdkEvent) -> gboolean
proximity-out-event (GdkEvent) -> gboolean
drag-leave (GdkDragContext, guint)
drag-begin (GdkDragContext)
drag-end (GdkDragContext)
drag-data-delete (GdkDragContext)
drag-failed (GdkDragContext, GtkDragResult) -> gboolean
drag-motion (GdkDragContext, gint, gint, guint) -> gboolean
drag-drop (GdkDragContext, gint, gint, guint) -> gboolean
drag-data-get (GdkDragContext, GtkSelectionData, guint, guint)
drag-data-received (GdkDragContext, gint, gint, GtkSelectionData, guint, guint)
visibility-notify-event (GdkEvent) -> gboolean
client-event (GdkEvent) -> gboolean
no-expose-event (GdkEvent) -> gboolean
window-state-event (GdkEvent) -> gboolean
damage-event (GdkEvent) -> gboolean
grab-broken-event (GdkEvent) -> gboolean
query-tooltip (gint, gint, gboolean, GtkTooltip) -> gboolean
popup-menu () -> gboolean
show-help (GtkWidgetHelpType) -> gboolean
accel-closures-changed ()
screen-changed (GdkScreen)
can-activate-accel (guint) -> gboolean
Properties from GtkWidget:
name -> gchararray: Widget name
The name of the widget
parent -> GtkContainer: Parent widget
The parent widget of this widget. Must be a Container widget
width-request -> gint: Width request
Override for width request of the widget, or -1 if natural request should be used
height-request -> gint: Height request
Override for height request of the widget, or -1 if natural request should be used
visible -> gboolean: Visible
Whether the widget is visible
sensitive -> gboolean: Sensitive
Whether the widget responds to input
app-paintable -> gboolean: Application paintable
Whether the application will paint directly on the widget
can-focus -> gboolean: Can focus
Whether the widget can accept the input focus
has-focus -> gboolean: Has focus
Whether the widget has the input focus
is-focus -> gboolean: Is focus
Whether the widget is the focus widget within the toplevel
can-default -> gboolean: Can default
Whether the widget can be the default widget
has-default -> gboolean: Has default
Whether the widget is the default widget
receives-default -> gboolean: Receives default
If TRUE, the widget will receive the default action when it is focused
composite-child -> gboolean: Composite child
Whether the widget is part of a composite widget
style -> GtkStyle: Style
The style of the widget, which contains information about how it will look (colors etc)
events -> GdkEventMask: Events
The event mask that decides what kind of GdkEvents this widget gets
extension-events -> GdkExtensionMode: Extension events
The mask that decides what kind of extension events this widget gets
no-show-all -> gboolean: No show all
Whether gtk_widget_show_all() should not affect this widget
has-tooltip -> gboolean: Has tooltip
Whether this widget has a tooltip
tooltip-markup -> gchararray: Tooltip markup
The contents of the tooltip for this widget
tooltip-text -> gchararray: Tooltip Text
The contents of the tooltip for this widget
window -> GdkWindow: Window
The widget's window if it is realized
double-buffered -> gboolean: Double Buffered
Whether or not the widget is double buffered
Signals from GtkObject:
destroy ()
Properties from GtkObject:
user-data -> gpointer: User Data
Anonymous User Data Pointer
Signals from GObject:
notify (GParam)
"""
@classmethod
def do_curve_type_changed(cls, *args, **kwargs): # real signature unknown
pass
def get_vector(self, *args, **kwargs): # real signature unknown
pass
def reset(self, *args, **kwargs): # real signature unknown
pass
def set_curve_type(self, *args, **kwargs): # real signature unknown
pass
def set_gamma(self, *args, **kwargs): # real signature unknown
pass
def set_range(self, *args, **kwargs): # real signature unknown
pass
def set_vector(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__gtype__ = None # (!) real value is ''
| ProfessorX/Config | .PyCharm30/system/python_stubs/-1247972723/gtk/_gtk/Curve.py | Python | gpl-2.0 | 7,072 |
# Copyright (C) 2004-2008 Paul Cochrane
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
## @file test_plot.py
import unittest
import sys,os
from string import *
here = os.getcwd() + '/../../'
sys.path.append(here)
import pyvisi # this should import all of the pyvisi stuff needed
"""
Class and functions for testing the Renderer class
"""
class TestRenderer(unittest.TestCase):
"""
Test the renderer object at the base pyvisi level
"""
def setUp(self):
self.ren = pyvisi.Renderer()
def testInit(self):
"""
Tests initialisation of the Renderer object
"""
classStr = self.ren.__class__.__name__
self.assertEqual('Renderer', classStr)
def testExactlyOneArgument(self):
"""
Check that method only accepts one argument
"""
# check setRenderWindowHeight
self.assertRaises(TypeError, self.ren.setRenderWindowHeight)
self.assertRaises(TypeError, self.ren.setRenderWindowHeight, 10, 20)
# check setRenderWindowWidth
self.assertRaises(TypeError, self.ren.setRenderWindowWidth)
self.assertRaises(TypeError, self.ren.setRenderWindowWidth, 10, 20)
# check addToEvalStack
self.assertRaises(TypeError, self.ren.addToEvalStack)
self.assertRaises(TypeError, self.ren.addToEvalStack, "moo", "baa")
# check addToInitStack
self.assertRaises(TypeError, self.ren.addToInitStack)
self.assertRaises(TypeError, self.ren.addToInitStack, "moo", "baa")
def testExactlyTwoArguments(self):
"""
Check that method only accepts two arguments
"""
self.assertRaises(TypeError, \
self.ren.setRenderWindowDimensions)
self.assertRaises(TypeError, \
self.ren.setRenderWindowDimensions, 12)
self.assertRaises(TypeError, \
self.ren.setRenderWindowDimensions, 12, 14, 16)
def testRenderWindowDefaultDims(self):
"""
Check render window default width and height
"""
self.assertEqual(640, self.ren.renderWindowWidth) # width
self.assertEqual(480, self.ren.renderWindowHeight) # height
def testGetRenderWindowWidth(self):
"""
Check getting the render window width
"""
width = self.ren.getRenderWindowWidth()
self.assertEqual(self.ren.renderWindowWidth, width)
def testSetRenderWindowWidth(self):
"""
Test setting the render window width
"""
width = 720
self.ren.setRenderWindowWidth(width)
self.assertEqual(self.ren.getRenderWindowWidth(), width)
def testGetRenderWindowHeight(self):
"""
Check getting the render window height
"""
height = self.ren.getRenderWindowHeight()
self.assertEqual(self.ren.renderWindowHeight, height)
def testSetRenderWindowHeight(self):
"""
Test setting the render window height
"""
height = 593
self.ren.setRenderWindowHeight(height)
self.assertEqual(self.ren.getRenderWindowHeight(), height)
def testGetRenderWindowDimensions(self):
"""
Test getting the render window width and height
"""
(width, height) = self.ren.getRenderWindowDimensions()
self.assertEqual((640,480), (width, height))
def testSetRenderWindowDimensions(self):
"""
Test setting the render window width and height
"""
width = 123
height = 456
self.ren.setRenderWindowDimensions(width, height)
self.assertEqual(self.ren.getRenderWindowDimensions(), (width,height))
def testIntegerArgsToSet(self):
"""
Test setting of integer arguments
"""
self.assertRaises(AssertionError, \
self.ren.setRenderWindowWidth, "moo")
self.assertRaises(AssertionError, \
self.ren.setRenderWindowHeight, "moo")
self.assertRaises(AssertionError, \
self.ren.setRenderWindowDimensions, "moo", "baa")
def testStringArgsToSet(self):
"""
Test setting of string arguments
"""
self.assertRaises(AssertionError, self.ren.addToEvalStack, 10)
self.assertRaises(AssertionError, self.ren.addToInitStack, 10)
def testGetEvalStack(self):
"""
Test getting the evaluation stack
"""
# it should be the null string on initialisation
self.assertEqual("", self.ren.getEvalStack())
def testAddToEvalStack(self):
"""
Test adding a string to the evaluation stack
"""
inString = "my string"
outString = inString + '\n'
self.ren.addToEvalStack(inString)
self.assertEqual(self.ren.getEvalStack(), outString)
def testGetInitStack(self):
"""
Test getting the initialisation stack
"""
# should be the null string initially
self.assertEqual("", self.ren.getInitStack())
def testAddToInitStack(self):
"""
Test adding a string to the initialisation stack
"""
inString = "my string"
outString = inString + '\n'
self.ren.addToInitStack(inString)
self.assertEqual(self.ren.getInitStack(), outString)
def testResetEvalStack(self):
"""
Test resetting the evaluation stack
"""
# set the stack to something
inString = "my string"
self.ren.addToEvalStack(inString)
# reset the stack
self.ren.resetEvalStack()
# now check that it's the null string again
self.assertEqual("", self.ren.getEvalStack())
def testResetInitStack(self):
"""
Test resetting the initialisation stack
"""
# set the stack to something
inString = "my string"
self.ren.addToInitStack(inString)
# reset the stack
self.ren.resetInitStack()
# now check that it's the null string again
self.assertEqual("", self.ren.getInitStack())
if __name__ == '__main__':
unittest.main()
# vim: expandtab shiftwidth=4:
| paultcochrane/pyvisi | pyvisi/tests/test_renderer.py | Python | gpl-2.0 | 6,801 |
# fsm.py - Finite State Machine classes
# Azzurra IRC Services TLD DFA generator
# Copyright (C) 2011 Matteo Panella <[email protected]>
#
# This program is free but copyrighted software; please check COPYING
# for more details.
__all__ = ['DFA', 'Trie', 'State']
class Token(object):
def __init__(self, sym, index):
self.sym = sym
self.index = index
def __repr__(self):
return '<Token(%r, %r)>' % (self.sym, self.index)
class State(object):
"""A DFA state"""
def __init__(self, statenum, is_start=False, is_final=False):
self.statenum = statenum
self.is_start = is_start
self.is_final = is_final
self.transitions = {}
def add_transition(self, symbol, state):
# A DFA may only have one transition per symbol (silly me...)
assert self.transitions.get(symbol) is None, "Internal error: multiple transitions for single symbol"
self.transitions[symbol] = state
def __str__(self):
s = []
s.append('State: %d' % self.statenum)
if self.is_start:
s.append(' start state\n')
elif self.is_final:
s.append(' final state\n')
else:
s.append('\n')
for (sym, next_state) in self.transitions.items():
if sym == '\x00':
s.append(' NULL -> ACCEPT\n')
else:
s.append(' %s -> %d\n' % (sym, next_state.statenum))
return ''.join(s)
def __repr__(self):
return '<State(%r, is_start=%r, is_final=%r)>' % (self.statenum, self.is_start, self.is_final)
class DFA(object):
"""A Deterministic Finite Automaton"""
def __init__(self):
self.start_state = None
self.states = set()
self.statenum_map = {}
def add_state(self, statenum, start=False, final=False):
"""Add a new state to the DFA and return it"""
new_state = State(statenum, start, final)
self.states.add(new_state)
self.statenum_map[statenum] = new_state
if start == True:
self.start_state = new_state
return new_state
def add_transition(self, curr_statenum, next_statenum, symbol):
"""Add a transition"""
try:
curr_state = self.statenum_map[curr_statenum]
except KeyError:
curr_state = self.add_state(curr_statenum)
try:
next_state = self.statenum_map[next_statenum]
except KeyError:
next_state = self.add_state(next_statenum)
curr_state.add_transition(symbol, next_state)
def get_next_state(self, curr_state, symbol):
"""Get transition for a symbol"""
if symbol in curr_state.transitions:
return curr_state.transitions[symbol]
else:
return None
def is_valid(self):
"""Validate this DFA.
Requirements:
* start state MUST be unique;
* one or more final states;
* all states MUST be reachable from start state.
"""
if self.start_state is None:
return False
final_found = any([state.is_final for state in self.states])
if not final_found:
return False
def visit_state(current, visited):
if current not in visited:
visited.add(current)
for next_state in current.transitions.values():
if next_state not in visited:
visit_state(next_state, visited)
visited = set()
visit_state(self.start_state, visited)
return len(visited) == len(self.states)
def print_states(self, fd):
print >>fd, '\nStates of DFA:\n'
for statenum in sorted(self.statenum_map.keys()):
print >>fd, self.statenum_map[statenum]
class Trie(DFA):
"""A trie (backed by a DFA)"""
def __init__(self):
DFA.__init__(self)
self.statenum = 0
def add_string(self, s):
"""Add a new string to the Trie"""
# Create start state if necessary
if self.start_state is None:
self.add_state(self.statenum, start=True)
self.statenum += 1
# Final state
self.final_state = self.add_state(self.statenum, final=True)
self.statenum += 1
# Find the last state for a prefix of the string
curr = self.start_state
i = 0
while i < len(s):
sym = s[i]
next_state = self.get_next_state(curr, sym)
if next_state is None:
break
else:
i += 1
if next_state is self.final_state:
# We have to split this node
next_state = self.add_state(self.statenum)
self.statenum += 1
self.add_transition(next_state.statenum, self.final_state.statenum, '\x00')
curr.transitions[sym] = next_state
curr = next_state
# Create new states for remaining characters
for j in xrange(i, len(s) - 1):
sym = s[j]
new_state = self.add_state(self.statenum)
self.statenum += 1
self.add_transition(curr.statenum, new_state.statenum, sym)
curr = new_state
# Last symbol goes straight to final_state
self.add_transition(curr.statenum, self.final_state.statenum, s[-1])
def get_language(self):
"""Return a list of strings accepted by this trie"""
lang = []
def get_lang(so_far, curr, lang):
if curr.is_final:
lang.append(''.join(so_far))
for (sym, next_state) in curr.transitions.items():
if sym != '\x00':
so_far.append(sym)
get_lang(so_far, next_state, lang)
so_far.pop()
else:
lang.append(''.join(so_far))
get_lang([], self.start_state, lang)
return lang
def _get_tokenmap_internal(self):
"""For internal use only."""
lang = self.get_language()
tmap = {'\x00': Token('\x00', 0)}
tidx = 1
for s in lang:
for sym in s:
if sym not in tmap:
tmap[sym] = Token(sym, tidx)
tidx += 1
return tmap
def get_tokens(self):
"""Return a token map for the language accepted by this trie"""
return sorted(self._get_tokenmap_internal().values(), key=lambda token: token.index)
def get_state_matrix(self):
"""Get the state matrix for this trie"""
states = sorted(self.statenum_map.values(), key=lambda state: state.statenum)
stm = []
illegal_state = self.statenum
token_map = self._get_tokenmap_internal()
for state in states:
tlist = [illegal_state] * len(token_map)
for (sym, next_state) in state.transitions.items():
tridx = token_map[sym].index
tlist[tridx] = next_state.statenum
stm.append((state.is_final, tuple(tlist)))
return (stm, self.start_state.statenum, self.final_state.statenum, illegal_state)
| rfc1459/tldgen | fsm.py | Python | gpl-2.0 | 7,214 |
import wpilib
class MyRobot(wpilib.IterativeRobot):
def robotInit(self):
'''
Initializes robot components
'''
#initialize motor
self.motor = wpilib.Jaguar(0)
def autonomousInit(self):
pass
def autonomousPeriodic(self):
pass
def teleopInit(self):
pass
def teleopPeriodic(self):
pass
| frc2423/2015 | mike_practice/robot.py | Python | gpl-2.0 | 425 |
basis_set = \
{
"H": [
[
"S",
[
[
19.2406,
0.032828
],
[
2.8992,
0.231208
],
[
0.6534,
0.817238
]
]
],
[
"S",
[
[
0.1776,
1.0
]
]
]
],
"Li": [
[
"S",
[
[
921.3,
0.001367
],
[
138.7,
0.010425
],
[
31.94,
0.049859
],
[
9.353,
0.160701
],
[
3.158,
0.344604
],
[
1.157,
0.425197
]
]
],
[
"S",
[
[
0.4446,
1.0
]
]
],
[
"S",
[
[
0.07666,
1.0
]
]
],
[
"S",
[
[
0.02864,
1.0
]
]
],
[
"P",
[
[
1.488,
0.03877
],
[
0.2667,
0.236257
],
[
0.07201,
0.830448
]
]
],
[
"P",
[
[
0.0237,
1.0
]
]
]
],
"B": [
[
"S",
[
[
2788.41,
0.002122
],
[
419.039,
0.016171
],
[
96.4683,
0.078356
],
[
28.0694,
0.26325
],
[
9.376,
0.596729
],
[
1.3057,
0.230397
]
]
],
[
"S",
[
[
3.4062,
1.0
]
]
],
[
"S",
[
[
0.3245,
1.0
]
]
],
[
"S",
[
[
0.1022,
1.0
]
]
],
[
"P",
[
[
11.3413,
0.017987
],
[
2.436,
0.110339
],
[
0.6836,
0.383111
],
[
0.2134,
0.64786
]
]
],
[
"P",
[
[
0.0701,
1.0
]
]
]
],
"C": [
[
"S",
[
[
4232.61,
0.002029
],
[
634.882,
0.015535
],
[
146.097,
0.075411
],
[
42.4974,
0.257121
],
[
14.1892,
0.596555
],
[
1.9666,
0.242517
]
]
],
[
"S",
[
[
5.1477,
1.0
]
]
],
[
"S",
[
[
0.4962,
1.0
]
]
],
[
"S",
[
[
0.1533,
1.0
]
]
],
[
"P",
[
[
18.1557,
0.018534
],
[
3.9864,
0.115442
],
[
1.1429,
0.386206
],
[
0.3594,
0.640089
]
]
],
[
"P",
[
[
0.1146,
1.0
]
]
]
],
"N": [
[
"S",
[
[
5909.44,
0.002004
],
[
887.451,
0.01531
],
[
204.749,
0.074293
],
[
59.8376,
0.253364
],
[
19.9981,
0.600576
],
[
2.686,
0.245111
]
]
],
[
"S",
[
[
7.1927,
1.0
]
]
],
[
"S",
[
[
0.7,
1.0
]
]
],
[
"S",
[
[
0.2133,
1.0
]
]
],
[
"P",
[
[
26.786,
0.018257
],
[
5.9564,
0.116407
],
[
1.7074,
0.390111
],
[
0.5314,
0.637221
]
]
],
[
"P",
[
[
0.1654,
1.0
]
]
]
],
"O": [
[
"S",
[
[
7816.54,
0.002031
],
[
1175.82,
0.015436
],
[
273.188,
0.073771
],
[
81.1696,
0.247606
],
[
27.1836,
0.611832
],
[
3.4136,
0.241205
]
]
],
[
"S",
[
[
9.5322,
1.0
]
]
],
[
"S",
[
[
0.9398,
1.0
]
]
],
[
"S",
[
[
0.2846,
1.0
]
]
],
[
"P",
[
[
35.1832,
0.01958
],
[
7.904,
0.124189
],
[
2.3051,
0.394727
],
[
0.7171,
0.627375
]
]
],
[
"P",
[
[
0.2137,
1.0
]
]
]
],
"F": [
[
"S",
[
[
9994.79,
0.002017
],
[
1506.03,
0.015295
],
[
350.269,
0.07311
],
[
104.053,
0.24642
],
[
34.8432,
0.612593
],
[
4.3688,
0.242489
]
]
],
[
"S",
[
[
12.2164,
1.0
]
]
],
[
"S",
[
[
1.2078,
1.0
]
]
],
[
"S",
[
[
0.3634,
1.0
]
]
],
[
"P",
[
[
44.3555,
0.020868
],
[
10.082,
0.130092
],
[
2.9959,
0.396219
],
[
0.9383,
0.620368
]
]
],
[
"P",
[
[
0.2733,
1.0
]
]
]
],
"Ne": [
[
"S",
[
[
12100.0,
0.0012
],
[
1821.0,
0.009092
],
[
432.8,
0.041305
],
[
132.5,
0.137867
],
[
43.77,
0.362433
],
[
14.91,
0.472247
],
[
5.127,
0.130035
]
]
],
[
"S",
[
[
14.91,
1.0
]
]
],
[
"S",
[
[
1.491,
1.0
]
]
],
[
"S",
[
[
0.4468,
1.0
]
]
],
[
"P",
[
[
56.45,
0.020875
],
[
12.92,
0.130032
],
[
3.865,
0.395679
],
[
1.203,
0.62145
]
]
],
[
"P",
[
[
0.3444,
1.0
]
]
]
],
"Al": [
[
"S",
[
[
23490.0,
0.002509
],
[
3548.0,
0.018986
],
[
823.5,
0.092914
],
[
237.7,
0.335935
],
[
78.6,
0.647391
]
]
],
[
"S",
[
[
78.6,
0.111937
],
[
29.05,
0.655976
],
[
11.62,
0.283349
]
]
],
[
"S",
[
[
3.465,
1.0
]
]
],
[
"S",
[
[
1.233,
1.0
]
]
],
[
"S",
[
[
0.2018,
1.0
]
]
],
[
"S",
[
[
0.07805,
1.0
]
]
],
[
"P",
[
[
141.5,
0.017882
],
[
33.22,
0.120375
],
[
10.39,
0.41158
],
[
3.593,
0.595353
]
]
],
[
"P",
[
[
3.593,
0.211758
],
[
1.242,
0.837795
]
]
],
[
"P",
[
[
0.304,
1.0
]
]
],
[
"P",
[
[
0.07629,
1.0
]
]
]
],
"Si": [
[
"S",
[
[
26740.0,
0.002583
],
[
4076.0,
0.019237
],
[
953.3,
0.093843
],
[
274.6,
0.341235
],
[
90.68,
0.641675
]
]
],
[
"S",
[
[
90.68,
0.121439
],
[
33.53,
0.653143
],
[
13.46,
0.277624
]
]
],
[
"S",
[
[
4.051,
1.0
]
]
],
[
"S",
[
[
1.484,
1.0
]
]
],
[
"S",
[
[
0.2704,
1.0
]
]
],
[
"S",
[
[
0.09932,
1.0
]
]
],
[
"P",
[
[
163.7,
0.011498
],
[
38.35,
0.077726
],
[
12.02,
0.263595
],
[
4.185,
0.758269
]
]
],
[
"P",
[
[
4.185,
-1.173045
],
[
1.483,
1.438335
]
]
],
[
"P",
[
[
0.335,
1.0
]
]
],
[
"P",
[
[
0.09699,
1.0
]
]
]
],
"P": [
[
"S",
[
[
30630.0,
0.002619
],
[
4684.0,
0.019479
],
[
1094.0,
0.095207
],
[
315.3,
0.345742
],
[
104.1,
0.636288
]
]
],
[
"S",
[
[
104.1,
0.130706
],
[
38.42,
0.650274
],
[
15.45,
0.272308
]
]
],
[
"S",
[
[
4.656,
1.0
]
]
],
[
"S",
[
[
1.759,
1.0
]
]
],
[
"S",
[
[
0.3409,
1.0
]
]
],
[
"S",
[
[
0.1238,
1.0
]
]
],
[
"P",
[
[
187.7,
0.013158
],
[
43.63,
0.090494
],
[
13.6,
0.305054
],
[
4.766,
0.713579
]
]
],
[
"P",
[
[
4.766,
-0.792573
],
[
1.743,
1.429987
]
]
],
[
"P",
[
[
0.4192,
1.0
]
]
],
[
"P",
[
[
0.1245,
1.0
]
]
]
],
"S": [
[
"S",
[
[
35710.0,
0.002565
],
[
5397.0,
0.019405
],
[
1250.0,
0.095595
],
[
359.9,
0.345793
],
[
119.2,
0.635794
]
]
],
[
"S",
[
[
119.2,
0.130096
],
[
43.98,
0.651301
],
[
17.63,
0.271955
]
]
],
[
"S",
[
[
5.42,
1.0
]
]
],
[
"S",
[
[
2.074,
1.0
]
]
],
[
"S",
[
[
0.4246,
1.0
]
]
],
[
"S",
[
[
0.1519,
1.0
]
]
],
[
"P",
[
[
212.9,
0.014091
],
[
49.6,
0.096685
],
[
15.52,
0.323874
],
[
5.476,
0.691756
]
]
],
[
"P",
[
[
5.476,
-0.626737
],
[
2.044,
1.377051
]
]
],
[
"P",
[
[
0.5218,
1.0
]
]
],
[
"P",
[
[
0.1506,
1.0
]
]
]
],
"Cl": [
[
"S",
[
[
40850.0,
0.002532
],
[
6179.0,
0.019207
],
[
1425.0,
0.095257
],
[
409.2,
0.345589
],
[
135.5,
0.636401
]
]
],
[
"S",
[
[
135.5,
0.120956
],
[
50.13,
0.648511
],
[
20.21,
0.275487
]
]
],
[
"S",
[
[
6.283,
1.0
]
]
],
[
"S",
[
[
2.46,
1.0
]
]
],
[
"S",
[
[
0.5271,
1.0
]
]
],
[
"S",
[
[
0.1884,
1.0
]
]
],
[
"P",
[
[
240.8,
0.014595
],
[
56.56,
0.099047
],
[
17.85,
0.330562
],
[
6.35,
0.682874
]
]
],
[
"P",
[
[
6.35,
-0.561785
],
[
2.403,
1.351901
]
]
],
[
"P",
[
[
0.641,
1.0
]
]
],
[
"P",
[
[
0.1838,
1.0
]
]
]
]
}
def getOrbs(atom):
try:
return basis_set[atom]
except KeyError:
raise NameError('Element not supported by basis set!') | LT12/LTPsi | basis/DZ.py | Python | gpl-2.0 | 25,619 |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
DsgTools
A QGIS plugin
Brazilian Army Cartographic Production Tools
-------------------
begin : 2016-08-01
git sha : $Format:%H$
copyright : (C) 2016 by Philipe Borba - Cartographic Engineer @ Brazilian Army
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
import os
from qgis.core import QgsMessageLog
# Qt imports
from qgis.PyQt import QtWidgets, uic, QtCore
from qgis.PyQt.QtCore import pyqtSlot, pyqtSignal, QSettings, Qt
from qgis.PyQt.QtSql import QSqlQuery
# DSGTools imports
from DsgTools.gui.ServerTools.viewServers import ViewServers
from DsgTools.core.Factories.SqlFactory.sqlGeneratorFactory import SqlGeneratorFactory
from DsgTools.core.Factories.DbFactory.dbFactory import DbFactory
from DsgTools.gui.Misc.PostgisCustomization.CustomJSONTools.customJSONBuilder import CustomJSONBuilder
FORM_CLASS, _ = uic.loadUiType(os.path.join(
os.path.dirname(__file__), 'newAttributeWidget.ui'))
class NewAttributeWidget(QtWidgets.QWidget, FORM_CLASS):
def __init__(self, abstractDb, uiParameterJsonDict = None, parent = None):
"""Constructor."""
super(self.__class__, self).__init__(parent)
self.abstractDb = abstractDb
self.setupUi(self)
self.addAttributeWidget.abstractDb = abstractDb
self.jsonBuilder = CustomJSONBuilder()
self.populateSchemaCombo()
self.populateFromUiParameterJsonDict(uiParameterJsonDict)
def populateFromUiParameterJsonDict(self, uiParameterJsonDict):
"""
{
'schemaComboBox': --current text of schemaComboBox --
'tableComboBox': --current text of tableComboBox--
'allTablesCheckBox': --state of allTablesCheckBox--
'attrWidget' : -- uiParameterJson from addAttributeWidget--
}
"""
if uiParameterJsonDict:
if uiParameterJsonDict['allTablesCheckBox']:
self.allTablesCheckBox.setCheckState(Qt.Checked)
else:
schemaIdx = self.schemaComboBox.findText(uiParameterJsonDict['schemaComboBox'], flags = Qt.MatchExactly)
self.schemaComboBox.setCurrentIndex(schemaIdx)
tableIdx = self.tableComboBox.findText(uiParameterJsonDict['tableComboBox'], flags = Qt.MatchExactly)
self.tableComboBox.setCurrentIndex(tableIdx)
self.addAttributeWidget.populateFromUiParameterJsonDict(uiParameterJsonDict['attrWidget'])
def getTitle(self):
return self.title
def setTitle(self, title):
self.title = title
def populateSchemaCombo(self):
self.schemaComboBox.clear()
self.schemaComboBox.addItem(self.tr('Select a schema'))
schemaList = self.abstractDb.getGeometricSchemaList()
for schema in schemaList:
if schema not in ['views', 'validation']:
self.schemaComboBox.addItem(schema)
@pyqtSlot(int)
def on_schemaComboBox_currentIndexChanged(self, idx):
if idx == 0:
self.tableComboBox.clear()
self.tableComboBox.setEnabled(False)
else:
schema = self.schemaComboBox.currentText()
self.tableComboBox.setEnabled(True)
self.tableComboBox.clear()
self.tableComboBox.addItem(self.tr('Select a table'))
tableList = self.abstractDb.getGeometricTableListFromSchema(schema)
for table in tableList:
self.tableComboBox.addItem(table)
@pyqtSlot(int)
def on_allTablesCheckBox_stateChanged(self,idx):
if idx == 2:
self.tableComboBox.clear()
self.tableComboBox.setEnabled(False)
self.schemaComboBox.clear()
self.schemaComboBox.setEnabled(False)
else:
self.schemaComboBox.setEnabled(True)
self.populateSchemaCombo()
def validate(self):
if not self.allTablesCheckBox.isChecked():
if self.tableComboBox.currentText() == '':
return False
if self.schemaComboBox.currentText() == '':
return False
return self.addAttributeWidget.validate()
def validateDiagnosis(self):
invalidatedReason = ''
if self.tableComboBox.currentIndex() == 0:
invalidatedReason += self.tr('A table name must be chosen.\n')
if self.schemaComboBox.currentIndex() == 0:
invalidatedReason += self.tr('A schema must be chosen.\n')
invalidatedReason += self.addAttributeWidget.validateDiagnosis()
return invalidatedReason
def getJSONTag(self):
if not self.validate():
raise Exception(self.tr('Error in attribute ')+ self.title + ' : ' + self.validateDiagnosis())
schema = self.schemaComboBox.currentText()
tableName = self.tableComboBox.currentText()
attrList = [self.addAttributeWidget.getJSONTag()]
if not self.allTablesCheckBox.isChecked():
bloodLine = [i for i in self.abstractDb.getInheritanceBloodLine(tableName) if i != tableName]
return [self.jsonBuilder.buildNewAttributeElement(schema, tableName, attrList, childrenToAlter = bloodLine)]
else:
attrModList = []
classTuppleList = self.abstractDb.getParentGeomTables(getTupple = True)
for tupple in classTuppleList:
schema, tableName = tupple
if schema not in ('views', 'validation'):
bloodLine = [i for i in self.abstractDb.getInheritanceBloodLine(tableName) if i != tableName]
attrModList.append(self.jsonBuilder.buildNewAttributeElement(schema, tableName, attrList, childrenToAlter = bloodLine))
return attrModList
def getUiParameterJsonDict(self):
"""
builds a dict with the following format:
{
'schemaComboBox': --current text of schemaComboBox --
'tableComboBox': --current text of tableComboBox--
'allTablesCheckBox': --state of allTablesCheckBox--
'attrWidget' : -- uiParameterJson from addAttributeWidget--
}
"""
uiParameterJsonDict = dict()
uiParameterJsonDict['schemaComboBox'] = self.schemaComboBox.currentText()
uiParameterJsonDict['tableComboBox'] = self.tableComboBox.currentText()
uiParameterJsonDict['allTablesCheckBox'] = self.allTablesCheckBox.isChecked()
uiParameterJsonDict['attrWidget'] = self.addAttributeWidget.getUiParameterJsonDict()
return uiParameterJsonDict | lcoandrade/DsgTools | gui/CustomWidgets/CustomDbManagementWidgets/newAttributeWidget.py | Python | gpl-2.0 | 7,466 |
from __future__ import unicode_literals
from django.contrib import messages
from django.contrib.auth.decorators import login_required
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from alliance_auth.hooks import get_hooks
from authentication.decorators import members_and_blues
from authentication.models import AuthServicesInfo
from eveonline.models import EveCharacter
from services.forms import FleetFormatterForm
import logging
logger = logging.getLogger(__name__)
@login_required
def fleet_formatter_view(request):
logger.debug("fleet_formatter_view called by user %s" % request.user)
generated = ""
if request.method == 'POST':
form = FleetFormatterForm(request.POST)
logger.debug("Received POST request containing form, valid: %s" % form.is_valid())
if form.is_valid():
generated = "Fleet Name: " + form.cleaned_data['fleet_name'] + "\n"
generated = generated + "FC: " + form.cleaned_data['fleet_commander'] + "\n"
generated = generated + "Comms: " + form.cleaned_data['fleet_comms'] + "\n"
generated = generated + "Fleet Type: " + form.cleaned_data['fleet_type'] + " || " + form.cleaned_data[
'ship_priorities'] + "\n"
generated = generated + "Form Up: " + form.cleaned_data['formup_location'] + " @ " + form.cleaned_data[
'formup_time'] + "\n"
generated = generated + "Duration: " + form.cleaned_data['expected_duration'] + "\n"
generated = generated + "Reimbursable: " + form.cleaned_data['reimbursable'] + "\n"
generated = generated + "Important: " + form.cleaned_data['important'] + "\n"
if form.cleaned_data['comments'] != "":
generated = generated + "Why: " + form.cleaned_data['comments'] + "\n"
logger.info("Formatted fleet broadcast for user %s" % request.user)
else:
form = FleetFormatterForm()
logger.debug("Returning empty form to user %s" % request.user)
context = {'form': form, 'generated': generated}
return render(request, 'registered/fleetformattertool.html', context=context)
@login_required
def services_view(request):
logger.debug("services_view called by user %s" % request.user)
auth = AuthServicesInfo.objects.get(user=request.user)
char = None
if auth.main_char_id:
try:
char = EveCharacter.objects.get(character_id=auth.main_char_id)
except EveCharacter.DoesNotExist:
messages.warning(request, _("There's a problem with your main character. Please select a new one."))
context = {'service_ctrls': []}
for fn in get_hooks('services_hook'):
# Render hooked services controls
svc = fn()
if svc.show_service_ctrl(request.user, auth.state):
context['service_ctrls'].append(svc.render_services_ctrl(request))
return render(request, 'registered/services.html', context=context)
def superuser_test(user):
return user.is_superuser
| iAddz/allianceauth | services/views.py | Python | gpl-2.0 | 3,056 |
from amoco.config import conf
from amoco.system.core import DefineLoader
from amoco.system import elf
@DefineLoader("elf", elf.EM_X86_64)
def loader_x64(p):
from amoco.system.linux64.x64 import OS
return OS.loader(p, conf.System)
@DefineLoader("elf", elf.EM_AARCH64)
def loader_aarch64(p):
from amoco.system.linux64.aarch64 import OS
return OS.loader(p, conf.System)
| LRGH/amoco | amoco/system/linux64/__init__.py | Python | gpl-2.0 | 389 |
#!/usr/bin/python3
# This test needs Raspberry Pi hardware
# need export PYTHONPATH=/path/to/Vcourse/lib
import RPi.GPIO as GPIO
import time
# GPIO.cleanup()
# quit()
RED = 12
GREEN = 16
BLUE = 18 # not currently used
SLOW = 0.5
MEDIUM = 1.0 # can miss this with quick look
FAST = 2.0
VERYFAST = 4.0
CHANNELS = (RED, GREEN, BLUE)
GPIO.setmode(GPIO.BOARD) # use board pin numbers n
GPIO.setwarnings(True) # for warnings
GPIO.setup(CHANNELS, GPIO.OUT) # set CHANNELS pins as an output
GPIO.output(CHANNELS, GPIO.LOW) # initially set all off
red = GPIO.PWM(RED, SLOW) # (channel, frequency)
green = GPIO.PWM(GREEN, SLOW)
blue = GPIO.PWM(BLUE, SLOW)
def off(x ='') :
print('no light ' + str(x))
red.stop()
green.stop()
blue.stop()
GPIO.output(CHANNELS, GPIO.LOW)
#shutoff can be a bit slow and happen after next on signal, so
time.sleep(0.5)
def bound(x ='') :
print('zone red ' + str(x))
off()
#GPIO.output(RED, GPIO.HIGH)
red.start(99) # arg is duty cycle. 99=mostly on
#red.ChangeDutyCycle(99)
red.ChangeFrequency(0.1)
def warn(x ='') :
print('flash red ' + str(x))
off()
red.start(20) # arg is duty cycle..
red.ChangeFrequency(FAST) # where freq is the new frequency in Hz
bound()
warn()
off()
#after a few seconds
#>>> Segmentation fault
# without defined functions there is no segfault
#bound(x ='') :
red.stop()
green.stop()
blue.stop()
GPIO.output(CHANNELS, GPIO.LOW)
# this line with comment does something funny
red.start(99) # arg is duty cycle. 99=mostly on
red.start(99)
red.ChangeFrequency(0.1)
#warn(x ='') :
red.stop()
green.stop()
blue.stop()
GPIO.output(CHANNELS, GPIO.LOW)
red.start(20) # arg is duty cycle..
red.ChangeFrequency(FAST) # where freq is the new frequency in Hz
#off
red.stop()
green.stop()
blue.stop()
GPIO.output(CHANNELS, GPIO.LOW)
| pdgilbert/Vcourse | tests/segfault2.py | Python | gpl-2.0 | 1,924 |
# -*- coding: utf-8 -*-
import logging
import webbrowser
import wx
import widgetUtils
import output
from wxUI.tabs import video
from wxUI import commonMessages, menus
from controller import selector
from .wall import wallBuffer
log = logging.getLogger("controller.buffers.video")
class videoBuffer(wallBuffer):
""" This buffer represents video elements, and it can be used for showing videos for the logged user or someone else."""
def create_tab(self, parent):
self.tab = video.videoTab(parent)
self.connect_events()
self.tab.name = self.name
if hasattr(self, "can_post") and self.can_post == False and hasattr(self.tab, "post"):
self.tab.post.Enable(False)
def connect_events(self):
widgetUtils.connect_event(self.tab.play, widgetUtils.BUTTON_PRESSED, self.play_audio)
super(videoBuffer, self).connect_events()
def play_audio(self, *args, **kwargs):
""" Due to inheritance this method should be called play_audio, but play the currently focused video.
Opens a webbrowser pointing to the video's URL."""
selected = self.tab.list.get_selected()
if self.tab.list.get_count() == 0:
return
if selected == -1:
selected = 0
output.speak(_("Opening video in webbrowser..."))
webbrowser.open_new_tab(self.session.db[self.name]["items"][selected]["player"])
# print self.session.db[self.name]["items"][selected]
return True
def open_post(self, *args, **kwargs):
pass
def remove_buffer(self, mandatory=False):
if "me_video" == self.name:
output.speak(_("This buffer can't be deleted"))
return False
else:
if mandatory == False:
dlg = commonMessages.remove_buffer()
else:
dlg = widgetUtils.YES
if dlg == widgetUtils.YES:
self.session.db.pop(self.name)
return True
else:
return False
def get_more_items(self, *args, **kwargs):
# Translators: Some buffers can't use the get previous item feature due to API limitations.
output.speak(_("This buffer doesn't support getting more items."))
def onFocus(self, event, *args, **kwargs):
event.Skip()
def add_to_library(self, *args, **kwargs):
post = self.get_post()
if post == None:
return
args = {}
args["video_id"] = post["id"]
if "album_id" in post:
args["album_id"] = post["album_id"]
args["owner_id"] = post["owner_id"]
video = self.session.vk.client.video.add(**args)
if video != None and int(video) > 21:
output.speak(_("Video added to your library"))
def remove_from_library(self, *args, **kwargs):
post = self.get_post()
if post == None:
return
args = {}
args["video_id"] = post["id"]
args["owner_id"] = self.session.user_id
result = self.session.vk.client.video.delete(**args)
if int(result) == 1:
output.speak(_("Removed video from library"))
self.tab.list.remove_item(self.tab.list.get_selected())
def move_to_album(self, *args, **kwargs):
if len(self.session.video_albums) == 0:
return commonMessages.no_video_albums()
post= self.get_post()
if post == None:
return
album = selector.album(_("Select the album where you want to move this video"), self.session, "video_albums")
if album.item == None: return
id = post["id"]
response = self.session.vk.client.video.addToAlbum(album_ids=album.item, video_id=id, target_id=self.session.user_id, owner_id=self.get_post()["owner_id"])
if response == 1:
# Translators: Used when the user has moved an video to an album.
output.speak(_("Moved"))
def get_menu(self):
""" We'll use the same menu that is used for audio items, as the options are exactly the same"""
p = self.get_post()
if p == None:
return
m = menus.audioMenu()
widgetUtils.connect_event(m, widgetUtils.MENU, self.move_to_album, menuitem=m.move)
# if owner_id is the current user, the audio is added to the user's audios.
if p["owner_id"] == self.session.user_id:
m.library.SetItemLabel(_("&Remove"))
widgetUtils.connect_event(m, widgetUtils.MENU, self.remove_from_library, menuitem=m.library)
else:
widgetUtils.connect_event(m, widgetUtils.MENU, self.add_to_library, menuitem=m.library)
return m
def open_in_browser(self, *args, **kwargs):
post = self.get_post()
if post == None:
return
url = "https://vk.com/video{user_id}_{video_id}".format(user_id=post["owner_id"], video_id=post["id"])
webbrowser.open_new_tab(url)
| manuelcortez/socializer | src/controller/buffers/video.py | Python | gpl-2.0 | 4,961 |
from flask import Response
from flask.views import View
from bson import json_util
from ugca import mongo
class SurveyeeDistribution(View):
def dispatch_request(self, group):
'''Get the answers types of corruption as they are preceived, witnessed, and participated in.
:param group: surveyee property to group by.
income, gender, municipality , maritalstatus, gender, age, education, region,
ethnicity, employment.position, employment.institution, and employtment.level
sh.: /surveyee/distribution/gender
'''
if group in ['position', 'institution', 'level']:
group = 'employment.' + group
# aggregate
aggregate_json = [
{
"$group": {
"_id": "$surveyee." + group,
"count": {
"$sum": 1
}
}
},
{
"$sort": {
"_id": 1
}
}
]
response_json = mongo.db.gsc.aggregate(aggregate_json)
answers_json = response_json['result']
# Build response object
resp = Response(
response=json_util.dumps(response_json),
mimetype='application/json')
# Return response
return resp
| opendatakosovo/undp-gsc-api | ugca/views/surveyeedistribution.py | Python | gpl-2.0 | 1,322 |
# -*- coding: utf-8 -*-
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from qgis.core import *
from ui_frmVisual import Ui_Dialog
import ftools_utils
import math
class VisualDialog( QDialog, Ui_Dialog ):
def __init__( self, iface, function ):
QDialog.__init__( self )
self.iface = iface
self.setupUi( self )
self.myFunction = function
if self.myFunction == 2 or self.myFunction == 3:
QObject.connect( self.inShape, SIGNAL( "currentIndexChanged(QString)" ), self.update )
self.manageGui()
self.cancel_close = self.buttonBox_2.button( QDialogButtonBox.Close )
self.buttonOk = self.buttonBox_2.button( QDialogButtonBox.Ok )
self.progressBar.setValue( 0 )
self.partProgressBar.setValue( 0 )
self.partProgressBar.setVisible( False )
def keyPressEvent( self, e ):
'''
Reimplemented key press event:
'''
if ( e.modifiers() == Qt.ControlModifier or e.modifiers() == Qt.MetaModifier ) and e.key() == Qt.Key_C:
#selection = self.tblUnique.selectedItems()
items = QString()
if self.myFunction in ( 1, 2 ):
for rec in range( self.tblUnique.rowCount() ):
items.append( self.tblUnique.item( rec, 0 ).text() + "\n" )
else:
for rec in range( self.tblUnique.rowCount() ):
items.append( self.tblUnique.item( rec, 0 ).text() + ":" + self.tblUnique.item( rec, 1 ).text() + "\n" )
if not items.isEmpty():
clip_board = QApplication.clipboard()
clip_board.setText( items )
else:
QDialog.keyPressEvent( self, e )
def update( self ):
self.cmbField.clear()
inputLayer = unicode( self.inShape.currentText() )
if inputLayer != "":
changedLayer = ftools_utils.getVectorLayerByName( inputLayer )
changedField = changedLayer.dataProvider().fields()
# for Basic statistics (with or without selection)
if self.myFunction == 3:
if changedLayer.selectedFeatureCount() != 0:
self.useSelected.setCheckState( Qt.Checked )
else:
self.useSelected.setCheckState( Qt.Unchecked )
# add all fields in combobox because now we can work with text fields too
for i in changedField:
self.cmbField.addItem( unicode( changedField[i].name() ) )
def accept( self ):
if self.inShape.currentText() == "":
QMessageBox.information( self, self.tr("Error!"), self.tr( "Please specify input vector layer" ) )
elif self.cmbField.isVisible() and self.cmbField.currentText() == "":
QMessageBox.information( self, self.tr("Error!"), self.tr( "Please specify input field" ) )
else:
self.visual( self.inShape.currentText(), self.cmbField.currentText(), self.useSelected.checkState() )
def manageGui( self ):
if self.myFunction == 1: # Check geometry validity
self.setWindowTitle( self.tr( "Check geometry validity" ) )
self.cmbField.setVisible( False )
self.label.setVisible( False )
self.useSelected.setVisible( False )
self.label_2.setText( self.tr( "Geometry errors" ) )
self.label_4.setText( self.tr( "Total encountered errors" ) )
elif self.myFunction == 2: # List unique values
self.setWindowTitle( self.tr( "List unique values" ) )
self.label_2.setText( self.tr( "Unique values" ) )
self.label_4.setText(self.tr( "Total unique values" ) )
self.useSelected.setVisible( False )
elif self.myFunction == 3: # Basic statistics
self.setWindowTitle( self.tr( "Basics statistics" ) )
self.label_2.setText( self.tr( "Statistics output" ) )
self.label_4.setVisible( False )
self.lstCount.setVisible( False )
self.resize( 381, 400 )
elif self.myFunction == 4: # Nearest neighbour analysis
self.setWindowTitle( self.tr( "Nearest neighbour analysis" ) )
self.cmbField.setVisible( False )
self.label.setVisible( False )
self.useSelected.setVisible( False )
self.label_2.setText( self.tr( "Nearest neighbour statistics" ) )
self.label_4.setVisible( False )
self.lstCount.setVisible( False )
self.resize( 381, 200 )
self.inShape.clear()
if self.myFunction == 1:
myList = ftools_utils.getLayerNames( [ QGis.Polygon ] )
elif self.myFunction == 4:
myList = ftools_utils.getLayerNames( [ QGis.Point ] )
else:
myList = ftools_utils.getLayerNames( [ QGis.Point, QGis.Line, QGis.Polygon ] )
self.inShape.addItems( myList )
return
#1: Check geometry
#2: List unique values
#3: Basic statistics
#4: Nearest neighbour analysis
def visual( self, myLayer, myField, mySelection ):
vlayer = ftools_utils.getVectorLayerByName( myLayer )
self.tblUnique.clearContents()
self.tblUnique.setRowCount( 0 )
self.lstCount.clear()
self.buttonOk.setEnabled( False )
self.testThread = visualThread( self.iface.mainWindow(), self, self.myFunction, vlayer, myField, mySelection )
QObject.connect( self.testThread, SIGNAL( "runFinished(PyQt_PyObject)" ), self.runFinishedFromThread )
QObject.connect( self.testThread, SIGNAL( "runStatus(PyQt_PyObject)" ), self.runStatusFromThread )
QObject.connect( self.testThread, SIGNAL( "runRange(PyQt_PyObject)" ), self.runRangeFromThread )
QObject.connect( self.testThread, SIGNAL( "runPartRange(PyQt_PyObject)" ), self.runPartRangeFromThread )
QObject.connect( self.testThread, SIGNAL( "runPartStatus(PyQt_PyObject)" ), self.runPartStatusFromThread )
self.cancel_close.setText( self.tr("Cancel") )
QObject.connect( self.cancel_close, SIGNAL( "clicked()" ), self.cancelThread )
QApplication.setOverrideCursor( Qt.WaitCursor )
self.testThread.start()
return True
def cancelThread( self ):
self.testThread.stop()
QApplication.restoreOverrideCursor()
self.buttonOk.setEnabled( True )
def runFinishedFromThread( self, output ):
self.testThread.stop()
QApplication.restoreOverrideCursor()
self.buttonOk.setEnabled( True )
result = output[ 0 ]
numRows = len( result )
self.tblUnique.setRowCount( numRows )
if self.myFunction in ( 1, 2 ):
self.tblUnique.setColumnCount( 1 )
for rec in range( numRows ):
item = QTableWidgetItem( result[ rec ] )
self.tblUnique.setItem( rec, 0, item )
else:
self.tblUnique.setColumnCount( 2 )
for rec in range( numRows ):
tmp = result[ rec ].split( ":" )
item = QTableWidgetItem( tmp[ 0 ] )
self.tblUnique.setItem( rec, 0, item )
item = QTableWidgetItem( tmp[ 1 ] )
self.tblUnique.setItem( rec, 1, item )
self.tblUnique.setHorizontalHeaderLabels( [ self.tr("Parameter"), self.tr("Value") ] )
self.tblUnique.horizontalHeader().setResizeMode( 1, QHeaderView.ResizeToContents )
self.tblUnique.horizontalHeader().show()
self.tblUnique.horizontalHeader().setResizeMode( 0, QHeaderView.Stretch )
self.tblUnique.resizeRowsToContents()
self.lstCount.insert( unicode( output[ 1 ] ) )
self.cancel_close.setText( "Close" )
QObject.disconnect( self.cancel_close, SIGNAL( "clicked()" ), self.cancelThread )
return True
def runStatusFromThread( self, status ):
self.progressBar.setValue( status )
def runRangeFromThread( self, range_vals ):
self.progressBar.setRange( range_vals[ 0 ], range_vals[ 1 ] )
def runPartStatusFromThread( self, status ):
self.partProgressBar.setValue( status )
if status >= self.part_max:
self.partProgressBar.setVisible( False )
def runPartRangeFromThread( self, range_vals ):
self.part_max = range_vals[ 1 ]
self.partProgressBar.setVisible( True )
self.partProgressBar.setRange( range_vals[ 0 ], range_vals[ 1 ] )
class visualThread( QThread ):
def __init__( self, parentThread, parentObject, function, vlayer, myField, mySelection ):
QThread.__init__( self, parentThread )
self.parent = parentObject
self.running = False
self.myFunction = function
self.vlayer = vlayer
self.myField = myField
self.mySelection = mySelection
# self.total = 0
# self.currentCount = 0
def run( self ):
self.running = True
if self.myFunction == 1: # Check geometry
( lst, cnt ) = self.check_geometry( self.vlayer )
elif self.myFunction == 2: # List unique values
( lst, cnt ) = self.list_unique_values( self.vlayer, self.myField )
elif self.myFunction == 3: # Basic statistics
( lst, cnt ) = self.basic_statistics( self.vlayer, self.myField )
elif self.myFunction == 4: # Nearest neighbour analysis
( lst, cnt ) = self.nearest_neighbour_analysis( self.vlayer )
self.emit( SIGNAL( "runFinished(PyQt_PyObject)" ), ( lst, cnt ) )
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), 0 )
def stop(self):
self.running = False
def list_unique_values( self, vlayer, myField ):
vprovider = vlayer.dataProvider()
allAttrs = vprovider.attributeIndexes()
vprovider.select( allAttrs )
fields = vprovider.fields()
index = vprovider.fieldNameIndex( myField )
unique = ftools_utils.getUniqueValues( vprovider, int( index ) )
lstUnique = []
nFeat = len( unique )
nElement = 0
if nFeat > 0:
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), 0 )
self.emit( SIGNAL( "runRange(PyQt_PyObject)" ), ( 0, nFeat ) )
for item in unique:
nElement += 1
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), nElement )
lstUnique.append(item.toString().trimmed())
lstCount = len( unique )
return ( lstUnique, lstCount )
def basic_statistics( self, vlayer, myField ):
vprovider = vlayer.dataProvider()
allAttrs = vprovider.attributeIndexes()
vprovider.select( allAttrs )
fields = vprovider.fields()
index = vprovider.fieldNameIndex( myField )
feat = QgsFeature()
sumVal = 0.0
meanVal = 0.0
nVal = 0.0
values = []
first = True
nElement = 0
# determine selected field type
if ftools_utils.getFieldType( vlayer, myField ) in (
'String', 'varchar', 'char', 'text'):
fillVal = 0
emptyVal = 0
if self.mySelection: # only selected features
selection = vlayer.selectedFeatures()
nFeat = vlayer.selectedFeatureCount()
if nFeat > 0:
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), 0 )
self.emit( SIGNAL( "runRange(PyQt_PyObject)" ), ( 0, nFeat ) )
for f in selection:
atMap = f.attributeMap()
lenVal = float( len( atMap[ index ].toString() ) )
if first:
minVal = lenVal
maxVal = lenVal
first = False
else:
if lenVal < minVal: minVal = lenVal
if lenVal > maxVal: maxVal = lenVal
if lenVal != 0.00:
fillVal += 1
else:
emptyVal += 1
values.append( lenVal )
sumVal = sumVal + lenVal
nElement += 1
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), nElement )
else: # there is no selection, process the whole layer
nFeat = vprovider.featureCount()
if nFeat > 0:
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), 0 )
self.emit( SIGNAL( "runRange(PyQt_PyObject)" ), ( 0, nFeat ) )
vprovider.select( allAttrs )
while vprovider.nextFeature( feat ):
atMap = feat.attributeMap()
lenVal = float( len( atMap[ index ].toString() ) )
if first:
minVal = lenVal
maxVal = lenVal
first = False
else:
if lenVal < minVal: minVal = lenVal
if lenVal > maxVal: maxVal = lenVal
if lenVal != 0.00:
fillVal += 1
else:
emptyVal += 1
values.append( lenVal )
sumVal = sumVal + lenVal
nElement += 1
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), nElement )
nVal= float( len( values ) )
if nVal > 0:
meanVal = sumVal / nVal
lstStats = []
lstStats.append( self.tr( "Max. len:" ) + unicode( maxVal ) )
lstStats.append( self.tr( "Min. len:" ) + unicode( minVal ) )
lstStats.append( self.tr( "Mean. len:" ) + unicode( meanVal ) )
lstStats.append( self.tr( "Filled:" ) + unicode( fillVal ) )
lstStats.append( self.tr( "Empty:" ) + unicode( emptyVal ) )
lstStats.append( self.tr( "N:" ) + unicode( nVal ) )
return ( lstStats, [] )
else:
return ( ["Error:No features selected!"], [] )
else: # numeric field
stdVal = 0.00
cvVal = 0.00
rangeVal = 0.00
medianVal = 0.00
maxVal = 0.00
minVal = 0.00
if self.mySelection: # only selected features
selection = vlayer.selectedFeatures()
nFeat = vlayer.selectedFeatureCount()
uniqueVal = ftools_utils.getUniqueValuesCount( vlayer, index, True )
if nFeat > 0:
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), 0 )
self.emit( SIGNAL( "runRange(PyQt_PyObject)" ), ( 0, nFeat ) )
for f in selection:
atMap = f.attributeMap()
value = float( atMap[ index ].toDouble()[ 0 ] )
if first:
minVal = value
maxVal = value
first = False
else:
if value < minVal: minVal = value
if value > maxVal: maxVal = value
values.append( value )
sumVal = sumVal + value
nElement += 1
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), nElement )
else: # there is no selection, process the whole layer
nFeat = vprovider.featureCount()
uniqueVal = ftools_utils.getUniqueValuesCount( vlayer, index, False )
if nFeat > 0:
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), 0 )
self.emit( SIGNAL( "runRange(PyQt_PyObject)" ), ( 0, nFeat ) )
vprovider.select( allAttrs )
while vprovider.nextFeature( feat ):
atMap = feat.attributeMap()
value = float( atMap[ index ].toDouble()[ 0 ] )
if first:
minVal = value
maxVal = value
first = False
else:
if value < minVal: minVal = value
if value > maxVal: maxVal = value
values.append( value )
sumVal = sumVal + value
nElement += 1
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), nElement )
nVal= float( len( values ) )
if nVal > 0.00:
rangeVal = maxVal - minVal
meanVal = sumVal / nVal
if meanVal != 0.00:
for val in values:
stdVal += ( ( val - meanVal ) * ( val - meanVal ) )
stdVal = math.sqrt( stdVal / nVal )
cvVal = stdVal / meanVal
if nVal > 1:
lstVal = values
lstVal.sort()
if ( nVal % 2 ) == 0:
medianVal = 0.5 * ( lstVal[ int( ( nVal - 1 ) / 2 ) ] + lstVal[ int( ( nVal ) / 2 ) ] )
else:
medianVal = lstVal[ int( ( nVal + 1 ) / 2 ) ]
lstStats = []
lstStats.append( self.tr( "Mean:" ) + unicode( meanVal ) )
lstStats.append( self.tr( "StdDev:" ) + unicode( stdVal ) )
lstStats.append( self.tr( "Sum:" ) + unicode( sumVal) )
lstStats.append( self.tr( "Min:" ) + unicode( minVal ) )
lstStats.append( self.tr( "Max:" ) + unicode( maxVal ) )
lstStats.append( self.tr( "N:" ) + unicode( nVal ) )
lstStats.append( self.tr( "CV:" ) + unicode( cvVal ) )
lstStats.append( self.tr( "Number of unique values:" ) + unicode( uniqueVal ) )
lstStats.append( self.tr( "Range:" ) + unicode( rangeVal ) )
lstStats.append( self.tr( "Median:" ) + unicode( medianVal ) )
return ( lstStats, [] )
else:
return ( ["Error:No features selected!"], [] )
def nearest_neighbour_analysis( self, vlayer ):
vprovider = vlayer.dataProvider()
allAttrs = vprovider.attributeIndexes()
vprovider.select( allAttrs )
feat = QgsFeature()
neighbour = QgsFeature()
sumDist = 0.00
distance = QgsDistanceArea()
A = vlayer.extent()
A = float( A.width() * A.height() )
index = ftools_utils.createIndex( vprovider )
vprovider.rewind()
nFeat = vprovider.featureCount()
nElement = 0
if nFeat > 0:
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), 0 )
self.emit( SIGNAL( "runRange(PyQt_PyObject)" ), ( 0, nFeat ) )
while vprovider.nextFeature( feat ):
neighbourID = index.nearestNeighbor( feat.geometry().asPoint(), 2 )[ 1 ]
vprovider.featureAtId( neighbourID, neighbour, True, [] )
nearDist = distance.measureLine( neighbour.geometry().asPoint(), feat.geometry().asPoint() )
sumDist += nearDist
nElement += 1
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), nElement )
nVal = vprovider.featureCount()
do = float( sumDist) / nVal
de = float( 0.5 / math.sqrt( nVal / A ) )
d = float( do / de )
SE = float( 0.26136 / math.sqrt( ( nVal * nVal ) / A ) )
zscore = float( ( do - de ) / SE )
lstStats = []
lstStats.append( self.tr( "Observed mean distance:" ) + unicode( do ) )
lstStats.append( self.tr( "Expected mean distance:" ) + unicode( de ) )
lstStats.append( self.tr( "Nearest neighbour index:" ) + unicode( d ) )
lstStats.append( self.tr( "N:" ) + unicode( nVal ) )
lstStats.append( self.tr( "Z-Score:" ) + unicode( zscore ) )
return ( lstStats, [] )
def check_geometry( self, vlayer ):
vprovider = vlayer.dataProvider()
allAttrs = vprovider.attributeIndexes()
vprovider.select( allAttrs )
feat = QgsFeature()
geom = QgsGeometry()
count = 0
lstErrors = []
nFeat = vprovider.featureCount()
nElement = 0
if nFeat > 0:
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), 0 )
self.emit( SIGNAL( "runRange(PyQt_PyObject)" ), ( 0, nFeat ) )
while vprovider.nextFeature( feat ):
geom = QgsGeometry( feat.geometry() )
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), nElement )
nElement += 1
if geom.isMultipart():
polygons = geom.asMultiPolygon()
for polygon in polygons:
if not self.isHoleNested( polygon ):
lstErrors.append( self.tr( "Feature %1 contains an unnested hole" ).arg( unicode( feat.id() ) ) )
count += 1
if not self.isPolygonClosed( polygon ):
lstErrors.append( self.tr( "Feature %1 is not closed" ).arg( unicode( feat.id() ) ) )
count += 1
if self.isSelfIntersecting( polygon ):
lstErrors.append( self.tr( "Feature %1 is self intersecting" ).arg( unicode( feat.id() ) ) )
count += 1
if not self.isCorrectOrientation( polygon ):
lstErrors.append( self.tr( "Feature %1 has incorrect node ordering" ).arg( unicode( feat.id() ) ) )
count += 1
else:
geom = geom.asPolygon()
if not self.isHoleNested( geom ):
lstErrors.append( self.tr( "Feature %1 contains an unnested hole" ).arg( unicode( feat.id() ) ) )
count += 1
if not self.isPolygonClosed( geom ):
lstErrors.append( self.tr( "Feature %1 is not closed" ).arg( unicode( feat.id() ) ) )
count += 1
if self.isSelfIntersecting( geom ):
lstErrors.append( self.tr( "Feature %1 is self intersecting" ).arg( unicode( feat.id() ) ) )
count += 1
if not self.isCorrectOrientation( geom ):
lstErrors.append( self.tr( "Feature %1 has incorrect node ordering" ).arg( unicode( feat.id() ) ) )
count += 1
self.emit( SIGNAL( "runStatus(PyQt_PyObject)" ), nFeat )
return ( lstErrors, count )
def isHoleNested( self, polygon ):
if len( polygon ) <= 1:
return True
else:
outer = polygon[ 0 ]
for i in polygon[ 1: len( polygon ) ]:
if not self.arePointsInside( i, outer ):
return False
return True
def arePointsInside( self, inner, outer ):
outer = QgsGeometry().fromPolygon( [ outer ] )
for j in inner:
if not outer.contains(j):
return False
return True
def isPolygonClosed( self, polygon ):
for i in polygon:
first = i[ 0 ]
last = i[ len( i )-1 ]
if not first == last:
return False
return True
def isSelfIntersecting( self, polygon ):
cPart = 0
for h in polygon:
cPart += len(h)
self.emit( SIGNAL( "runPartRange(PyQt_PyObject)" ), ( 0, cPart ) )
nPart = 0
for h in polygon:
for i in range( 0, len(h)-1 ):
self.emit( SIGNAL( "runPartStatus(PyQt_PyObject)" ), nPart )
count = 0
for j in range( i+1, len(h)-1 ):
if QgsGeometry().fromPolyline( [ h[ i ], h[ i + 1 ] ] ).intersects( QgsGeometry().fromPolyline( [ h[ j ], h[ j + 1 ] ] ) ):
count += 1
if (i==0 and count>2) or (i>0 and count>1):
self.emit( SIGNAL( "runPartStatus(PyQt_PyObject)" ), cPart )
return True
nPart += 1
self.emit( SIGNAL( "runPartStatus(PyQt_PyObject)" ), cPart )
return False
def isCorrectOrientation( self, polygon ):
outer = True
for h in polygon:
if outer:
outer = False
if not self.isClockwise( h ):
return False
else:
if self.isClockwise(h):
return False
return True
def isClockwise( self, temp ):
area = 0
for pt in range( 0, len( temp ) -1 ):
area += ( temp[ pt ].x() * temp[ pt + 1 ].y() - temp[ pt + 1 ].x() * temp[ pt ].y() )
area = area / 2
if area <= 0:
return True
else:
return False
| sourcepole/qgis | qgis/python/plugins/fTools/tools/doVisual.py | Python | gpl-2.0 | 21,682 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# EEG.py
#
# Copyright 2015 220 <220@WKH>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import struct
# talking about the brain...
class Channel ():
label = ""
transducer = ""
physical_dimension = ""
physical_minimum = 0
physical_maximum = 0
digital_minimum = 0
digital_maximum = 0
pre_filtering = ""
samples_per_record = 0
reserved = ""
# your mind is your most important asset
class EEG ():
header_format = ('@8s80s80s8s8s8s44s8s8s4s')
header_length = struct.calcsize (header_format)
data = []
records = []
channels = []
patient_id = ""
recording_id = ""
start_date = ""
start_time = ""
reserved = ""
header_bytes = 0
data_records = -1
data_duration = 0
signal_channels = 0
samples_per_record = 0
digital_minimum = 0
digital_maximum = 0
# open your mind, there's much to be understood about it
def open (self, filename):
## FILE STREAM
fstream = open (filename, 'rb')
## HEADER
data = struct.unpack (self.header_format, fstream.read (self.header_length))
# basic info
self.patient_id = data [1].rstrip ()
self.recording_id = data [2].rstrip ()
self.start_date = data [3]
self.start_time = data [4]
self.header_bytes = int (data [5])
self.reserved = data[6]
self.data_records = int (data [7])
self.data_duration = int (data [8])
self.signal_channels = int (data [9])
#channels info
self.channels = []
for i in xrange (self.signal_channels):
c = Channel ()
self.channels.append (c)
for i in xrange (self.signal_channels):
self.channels [i].label = fstream.read (16).rstrip ()
for i in xrange (self.signal_channels):
self.channels [i].transducer = fstream.read (80).rstrip ()
for i in xrange (self.signal_channels):
self.channels [i].physical_dimension = fstream.read (8)
for i in xrange (self.signal_channels):
self.channels [i].physical_minimum = int (fstream.read (8))
for i in xrange (self.signal_channels):
self.channels [i].physical_maximum = int (fstream.read (8))
for i in xrange (self.signal_channels):
self.channels [i].digital_minimum = int (fstream.read (8))
for i in xrange (self.signal_channels):
self.channels [i].digital_maximum = int (fstream.read (8))
for i in xrange (self.signal_channels):
self.channels [i].pre_filtering = fstream.read (80).rstrip ()
for i in xrange (self.signal_channels):
self.channels [i].samples_per_record = int (fstream.read (8))
for i in xrange (self.signal_channels):
self.channels [i].reserved = fstream.read (32).rstrip ()
# read records
self.samples_per_record = int (self.channels [0].samples_per_record)
self.digital_maximum = int (self.channels [0].digital_maximum)
self.digital_minimum = int (self.channels [0].digital_minimum)
total_count = 0
self.records = []
# give it some time...
for h in xrange (self.data_records):
signals = []
for i in xrange (self.signal_channels):
#print "Signal: "+str (i)
samples = []
for j in xrange (self.samples_per_record):
y = int (struct.unpack ('@H', fstream.read (2)) [0])
samples.append (y)
total_count+= 1
signals.append (samples)
self.records.append (signals)
#print h
print "Total samples: "+str (total_count)
fstream.close ()
return 0
# no use of having a brain if you don't use it...
def consoleInfo (self):
print "[EEG basic info]"
print "Patient id: "+self.patient_id
print "Recording id: "+self.recording_id
print "Start date: "+self.start_date
print "Start time: "+self.start_time
print "Header bytes: "+str (self.header_bytes)
print "Data records: "+str (self.data_records)
print "Data duration: "+str (self.data_duration)
print "Signal channels: "+str (self.signal_channels)
def channelsInfo (self):
for i in xrange (self.signal_channels):
self.channelInfo (i)
print
def channelInfo (self, channel_index):
channel = self.channels [channel_index]
print str (channel_index)+" "+channel.label+"\t"+channel.transducer \
+" "+channel.physical_dimension+":"+str (channel.physical_minimum)+"/" \
+str (channel.physical_maximum)+" "+str (channel.digital_minimum)+"/" \
+str (channel.digital_maximum)+" | "+str (channel.samples_per_record)
def channelDetailInfo (self, channel_index):
channel = self.channels [channel_index]
print "[Channel "+str (channel_index)+" info]"
print "Label: "+channel.label
print "Transducer: "+channel.transducer
print "Physical dimension: "+channel.physical_dimension
print "Physical minimum: "+channel.physical_minimum
print "Physical maximum: "+channel.physical_maximum
print "Digital minimum: "+channel.digital_minimum
print "Digital maximum: "+channel.digital_maximum
print "Pre-filtering: "+channel.pre_filtering
print "Samples per record: "+channel.samples_per_record
def electrodesList (self):
for channel in self.channels:
print channel.label
| 2-2-0/EDFview | EEG.py | Python | gpl-2.0 | 5,672 |
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
admin.autodiscover()
urlpatterns = patterns('',
url(r'^polls/', include('polls.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^(.*)$', RedirectView.as_view(url='/polls/poll_list/')),
)
| gstiebler/offline_mobile_poll | django/mysite/urls.py | Python | gpl-2.0 | 347 |
from ProfileManager import ProfileManager
GLOBAL_SESSION = None
def _setSession(session):
global GLOBAL_SESSION
GLOBAL_SESSION = session
def _getSession():
global GLOBAL_SESSION
return GLOBAL_SESSION
class SessionManager(object):
@staticmethod
def doLogin(name):
profileManager = ProfileManager()
_setSession(profileManager.doLogin(name))
@staticmethod
def getSession():
return _getSession()
| Neeneko/OKMiner | SessionManager.py | Python | gpl-2.0 | 463 |
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2006 Donald N. Allingham
# 2009 Benny Malengier
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# $Id$
#-------------------------------------------------------------------------
#
# Python classes
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
from gi.repository import GObject
#-------------------------------------------------------------------------
#
# Python classes
#
#-------------------------------------------------------------------------
from gramps.gen.const import GRAMPS_LOCALE as glocale
_ = glocale.get_translation().gettext
#-------------------------------------------------------------------------
#
# GRAMPS classes
#
#-------------------------------------------------------------------------
from gramps.gen.lib import Name, Surname
from gramps.gen.errors import WindowActiveError
from ...ddtargets import DdTargets
from .namemodel import NameModel
from .groupembeddedlist import GroupEmbeddedList
#-------------------------------------------------------------------------
#
#
#
#-------------------------------------------------------------------------
class NameEmbedList(GroupEmbeddedList):
_HANDLE_COL = 2
_DND_TYPE = DdTargets.NAME
_WORKGROUP = NameModel.ALTINDEX
_MSG = {
'add' : _('Create and add a new name'),
'del' : _('Remove the existing name'),
'edit' : _('Edit the selected name'),
'up' : _('Move the selected name upwards'),
'down' : _('Move the selected name downwards'),
}
#index = column in model. Value =
# (name, sortcol in model, width, markup/text, weigth_col
_column_names = [
(_('Name'), -1, 250, 0, NameModel.COL_FONTWEIGHT[0]),
(_('Type'), NameModel.COL_TYPE[0], 100, 0, -1),
None,
None,
(_('Group As'), NameModel.COL_GROUPAS[0],100, 0, -1),
(_('Source'), NameModel.COL_HASSOURCE[0],60, 0, -1),
(_('Notes Preview'), NameModel.COL_NOTEPREVIEW[0], 250, 0, -1),
]
def __init__(self, dbstate, uistate, track, data, person, callback):
"""callback is the function to call when preferred name changes
on the namelist """
self.data = data
self.person = person
self.callback = callback
GroupEmbeddedList.__init__(self, dbstate, uistate, track, _('_Names'),
NameModel, move_buttons=True)
self.tree.expand_all()
def _cleanup_on_exit(self):
"""Unset all things that can block garbage collection.
Finalize rest
"""
self.person = None
self.callback = None
self.data = None
def get_data(self):
return ([self.person.get_primary_name()],
self.data)
def groups(self):
"""
Return the (group key, group name)s in the order as given by get_data()
"""
return ((None, NameModel.DEFNAME), (None, NameModel.ALTNAME))
def column_order(self):
"""
The columns to show as a tuple of tuples containing
tuples (show/noshow, model column)
"""
return ((1, 0), (1, 1), (1, 4), (1, 5), (1, 6))
def get_popup_menu_items(self):
if self._tmpgroup == self._WORKGROUP:
return [
(True, True, Gtk.STOCK_ADD, self.add_button_clicked),
(False,True, Gtk.STOCK_EDIT, self.edit_button_clicked),
(True, True, Gtk.STOCK_REMOVE, self.del_button_clicked),
(True, False, _('Set as default name'), self.name_button_clicked),
]
else:
return [
(True, True, Gtk.STOCK_ADD, self.add_button_clicked),
(False,True, Gtk.STOCK_EDIT, self.edit_button_clicked),
]
def name_button_clicked(self, obj):
name = self.get_selected()
if name and name[1]:
self.set_default_name(name[1])
def set_default_name(self, name):
pname = self.person.get_primary_name()
self.person.set_primary_name(name)
remove = [altname for altname in self.data if altname.is_equal(name)]
list(map(self.data.remove, remove))
#only non empty name should move to alternative names
if not name.is_equal(Name()):
self.data.append(pname)
self.rebuild()
self.callback()
def update_defname(self):
"""
callback from person editor if change to the preferred name happens
"""
self.model.update_defname(self.person.get_primary_name())
self.tree.expand_all()
def add_button_clicked(self, obj):
name = Name()
#the editor requires a surname
name.add_surname(Surname())
name.set_primary_surname(0)
try:
from .. import EditName
EditName(self.dbstate, self.uistate, self.track,
name, self.add_callback)
except WindowActiveError:
pass
def add_callback(self, name):
data = self.get_data()[self._WORKGROUP]
data.append(name)
self.rebuild()
GObject.idle_add(self.tree.scroll_to_cell,
(self._WORKGROUP, len(data) - 1))
def edit_button_clicked(self, obj):
name = self.get_selected()
if name and name[1] is not None:
try:
from .. import EditName
if name[0] == NameModel.ALTINDEX:
EditName(self.dbstate, self.uistate, self.track,
name[1], self.edit_callback)
elif name[0] == NameModel.DEFINDEX:
EditName(self.dbstate, self.uistate, self.track,
name[1], self.editdef_callback)
except WindowActiveError:
pass
def edit_callback(self, name):
self.rebuild()
def editdef_callback(self, name):
"""
callback after default name has changed
"""
self.rebuild()
self.callback()
def dropnotworkgroup(self, row, obj):
"""
Drop of obj on row that is not WORKGROUP
"""
if row[0] == NameModel.DEFINDEX:
#drop on default name
self.set_default_name(obj)
def move_away_work(self, row_from, row_to, obj):
"""
move from the workgroup to a not workgroup
we allow to change the default name like this
"""
if row_from[0] == self._WORKGROUP and row_to[0] == NameModel.DEFINDEX:
self.set_default_name(obj)
def post_rebuild(self, prebuildpath):
"""
Allow post rebuild specific handling.
@param prebuildpath: path selected before rebuild, None if none
@type prebuildpath: tree path
"""
self.tree.expand_all()
if not prebuildpath is None:
self.selection.select_path(prebuildpath)
| Forage/Gramps | gramps/gui/editors/displaytabs/nameembedlist.py | Python | gpl-2.0 | 7,691 |
"""Test invalid __all__ format.
Tuples with one element MUST contain a comma! Otherwise it's a string.
"""
__all__ = ("CONST") # [invalid-all-format]
CONST = 42
| PyCQA/pylint | tests/functional/i/invalid/invalid_all_format.py | Python | gpl-2.0 | 164 |
# -*- coding: utf-8 -*-
"""
------
Views
------
Arquivo de configuração das views da aplicação blog
Autores:
* Alisson Barbosa Ferreira <[email protected]>
Data:
============== ==================
Criação Atualização
============== ==================
29/11/2014 25/03/2015
============== ==================
"""
import os
import smtplib
import json
import binascii
from django.core import paginator
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from rest_framework import status
from rest_framework.response import Response
from rest_framework.decorators import api_view, authentication_classes, permission_classes
from rest_framework.authentication import SessionAuthentication, BasicAuthentication, TokenAuthentication
from rest_framework.permissions import IsAuthenticated
from permissions import IsApi
from authentication import ApiAuthentication
from api_apple import get_apple_validate
from models import Post, Categoria, Cliente
from serializers import PostSerializer
from forms import FormUser, FormPost
def home(request):
""" Pagina principal do site """
posts_lista = Post.objects.all().order_by('-created_on')
paginacao = Paginator(posts_lista, 2)
pagina = request.GET.get('pagina')
try:
posts = paginacao.page(pagina)
except PageNotAnInteger:
posts = paginacao.page(1) # Se a página não é um inteiro, entregar primeira página.
except EmptyPage:
posts = paginacao.page(paginator.num_pages) # Se a página está fora do intervalo (por exemplo, 9999), entregar última página de resultados.
return render_to_response('blog/index.html',locals(),context_instance=RequestContext(request),)
def usuario(request):
""" Pagina de cadastro de usuarios """
if request.method == 'POST':
form = FormUser(request.POST)
if form.is_valid():
novo_usuario = form.save()
mensagem = "Usuário cadastrado com sucesso!"
usuario = User.objects.get(email=novo_usuario.email)
else:
form = FormUser()
return render_to_response('blog/usuario.html',locals(),context_instance=RequestContext(request),)
@login_required
def post(request):
""" Pagina de cadastro de posts """
if request.method == 'POST':
form = FormPost(request.POST)
if form.is_valid():
novo_post = form.save()
mensagem = "Post cadastrado com sucesso!"
else:
form = FormPost()
return render_to_response('blog/post.html',locals(),context_instance=RequestContext(request),)
def enviar_email(request):
""" Pagina de envio de emails """
smtpserver = smtplib.SMTP('smtp.gmail.com', 587)
smtpserver.ehlo()
smtpserver.starttls()
smtpserver.ehlo()
smtpserver.login('[email protected]', '6b16ae55')
smtpserver.sendmail('[email protected]','[email protected]', 'Teste envio de email.')
smtpserver.close()
return render_to_response('blog/email.html',locals(),context_instance=RequestContext(request),)
@api_view(['GET', 'POST'])
def all_posts(request):
""" Retorna um json com todos os post cadastrados """
response = {
"status": "failure",
}
if request.method == 'GET':
posts = Post.objects.all()
serializer = PostSerializer(posts, many=True)
response = {
"status": "success",
"shows": serializer.data,
}
return Response(response)
elif request.method == 'POST':
serializer = PostSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
response = {
"status": "success",
"shows": serializer.data,
}
return Response(response, status=status.HTTP_201_CREATED)
response = {
"status": "failure",
"errors": serializer.errors,
}
return Response(response, status=status.HTTP_400_BAD_REQUEST)
return Response(response)
@api_view(['GET', 'PUT', 'DELETE'])
def get_post(request, pk):
""" Realiza as operações de select, update e delete no post dono da pk """
try:
post = Post.objects.get(pk=pk)
except Post.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
if request.method == 'GET':
serializer = PostSerializer(post)
return Response(serializer.data)
elif request.method == 'PUT':
serializer = PostSerializer(post, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
elif request.method == 'DELETE':
post.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@api_view(['GET', 'POST'])
@authentication_classes((ApiAuthentication, ))
@permission_classes((IsApi,))
def api_auth(request):
"""
Autentica o usuario via rest framework
request -- Requisição Http
:return: response<json> - Retorna todos os posts
"""
posts = Post.objects.all()
serializer = PostSerializer(posts, many=True)
response = {
"status": "success",
"shows": serializer.data,
}
return Response(response)
@api_view(['GET', 'POST'])
@authentication_classes((TokenAuthentication, SessionAuthentication, BasicAuthentication))
@permission_classes((IsAuthenticated,))
def api_token(request):
"""
Autentica o usuario via rest framework
:param request:
:return: response<json> - Retorna todos os posts
"""
posts = Post.objects.all()
serializer = PostSerializer(posts, many=True)
response = {
"status": "success",
"shows": serializer.data,
}
return Response(response)
@api_view(['GET', 'POST'])
def api_login(request):
"""
Valida a compra com apple e google e cria um token de acesso
:param apple_receipt <str>: Recibo de compra da apple
:param device <str>: Sistema operacional do aparelho, IOS ou AND
:return: response<json> - Retorna o token de acesso
"""
response = {
"status": "failure",
}
data = json.loads(request.body)
if data['device'] == 'IOS':
resposta = get_apple_validate(data['apple_receipt'])
elif data['device'] == 'AND':
resposta = {
"status": False,
}
if resposta['status']:
cliente = Cliente(token=binascii.hexlify(os.urandom(20)).decode())
cliente.save()
if cliente:
response = {
"status": "success",
"token": cliente.token
}
return Response(response)
def autorelacionamento(request):
"""
Mostra as categorias criadas apartir de um autorecalionamento
http://blog.naison.com.br/django/auto-relacionamento-no-django
:param request: Requisição http
:return: pagina com categoria
"""
categorias = Categoria.objects.filter(parent=None)
return render_to_response('blog/autorelacionamento.html', locals(), context_instance=RequestContext(request),)
| alissonbf/blog-teste | blog/views.py | Python | gpl-2.0 | 7,479 |
# Copyright (c) 2016 The GNOME Music Developers
#
# GNOME Music is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# GNOME Music is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with GNOME Music; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# The GNOME Music authors hereby grant permission for non-GPL compatible
# GStreamer plugins to be used and distributed together with GStreamer
# and GNOME Music. This permission is above and beyond the permissions
# granted by the GPL license by which GNOME Music is covered. If you
# modify this code, you may extend this exception to your version of the
# code, but you are not obligated to do so. If you do not wish to do so,
# delete this exception statement from your version.
from gi.repository import Gtk, Pango
from gnomemusic import log
from gnomemusic.grilo import grilo
from gnomemusic.playlists import Playlists
import gnomemusic.utils as utils
class PlaylistDialog():
"""Dialog for adding items to a playlist"""
def __repr__(self):
return '<PlaylistDialog>'
@log
def __init__(self, parent, playlist_todelete):
self._ui = Gtk.Builder()
self._ui.add_from_resource('/org/gnome/Music/PlaylistDialog.ui')
self._dialog_box = self._ui.get_object('dialog')
self._dialog_box.set_transient_for(parent)
self._add_playlist_stack = self._ui.get_object('add_playlist_stack')
self._normal_state = self._ui.get_object('normal_state')
self._empty_state = self._ui.get_object('empty_state')
self._title_bar = self._ui.get_object('headerbar')
self._dialog_box.set_titlebar(self._title_bar)
self._setup_dialog()
self._playlist_todelete = playlist_todelete
self._playlist = Playlists.get_default()
@log
def run(self):
"""Run the playlist dialog"""
return self._dialog_box.run()
@log
def destroy(self):
"""Destroy the playlist dialog"""
return self._dialog_box.destroy()
@log
def _setup_dialog(self):
self._view = self._ui.get_object('treeview')
self._view.set_activate_on_single_click(False)
self._selection = self._ui.get_object('treeview-selection')
self._selection.connect('changed', self._on_selection_changed)
self._add_list_renderers()
self._view.connect('row-activated', self._on_item_activated)
self._model = self._ui.get_object('liststore')
self._populate()
self._cancel_button = self._ui.get_object('cancel-button')
self._select_button = self._ui.get_object('select-button')
self._select_button.set_sensitive(False)
self._cancel_button.connect('clicked', self._on_cancel_button_clicked)
self._select_button.connect('clicked', self._on_selection)
def playlists_available_cb(available):
if available:
self._add_playlist_stack.set_visible_child(self._normal_state)
self._new_playlist_button = self._ui.get_object(
'new-playlist-button')
self._new_playlist_entry = self._ui.get_object(
'new-playlist-entry')
else:
self._add_playlist_stack.set_visible_child(self._empty_state)
self._new_playlist_button = self._ui.get_object(
'create-first-playlist-button')
self._new_playlist_entry = self._ui.get_object(
'first-playlist-entry')
self._new_playlist_button.set_sensitive(False)
self._new_playlist_button.connect('clicked',
self._on_editing_done)
self._new_playlist_entry.connect(
'changed', self._on_new_playlist_entry_changed)
self._new_playlist_entry.connect('activate',
self._on_editing_done)
self._new_playlist_entry.connect(
'focus-in-event', self._on_new_playlist_entry_focused)
self._playlist.connect('playlist-created',
self._on_playlist_created)
grilo.playlists_available(playlists_available_cb)
@log
def get_selected(self):
"""Get the selected playlist"""
_iter = self._selection.get_selected()[1]
if not _iter:
return None
return self._model[_iter][1]
@log
def _add_list_renderers(self):
type_renderer = Gtk.CellRendererText(
xpad=8, ypad=8, ellipsize=Pango.EllipsizeMode.END, xalign=0.0)
col = Gtk.TreeViewColumn("Name", type_renderer, text=0)
self._view.append_column(col)
@log
def _populate(self):
grilo.populate_playlists(0, self._add_item)
@log
def _add_item(self, source, param, item, remaining=0, data=None):
if item:
self._add_item_to_model(item)
@log
def _add_item_to_model(self, item):
"""Adds (non-static only) playlists to the model"""
# Don't show static playlists
if self._playlist.is_static_playlist(item):
return None
# Hide playlist that is going to be deleted
if (self._playlist_todelete is not None
and item.get_id() == self._playlist_todelete.get_id()):
return None
new_iter = self._model.insert_with_valuesv(
-1, [0, 1], [utils.get_media_title(item), item])
return new_iter
@log
def _on_selection(self, select_button):
self._dialog_box.response(Gtk.ResponseType.ACCEPT)
@log
def _on_cancel_button_clicked(self, cancel_button):
self._dialog_box.response(Gtk.ResponseType.REJECT)
@log
def _on_item_activated(self, view, path, column):
self._new_playlist_entry.set_text("")
self._new_playlist_button.set_sensitive(False)
_iter = self._model.get_iter(path)
if self._model[_iter][1]:
self._view.set_cursor(path, column, True)
else:
self._dialog_box.response(Gtk.ResponseType.ACCEPT)
@log
def _on_selection_changed(self, selection):
model, _iter = self._selection.get_selected()
self._select_button.set_sensitive(_iter is not None)
@log
def _on_editing_done(self, sender, data=None):
if self._new_playlist_entry.get_text() != '':
self._playlist.create_playlist(self._new_playlist_entry.get_text())
@log
def _on_playlist_created(self, playlists, item):
new_iter = self._add_item_to_model(item)
if new_iter and self._view.get_columns():
self._view.set_cursor(self._model.get_path(new_iter),
self._view.get_columns()[0], False)
self._view.row_activated(self._model.get_path(new_iter),
self._view.get_columns()[0])
self._dialog_box.response(Gtk.ResponseType.ACCEPT)
@log
def _on_new_playlist_entry_changed(self, editable, data=None):
if editable.get_text() != '':
self._new_playlist_button.set_sensitive(True)
else:
self._new_playlist_button.set_sensitive(False)
@log
def _on_new_playlist_entry_focused(self, editable, data=None):
self._selection.unselect_all()
| albfan/gnome-music | gnomemusic/widgets/playlistdialog.py | Python | gpl-2.0 | 7,810 |
#!usr/bin/python
# -*- coding: utf-8 -*-
#
from random import choice, uniform
import smtplib
import requests
from email.mime.text import MIMEText
from datetime import date, timedelta
import getpass
import settings as s
class TheWonderCouple:
def __init__(self, gifter_email, gifted_email, joke):
self.gifter_email = gifter_email
self.gifted_email = gifted_email
self.joke = joke
class TheAwesomeCoupleMixer:
def __init__(self):
self.wonder_couples = []
def magicMixer(self, emails):
num_couples = len(emails)
gifters = list(range(num_couples))
gifteds = list(range(num_couples))
for email in emails:
gifter = gifted = choice(gifters)
gifters.remove(gifter)
while (gifter == gifted):
gifted = choice(gifteds)
gifteds.remove(gifted)
joke = CoJoTheEntertainer.getHilariousParticipation()
twc = TheWonderCouple(emails[gifter], emails[gifted], joke)
self.wonder_couples.append(twc)
def getWonderCouples(self, emails):
self.magicMixer(emails)
return self.wonder_couples
class TheFabulousMailer:
def __init__(self, smtp_server, smtp_port, from_email, from_email_pwd, subject, price, inflation_rate):
self.smtp_server = smtp_server
self.smtp_port = smtp_port
self.from_email = from_email
self.from_email_pwd = from_email_pwd
self.subject = subject
self.price = price
self.inflation_rate = inflation_rate
def performLegendarySending(self, wonder_couples):
# server = smtplib.SMTP_SSL(self.smtp_server, 465)#self.smtp_port)
server = smtplib.SMTP(self.smtp_server, self.smtp_port)
server.ehlo()
server.starttls()
server.ehlo()
server.login(self.from_email, self.from_email_pwd)
for wonder_couple in wonder_couples:
print("Sending to {gifter_email}".format(gifter_email=wonder_couple.gifter_email))
to_email = wonder_couple.gifter_email
msg_text = """
Sois o casal {gifter_email}
Ides ofertar presentes ao casal {gifted_email}
um presente com valor exatamente igual a: {price}
And I say to you: {joke}
'mai nada!
""".format(gifter_email=wonder_couple.gifter_email,
gifted_email=wonder_couple.gifted_email,
price=CoJoTheCapitalist.getCapitalistPrice(self.price, self.inflation_rate),
joke=wonder_couple.joke)
message = MIMEText(msg_text)
message['From'] = self.from_email
message['To'] = to_email
message['Subject'] = self.subject
server.sendmail(self.from_email, to_email, message.as_string())
server.quit()
print("All done!")
class TheIntrepidPriceFinder:
def __init__(self, min_price, max_price):
self.min_price = min_price
self.max_price = max_price
def getThePriceForHappiness(self):
price = round(uniform(self.min_price, self.max_price), 2)
return str(price)
class CoJoTheEntertainer:
@staticmethod
def getHilariousParticipation():
r = requests.get('http://api.icndb.com/jokes/random')
if r.status_code == 200:
the_line = r.json()
return the_line['value']['joke']
else:
return "No joke for you"
class CoJoTheCapitalist:
@staticmethod
def getCapitalistPrice(price, inflation_rate):
rate_date = CoJoTheCapitalist.getDayInSocialistHistory()
rate = CoJoTheCapitalist.getCapitalisteRateForSocialistDate(rate_date)
foreign_price = round(float(price) * float(1+(inflation_rate/100.0)) * float(rate['rate']), 2)
return "{foreign_price} {currency} (taxa de {rate_date})".format(foreign_price=foreign_price,
currency=rate['currency'],
rate_date=rate_date.strftime("%Y-%m-%d"))
@staticmethod
def getDayInSocialistHistory():
# days since 1999-01-04
first_date = date(1999, 1, 4)
max_days = date.today() - first_date
days_delta = round(uniform(0, max_days.days), 0)
return first_date + timedelta(days=days_delta)
@staticmethod
def getCapitalisteRateForSocialistDate(rate_date):
uri = "https://api.exchangeratesapi.io/{rate_date}".format(rate_date=rate_date.strftime("%Y-%m-%d"))
r = requests.get(uri)
if r.status_code == 200:
exchange_rates = r.json()
currency = choice(list(exchange_rates['rates'].keys()))
rate = exchange_rates['rates'][currency]
return {"currency": currency, "rate": rate}
else:
return {"currency": "EUR", "rate": 1}
if __name__ == '__main__':
## run the thing
tacm = TheAwesomeCoupleMixer()
wonder_couples = tacm.getWonderCouples(emails=s.couples)
tipf = TheIntrepidPriceFinder(min_price=s.min_price, max_price=s.max_price)
price = tipf.getThePriceForHappiness()
from_email_pwd = getpass.getpass()
tfm = TheFabulousMailer(smtp_server=s.smtp_server, smtp_port=s.smtp_port,
from_email=s.from_email, from_email_pwd=from_email_pwd,
subject=s.subject, price=price, inflation_rate=s.inflation_rate)
tfm.performLegendarySending(wonder_couples=wonder_couples)
| rphenriques/couple_mixer | couple_mixer_v2019.py | Python | gpl-2.0 | 5,612 |
# -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2020 CERN.
#
# Zenodo is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Support and contact module for Zenodo."""
from __future__ import absolute_import, print_function
import joblib
from celery.signals import celeryd_init
from flask import current_app
from . import config, current_spam
class ZenodoSpam(object):
"""Zenodo support form."""
@property
def model(self):
"""Spam detection model."""
if not getattr(self, '_model', None):
if not current_app.config.get('ZENODO_SPAM_MODEL_LOCATION'):
model = None
else:
model = joblib.load(
current_app.config['ZENODO_SPAM_MODEL_LOCATION'])
self._model = model
return self._model
def __init__(self, app=None):
"""Extension initialization."""
if app:
self.init_app(app)
def init_app(self, app):
"""Flask application initialization."""
self.app = app
self.init_config(app)
app.extensions['zenodo-spam'] = self
@staticmethod
def init_config(app):
"""Initialize configuration."""
for k in dir(config):
if k.startswith('ZENODO_SPAM_'):
app.config.setdefault(k, getattr(config, k))
@celeryd_init.connect
def warm_up_cache(instance, **kwargs):
"""Preload the spam model in the celery application."""
with instance.app.flask_app.app_context():
current_spam.model
| zenodo/zenodo | zenodo/modules/spam/ext.py | Python | gpl-2.0 | 2,369 |
Subsets and Splits