repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Nik0las1984/mudpyl | mudpyl/net/nvt.py | 1 | 5604 | """This module contains tools for emulating a network virtual terminal. See
RFC 854 for details of the NVT commands, and VT100 documentation for the
colour codes.
"""
from mudpyl.metaline import Metaline, RunLengthList
from mudpyl.colours import NORMAL_CODES, fg_code, bg_code, WHITE, BLACK
import re
ALL_RESET = '0'
BOLDON = '1'
BOLDOFF = '22'
FG_FLAG = '3'
BG_FLAG = '4'
GROUND_RESET = '8'
colour_pattern = re.compile( "\x1b" + #ESC
r"\[" #open square bracket
r"(\d+" #open group, initial digits
r"(?:;\d{1,2})*" #following digits
r")" #close the group
"m" #just an 'm'
)
toremove = set('\000' #NUL
'\007' #BEL
'\013' #VT
'\014') #FF
BS = '\010'
HT = '\011' #AKA '\t' and tab.
HT_replacement = ' ' #four spaces
def make_string_sane(string):
"""Process (in most cases, this means 'ignore') the NVT characters in the
input string.
"""
#simple characters don't need any special machinery.
for char in toremove:
string = string.replace(char, '')
#do it backspace by backspace because otherwise, if there were multiple
#backspaces in a row, it gets confused and backspaces over backspaces.
while BS in string:
#take off leading backspaces so that the following regex doesn't get
#confused.
string = string.lstrip(BS)
string = re.sub('.' + BS, '', string, 1)
#swap tabs for four whitespaces.
string = string.replace(HT, HT_replacement)
return string
class ColourCodeParser(object):
"""A stateful colour code parser."""
def __init__(self):
self.fore = WHITE
self.back = BLACK
self.bold = False
def _parseline(self, line):
"""Feed it lines of VT100-infested text, and it splits it all up.
This returns a threeple: a string, the foreground colours, and the
background colours. The string is simple enough. The background list
is a list of integers corresponding to WHITE, GREEN, etc. The
foreground list is made up of two-ples: the first is the integer
colour, and the second is whether bold is on or off.
The lists of fore and back changes isn't redundant -- there are no
changes that could be removed without losing colour information.
"""
#this is a performance hotspot, so minimise the number of attribute
#lookups and modifications
fore = self.fore
bold = self.bold
back = self.back
backs = [(0, back)]
fores = [(0, (fore, bold))]
text = ''
prev_end = 0
for match in colour_pattern.finditer(line):
text += line[prev_end:match.start()]
prev_end = match.end()
codes = match.group(1)
for code in codes.split(';'):
code = code.lstrip('0') #normalisation.
if not code:
#leading zeroes been stripped from ALL_RESET
if fore != WHITE or bold:
fore = WHITE
bold = False
fores.append((len(text), (fore, bold)))
if back != BLACK:
back = BLACK
backs.append((len(text), back))
elif code == BOLDON and not bold:
bold = True
fores.append((len(text), (fore, bold)))
elif code == BOLDOFF and bold:
bold = False
fores.append((len(text), (fore, bold)))
elif code.startswith(FG_FLAG):
code = code[1:]
if code == GROUND_RESET:
code = WHITE
if code in NORMAL_CODES and code != fore:
fore = code
fores.append((len(text), (fore, bold)))
elif code.startswith(BG_FLAG):
code = code[1:]
if code == GROUND_RESET:
code = BLACK
if code in NORMAL_CODES and code != back:
back = code
backs.append((len(text), back))
#We don't really care about chopped colour codes. This class is
#actually going to be tossed whole lines (ie, \r\n or similar
#terminated), and any escape code of the form "\x1b[\r\n30m" or
#similar is broken anyway. I'll probably be proved wrong somehow
#on this one...
if len(line) - 1 > prev_end:
text += line[prev_end:]
self.fore = fore
self.back = back
self.bold = bold
return (fores, backs, text)
def parseline(self, line):
"""Interpret the VT100 codes in line and returns a Metaline, replete
with RunLengthLists, that splits the text, foreground and background
into three separate channels.
"""
fores, backs, cleanline = self._parseline(line)
rlfores = RunLengthList(((length, fg_code(colour, bold))
for (length, (colour, bold)) in fores),
_normalised = True)
rlbacks = RunLengthList(((length, bg_code(colour))
for (length, colour) in backs),
_normalised = True)
return Metaline(cleanline, rlfores, rlbacks)
| gpl-2.0 | -6,692,970,020,253,625,000 | 35.868421 | 77 | 0.526588 | false | 4.26484 | false | false | false |
RCAD/ringling-render-tools | src/rrt/maya/ui/submit.py | 1 | 13088 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\src\rrt\maya\ui\submit.ui'
#
# Created: Wed Oct 24 16:19:16 2012
# by: PyQt4 UI code generator 4.7.7
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_SubmitMainWindow(object):
def setupUi(self, SubmitMainWindow):
SubmitMainWindow.setObjectName(_fromUtf8("SubmitMainWindow"))
SubmitMainWindow.setEnabled(True)
SubmitMainWindow.resize(445, 283)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(SubmitMainWindow.sizePolicy().hasHeightForWidth())
SubmitMainWindow.setSizePolicy(sizePolicy)
SubmitMainWindow.setMinimumSize(QtCore.QSize(445, 283))
SubmitMainWindow.setWindowTitle(_fromUtf8("hpc-submit-maya"))
self.verticalLayout = QtGui.QVBoxLayout(SubmitMainWindow)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.formLayout = QtGui.QFormLayout()
self.formLayout.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout.setLabelAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.formLayout.setHorizontalSpacing(6)
self.formLayout.setVerticalSpacing(8)
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.head_node_label = QtGui.QLabel(SubmitMainWindow)
self.head_node_label.setObjectName(_fromUtf8("head_node_label"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.head_node_label)
self.head_node_field = QtGui.QComboBox(SubmitMainWindow)
self.head_node_field.setObjectName(_fromUtf8("head_node_field"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.head_node_field)
self.title_label = QtGui.QLabel(SubmitMainWindow)
self.title_label.setLayoutDirection(QtCore.Qt.LeftToRight)
self.title_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.title_label.setObjectName(_fromUtf8("title_label"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.title_label)
self.project_label = QtGui.QLabel(SubmitMainWindow)
self.project_label.setMinimumSize(QtCore.QSize(0, 0))
self.project_label.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.project_label.setObjectName(_fromUtf8("project_label"))
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.project_label)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setSpacing(6)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.project_field = QtGui.QLineEdit(SubmitMainWindow)
self.project_field.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.project_field.sizePolicy().hasHeightForWidth())
self.project_field.setSizePolicy(sizePolicy)
self.project_field.setMinimumSize(QtCore.QSize(161, 26))
self.project_field.setReadOnly(True)
self.project_field.setObjectName(_fromUtf8("project_field"))
self.horizontalLayout.addWidget(self.project_field)
self.browse_button = QtGui.QPushButton(SubmitMainWindow)
self.browse_button.setMinimumSize(QtCore.QSize(85, 27))
self.browse_button.setObjectName(_fromUtf8("browse_button"))
self.horizontalLayout.addWidget(self.browse_button)
self.formLayout.setLayout(2, QtGui.QFormLayout.FieldRole, self.horizontalLayout)
self.scene_label = QtGui.QLabel(SubmitMainWindow)
self.scene_label.setObjectName(_fromUtf8("scene_label"))
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.scene_label)
self.horizontalLayout1 = QtGui.QHBoxLayout()
self.horizontalLayout1.setSpacing(6)
self.horizontalLayout1.setObjectName(_fromUtf8("horizontalLayout1"))
self.scene_field = QtGui.QLineEdit(SubmitMainWindow)
self.scene_field.setEnabled(True)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.scene_field.sizePolicy().hasHeightForWidth())
self.scene_field.setSizePolicy(sizePolicy)
self.scene_field.setMinimumSize(QtCore.QSize(161, 26))
self.scene_field.setReadOnly(True)
self.scene_field.setObjectName(_fromUtf8("scene_field"))
self.horizontalLayout1.addWidget(self.scene_field)
self.scene_button = QtGui.QPushButton(SubmitMainWindow)
self.scene_button.setMinimumSize(QtCore.QSize(85, 27))
self.scene_button.setObjectName(_fromUtf8("scene_button"))
self.horizontalLayout1.addWidget(self.scene_button)
self.formLayout.setLayout(3, QtGui.QFormLayout.FieldRole, self.horizontalLayout1)
self.start_label = QtGui.QLabel(SubmitMainWindow)
self.start_label.setObjectName(_fromUtf8("start_label"))
self.formLayout.setWidget(4, QtGui.QFormLayout.LabelRole, self.start_label)
self.start_field = QtGui.QSpinBox(SubmitMainWindow)
self.start_field.setMinimum(1)
self.start_field.setMaximum(999999999)
self.start_field.setObjectName(_fromUtf8("start_field"))
self.formLayout.setWidget(4, QtGui.QFormLayout.FieldRole, self.start_field)
self.end_label = QtGui.QLabel(SubmitMainWindow)
self.end_label.setObjectName(_fromUtf8("end_label"))
self.formLayout.setWidget(5, QtGui.QFormLayout.LabelRole, self.end_label)
self.end_field = QtGui.QSpinBox(SubmitMainWindow)
self.end_field.setMinimum(1)
self.end_field.setMaximum(999999999)
self.end_field.setObjectName(_fromUtf8("end_field"))
self.formLayout.setWidget(5, QtGui.QFormLayout.FieldRole, self.end_field)
self.step_label = QtGui.QLabel(SubmitMainWindow)
self.step_label.setObjectName(_fromUtf8("step_label"))
self.formLayout.setWidget(6, QtGui.QFormLayout.LabelRole, self.step_label)
self.horizontalLayout_11 = QtGui.QHBoxLayout()
self.horizontalLayout_11.setObjectName(_fromUtf8("horizontalLayout_11"))
self.step_field = QtGui.QSpinBox(SubmitMainWindow)
self.step_field.setMinimum(1)
self.step_field.setMaximum(999999999)
self.step_field.setObjectName(_fromUtf8("step_field"))
self.horizontalLayout_11.addWidget(self.step_field)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem)
spacerItem1 = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem1)
spacerItem2 = QtGui.QSpacerItem(50, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem2)
self.render_label = QtGui.QLabel(SubmitMainWindow)
self.render_label.setObjectName(_fromUtf8("render_label"))
self.horizontalLayout_11.addWidget(self.render_label)
self.render_field = QtGui.QComboBox(SubmitMainWindow)
self.render_field.setObjectName(_fromUtf8("render_field"))
self.horizontalLayout_11.addWidget(self.render_field)
spacerItem3 = QtGui.QSpacerItem(10, 10, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.horizontalLayout_11.addItem(spacerItem3)
self.formLayout.setLayout(6, QtGui.QFormLayout.FieldRole, self.horizontalLayout_11)
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.rrt_debug = QtGui.QCheckBox(SubmitMainWindow)
self.rrt_debug.setLayoutDirection(QtCore.Qt.LeftToRight)
self.rrt_debug.setObjectName(_fromUtf8("rrt_debug"))
self.horizontalLayout_5.addWidget(self.rrt_debug)
self.pause = QtGui.QCheckBox(SubmitMainWindow)
self.pause.setLayoutDirection(QtCore.Qt.LeftToRight)
self.pause.setObjectName(_fromUtf8("pause"))
self.horizontalLayout_5.addWidget(self.pause)
self.formLayout.setLayout(7, QtGui.QFormLayout.FieldRole, self.horizontalLayout_5)
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.title_field = QtGui.QLineEdit(SubmitMainWindow)
self.title_field.setObjectName(_fromUtf8("title_field"))
self.horizontalLayout_4.addWidget(self.title_field)
self.formLayout.setLayout(1, QtGui.QFormLayout.FieldRole, self.horizontalLayout_4)
self.verticalLayout.addLayout(self.formLayout)
self.line = QtGui.QFrame(SubmitMainWindow)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.verticalLayout.addWidget(self.line)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setSpacing(6)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
spacerItem4 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem4)
self.submit_button = QtGui.QPushButton(SubmitMainWindow)
self.submit_button.setObjectName(_fromUtf8("submit_button"))
self.horizontalLayout_2.addWidget(self.submit_button)
self.cancel_button = QtGui.QPushButton(SubmitMainWindow)
self.cancel_button.setObjectName(_fromUtf8("cancel_button"))
self.horizontalLayout_2.addWidget(self.cancel_button)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.retranslateUi(SubmitMainWindow)
QtCore.QObject.connect(self.browse_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.browse)
QtCore.QObject.connect(self.cancel_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.quit)
QtCore.QObject.connect(self.submit_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.submit_job)
QtCore.QObject.connect(self.scene_button, QtCore.SIGNAL(_fromUtf8("clicked()")), SubmitMainWindow.scene)
QtCore.QMetaObject.connectSlotsByName(SubmitMainWindow)
def retranslateUi(self, SubmitMainWindow):
self.head_node_label.setToolTip(QtGui.QApplication.translate("SubmitMainWindow", "which cluster to use", None, QtGui.QApplication.UnicodeUTF8))
self.head_node_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Head Node", None, QtGui.QApplication.UnicodeUTF8))
self.head_node_field.setToolTip(QtGui.QApplication.translate("SubmitMainWindow", "Which cluster to submit to", None, QtGui.QApplication.UnicodeUTF8))
self.title_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Job Title", None, QtGui.QApplication.UnicodeUTF8))
self.project_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Project Folder", None, QtGui.QApplication.UnicodeUTF8))
self.browse_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Set", None, QtGui.QApplication.UnicodeUTF8))
self.scene_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Maya Scene File", None, QtGui.QApplication.UnicodeUTF8))
self.scene_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Browse", None, QtGui.QApplication.UnicodeUTF8))
self.start_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Start Frame", None, QtGui.QApplication.UnicodeUTF8))
self.end_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "End Frame", None, QtGui.QApplication.UnicodeUTF8))
self.step_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Frame Step", None, QtGui.QApplication.UnicodeUTF8))
self.render_label.setText(QtGui.QApplication.translate("SubmitMainWindow", "Renderer", None, QtGui.QApplication.UnicodeUTF8))
self.rrt_debug.setText(QtGui.QApplication.translate("SubmitMainWindow", "Show Debug Messages", None, QtGui.QApplication.UnicodeUTF8))
self.pause.setText(QtGui.QApplication.translate("SubmitMainWindow", "Pause before exit", None, QtGui.QApplication.UnicodeUTF8))
self.submit_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Submit Job", None, QtGui.QApplication.UnicodeUTF8))
self.cancel_button.setText(QtGui.QApplication.translate("SubmitMainWindow", "Cancel", None, QtGui.QApplication.UnicodeUTF8))
| mit | 7,755,343,654,843,894,000 | 66.117949 | 157 | 0.737164 | false | 3.985384 | false | false | false |
kunalarya/simple-sat-solver | satsolver/solver.py | 1 | 9011 | from __future__ import print_function
import argparse
import logging
from collections import namedtuple
import satsolver.parser as parser
from satsolver.util import Success, Failure
from satsolver.state import Instance
class Node(object):
def __init__(self, lit, asg, level):
assert lit > 0
self.lit = lit
self.asg = asg
self.level = level
def __repr__(self):
return '<x_{} = {} @ {}>'.format(
self.lit, self.asg, self.level)
class ImplicationGraph(object):
"""Implication Graph"""
def __init__(self):
self.nodes = set()
self.lits = set() # set of literals in nodes
# map lit -> nodes w/assignments
# dict[int, list[Node]]
self.nodes_by_lit = {}
self.fwd_edges = {}
# maps (x -> y) tuple edge to clause
self.edge_annot = {}
def add_node(self, node):
self.nodes.add(node)
self.lits.add(node.lit)
self.fwd_edges[node] = []
self.nodes_by_lit[node.lit] = node
def del_node(self, node):
self.lits.remove(node.lit)
self.nodes.remove(node)
del self.fwd_edges[node]
del self.nodes_by_lit[node.lit]
def add_edge(self, src, dst, reason):
self.fwd_edges[src].append(dst)
self.edge_annot[src, dst] = reason
Decision = namedtuple('Decision', ['level', 'lit', 'value'])
Implication = namedtuple('Implication', ['clause', 'lit', 'value'])
class Solver(object):
"""Main Solver"""
def __init__(self, instance, recipe=None):
self.instance = instance
# Pick variables in this order, if given.
self.recipe = recipe
self.recipe_index = 0
# def new_var(self):
# pass
# def add_clause(self, lits):
# pass
# def simplify_db(self):
# pass
def solve(self):
result = self.decide([], 1)
return result
def determine_next_var(self):
"""Choose the next variable to assign.
It will run the recipe if given, otherwise select a random unassigned
variable.
Returns:
tuple(variable, value)
"""
if self.recipe is not None:
if len(self.recipe) > 0:
next_var_and_value = self.recipe[0]
self.recipe = self.recipe[1:]
return next_var_and_value
# Otherwise, choose a variable randomly.
next_var = next(iter(self.instance.unasg_vars))
return next_var, 1
def bcp(self, decision_level, igraph):
"""Boolean Constrain Propagation
Returns:
Success | Failure
Success result:
{lit: Implication}
Failure means UNSAT
"""
any_unit = True
implications = {} # Keyed on int
while any_unit:
any_unit = False
for clause_index, clause in enumerate(self.instance.clauses):
r = self.instance.is_unit(clause)
if not r.success: return r
is_unit, implied = r.result
if is_unit:
lit = abs(implied)
if implied > 0:
r = self.instance.set_lit(lit, 1)
if not r.success: return r
implications[lit] = Implication(clause_index, lit, 1)
value = 1
else:
r = self.instance.set_lit(lit, 0)
if not r.success: return r
implications[lit] = Implication(clause_index, lit, 0)
value = 0
logging.debug('implied=%d -> %d', lit, value)
# Create a node in the ImplicationGraph if it doesn't yet exist.
if not lit in igraph.nodes_by_lit:
lit_node = Node(lit, value, decision_level)
igraph.add_node(lit_node)
# Create any edges
for implicating_lit in clause:
implicating_pair = self.instance.get_value(implicating_lit)
implicating_lit, implicating_value = implicating_pair
if implicating_lit != lit:
# create the implicating lit if needed
if implicating_lit not in igraph.lits:
inode = Node(implicating_lit, implicating_value,
decision_level)
igraph.add_node(inode)
else:
inode = igraph.nodes_by_lit[implicating_lit]
# create an edge for this node
lit_node = igraph.nodes_by_lit[lit]
igraph.add_edge(inode, lit_node, clause)
logging.debug('add edge %s->%s because of %s',
inode, lit_node, clause)
any_unit = True
return Success(implications)
def decide(self, decisions, level):
"""
Args:
decisions (list[Decision]):
level (int):
Returns:
Success | Failure
"""
# choose a variable to decide
print('.', end='')
logging.debug('______________________________')
logging.debug('[level: %d]', level)
# Choose a variable to set.
next_var, next_value = self.determine_next_var()
# Create a new copy of the decisions.
decisions = list(decisions)
decisions.append(Decision(level, next_var, next_value))
logging.debug('try_assignment(level=%d, %d->%d)', level, next_var,
next_value)
result = self.try_assignment(level, decisions, next_var, next_value)
if not result.success:
logging.debug('caused unsat: try_assignment(level=%d, %d->%d)',
level, next_var, next_value)
# try the other branch
inverted_value = 1 - next_value
# remove last decision
decisions = decisions[:-1]
# add new decision
decisions.append(Decision(level, next_var, inverted_value))
r = self.try_assignment(level, decisions, next_var, inverted_value)
# If we reached UNSAT here, then there's no solution here, so propagate
# this issue up.
if not r.success:
return r
else:
# If all variables have been assigned, store this as a solution.
if len(self.instance.unasg_vars) == 0:
if self.instance.verify():
self.instance.save_solution()
print('satisfied!')
else:
raise ValueError('All variables assigned, but UNSAT')
return Success()
def try_assignment(self, level, decisions, lit, value):
logging.debug('try_assignment: lit = %d -- setting to %d', lit, value)
# assign it True
r = self.instance.set_lit(lit, value)
if not r.success:
return r
igraph = ImplicationGraph()
# build the graph
for decision in decisions:
# create a node for each decision
node = Node(decision.lit, decision.value, decision.level)
igraph.add_node(node)
logging.debug('adding node %s', node)
logging.debug('running bcp...')
r = self.bcp(level, igraph)
if not r.success: # Meaning UNSAT:
logging.debug('decision led to UNSAT. unsetting')
self.instance.unset_lit(lit)
# If it's UNSAT, we need to backtrack
return Failure('Unsat!')
# Otherwise it was a Success
implications = r.result
if len(self.instance.unasg_vars) > 0:
# increase the decision level
r = self.decide(decisions, level+1)
self.instance.unset_lit(lit)
return r
# otherwise, return igraph
return Success(result=(igraph, None))
def solve(instance):
"""
Args:
instance (Instance): parsed SAT instance
Returns:
Success | Failure
"""
solver = Solver(instance)
result = solver.solve()
if not result.success:
print('Unsatisfiable')
return result
def main():
cmdline_parser = argparse.ArgumentParser()
cmdline_parser.add_argument('filename', action='store', type=str)
args = cmdline_parser.parse_args()
file_parser = parser.CNFFileParser(args.filename)
inst = Instance(var_count=file_parser.var_count, clauses=file_parser.clauses)
result = solve(inst)
if result.success:
# Print the solutions
print('Satisfying solutions:')
for solution in inst.solutions:
print(solution)
if __name__ == '__main__':
main()
| apache-2.0 | -7,440,824,354,011,734,000 | 29.545763 | 84 | 0.52669 | false | 4.246466 | false | false | false |
luyijun/evennia_worldloader | worldloader/example_tutorial_world/worlddata/migrations/0001_initial.py | 1 | 5070 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='personal_objects',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'Personal Object List',
'verbose_name_plural': 'Personal Object List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_details',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Detail List',
'verbose_name_plural': 'World Detail List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_exits',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Exit List',
'verbose_name_plural': 'World Exit List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_objects',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Object List',
'verbose_name_plural': 'World Object List',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='world_rooms',
fields=[
('key', models.CharField(max_length=255, serialize=False, primary_key=True)),
('name', models.CharField(max_length=255)),
('alias', models.CharField(max_length=255, blank=True)),
('typeclass', models.CharField(max_length=255)),
('desc', models.TextField(blank=True)),
('location', models.CharField(max_length=255, blank=True)),
('home', models.CharField(max_length=255, blank=True)),
('lock', models.CharField(max_length=255, blank=True)),
('attributes', models.TextField(blank=True)),
('tutorial_info', models.TextField(blank=True)),
('destination', models.CharField(max_length=255, blank=True)),
],
options={
'verbose_name': 'World Room List',
'verbose_name_plural': 'World Room List',
},
bases=(models.Model,),
),
]
| bsd-3-clause | 3,046,071,758,788,312,000 | 44.675676 | 93 | 0.522288 | false | 4.522748 | false | false | false |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247971765/PyQt4/QtGui/QAbstractSpinBox.py | 1 | 9176 | # encoding: utf-8
# module PyQt4.QtGui
# from /usr/lib/python3/dist-packages/PyQt4/QtGui.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyQt4.QtCore as __PyQt4_QtCore
from .QWidget import QWidget
class QAbstractSpinBox(QWidget):
""" QAbstractSpinBox(QWidget parent=None) """
def alignment(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.alignment() -> Qt.Alignment """
pass
def buttonSymbols(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.buttonSymbols() -> QAbstractSpinBox.ButtonSymbols """
pass
def changeEvent(self, QEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.changeEvent(QEvent) """
pass
def clear(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.clear() """
pass
def closeEvent(self, QCloseEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.closeEvent(QCloseEvent) """
pass
def contextMenuEvent(self, QContextMenuEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.contextMenuEvent(QContextMenuEvent) """
pass
def correctionMode(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.correctionMode() -> QAbstractSpinBox.CorrectionMode """
pass
def editingFinished(self, *args, **kwargs): # real signature unknown
""" QAbstractSpinBox.editingFinished [signal] """
pass
def event(self, QEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.event(QEvent) -> bool """
return False
def fixup(self, p_str): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.fixup(str) -> str """
return ""
def focusInEvent(self, QFocusEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.focusInEvent(QFocusEvent) """
pass
def focusOutEvent(self, QFocusEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.focusOutEvent(QFocusEvent) """
pass
def hasAcceptableInput(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.hasAcceptableInput() -> bool """
return False
def hasFrame(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.hasFrame() -> bool """
return False
def hideEvent(self, QHideEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.hideEvent(QHideEvent) """
pass
def initStyleOption(self, QStyleOptionSpinBox): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.initStyleOption(QStyleOptionSpinBox) """
pass
def inputMethodQuery(self, Qt_InputMethodQuery): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.inputMethodQuery(Qt.InputMethodQuery) -> object """
return object()
def interpretText(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.interpretText() """
pass
def isAccelerated(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.isAccelerated() -> bool """
return False
def isReadOnly(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.isReadOnly() -> bool """
return False
def keyboardTracking(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.keyboardTracking() -> bool """
return False
def keyPressEvent(self, QKeyEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.keyPressEvent(QKeyEvent) """
pass
def keyReleaseEvent(self, QKeyEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.keyReleaseEvent(QKeyEvent) """
pass
def lineEdit(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.lineEdit() -> QLineEdit """
return QLineEdit
def minimumSizeHint(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.minimumSizeHint() -> QSize """
pass
def mouseMoveEvent(self, QMouseEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.mouseMoveEvent(QMouseEvent) """
pass
def mousePressEvent(self, QMouseEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.mousePressEvent(QMouseEvent) """
pass
def mouseReleaseEvent(self, QMouseEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.mouseReleaseEvent(QMouseEvent) """
pass
def paintEvent(self, QPaintEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.paintEvent(QPaintEvent) """
pass
def resizeEvent(self, QResizeEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.resizeEvent(QResizeEvent) """
pass
def selectAll(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.selectAll() """
pass
def setAccelerated(self, bool): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setAccelerated(bool) """
pass
def setAlignment(self, Qt_Alignment): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setAlignment(Qt.Alignment) """
pass
def setButtonSymbols(self, QAbstractSpinBox_ButtonSymbols): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setButtonSymbols(QAbstractSpinBox.ButtonSymbols) """
pass
def setCorrectionMode(self, QAbstractSpinBox_CorrectionMode): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setCorrectionMode(QAbstractSpinBox.CorrectionMode) """
pass
def setFrame(self, bool): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setFrame(bool) """
pass
def setKeyboardTracking(self, bool): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setKeyboardTracking(bool) """
pass
def setLineEdit(self, QLineEdit): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setLineEdit(QLineEdit) """
pass
def setReadOnly(self, bool): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setReadOnly(bool) """
pass
def setSpecialValueText(self, p_str): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setSpecialValueText(str) """
pass
def setWrapping(self, bool): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.setWrapping(bool) """
pass
def showEvent(self, QShowEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.showEvent(QShowEvent) """
pass
def sizeHint(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.sizeHint() -> QSize """
pass
def specialValueText(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.specialValueText() -> str """
return ""
def stepBy(self, p_int): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.stepBy(int) """
pass
def stepDown(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.stepDown() """
pass
def stepEnabled(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.stepEnabled() -> QAbstractSpinBox.StepEnabled """
pass
def stepUp(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.stepUp() """
pass
def text(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.text() -> str """
return ""
def timerEvent(self, QTimerEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.timerEvent(QTimerEvent) """
pass
def validate(self, p_str, p_int): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.validate(str, int) -> (QValidator.State, str, int) """
pass
def wheelEvent(self, QWheelEvent): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.wheelEvent(QWheelEvent) """
pass
def wrapping(self): # real signature unknown; restored from __doc__
""" QAbstractSpinBox.wrapping() -> bool """
return False
def __init__(self, QWidget_parent=None): # real signature unknown; restored from __doc__
pass
ButtonSymbols = None # (!) real value is ''
CorrectionMode = None # (!) real value is ''
CorrectToNearestValue = 1
CorrectToPreviousValue = 0
NoButtons = 2
PlusMinus = 1
StepDownEnabled = 2
StepEnabled = None # (!) real value is ''
StepEnabledFlag = None # (!) real value is ''
StepNone = 0
StepUpEnabled = 1
UpDownArrows = 0
| gpl-2.0 | 770,578,344,612,015,000 | 36.761317 | 113 | 0.650065 | false | 4.314057 | false | false | false |
spencerpomme/coconuts-on-fire | person.py | 1 | 1237 | from classtools import AttrDisplay
class Person(AttrDisplay):
'''
Create and process person records
'''
def __init__(self, name, job=None, pay=0):
self.name = name
self.job = job
self.pay = pay
def lastName(self):
return self.name.split()[-1]
def giveRaise(self, percent):
self.pay = int(self.pay *(1 + percent))
class Manager(Person):
def __init__(self, name, pay):
Person.__init__(self, name, 'mgr', pay)
def giveRaise(self, percent, bonus=.10):
Person.giveRaise(self, percent+bonus)
class Department:
def __init__(self, *args):
self.members = list(args)
def addMember(self, person):
self.members.append(person)
def giveRaise(self, percent):
for person in self.members:
person.giveRaise(percent)
def showAll(self):
for person in self.members:
print(person)
if __name__ == '__main__':
bob = Person('Bob Smith')
sue = Person('Sue Jones', job='dev', pay=100000)
tom = Manager('Tom Jones', pay=50000)
development = Department(bob, sue)
development.addMember(tom)
development.giveRaise(.10)
development.showAll()
| apache-2.0 | -7,894,379,898,848,833,000 | 21.089286 | 52 | 0.587712 | false | 3.670623 | false | false | false |
octopicorn/cloudbrain | cloudbrain/connectors/MockConnector.py | 1 | 1165 | import time
import random
from cloudbrain.connectors.ConnectorInterface import Connector
from cloudbrain.utils.metadata_info import get_num_channels
class MockConnector(Connector):
def __init__(self, publishers, buffer_size, device_name, device_port='mock_port', device_mac=None):
"""
:return:
"""
super(MockConnector, self).__init__(publishers, buffer_size, device_name, device_port, device_mac)
self.data_generators = [self.data_generator_factory(metric, get_num_channels(self.device_name, metric)) for metric in self.metrics]
def connect_device(self):
"""
Mock connector so actually, don't do anything there :-)
:return:
"""
pass
def start(self):
while 1:
for data_generator in self.data_generators:
data_generator()
time.sleep(1)
def data_generator_factory(self, metric_name, num_channels):
def data_generator():
message = {"channel_%s" % i: random.random() * 10 for i in xrange(num_channels)}
message['timestamp'] = int(time.time() * 1000000) # micro seconds
print message
self.buffers[metric_name].write(message)
return data_generator
| agpl-3.0 | -3,348,609,461,343,070,700 | 22.77551 | 135 | 0.678112 | false | 3.782468 | false | false | false |
insequent/quark | quark/cache/security_groups_client.py | 1 | 8108 | # Copyright 2014 Openstack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
#
import json
import netaddr
from oslo_log import log as logging
from quark.cache import redis_base
from quark import exceptions as q_exc
from quark import protocols
from quark import utils
LOG = logging.getLogger(__name__)
SECURITY_GROUP_RULE_KEY = "rules"
SECURITY_GROUP_HASH_ATTR = "security group rules"
SECURITY_GROUP_ACK = "security group ack"
ALL_V4 = netaddr.IPNetwork("::ffff:0.0.0.0/96")
ALL_V6 = netaddr.IPNetwork("::/0")
class SecurityGroupsClient(redis_base.ClientBase):
def _convert_remote_network(self, remote_ip_prefix):
# NOTE(mdietz): RM11364 - While a /0 is valid and should be supported,
# it breaks OVS to apply a /0 as the source or
# destination network.
net = netaddr.IPNetwork(remote_ip_prefix).ipv6()
if net.cidr == ALL_V4 or net.cidr == ALL_V6:
return ''
return str(net)
def serialize_rules(self, rules):
"""Creates a payload for the redis server."""
# TODO(mdietz): If/when we support other rule types, this comment
# will have to be revised.
# Action and direction are static, for now. The implementation may
# support 'deny' and 'egress' respectively in the future. We allow
# the direction to be set to something else, technically, but current
# plugin level call actually raises. It's supported here for unit
# test purposes at this time
serialized = []
for rule in rules:
direction = rule["direction"]
source = ''
destination = ''
if rule.get("remote_ip_prefix"):
prefix = rule["remote_ip_prefix"]
if direction == "ingress":
source = self._convert_remote_network(prefix)
else:
destination = self._convert_remote_network(prefix)
optional_fields = {}
# NOTE(mdietz): this will expand as we add more protocols
protocol_map = protocols.PROTOCOL_MAP[rule["ethertype"]]
if rule["protocol"] == protocol_map["icmp"]:
optional_fields["icmp type"] = rule["port_range_min"]
optional_fields["icmp code"] = rule["port_range_max"]
else:
optional_fields["port start"] = rule["port_range_min"]
optional_fields["port end"] = rule["port_range_max"]
payload = {"ethertype": rule["ethertype"],
"protocol": rule["protocol"],
"source network": source,
"destination network": destination,
"action": "allow",
"direction": direction}
payload.update(optional_fields)
serialized.append(payload)
return serialized
def serialize_groups(self, groups):
"""Creates a payload for the redis server
The rule schema is the following:
REDIS KEY - port_device_id.port_mac_address/sg
REDIS VALUE - A JSON dump of the following:
port_mac_address must be lower-cased and stripped of non-alphanumeric
characters
{"id": "<arbitrary uuid>",
"rules": [
{"ethertype": <hexademical integer>,
"protocol": <integer>,
"port start": <integer>, # optional
"port end": <integer>, # optional
"icmp type": <integer>, # optional
"icmp code": <integer>, # optional
"source network": <string>,
"destination network": <string>,
"action": <string>,
"direction": <string>},
],
"security groups ack": <boolean>
}
Example:
{"id": "004c6369-9f3d-4d33-b8f5-9416bf3567dd",
"rules": [
{"ethertype": 0x800,
"protocol": "tcp",
"port start": 1000,
"port end": 1999,
"source network": "10.10.10.0/24",
"destination network": "",
"action": "allow",
"direction": "ingress"},
],
"security groups ack": "true"
}
port start/end and icmp type/code are mutually exclusive pairs.
"""
rules = []
for group in groups:
rules.extend(self.serialize_rules(group.rules))
return rules
def get_rules_for_port(self, device_id, mac_address):
rules = self.get_field(
self.vif_key(device_id, mac_address), SECURITY_GROUP_HASH_ATTR)
if rules:
return json.loads(rules)
def apply_rules(self, device_id, mac_address, rules):
"""Writes a series of security group rules to a redis server."""
LOG.info("Applying security group rules for device %s with MAC %s" %
(device_id, mac_address))
if not self._use_master:
raise q_exc.RedisSlaveWritesForbidden()
rule_dict = {SECURITY_GROUP_RULE_KEY: rules}
redis_key = self.vif_key(device_id, mac_address)
# TODO(mdietz): Pipeline these. Requires some rewriting
self.set_field(redis_key, SECURITY_GROUP_HASH_ATTR, rule_dict)
self.set_field_raw(redis_key, SECURITY_GROUP_ACK, False)
def delete_vif_rules(self, device_id, mac_address):
# Redis HDEL command will ignore key safely if it doesn't exist
self.delete_field(self.vif_key(device_id, mac_address),
SECURITY_GROUP_HASH_ATTR)
self.delete_field(self.vif_key(device_id, mac_address),
SECURITY_GROUP_ACK)
def delete_vif(self, device_id, mac_address):
# Redis DEL command will ignore key safely if it doesn't exist
self.delete_key(self.vif_key(device_id, mac_address))
@utils.retry_loop(3)
def get_security_group_states(self, interfaces):
"""Gets security groups for interfaces from Redis
Returns a dictionary of xapi.VIFs with values of the current
acknowledged status in Redis.
States not explicitly handled:
* ack key, no rules - This is the same as just tagging the VIF,
the instance will be inaccessible
* rules key, no ack - Nothing will happen, the VIF will
not be tagged.
"""
LOG.debug("Getting security groups from Redis for {0}".format(
interfaces))
interfaces = tuple(interfaces)
vif_keys = [self.vif_key(vif.device_id, vif.mac_address)
for vif in interfaces]
security_groups = self.get_fields(vif_keys, SECURITY_GROUP_ACK)
ret = {}
for vif, security_group_ack in zip(interfaces, security_groups):
if security_group_ack:
security_group_ack = security_group_ack.lower()
if "true" in security_group_ack:
ret[vif] = True
elif "false" in security_group_ack:
ret[vif] = False
else:
LOG.debug("Skipping bad ack value %s" % security_group_ack)
return ret
@utils.retry_loop(3)
def update_group_states_for_vifs(self, vifs, ack):
"""Updates security groups by setting the ack field"""
if not self._use_master:
raise q_exc.RedisSlaveWritesForbidden()
vif_keys = [self.vif_key(vif.device_id, vif.mac_address)
for vif in vifs]
self.set_fields(vif_keys, SECURITY_GROUP_ACK, ack)
| apache-2.0 | 9,148,292,089,688,650,000 | 37.980769 | 79 | 0.585101 | false | 4.157949 | false | false | false |
maaaks/andreas | andreas/db/model.py | 1 | 3293 | from typing import Dict, List, Optional, Tuple, Type
from playhouse import signals
from andreas.db.database import db
class Model(signals.Model):
class Meta:
database = db
schema = 'andreas'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._models_to_save_after_myself: List[Tuple[Model,Dict]] = []
@classmethod
def table(cls) -> str:
return f'{cls._meta.schema}.{cls._meta.table_name}'
@classmethod
def triggers(cls) -> Optional[Dict[str,str]]:
return None
@classmethod
def create_table(cls, fail_silently=False):
"""
Creates a table for given model and creates/recreates all the triggers on it.
"""
super().create_table(fail_silently=fail_silently)
if cls.triggers():
# Remove the old triggers
for event in 'insert', 'update', 'delete', 'truncate':
for when in 'before', 'after', 'instead_of':
db.execute_sql(f'drop trigger if exists {when}_{event} on {cls.table()}')
db.execute_sql(f'drop function if exists on_{cls.table()}_{when}_{event}()')
# Create new triggers
for when, code in cls.triggers().items():
trigger_name = when.replace(' ', '_')
code = code.rstrip('; \t\n\t')
db.execute_sql(
f'create or replace function {cls.table()}_{trigger_name}() returns trigger '
f'as $$ begin {code}; end $$ language plpgsql')
db.execute_sql(
f'create trigger {trigger_name} {when} on {cls.table()} '
f'for each row execute procedure {cls.table()}_{trigger_name}()')
def reload(self):
"""
Updates all the fields from the database.
"""
newer_self = self.get(self._pk_expr())
for field_name in self._meta.fields.keys():
val = getattr(newer_self, field_name)
setattr(self, field_name, val)
self._dirty.clear()
def save_after(self, dependency: 'Model', **kwargs) -> None:
"""
Registers handler that will automatically save this model right as soon as `dependency` will be saved.
This handler works only once and unregisters itself after finishing its work.
"""
dependency._models_to_save_after_myself.append((self, kwargs))
@classmethod
def create_after(cls, dependency: 'Model', **kwargs) -> 'Model':
"""
Creates instance and registers handler that will automatically save it as soon as `dependency` will be saved.
This handler works only once and unregisters itself after finishing its work.
"""
instance = cls(**kwargs)
dependency._models_to_save_after_myself.append((instance, {}))
return instance
@signals.post_save()
def post_save(model_class: Type[Model], instance: Model, created: bool):
"""
After an object is saved, all other models that waited for it will be automatically saved, too.
"""
for model, kwargs in instance._models_to_save_after_myself:
model.save(**kwargs)
instance._models_to_save_after_myself = [] | mit | 6,112,106,448,212,380,000 | 37.302326 | 117 | 0.580929 | false | 4.189567 | false | false | false |
enavarro222/bblamp | webserver.py | 1 | 5293 | #!/usr/bin/python
#-*- coding:utf-8 -*-
import os
import sys
import json
# Make sure your gevent version is >= 1.0
import gevent
from gevent.wsgi import WSGIServer
from gevent.queue import Queue
from flask import Flask, Response
from flask import render_template, jsonify
from utils import ServerSentEvent
from api import lapps
from api import get_lapp_status
from errors import BBLampException
#TODO: hardware ?
from simulate import simu
import config
# the Flask app
bblamp_app = Flask(__name__)
bblamp_app.debug = True
# app API
bblamp_app.register_blueprint(lapps, url_prefix="/v1")
# lamp simulation API
bblamp_app.register_blueprint(simu, url_prefix="/simu/v1")
# app shared state variables
subscriptions = []
#-------------------------------------------------------------------------------
@bblamp_app.errorhandler(BBLampException)
def handle_invalid_lapp_name(error):
""" BBLampException handler
"""
response = jsonify(error.to_dict())
response.status_code = error.status_code
return response
#-------------------------------------------------------------------------------
# lapp log API (push)
@bblamp_app.route("/log/debug")
def log_debug():
return "Currently %d subscriptions" % len(subscriptions)
@bblamp_app.route("/log/subscribe")
def log_subscribe():
def gen():
q = Queue()
subscriptions.append(q)
try:
while True:
result = q.get()
ev = ServerSentEvent(str(result))
yield ev.encode()
except GeneratorExit: # Or maybe use flask signals
subscriptions.remove(q)
return Response(gen(), mimetype="text/event-stream")
def send_data(dtype, data):
""" Send data to the clients
"""
output = {
"dtype": dtype,
"data": data
}
for sub in subscriptions[:]:
print("%s : %s" % (dtype, data))
sub.put(json.dumps(output))
def new_lapp_output(msg):
send_data("output", msg)
def new_lapp_logmsg(msg):
send_data("log", msg)
def new_lapp_status():
send_data("status", get_lapp_status())
def monitor_logging_file(filename, output_fct):
""" pseudo therad (gevent) that monitor a log file
"""
while True:
try:
with open(filename, "r") as in_file:
#seek to the end
# in order to not send all already in the file lines
in_file.seek(0, os.SEEK_END)
while True:
# check the file still exist
# cf: http://stackoverflow.com/a/12690767
if os.fstat(in_file.fileno()).st_nlink == 0:
break # try to reopen it if it has been deleted
# try to read next line
log_line = in_file.readline()
if log_line != "":
# Search if next lines are for the same log "line"
## wait short time to be sure to not miss next "same log line"
gevent.sleep(2e-3)
last_pos = in_file.tell()
nextline = in_file.readline()
while not (nextline == "" or nextline.startswith("LampApp")): # = not a new log line
log_line += nextline
# wait short time to be sure to not miss next "same log line"
gevent.sleep(2e-3)
last_pos = in_file.tell()
nextline = in_file.readline()
# push log_line
output_fct(log_line)
# and seek back to the next log line (seek to the same position)
in_file.seek(last_pos)
gevent.sleep(0.1)
except IOError as error:
# file doesn't exist or not
if error.errno == 2:
#TODO: add logging
gevent.sleep(1)
else:
raise
def monitor_lapp_logfile():
monitor_logging_file(config.LAPP_LOGFILE, new_lapp_logmsg)
def monitor_lapp_outfile():
monitor_logging_file(config.LAPP_OUTFILE, new_lapp_output)
def monitor_lapp_status():
while True:
last_status = get_lapp_status()
while last_status["hash"] == get_lapp_status()["hash"]:
gevent.sleep(0.4)
new_lapp_status()
gevent.sleep(0.4)
#-------------------------------------------------------------------------------
# single page app getter
@bblamp_app.route("/")
@bblamp_app.route("/<string:lapp_name>")
def main_page(lapp_name=None):
return render_template("index.html")
@bblamp_app.route("/ltest")
def logging_test():
return render_template("log_test.html")
#-------------------------------------------------------------------------------
def main():
print("<run>")
# file monitoring
monitor_log_worker = gevent.spawn(monitor_lapp_logfile)
monitor_output_worker = gevent.spawn(monitor_lapp_outfile)
monitor_status_worker = gevent.spawn(monitor_lapp_status)
# web server
server = WSGIServer(("0.0.0.0", 5000), bblamp_app)
server.serve_forever()
print("<run_done>")
return 0
if __name__ == "__main__":
sys.exit(main())
| agpl-3.0 | 1,784,097,008,131,728,100 | 29.595376 | 108 | 0.536936 | false | 3.958863 | false | false | false |
jantman/awslimitchecker | awslimitchecker/tests/test_utils.py | 1 | 19953 | """
awslimitchecker/tests/test_utils.py
The latest version of this package is available at:
<https://github.com/jantman/awslimitchecker>
##############################################################################
Copyright 2015-2018 Jason Antman <[email protected]>
This file is part of awslimitchecker, also known as awslimitchecker.
awslimitchecker is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
awslimitchecker is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with awslimitchecker. If not, see <http://www.gnu.org/licenses/>.
The Copyright and Authors attributions contained herein may not be removed or
otherwise altered, except to add the Author attribution of a contributor to
this work. (Additional Terms pursuant to Section 7b of the AGPL v3)
##############################################################################
While not legally required, I sincerely request that anyone who finds
bugs please submit them at <https://github.com/jantman/awslimitchecker> or
to me via email, and that you send any contributions or improvements
either as a pull request on GitHub, or to me via email.
##############################################################################
AUTHORS:
Jason Antman <[email protected]> <http://www.jasonantman.com>
##############################################################################
"""
import argparse
import pytest
import sys
import termcolor
from awslimitchecker.limit import AwsLimit, AwsLimitUsage
from awslimitchecker.utils import (
StoreKeyValuePair, dict2cols, paginate_dict, _get_dict_value_by_path,
_set_dict_value_by_path, _get_latest_version, color_output,
issue_string_tuple
)
# https://code.google.com/p/mock/issues/detail?id=249
# py>=3.4 should use unittest.mock not the mock package on pypi
if (
sys.version_info[0] < 3 or
sys.version_info[0] == 3 and sys.version_info[1] < 4
):
from mock import call, Mock, patch
else:
from unittest.mock import call, Mock, patch
pbm = 'awslimitchecker.utils'
class TestStoreKeyValuePair(object):
def test_argparse_works(self):
parser = argparse.ArgumentParser()
parser.add_argument('--foo', action='store', type=str)
res = parser.parse_args(['--foo=bar'])
assert res.foo == 'bar'
def test_long(self):
parser = argparse.ArgumentParser()
parser.add_argument('--one', action=StoreKeyValuePair)
res = parser.parse_args(['--one=foo=bar'])
assert res.one == {'foo': 'bar'}
def test_short(self):
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--one', action=StoreKeyValuePair)
res = parser.parse_args(['-o', 'foo=bar'])
assert res.one == {'foo': 'bar'}
def test_multi_long(self):
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--one', action=StoreKeyValuePair)
res = parser.parse_args(['--one=foo=bar', '--one=baz=blam'])
assert res.one == {'foo': 'bar', 'baz': 'blam'}
def test_multi_short(self):
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--one', action=StoreKeyValuePair)
res = parser.parse_args(['-o', 'foo=bar', '-o', 'baz=blam'])
assert res.one == {'foo': 'bar', 'baz': 'blam'}
def test_no_equals(self):
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--one', action=StoreKeyValuePair)
with pytest.raises(SystemExit) as excinfo:
parser.parse_args(['-o', 'foobar'])
if sys.version_info[0] > 2:
msg = excinfo.value.args[0]
else:
msg = excinfo.value.message
assert msg == 2
def test_quoted(self):
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--one', action=StoreKeyValuePair)
res = parser.parse_args([
'-o',
'"foo some"=bar',
'--one="baz other"=blam'
])
assert res.one == {'foo some': 'bar', 'baz other': 'blam'}
class Test_dict2cols(object):
def test_simple(self):
d = {'foo': 'bar', 'baz': 'blam'}
res = dict2cols(d)
assert res == 'baz blam\nfoo bar\n'
def test_spaces(self):
d = {'foo': 'bar', 'baz': 'blam'}
res = dict2cols(d, spaces=4)
assert res == 'baz blam\nfoo bar\n'
def test_less_simple(self):
d = {
'zzz': 'bar',
'aaa': 'blam',
'abcdefghijklmnopqrstuv': 'someothervalue',
}
res = dict2cols(d)
assert res == '' + \
'aaa blam\n' + \
'abcdefghijklmnopqrstuv someothervalue\n' + \
'zzz bar\n'
def test_separator(self):
d = {'foo': 'bar', 'baz': 'blam'}
res = dict2cols(d, spaces=4, separator='.')
assert res == 'baz....blam\nfoo....bar\n'
def test_empty(self):
d = {}
res = dict2cols(d)
assert res == ''
class TestPaginateDict(object):
def test_no_marker_path(self):
func = Mock()
with pytest.raises(Exception) as excinfo:
paginate_dict(func)
ex_str = "alc_marker_path must be specified for queries " \
"that return a dict."
assert ex_str in str(excinfo.value)
def test_no_data_path(self):
func = Mock()
with pytest.raises(Exception) as excinfo:
paginate_dict(func, alc_marker_path=[])
ex_str = "alc_data_path must be specified for queries " \
"that return a dict."
assert ex_str in str(excinfo.value)
def test_no_marker_param(self):
func = Mock()
with pytest.raises(Exception) as excinfo:
paginate_dict(
func,
alc_marker_path=[],
alc_data_path=[]
)
ex_str = "alc_marker_param must be specified for queries " \
"that return a dict."
assert ex_str in str(excinfo.value)
def test_bad_path(self):
result = {
'k1': {
'badpath': {}
}
}
func = Mock()
func.return_value = result
res = paginate_dict(
func,
alc_marker_path=['k1', 'k2', 'Marker'],
alc_data_path=['k1', 'k2', 'Data'],
alc_marker_param='Marker'
)
assert res == result
assert func.mock_calls == [call()]
def test_no_marker(self):
result = {
'k1': {
'k2': {
'Data': []
}
}
}
func = Mock()
func.return_value = result
res = paginate_dict(
func,
alc_marker_path=['k1', 'k2', 'Marker'],
alc_data_path=['k1', 'k2', 'Data'],
alc_marker_param='Marker'
)
assert res == result
assert func.mock_calls == [call()]
def test_two_iterations(self):
e1 = Mock()
e2 = Mock()
e3 = Mock()
e4 = Mock()
e5 = Mock()
e6 = Mock()
func = Mock()
res1 = {
'k1': {
'k2': {
'Data': [e1, e2],
'Foo1': 'bar1',
'Marker': 'marker1'
}
}
}
res2 = {
'k1': {
'k2': {
'Data': [e3, e4],
'Foo2': 'bar2',
'Marker': 'marker2'
}
}
}
res3 = {
'k1': {
'k2': {
'Data': [e5, e6],
'Foo3': 'bar3'
}
}
}
expected = {
'k1': {
'k2': {
'Data': [e1, e2, e3, e4, e5, e6],
'Foo3': 'bar3'
}
}
}
func.side_effect = [res1, res2, res3]
res = paginate_dict(
func,
'foo',
bar='baz',
alc_marker_path=['k1', 'k2', 'Marker'],
alc_data_path=['k1', 'k2', 'Data'],
alc_marker_param='MarkerParam'
)
assert res == expected
assert func.mock_calls == [
call('foo', bar='baz'),
call(
'foo',
bar='baz',
MarkerParam='marker1'
),
call(
'foo',
bar='baz',
MarkerParam='marker2'
)
]
class TestDictFuncs(object):
def test_get_dict_value_by_path(self):
d = {
'foo': {
'bar': {
'baz': 'bazval'
}
}
}
path = ['foo', 'bar', 'baz']
res = _get_dict_value_by_path(d, path)
assert res == 'bazval'
# make sure we don't modify inputs
assert path == ['foo', 'bar', 'baz']
assert d == {
'foo': {
'bar': {
'baz': 'bazval'
}
}
}
def test_get_dict_value_by_path_obj(self):
e1 = Mock()
e2 = Mock()
d = {
'k1': {
'k2': {
'Marker': 'marker2',
'Data': [e1, e2],
'Foo2': 'bar2'
}
}
}
res = _get_dict_value_by_path(d, ['k1', 'k2', 'Data'])
assert res == [e1, e2]
def test_get_dict_value_by_path_none(self):
d = {
'foo': {
'bar': {
'blam': 'blarg'
}
}
}
res = _get_dict_value_by_path(d, ['foo', 'bar', 'baz'])
assert res is None
def test_get_dict_value_by_path_deep_none(self):
d = {'baz': 'blam'}
res = _get_dict_value_by_path(d, ['foo', 'bar', 'baz'])
assert res is None
def test_set_dict_value_by_path(self):
d = {
'foo': {
'bar': {
'baz': 'bazval'
}
}
}
path = ['foo', 'bar', 'baz']
res = _set_dict_value_by_path(d, 'blam', path)
assert res == {
'foo': {
'bar': {
'baz': 'blam'
}
}
}
# make sure we don't modify inputs
assert path == ['foo', 'bar', 'baz']
assert d == {
'foo': {
'bar': {
'baz': 'bazval'
}
}
}
def test_set_dict_value_by_path_none(self):
d = {
'foo': {
'bar': {
'blam': 'blarg'
}
}
}
res = _set_dict_value_by_path(d, 'blam', ['foo', 'bar', 'baz'])
assert res == {
'foo': {
'bar': {
'baz': 'blam',
'blam': 'blarg'
}
}
}
def test_set_dict_value_by_path_deep_none(self):
d = {'foo': 'bar'}
with pytest.raises(TypeError):
_set_dict_value_by_path(d, 'blam', ['foo', 'bar', 'baz'])
def test_set_dict_value_by_path_empty(self):
d = {'foo': 'bar'}
res = _set_dict_value_by_path(d, 'baz', [])
assert res == d
class TestGetCurrentVersion(object):
def test_exception(self):
mock_http = Mock()
with patch('%s._VERSION_TUP' % pbm, (0, 2, 3)):
with patch('%s.urllib3.PoolManager' % pbm, autospec=True) as m_pm:
with patch('%s.logger' % pbm, autospec=True) as mock_logger:
m_pm.return_value = mock_http
mock_http.request.side_effect = RuntimeError()
res = _get_latest_version()
assert res is None
assert mock_logger.mock_calls == [
call.debug('Error getting latest version from PyPI', exc_info=True)
]
def test_older(self):
mock_http = Mock()
mock_resp = Mock(
status=200, data='{"info": {"version": "1.0.1"}}'
)
with patch('%s._VERSION_TUP' % pbm, (0, 2, 3)):
with patch('%s.urllib3.PoolManager' % pbm, autospec=True) as m_pm:
with patch('%s.logger' % pbm, autospec=True) as mock_logger:
m_pm.return_value = mock_http
mock_http.request.return_value = mock_resp
res = _get_latest_version()
assert res == '1.0.1'
assert mock_logger.mock_calls == []
def test_equal(self):
mock_http = Mock()
mock_resp = Mock(
status=200, data='{"info": {"version": "0.2.3"}}'
)
with patch('%s._VERSION_TUP' % pbm, (0, 2, 3)):
with patch('%s.urllib3.PoolManager' % pbm, autospec=True) as m_pm:
with patch('%s.logger' % pbm, autospec=True) as mock_logger:
m_pm.return_value = mock_http
mock_http.request.return_value = mock_resp
res = _get_latest_version()
assert res is None
assert mock_logger.mock_calls == []
def test_newer(self):
mock_http = Mock()
mock_resp = Mock(
status=200, data='{"info": {"version": "0.1.2"}}'
)
with patch('%s._VERSION_TUP' % pbm, (0, 2, 3)):
with patch('%s.urllib3.PoolManager' % pbm, autospec=True) as m_pm:
with patch('%s.logger' % pbm, autospec=True) as mock_logger:
m_pm.return_value = mock_http
mock_http.request.return_value = mock_resp
res = _get_latest_version()
assert res is None
assert mock_logger.mock_calls == []
class TestColorOutput(object):
def test_colored(self):
assert color_output('foo', 'yellow') == termcolor.colored(
'foo', 'yellow')
def test_not_colored(self):
assert color_output(
'foo', 'yellow', colorize=False
) == 'foo'
class TestIssueStringTuple(object):
def test_crit_one(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 12
c1 = AwsLimitUsage(mock_limit, 56)
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[c1],
[]
)
assert res == ('svcname/limitname',
'(limit 12) xXCRITICAL: 56Xx')
assert m_co.mock_calls == [
call('CRITICAL: 56', 'red', colorize=True)
]
def test_crit_multi(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 5
c1 = AwsLimitUsage(mock_limit, 10)
c2 = AwsLimitUsage(mock_limit, 12, resource_id='c2id')
c3 = AwsLimitUsage(mock_limit, 8)
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[c1, c2, c3],
[]
)
assert res == ('svcname/limitname',
'(limit 5) xXCRITICAL: 8, 10, c2id=12Xx')
assert m_co.mock_calls == [
call('CRITICAL: 8, 10, c2id=12', 'red', colorize=True)
]
def test_warn_one(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 12
w1 = AwsLimitUsage(mock_limit, 11)
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[],
[w1]
)
assert res == ('svcname/limitname', '(limit 12) xXWARNING: 11Xx')
assert m_co.mock_calls == [
call('WARNING: 11', 'yellow', colorize=True)
]
def test_warn_multi(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 12
w1 = AwsLimitUsage(mock_limit, 11)
w2 = AwsLimitUsage(mock_limit, 10, resource_id='w2id')
w3 = AwsLimitUsage(mock_limit, 10, resource_id='w3id')
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[],
[w1, w2, w3]
)
assert res == ('svcname/limitname',
'(limit 12) xXWARNING: w2id=10, w3id=10, 11Xx')
assert m_co.mock_calls == [
call('WARNING: w2id=10, w3id=10, 11', 'yellow', colorize=True)
]
def test_both_one(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 12
c1 = AwsLimitUsage(mock_limit, 10)
w1 = AwsLimitUsage(mock_limit, 10, resource_id='w3id')
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[c1],
[w1],
colorize=False
)
assert res == ('svcname/limitname',
'(limit 12) xXCRITICAL: 10Xx xXWARNING: w3id=10Xx')
assert m_co.mock_calls == [
call('CRITICAL: 10', 'red', colorize=False),
call('WARNING: w3id=10', 'yellow', colorize=False)
]
def test_both_multi(self):
mock_limit = Mock(spec_set=AwsLimit)
type(mock_limit).name = 'limitname'
mock_limit.get_limit.return_value = 12
c1 = AwsLimitUsage(mock_limit, 10)
c2 = AwsLimitUsage(mock_limit, 12, resource_id='c2id')
c3 = AwsLimitUsage(mock_limit, 8)
w1 = AwsLimitUsage(mock_limit, 11)
w2 = AwsLimitUsage(mock_limit, 10, resource_id='w2id')
w3 = AwsLimitUsage(mock_limit, 10, resource_id='w3id')
def se_color(s, c, colorize=True):
return 'xX%sXx' % s
with patch('%s.color_output' % pbm) as m_co:
m_co.side_effect = se_color
res = issue_string_tuple(
'svcname',
mock_limit,
[c1, c2, c3],
[w1, w2, w3]
)
assert res == ('svcname/limitname',
'(limit 12) xXCRITICAL: 8, 10, c2id=12Xx '
'xXWARNING: w2id=10, w3id=10, 11Xx')
assert m_co.mock_calls == [
call('CRITICAL: 8, 10, c2id=12', 'red', colorize=True),
call('WARNING: w2id=10, w3id=10, 11', 'yellow', colorize=True)
]
| agpl-3.0 | 8,122,354,500,113,535,000 | 30.225352 | 79 | 0.481481 | false | 3.67053 | true | false | false |
DOAJ/doaj | portality/forms/application_processors.py | 1 | 46290 | import uuid
from datetime import datetime
import portality.notifications.application_emails as emails
from portality.core import app
from portality import models, constants, app_email
from portality.lib.formulaic import FormProcessor
from portality.ui.messages import Messages
from portality.crosswalks.application_form import ApplicationFormXWalk
from portality.crosswalks.journal_form import JournalFormXWalk
from portality.formcontext.choices import Choices
from portality.bll import exceptions
from flask import url_for, request, has_request_context
from flask_login import current_user
from wtforms import FormField, FieldList
class ApplicationProcessor(FormProcessor):
def pre_validate(self):
# to bypass WTForms insistence that choices on a select field match the value, outside of the actual validation
# chain
super(ApplicationProcessor, self).pre_validate()
def _carry_fixed_aspects(self):
if self.source is None:
raise Exception("Cannot carry data from a non-existent source")
now = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
# copy over any important fields from the previous version of the object
created_date = self.source.created_date if self.source.created_date else now
self.target.set_created(created_date)
if "id" in self.source.data:
self.target.data['id'] = self.source.data['id']
try:
if self.source.date_applied is not None:
self.target.date_applied = self.source.date_applied
except AttributeError:
# fixme: should there always be a date_applied? Only true for applications
pass
try:
if self.source.current_application:
self.target.set_current_application(self.source.current_application)
except AttributeError:
# this means that the source doesn't know about current_applications, which is fine
pass
try:
if self.source.current_journal:
self.target.set_current_journal(self.source.current_journal)
except AttributeError:
# this means that the source doesn't know about current_journals, which is fine
pass
try:
if self.source.related_journal:
self.target.set_related_journal(self.source.related_journal)
except AttributeError:
# this means that the source doesn't know about related_journals, which is fine
pass
try:
if self.source.related_applications:
related = self.source.related_applications
for rel in related:
self.target.add_related_application(rel.get("application_id"), rel.get("date_accepted"))
except AttributeError:
# this means that the source doesn't know about related_applications, which is fine
pass
# if the source is a journal, we need to carry the in_doaj flag
if isinstance(self.source, models.Journal):
self.target.set_in_doaj(self.source.is_in_doaj())
def _merge_notes_forward(self, allow_delete=False):
if self.source is None:
raise Exception("Cannot carry data from a non-existent source")
if self.target is None:
raise Exception("Cannot carry data on to a non-existent target - run the xwalk first")
# first off, get the notes (by reference) in the target and the notes from the source
tnotes = self.target.notes
snotes = self.source.notes
# if there are no notes, we might not have the notes by reference, so later will
# need to set them by value
apply_notes_by_value = len(tnotes) == 0
# for each of the target notes we need to get the original dates from the source notes
for n in tnotes:
for sn in snotes:
if n.get("id") == sn.get("id"):
n["date"] = sn.get("date")
# record the positions of any blank notes
i = 0
removes = []
for n in tnotes:
if n.get("note").strip() == "":
removes.append(i)
i += 1
# actually remove all the notes marked for deletion
removes.sort(reverse=True)
for r in removes:
tnotes.pop(r)
# finally, carry forward any notes that aren't already in the target
if not allow_delete:
for sn in snotes:
found = False
for tn in tnotes:
if sn.get("id") == tn.get("id"):
found = True
if not found:
tnotes.append(sn)
if apply_notes_by_value:
self.target.set_notes(tnotes)
def _carry_continuations(self):
if self.source is None:
raise Exception("Cannot carry data from a non-existent source")
try:
sbj = self.source.bibjson()
tbj = self.target.bibjson()
if sbj.replaces:
tbj.replaces = sbj.replaces
if sbj.is_replaced_by:
tbj.is_replaced_by = sbj.is_replaced_by
if sbj.discontinued_date:
tbj.discontinued_date = sbj.discontinued_date
except AttributeError:
# this means that the source doesn't know about current_applications, which is fine
pass
class NewApplication(ApplicationProcessor):
"""
Public Application Form Context. This is also a sort of demonstrator as to how to implement
one, so it will do unnecessary things like override methods that don't actually need to be overridden.
This should be used in a context where an unauthenticated user is making a request to put a journal into the
DOAJ. It does not have any edit capacity (i.e. the form can only be submitted once), and it does not provide
any form fields other than the essential journal bibliographic, application bibliographc and contact information
for the suggester. On submission, it will set the status to "pending" and the item will be available for review
by the editors
"""
############################################################
# PublicApplicationForm versions of FormProcessor lifecycle functions
############################################################
def draft(self, account, id=None, *args, **kwargs):
# check for validity
valid = self.validate()
# FIXME: if you can only save a valid draft, you cannot save a draft
# the draft to be saved needs to be valid
#if not valid:
# return None
def _resetDefaults(form):
for field in form:
if field.errors:
if isinstance(field, FormField):
_resetDefaults(field.form)
elif isinstance(field, FieldList):
for sub in field:
if isinstance(sub, FormField):
_resetDefaults(sub)
else:
sub.data = sub.default
else:
field.data = field.default
# if not valid, then remove all fields which have validation errors
if not valid:
_resetDefaults(self.form)
self.form2target()
draft_application = models.DraftApplication(**self.target.data)
if id is not None:
draft_application.set_id(id)
draft_application.set_application_status("draft")
draft_application.set_owner(account.id)
draft_application.save()
return draft_application
def finalise(self, account, save_target=True, email_alert=True, id=None):
super(NewApplication, self).finalise()
# set some administrative data
now = datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ")
self.target.date_applied = now
self.target.set_application_status(constants.APPLICATION_STATUS_PENDING)
self.target.set_owner(account.id)
self.target.set_last_manual_update()
if id:
replacing = models.Application.pull(id)
if replacing is None:
self.target.set_id(id)
else:
if replacing.application_status == constants.APPLICATION_STATUS_PENDING and replacing.owner == account.id:
self.target.set_id(id)
self.target.set_created(replacing.created_date)
# Finally save the target
if save_target:
self.target.save()
# a draft may have been saved, so also remove that
if id:
models.DraftApplication.remove_by_id(id)
if email_alert:
try:
emails.send_received_email(self.target)
except app_email.EmailException as e:
self.add_alert(Messages.FORMS__APPLICATION_PROCESSORS__NEW_APPLICATION__FINALISE__USER_EMAIL_ERROR)
app.logger.exception(Messages.FORMS__APPLICATION_PROCESSORS__NEW_APPLICATION__FINALISE__LOG_EMAIL_ERROR)
class AdminApplication(ApplicationProcessor):
"""
Managing Editor's Application Review form. Should be used in a context where the form warrants full
admin priviledges. It will permit conversion of applications to journals, and assignment of owner account
as well as assignment to editorial group.
"""
def pre_validate(self):
# to bypass WTForms insistence that choices on a select field match the value, outside of the actual validation
# chain
super(AdminApplication, self).pre_validate()
self.form.editor.choices = [(self.form.editor.data, self.form.editor.data)]
# TODO: Should quick_reject be set through this form at all?
self.form.quick_reject.choices = [(self.form.quick_reject.data, self.form.quick_reject.data)]
def patch_target(self):
super(AdminApplication, self).patch_target()
# This patches the target with things that shouldn't change from the source
self._carry_fixed_aspects()
self._merge_notes_forward(allow_delete=True)
# NOTE: this means you can't unset an owner once it has been set. But you can change it.
if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None):
self.target.set_owner(self.source.owner)
def finalise(self, account, save_target=True, email_alert=True):
"""
account is the administrator account carrying out the action
"""
if self.source is None:
raise Exception("You cannot edit a not-existent application")
if self.source.application_status == constants.APPLICATION_STATUS_ACCEPTED:
raise Exception("You cannot edit applications which have been accepted into DOAJ.")
# if we are allowed to finalise, kick this up to the superclass
super(AdminApplication, self).finalise()
# TODO: should these be a BLL feature?
# If we have changed the editors assigned to this application, let them know.
is_editor_group_changed = ApplicationFormXWalk.is_new_editor_group(self.form, self.source)
is_associate_editor_changed = ApplicationFormXWalk.is_new_editor(self.form, self.source)
# record the event in the provenance tracker
models.Provenance.make(account, "edit", self.target)
# delayed import of the DOAJ BLL
from portality.bll.doaj import DOAJ
applicationService = DOAJ.applicationService()
# if the application is already rejected, and we are moving it back into a non-rejected status
if self.source.application_status == constants.APPLICATION_STATUS_REJECTED and self.target.application_status != constants.APPLICATION_STATUS_REJECTED:
try:
applicationService.unreject_application(self.target, current_user._get_current_object(), disallow_status=[])
except exceptions.DuplicateUpdateRequest as e:
self.add_alert(Messages.FORMS__APPLICATION_PROCESSORS__ADMIN_APPLICATION__FINALISE__COULD_NOT_UNREJECT)
return
# if this application is being accepted, then do the conversion to a journal
if self.target.application_status == constants.APPLICATION_STATUS_ACCEPTED:
j = applicationService.accept_application(self.target, account)
# record the url the journal is available at in the admin are and alert the user
if has_request_context(): # fixme: if we handle alerts via a notification service we won't have to toggle on request context
jurl = url_for("doaj.toc", identifier=j.toc_id)
if self.source.current_journal is not None: # todo: are alerts displayed?
self.add_alert('<a href="{url}" target="_blank">Existing journal updated</a>.'.format(url=jurl))
else:
self.add_alert('<a href="{url}" target="_blank">New journal created</a>.'.format(url=jurl))
# Add the journal to the account and send the notification email
try:
owner = models.Account.pull(j.owner)
self.add_alert('Associating the journal with account {username}.'.format(username=owner.id))
owner.add_journal(j.id)
if not owner.has_role('publisher'):
owner.add_role('publisher')
owner.save()
# for all acceptances, send an email to the owner of the journal
if email_alert:
self._send_application_approved_email(j.bibjson().title, owner.name, owner.email, self.source.current_journal is not None)
except AttributeError:
raise Exception("Account {owner} does not exist".format(owner=j.owner))
except app_email.EmailException:
self.add_alert("Problem sending email to suggester - probably address is invalid")
app.logger.exception("Acceptance email to owner failed.")
# if the application was instead rejected, carry out the rejection actions
elif self.source.application_status != constants.APPLICATION_STATUS_REJECTED and self.target.application_status == constants.APPLICATION_STATUS_REJECTED:
# remember whether this was an update request or not
is_update_request = self.target.current_journal is not None
# reject the application
applicationService.reject_application(self.target, current_user._get_current_object())
# if this was an update request, send an email to the owner
if is_update_request and email_alert:
sent = False
send_report = []
try:
send_report = emails.send_publisher_reject_email(self.target, update_request=is_update_request)
sent = True
except app_email.EmailException as e:
pass
if sent:
self.add_alert(Messages.SENT_REJECTED_UPDATE_REQUEST_EMAIL.format(user=self.target.owner, email=send_report[0].get("email"), name=send_report[0].get("name")))
else:
self.add_alert(Messages.NOT_SENT_REJECTED_UPDATE_REQUEST_EMAIL.format(user=self.target.owner))
# the application was neither accepted or rejected, so just save it
else:
self.target.set_last_manual_update()
self.target.save()
if email_alert:
# if revisions were requested, email the publisher
if self.source.application_status != constants.APPLICATION_STATUS_REVISIONS_REQUIRED and self.target.application_status == constants.APPLICATION_STATUS_REVISIONS_REQUIRED:
try:
emails.send_publisher_update_request_revisions_required(self.target)
self.add_alert(Messages.SENT_REJECTED_UPDATE_REQUEST_REVISIONS_REQUIRED_EMAIL.format(user=self.target.owner))
except app_email.EmailException as e:
self.add_alert(Messages.NOT_SENT_REJECTED_UPDATE_REQUEST_REVISIONS_REQUIRED_EMAIL.format(user=self.target.owner))
# if we need to email the editor and/or the associate, handle those here
if is_editor_group_changed:
try:
emails.send_editor_group_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to editor - probably address is invalid")
app.logger.exception("Email to associate failed.")
if is_associate_editor_changed:
try:
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception("Email to associate failed.")
# If this is the first time this application has been assigned to an editor, notify the publisher.
old_ed = self.source.editor
if (old_ed is None or old_ed == '') and self.target.editor is not None:
is_update_request = self.target.current_journal is not None
if is_update_request:
alerts = emails.send_publisher_update_request_editor_assigned_email(self.target)
else:
alerts = emails.send_publisher_application_editor_assigned_email(self.target)
for alert in alerts:
self.add_alert(alert)
# Inform editor and associate editor if this application was 'ready' or 'completed', but has been changed to 'in progress'
if (self.source.application_status == constants.APPLICATION_STATUS_READY or self.source.application_status == constants.APPLICATION_STATUS_COMPLETED) and self.target.application_status == constants.APPLICATION_STATUS_IN_PROGRESS:
# First, the editor
try:
emails.send_editor_inprogress_email(self.target)
self.add_alert('An email has been sent to notify the editor of the change in status.')
except AttributeError:
magic = str(uuid.uuid1())
self.add_alert('Couldn\'t find a recipient for this email - check editor groups are correct. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('No editor recipient for failed review email - ' + magic)
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Sending the failed review email to editor didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending review failed email to editor - ' + magic)
# Then the associate
try:
emails.send_assoc_editor_inprogress_email(self.target)
self.add_alert('An email has been sent to notify the assigned associate editor of the change in status.')
except AttributeError:
magic = str(uuid.uuid1())
self.add_alert('Couldn\'t find a recipient for this email - check an associate editor is assigned. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('No associate editor recipient for failed review email - ' + magic)
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Sending the failed review email to associate editor didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending review failed email to associate editor - ' + magic)
# email other managing editors if this was newly set to 'ready'
if self.source.application_status != constants.APPLICATION_STATUS_READY and self.target.application_status == constants.APPLICATION_STATUS_READY:
# this template requires who made the change, say it was an Admin
ed_id = 'an administrator'
try:
emails.send_admin_ready_email(self.target, editor_id=ed_id)
self.add_alert('A confirmation email has been sent to the Managing Editors.')
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert('Sending the ready status to managing editors didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending ready status email to managing editors - ' + magic)
def _send_application_approved_email(self, journal_title, publisher_name, email, update_request=False):
"""Email the publisher when an application is accepted (it's here because it's too troublesome to factor out)"""
url_root = request.url_root
if url_root.endswith("/"):
url_root = url_root[:-1]
to = [email]
fro = app.config.get('SYSTEM_EMAIL_FROM', '[email protected]')
if update_request:
subject = app.config.get("SERVICE_NAME", "") + " - update request accepted"
else:
subject = app.config.get("SERVICE_NAME", "") + " - journal accepted"
publisher_name = publisher_name if publisher_name is not None else "Journal Owner"
try:
if app.config.get("ENABLE_PUBLISHER_EMAIL", False):
msg = Messages.SENT_ACCEPTED_APPLICATION_EMAIL.format(email=email)
template = "email/publisher_application_accepted.txt"
if update_request:
msg = Messages.SENT_ACCEPTED_UPDATE_REQUEST_EMAIL.format(email=email)
template = "email/publisher_update_request_accepted.txt"
jn = journal_title
app_email.send_mail(to=to,
fro=fro,
subject=subject,
template_name=template,
journal_title=jn,
publisher_name=publisher_name,
url_root=url_root
)
self.add_alert(msg)
else:
msg = Messages.NOT_SENT_ACCEPTED_APPLICATION_EMAIL.format(email=email)
if update_request:
msg = Messages.NOT_SENT_ACCEPTED_UPDATE_REQUEST_EMAIL.format(email=email)
self.add_alert(msg)
except Exception as e:
magic = str(uuid.uuid1())
self.add_alert('Sending the journal acceptance information email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending application approved email failed - ' + magic)
def validate(self):
_statuses_not_requiring_validation = ['rejected', 'pending', 'in progress', 'on hold']
# make use of the ability to disable validation, otherwise, let it run
if self.form is not None:
if self.form.application_status.data in _statuses_not_requiring_validation:
self.pre_validate()
return True
return super(AdminApplication, self).validate()
class EditorApplication(ApplicationProcessor):
"""
Editors Application Review form. This should be used in a context where an editor who owns an editorial group
is accessing an application. This prevents re-assignment of Editorial group, but permits assignment of associate
editor. It also permits change in application state, except to "accepted"; therefore this form context cannot
be used to create journals from applications. Deleting notes is not allowed, but adding is.
"""
def pre_validate(self):
# Call to super sets all the basic disabled fields
super(EditorApplication, self).pre_validate()
# although the editor_group field is handled by the general pre-validator, we still need to set the choices
# self.form.editor_group.data = self.source.editor_group
self.form.editor.choices = [(self.form.editor.data, self.form.editor.data)]
# This is no longer necessary, is handled by the main pre_validate function
#if self._formulaic.get('application_status').is_disabled:
# self.form.application_status.data = self.source.application_status
# but we do still need to add the overwritten status to the choices for validation
if self.form.application_status.data not in [c[0] for c in self.form.application_status.choices]:
self.form.application_status.choices.append((self.form.application_status.data, self.form.application_status.data))
def patch_target(self):
super(EditorApplication, self).patch_target()
self._carry_fixed_aspects()
self._merge_notes_forward()
self._carry_continuations()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
def finalise(self):
if self.source is None:
raise Exception("You cannot edit a not-existent application")
if self.source.application_status == constants.APPLICATION_STATUS_ACCEPTED:
raise Exception("You cannot edit applications which have been accepted into DOAJ.")
# if we are allowed to finalise, kick this up to the superclass
super(EditorApplication, self).finalise()
# Check the status change is valid
# TODO: we want to rid ourselves of the Choices module
Choices.validate_status_change('editor', self.source.application_status, self.target.application_status)
# FIXME: may want to factor this out of the suggestionformxwalk
new_associate_assigned = ApplicationFormXWalk.is_new_editor(self.form, self.source)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# record the event in the provenance tracker
models.Provenance.make(current_user, "edit", self.target)
# if we need to email the associate because they have just been assigned, handle that here.
if new_associate_assigned:
try:
self.add_alert("New editor assigned - email with confirmation has been sent")
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception('Error sending associate assigned email')
# If this is the first time this application has been assigned to an editor, notify the publisher.
old_ed = self.source.editor
if (old_ed is None or old_ed == '') and self.target.editor is not None:
is_update_request = self.target.current_journal is not None
if is_update_request:
alerts = emails.send_publisher_update_request_editor_assigned_email(self.target)
else:
alerts = emails.send_publisher_application_editor_assigned_email(self.target)
for alert in alerts:
self.add_alert(alert)
# Email the assigned associate if the application was reverted from 'completed' to 'in progress' (failed review)
if self.source.application_status == constants.APPLICATION_STATUS_COMPLETED and self.target.application_status == constants.APPLICATION_STATUS_IN_PROGRESS:
try:
emails.send_assoc_editor_inprogress_email(self.target)
self.add_alert(
'An email has been sent to notify the assigned associate editor of the change in status.')
except AttributeError as e:
magic = str(uuid.uuid1())
self.add_alert(
'Couldn\'t find a recipient for this email - check an associate editor is assigned. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('No associate editor recipient for failed review email - ' + magic)
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert(
'Sending the failed review email to associate editor didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending failed review email to associate editor - ' + magic)
# email managing editors if the application was newly set to 'ready'
if self.source.application_status != constants.APPLICATION_STATUS_READY and self.target.application_status == constants.APPLICATION_STATUS_READY:
# Tell the ManEds who has made the status change - the editor in charge of the group
editor_group_name = self.target.editor_group
editor_group_id = models.EditorGroup.group_exists_by_name(name=editor_group_name)
editor_group = models.EditorGroup.pull(editor_group_id)
editor_acc = editor_group.get_editor_account()
# record the event in the provenance tracker
models.Provenance.make(current_user, "status:ready", self.target)
editor_id = editor_acc.id
try:
emails.send_admin_ready_email(self.target, editor_id=editor_id)
self.add_alert('A confirmation email has been sent to the Managing Editors.')
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert(
'Sending the ready status to managing editors didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending ready status email to managing editors - ' + magic)
class AssociateApplication(ApplicationProcessor):
"""
Associate Editors Application Review form. This is to be used in a context where an associate editor (fewest rights)
needs to access an application for review. This editor cannot change the editorial group or the assigned editor.
They also cannot change the owner of the application. They cannot set an application to "Accepted" so this form can't
be used to create a journal from an application. They cannot delete, only add notes.
"""
def pre_validate(self):
# Call to super sets all the basic disabled fields
super(AssociateApplication, self).pre_validate()
# no longer necessary, handled by superclass pre_validate
#if self._formulaic.get('application_status').is_disabled:
# self.form.application_status.data = self.source.application_status
# but we do still need to add the overwritten status to the choices for validation
if self.form.application_status.data not in [c[0] for c in self.form.application_status.choices]:
self.form.application_status.choices.append(
(self.form.application_status.data, self.form.application_status.data))
def patch_target(self):
if self.source is None:
raise Exception("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self._merge_notes_forward()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self.target.set_editor(self.source.editor)
self.target.set_seal(self.source.has_seal())
self._carry_continuations()
def finalise(self):
# if we are allowed to finalise, kick this up to the superclass
super(AssociateApplication, self).finalise()
# Check the status change is valid
Choices.validate_status_change('associate', self.source.application_status, self.target.application_status)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# record the event in the provenance tracker
models.Provenance.make(current_user, "edit", self.target)
# inform publisher if this was set to 'in progress' from 'pending'
if self.source.application_status == constants.APPLICATION_STATUS_PENDING and self.target.application_status == constants.APPLICATION_STATUS_IN_PROGRESS:
if app.config.get("ENABLE_PUBLISHER_EMAIL", False):
is_update_request = self.target.current_journal is not None
if is_update_request:
alerts = emails.send_publisher_update_request_inprogress_email(self.target)
else:
alerts = emails.send_publisher_application_inprogress_email(self.target)
for alert in alerts:
self.add_alert(alert)
else:
self.add_alert(Messages.IN_PROGRESS_NOT_SENT_EMAIL_DISABLED)
# inform editor if this was newly set to 'completed'
if self.source.application_status != constants.APPLICATION_STATUS_COMPLETED and self.target.application_status == constants.APPLICATION_STATUS_COMPLETED:
# record the event in the provenance tracker
models.Provenance.make(current_user, "status:completed", self.target)
try:
emails.send_editor_completed_email(self.target)
self.add_alert('A confirmation email has been sent to notify the editor of the change in status.')
except app_email.EmailException:
magic = str(uuid.uuid1())
self.add_alert(
'Sending the ready status to editor email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.exception('Error sending completed status email to editor - ' + magic)
class PublisherUpdateRequest(ApplicationProcessor):
def pre_validate(self):
if self.source is None:
raise Exception("You cannot validate a form from a non-existent source")
super(ApplicationProcessor, self).pre_validate()
# no longer required, handled by call to superclass pre_validate
# carry forward the disabled fields
#bj = self.source.bibjson()
#self.form.title.data = bj.title
#self.form.alternative_title.data = bj.alternative_title
#pissn = bj.pissn
#if pissn == "": pissn = None
#self.form.pissn.data = pissn
#eissn = bj.eissn
#if eissn == "": eissn = None
#self.form.eissn.data = eissn
def patch_target(self):
if self.source is None:
raise Exception("You cannot patch a target from a non-existent source")
self._carry_subjects_and_seal()
self._carry_fixed_aspects()
self._merge_notes_forward()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self.target.set_editor(self.source.editor)
self._carry_continuations()
# we carry this over for completeness, although it will be overwritten in the finalise() method
self.target.set_application_status(self.source.application_status)
def finalise(self, save_target=True, email_alert=True):
# FIXME: this first one, we ought to deal with outside the form context, but for the time being this
# can be carried over from the old implementation
if self.source is None:
raise Exception("You cannot edit a not-existent application")
# if we are allowed to finalise, kick this up to the superclass
super(PublisherUpdateRequest, self).finalise()
# set the status to update_request (if not already)
self.target.set_application_status(constants.APPLICATION_STATUS_UPDATE_REQUEST)
# Save the target
self.target.set_last_manual_update()
if save_target:
saved = self.target.save()
if saved is None:
raise Exception("Save on application failed")
# obtain the related journal, and attach the current application id to it
journal_id = self.target.current_journal
from portality.bll.doaj import DOAJ
journalService = DOAJ.journalService()
if journal_id is not None:
journal, _ = journalService.journal(journal_id)
if journal is not None:
journal.set_current_application(self.target.id)
if save_target:
saved = journal.save()
if saved is None:
raise Exception("Save on journal failed")
else:
self.target.remove_current_journal()
# email the publisher to tell them we received their update request
if email_alert:
try:
self._send_received_email()
except app_email.EmailException as e:
self.add_alert("We were unable to send you an email confirmation - possible problem with your email address")
app.logger.exception('Error sending reapplication received email to publisher')
def _carry_subjects_and_seal(self):
# carry over the subjects
source_subjects = self.source.bibjson().subject
self.target.bibjson().subject = source_subjects
# carry over the seal
self.target.set_seal(self.source.has_seal())
def _send_received_email(self):
acc = models.Account.pull(self.target.owner)
if acc is None:
self.add_alert("Unable to locate account for specified owner")
return
journal_name = self.target.bibjson().title #.encode('utf-8', 'replace')
to = [acc.email]
fro = app.config.get('SYSTEM_EMAIL_FROM', '[email protected]')
subject = app.config.get("SERVICE_NAME","") + " - update request received"
try:
if app.config.get("ENABLE_PUBLISHER_EMAIL", False):
app_email.send_mail(to=to,
fro=fro,
subject=subject,
template_name="email/publisher_update_request_received.txt",
journal_name=journal_name,
username=self.target.owner
)
self.add_alert('A confirmation email has been sent to ' + acc.email + '.')
except app_email.EmailException as e:
magic = str(uuid.uuid1())
self.add_alert('Hm, sending the "update request received" email didn\'t work. Please quote this magic number when reporting the issue: ' + magic + ' . Thank you!')
app.logger.error(magic + "\n" + repr(e))
raise e
class PublisherUpdateRequestReadOnly(ApplicationProcessor):
"""
Read Only Application form for publishers. Nothing can be changed. Useful to show publishers what they
currently have submitted for review
"""
def finalise(self):
raise Exception("You cannot edit applications using the read-only form")
###############################################
### Journal form processors
###############################################
class ManEdJournalReview(ApplicationProcessor):
"""
Managing Editor's Journal Review form. Should be used in a context where the form warrants full
admin privileges. It will permit doing every action.
"""
def patch_target(self):
if self.source is None:
raise Exception("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self._merge_notes_forward(allow_delete=True)
# NOTE: this means you can't unset an owner once it has been set. But you can change it.
if (self.target.owner is None or self.target.owner == "") and (self.source.owner is not None):
self.target.set_owner(self.source.owner)
def finalise(self):
# FIXME: this first one, we ought to deal with outside the form context, but for the time being this
# can be carried over from the old implementation
if self.source is None:
raise Exception("You cannot edit a not-existent journal")
# if we are allowed to finalise, kick this up to the superclass
super(ManEdJournalReview, self).finalise()
# FIXME: may want to factor this out of the suggestionformxwalk
# If we have changed the editors assinged to this application, let them know.
is_editor_group_changed = JournalFormXWalk.is_new_editor_group(self.form, self.source)
is_associate_editor_changed = JournalFormXWalk.is_new_editor(self.form, self.source)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# if we need to email the editor and/or the associate, handle those here
if is_editor_group_changed:
try:
emails.send_editor_group_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to editor - probably address is invalid")
app.logger.exception('Error sending assignment email to editor.')
if is_associate_editor_changed:
try:
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception('Error sending assignment email to associate.')
def validate(self):
# make use of the ability to disable validation, otherwise, let it run
if self.form is not None:
if self.form.make_all_fields_optional.data:
self.pre_validate()
return True
return super(ManEdJournalReview, self).validate()
class EditorJournalReview(ApplicationProcessor):
"""
Editors Journal Review form. This should be used in a context where an editor who owns an editorial group
is accessing a journal. This prevents re-assignment of Editorial group, but permits assignment of associate
editor.
"""
def patch_target(self):
if self.source is None:
raise Exception("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self._merge_notes_forward()
self._carry_continuations()
def pre_validate(self):
# call to super handles all the basic disabled field
super(EditorJournalReview, self).pre_validate()
# although the superclass sets the value of the disabled field, we still need to set the choices
# self.form.editor_group.data = self.source.editor_group
self.form.editor.choices = [(self.form.editor.data, self.form.editor.data)]
def finalise(self):
if self.source is None:
raise Exception("You cannot edit a not-existent journal")
# if we are allowed to finalise, kick this up to the superclass
super(EditorJournalReview, self).finalise()
email_associate = ApplicationFormXWalk.is_new_editor(self.form, self.source)
# Save the target
self.target.set_last_manual_update()
self.target.save()
# if we need to email the associate, handle that here.
if email_associate:
try:
emails.send_assoc_editor_email(self.target)
except app_email.EmailException:
self.add_alert("Problem sending email to associate editor - probably address is invalid")
app.logger.exception('Error sending assignment email to associate.')
class AssEdJournalReview(ApplicationProcessor):
"""
Associate Editors Journal Review form. This is to be used in a context where an associate editor (fewest rights)
needs to access a journal for review. This editor cannot change the editorial group or the assigned editor.
They also cannot change the owner of the journal. They cannot delete, only add notes.
"""
def patch_target(self):
if self.source is None:
raise Exception("You cannot patch a target from a non-existent source")
self._carry_fixed_aspects()
self._merge_notes_forward()
self.target.set_owner(self.source.owner)
self.target.set_editor_group(self.source.editor_group)
self.target.set_editor(self.source.editor)
self._carry_continuations()
def finalise(self):
if self.source is None:
raise Exception("You cannot edit a not-existent journal")
# if we are allowed to finalise, kick this up to the superclass
super(AssEdJournalReview, self).finalise()
# Save the target
self.target.set_last_manual_update()
self.target.save()
class ReadOnlyJournal(ApplicationProcessor):
"""
Read Only Journal form. Nothing can be changed. Useful for reviewing a journal and an application
(or update request) side by side in 2 browser windows or tabs.
"""
def form2target(self):
pass # you can't edit objects using this form
def patch_target(self):
pass # you can't edit objects using this form
def finalise(self):
raise Exception("You cannot edit journals using the read-only form")
class ManEdBulkEdit(ApplicationProcessor):
"""
Managing Editor's Journal Review form. Should be used in a context where the form warrants full
admin privileges. It will permit doing every action.
"""
pass
| apache-2.0 | 428,683,396,306,092,600 | 47.62395 | 241 | 0.631475 | false | 4.450106 | false | false | false |
brainix/social-butterfly | channels.py | 1 | 6608 | #-----------------------------------------------------------------------------#
# channels.py #
# #
# Copyright (c) 2010-2012, Code A La Mode, original authors. #
# #
# This file is part of Social Butterfly. #
# #
# Social Butterfly is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License as #
# published by the Free Software Foundation, either version 3 of the #
# License, or (at your option) any later version. #
# #
# Social Butterfly is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with Social Butterfly. If not, see: #
# <http://www.gnu.org/licenses/>. #
#-----------------------------------------------------------------------------#
"""Datastore model and public API for Google App Engine channel management."""
import datetime
import logging
import random
from google.appengine.api import channel
from google.appengine.ext import db
from google.appengine.ext import deferred
from google.appengine.runtime import DeadlineExceededError
from config import NUM_RETRIES
_log = logging.getLogger(__name__)
class Channel(db.Model):
"""Datastore model and public API for Google App Engine channel management.
Google App Engine implements channels (similar to Comet or WebSockets) for
real-time cloud to browser communication. But App Engine only provides the
communication primitives. We need to persist additional data about the
open channels, so that we know who to broadcast the messages to.
"""
name = db.StringProperty()
datetime = db.DateTimeProperty(required=True, auto_now=True)
@classmethod
def create(cls, name=None):
"""Create a channel and return its token."""
_log.info('creating channel')
def txn():
for retry in range(NUM_RETRIES):
client_id = 'client' + str(random.randint(0, 10 ** 8 - 1))
chan = cls.get_by_key_name(client_id)
if chan is None:
chan = cls(key_name=client_id, name=name)
chan.put()
return client_id
client_id = db.run_in_transaction(txn)
if client_id is None:
_log.warning("couldn't create channel; couldn't allocate ID")
else:
token = channel.create_channel(client_id)
_countdown = 2 * 60 * 60
deferred.defer(cls.destroy, client_id, _countdown=_countdown)
_log.info('created channel %s, token %s' % (client_id, token))
return token
@classmethod
def destroy(cls, client_id):
"""Destroy the specified channel."""
_log.info('destroying channel %s' % client_id)
chan = cls.get_by_key_name(client_id)
if chan is None:
body = "couldn't destroy channel %s; already destroyed" % client_id
_log.info(body)
else:
db.delete(chan)
_log.info('destroyed channel %s' % client_id)
@classmethod
def broadcast(cls, json, name=None):
"""Schedule broadcasting the specified JSON string to all channels."""
_log.info('deferring broadcasting JSON to all connected channels')
channels = cls.all()
if name is not None:
channels = channels.filter('name =', name)
channels = channels.count(1)
if channels:
deferred.defer(cls._broadcast, json, name=name, cursor=None)
_log.info('deferred broadcasting JSON to all connected channels')
else:
body = 'not deferring broadcasting JSON (no connected channels)'
_log.info(body)
@classmethod
def _broadcast(cls, json, name=None, cursor=None):
"""Broadcast the specified JSON string to all channels."""
_log.info('broadcasting JSON to all connected channels')
keys = cls.all(keys_only=True)
if name is not None:
keys = keys.filter('name = ', name)
if cursor is not None:
keys = keys.with_cursor(cursor)
num_channels = 0
try:
for key in keys:
client_id = key.name()
channel.send_message(client_id, json)
# There's a chance that Google App Engine will throw the
# DeadlineExceededError exception at this point in the flow of
# execution. In this case, the current channel will have
# already received our JSON broadcast, but the cursor will not
# have been updated. So on the next go-around, the current
# channel will receive our JSON broadcast again. I'm just
# documenting this possibility, but it shouldn't be a big deal.
cursor = keys.cursor()
num_channels += 1
except DeadlineExceededError:
_log.info('broadcasted JSON to %s channels' % num_channels)
_log.warning("deadline; deferring broadcast to remaining channels")
deferred.defer(cls._broadcast, json, name=name, cursor=cursor)
else:
_log.info('broadcasted JSON to %s channels' % num_channels)
_log.info('broadcasted JSON to all connected channels')
@classmethod
def flush(cls):
"""Destroy all channels created over two hours ago."""
_log.info('destroying all channels over two hours old')
now = datetime.datetime.now()
timeout = datetime.timedelta(hours=2)
expiry = now - timeout
keys = cls.all(keys_only=True).filter('datetime <=', expiry)
db.delete(keys)
_log.info('destroyed all channels over two hours old')
| gpl-3.0 | -4,261,593,968,478,345,000 | 45.20979 | 79 | 0.544492 | false | 4.730136 | false | false | false |
eykd/fuzzy-octo-bear | tests/test_map_loader.py | 1 | 1227 | from unittest import TestCase
from ensure import ensure
from path import path
from fuzzy.map import load_game_map
from fuzzy.rooms import Room
from fuzzy.exits import Exit
PATH = path(__file__).abspath().dirname()
class MapLoaderTests(TestCase):
def setUp(self):
self.filename = PATH / 'rooms.yaml'
def test_it_should_construct_a_map_from_the_yaml_file(self):
start_room = load_game_map(self.filename)
ensure(start_room).is_a(Room)
ensure(start_room.exits).has_length(2)
ensure(start_room.exits).is_a(list).of(Exit)
ensure(start_room.exits[0].target).is_a(Room)
ensure(start_room.exits[0].target).is_not(start_room)
room_3 = start_room.exits[1].target
ensure(room_3.exits).has_length(4)
ensure(room_3.exits).is_a(list).of(Exit)
room_6 = room_3.exits[2].target
ensure(room_6).is_a(Room)
ensure(room_6.exits).has_length(2)
ensure(room_6.description).equals("A nondescript room")
room_7 = room_3.exits[3].target
ensure(room_7).is_a(Room)
ensure(room_7.exits).has_length(2)
ensure(room_7.description).equals("A nondescript room")
ensure(room_6).is_not(room_7)
| gpl-2.0 | -8,923,510,156,966,951,000 | 29.675 | 64 | 0.647107 | false | 3.05985 | false | false | false |
adamsd5/yavalath | memorycontrol.py | 1 | 4959 | """This holds a routine for restricting the current process memory on Windows."""
import multiprocessing
import ctypes
def set_memory_limit(memory_limit):
"""Creates a new unnamed job object and assigns the current process to it.
The job object will have the given memory limit in bytes: the given process
together with its descendant processes will not be allowed to exceed
the limit. If purge_pid_on_exit is true, when the *calling* process exits
(the calling process can be the same or different from the given process),
the given process and all its descendant processes will be killed."""
import os
pid = os.getpid()
purge_pid_on_exit = True
# Windows API constants, used for OpenProcess and SetInformationJobObject.
PROCESS_TERMINATE = 0x1
PROCESS_SET_QUOTA = 0x100
JobObjectExtendedLimitInformation = 9
JOB_OBJECT_LIMIT_PROCESS_MEMORY = 0x100
JOB_OBJECT_LIMIT_JOB_MEMORY = 0x200
JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE = 0x2000
class JOBOBJECT_EXTENDED_LIMIT_INFORMATION(ctypes.Structure):
"""Windows API structure, used as input to SetInformationJobObject."""
class JOBOBJECT_BASIC_LIMIT_INFORMATION(ctypes.Structure):
_fields_ = [("PerProcessUserTimeLimit", ctypes.c_int64),
("PerJobUserTimeLimit", ctypes.c_int64),
("LimitFlags", ctypes.c_uint32),
("MinimumWorkingSetSize", ctypes.c_void_p),
("MaximumWorkingSetSize", ctypes.c_void_p),
("ActiveProcessLimit", ctypes.c_uint32),
("Affinity", ctypes.c_void_p),
("PriorityClass", ctypes.c_uint32),
("SchedulingClass", ctypes.c_uint32)]
class IO_COUNTERS(ctypes.Structure):
_fields_ = [("ReadOperationCount", ctypes.c_uint64),
("WriteOperationCount", ctypes.c_uint64),
("OtherOperationCount", ctypes.c_uint64),
("ReadTransferCount", ctypes.c_uint64),
("WriteTransferCount", ctypes.c_uint64),
("OtherTransferCount", ctypes.c_uint64)]
_fields_ = [("BasicLimitInformation", JOBOBJECT_BASIC_LIMIT_INFORMATION),
("IoInfo", IO_COUNTERS),
("ProcessMemoryLimit", ctypes.c_void_p),
("JobMemoryLimit", ctypes.c_void_p),
("PeakProcessMemoryUsed", ctypes.c_void_p),
("PeakJobMemoryUsed", ctypes.c_void_p)]
job_info = JOBOBJECT_EXTENDED_LIMIT_INFORMATION()
job_info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_JOB_MEMORY
if purge_pid_on_exit:
job_info.BasicLimitInformation.LimitFlags |= JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE
job_info.JobMemoryLimit = memory_limit
kernel = ctypes.windll.kernel32
job = kernel.CreateJobObjectA(None, None)
if job == 0:
raise RuntimeError("CreateJobObjectA failed")
keep_job_handle = False
try:
if not kernel.SetInformationJobObject(
job,
JobObjectExtendedLimitInformation,
ctypes.POINTER(JOBOBJECT_EXTENDED_LIMIT_INFORMATION)(job_info),
ctypes.sizeof(JOBOBJECT_EXTENDED_LIMIT_INFORMATION)):
raise RuntimeError("SetInformationJobObject failed")
process = kernel.OpenProcess(PROCESS_SET_QUOTA | PROCESS_TERMINATE,False, pid)
if process == 0:
raise RuntimeError("OpenProcess failed")
try:
if not kernel.AssignProcessToJobObject(job, process):
raise RuntimeError("AssignProcessToJobObject failed")
# If purge_pid_on_exit is true, we kill process pid and all its
# descendants when the job handle is closed. So, we keep the handle
# dangling, and it will be closed when *this* process terminates.
keep_job_handle = purge_pid_on_exit
finally:
if not kernel.CloseHandle(process):
raise RuntimeError("CloseHandle failed")
finally:
if not (keep_job_handle or kernel.CloseHandle(job)):
raise RuntimeError("CloseHandle failed")
def allocate(bytes):
import numpy
try:
result = numpy.zeros(shape=(bytes,), dtype='i1')
print("allocation done:", bytes)
except Exception as ex:
print("Failed to allocate:", ex)
raise
def runner(thunk, memory_limit, *args):
set_memory_limit(memory_limit)
thunk(*args)
def run_in_process_with_memory_limit(thunk, memory_limit, test_bytes):
p = multiprocessing.Process(target=runner, args=(thunk, memory_limit, test_bytes))
p.start()
p.join()
def main():
memory_limit = 1000*1000*100
run_in_process_with_memory_limit(allocate, memory_limit=memory_limit, test_bytes=memory_limit)
if __name__ == "__main__":
main()
| mit | -2,975,399,561,159,198,700 | 39.647541 | 98 | 0.626336 | false | 4.125624 | false | false | false |
j-dasilva/COMP4350 | apartment/messaging/message.py | 1 | 1169 | from django.conf import settings
import time
class Message(object):
def __init__(self, *args, **kwargs):
vals = self.process_args(args, kwargs)
self.sender = vals['sender']
self.recipient = vals['recipient']
self.urgency = int(vals['urgency'])
self.content = vals['content']
self.timestamp = int(vals['timestamp'])
self.read = (vals['read'] == 'True')
def process_args(self, args, kwargs):
if len(kwargs) == 6:
return kwargs
elif len(args) == 1:
return args[0]
elif settings.CREATE_STUBS:
# CREATE A STUB MESSAGE
return self.create_stub()
else:
raise MessageException()
def create_stub(self):
return {
"sender": "StubSender",
"recipient": "StubRecipient",
"urgency": "1",
"content": "Stub Message Body",
"timestamp": time.time(),
"read": "False"
}
class MessageException(BaseException):
def __init__(self):
super(MessageException, self).__init__("Failed to create Message. Please refer to constructor.") | gpl-2.0 | 587,018,529,972,457,200 | 29 | 104 | 0.551754 | false | 4.220217 | false | false | false |
nature-python/youcai-contest | application/utils/cipherutils.py | 1 | 1513 | #!/usr/bin/python
#encoding:utf-8
#
#author:xin.xin
#since:14-5-19上午10:35
#
#
from binascii import b2a_hex, a2b_hex
from Crypto.Cipher import AES
from application import app
class CipherUtils(object):
#加密函数,如果text不足16位就用空格补足为16位,
#如果大于16当时不是16的倍数,那就补足为16的倍数。
@staticmethod
def encrypt(text):
cryptor = AES.new(app.config['PASSWORD_CIPHER_KEY'], AES.MODE_CBC, '0000000000000000')
#这里密钥key 长度必须为16(AES-128),
#24(AES-192),或者32 (AES-256)Bytes 长度
#目前AES-128 足够目前使用
length = 16
count = len(text)
if count < length:
add = (length - count)
#\0 backspace
text = text + (' ' * add)
elif count > length:
add = (length - (count % length))
text = text + ('\0' * add)
ciphertext = cryptor.encrypt(text)
#因为AES加密时候得到的字符串不一定是ascii字符集的,输出到终端或者保存时候可能存在问题
#所以这里统一把加密后的字符串转化为16进制字符串
return b2a_hex(ciphertext)
#解密后,去掉补足的空格用strip() 去掉
@staticmethod
def decrypt(text):
cryptor = AES.new(app.config['PASSWORD_CIPHER_KEY'], AES.MODE_CBC, '0000000000000000')
plain_text = cryptor.decrypt(a2b_hex(text))
return plain_text.rstrip('\0')
| apache-2.0 | 8,200,681,271,139,766,000 | 24.5625 | 94 | 0.612062 | false | 2.263838 | false | false | false |
gobstones/PyGobstones | pygobstones/gui/views/gobstonesMain.py | 1 | 21498 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gobstonesMain.ui'
#
# Created by: PyQt4 UI code generator 4.9.6
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import sys
import resources
sys.path.append('..')
from pygobstones.commons.i18n import *
from pygobstones.gui.textEditor import *
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8('MainWindow'))
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8('centralwidget'))
self.verticalLayout = QtGui.QVBoxLayout(self.centralwidget)
self.verticalLayout.setObjectName(_fromUtf8('verticalLayout'))
self.tabWidgetEditors = QtGui.QTabWidget(self.centralwidget)
self.tabWidgetEditors.setObjectName(_fromUtf8('tabWidgetEditors'))
self.tabWidgetEditors.setStyleSheet("border:2px solid #4682b4; border-color:'#4682b4';")
self.tabWidgetEditors.tabBar().setStyleSheet("background-color:'white'; color:'#4682b4'; border:2px solid #4682b4; font-size:15px")
self.tabWidgetEditors.tabBar().setAttribute(QtCore.Qt.WA_TranslucentBackground)
self.tabFile = QtGui.QWidget()
self.tabFile.setStyleSheet("border-color:white")
self.tabFile.setObjectName(_fromUtf8('tabFile'))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.tabFile)
self.verticalLayout_3.setObjectName(_fromUtf8('verticalLayout_3'))
self.textEditFile = GobstonesTextEditor(self.tabFile)
self.textEditFile.setObjectName(_fromUtf8('textEditFile'))
self.textEditFile.setStyleSheet("selection-color: white; selection-background-color:#008080")
self.verticalLayout_3.addWidget(self.textEditFile)
self.tabWidgetEditors.addTab(self.tabFile, _fromUtf8(''))
self.tabLibrary = QtGui.QWidget()
self.tabLibrary.setStyleSheet("border-color:white")
self.tabLibrary.setObjectName(_fromUtf8('tabLibrary'))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.tabLibrary)
self.verticalLayout_2.setObjectName(_fromUtf8('verticalLayout_2'))
self.textEditLibrary = GobstonesTextEditor(self.tabLibrary)
self.textEditLibrary.setObjectName(_fromUtf8('textEditLibrary'))
self.textEditLibrary.setStyleSheet("selection-color: white; selection-background-color:#008080")
self.verticalLayout_2.addWidget(self.textEditLibrary)
self.tabWidgetEditors.addTab(self.tabLibrary, _fromUtf8(''))
self.set_highlighter(GobstonesHighlighter)
self.logger = QtGui.QTextEdit()
self.logger.setObjectName(_fromUtf8('logger'))
self.logger.setReadOnly(True)
self.logger.setStyleSheet("font-family: Monospace, Consolas, 'Courier New'; font-weight: 100; font-size: 10pt")
self.grid = QtGui.QGridLayout()
self.grid.setSpacing(1)
self.verticalLayout.addLayout(self.grid)
self.splitter = QtGui.QSplitter(QtCore.Qt.Vertical, self.centralwidget)
self.splitter.addWidget(self.tabWidgetEditors)
self.splitter.addWidget(self.logger)
self.verticalLayout.addWidget(self.splitter)
MainWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtGui.QStatusBar(MainWindow)
self.statusbar.setObjectName(_fromUtf8('statusbar'))
MainWindow.setStatusBar(self.statusbar)
self.toolBar = QtGui.QToolBar(MainWindow)
self.toolBar.setObjectName(_fromUtf8('toolBar'))
self.toolBar.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
MainWindow.addToolBar(QtCore.Qt.TopToolBarArea, self.toolBar)
self.menuBar = QtGui.QMenuBar(MainWindow)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 703, 20))
self.menuBar.setObjectName(_fromUtf8('menuBar'))
self.menuFile = QtGui.QMenu(self.menuBar)
self.menuFile.setObjectName(_fromUtf8('menuFile'))
self.menuEdit = QtGui.QMenu(self.menuBar)
self.menuEdit.setObjectName(_fromUtf8('menuEdit'))
self.menuGobstones = QtGui.QMenu(self.menuBar)
self.menuGobstones.setObjectName(_fromUtf8('menuGobstones'))
self.menuBoard = QtGui.QMenu(self.menuBar)
self.menuBoard.setObjectName(_fromUtf8('menuBoard'))
self.menuSelectResultView = QtGui.QMenu(self.menuBoard)
self.menuSelectResultView.setObjectName(_fromUtf8
('menuSelectResultView'))
self.menuHelp = QtGui.QMenu(self.menuBar)
self.menuHelp.setObjectName(_fromUtf8('menuHelp'))
MainWindow.setMenuBar(self.menuBar)
self.actionChangeLang = QtGui.QAction(MainWindow)
icon = QtGui.QIcon(":/logoGobstones.png")
self.actionChangeLang.setIcon(icon)
self.actionChangeLang.setObjectName(_fromUtf8('actionChangeLang'))
self.actionNewFile = QtGui.QAction(MainWindow)
icon = QtGui.QIcon(":/new.png")
self.actionNewFile.setIcon(icon)
self.actionNewFile.setObjectName(_fromUtf8('actionNewFile'))
self.actionCloseFile = QtGui.QAction(MainWindow)
icon = QtGui.QIcon(":/close.png")
self.actionCloseFile.setIcon(icon)
self.actionCloseFile.setObjectName(_fromUtf8('actionCloseFile'))
self.actionOpenFile = QtGui.QAction(MainWindow)
icon1 = QtGui.QIcon(":/open.png")
self.actionOpenFile.setIcon(icon1)
self.actionOpenFile.setObjectName(_fromUtf8('actionOpenFile'))
self.actionSave = QtGui.QAction(MainWindow)
icon2 = QtGui.QIcon(":/save.png")
self.actionSave.setIcon(icon2)
self.actionSave.setObjectName(_fromUtf8('actionSave'))
self.actionSaveAs = QtGui.QAction(MainWindow)
icon3 = QtGui.QIcon(":/save-as.png")
self.actionSaveAs.setIcon(icon3)
self.actionSaveAs.setObjectName(_fromUtf8('actionSaveAs'))
self.actionUndo = QtGui.QAction(MainWindow)
icon5 = QtGui.QIcon(":/undo.png")
self.actionUndo.setIcon(icon5)
self.actionUndo.setObjectName(_fromUtf8('actionUndo'))
self.actionRedo = QtGui.QAction(MainWindow)
icon6 = QtGui.QIcon(":/redo.png")
self.actionRedo.setIcon(icon6)
self.actionRedo.setObjectName(_fromUtf8('actionRedo'))
self.actionCut = QtGui.QAction(MainWindow)
icon7 = QtGui.QIcon(":/cut.png")
self.actionCut.setIcon(icon7)
self.actionCut.setObjectName(_fromUtf8('actionCut'))
self.actionCopy = QtGui.QAction(MainWindow)
icon8 = QtGui.QIcon(":/copy.png")
self.actionCopy.setIcon(icon8)
self.actionCopy.setObjectName(_fromUtf8('actionCopy'))
self.actionPaste = QtGui.QAction(MainWindow)
icon9 = QtGui.QIcon(":/paste.png")
self.actionPaste.setIcon(icon9)
self.actionPaste.setObjectName(_fromUtf8('actionPaste'))
self.actionSelectAll = QtGui.QAction(MainWindow)
icon10 = QtGui.QIcon(":/select-all.png")
self.actionSelectAll.setIcon(icon10)
self.actionSelectAll.setObjectName(_fromUtf8('actionSelectAll'))
self.actionFind = QtGui.QAction(MainWindow)
icon11 = QtGui.QIcon(":/find.png")
self.actionFind.setIcon(icon11)
self.actionFind.setObjectName(_fromUtf8('actionFind'))
self.actionReplace = QtGui.QAction(MainWindow)
icon20 = QtGui.QIcon(":/find.png")
self.actionReplace.setIcon(icon20)
self.actionReplace.setObjectName(_fromUtf8('actionReplace'))
self.actionFonts = QtGui.QAction(MainWindow)
icon21 = QtGui.QIcon(":/select-font.png")
self.actionFonts.setIcon(icon21)
self.actionFonts.setObjectName(_fromUtf8('actionFonts'))
self.actionPreferences = QtGui.QAction(MainWindow)
self.actionPreferences.setObjectName(_fromUtf8('actionFonts'))
self.actionCheck = QtGui.QAction(MainWindow)
icon14 = QtGui.QIcon(":/check.png")
self.actionCheck.setIcon(icon14)
self.actionCheck.setObjectName(_fromUtf8('actionCheck'))
self.actionRun = QtGui.QAction(MainWindow)
icon12 = QtGui.QIcon(":/start.png")
self.actionRun.setIcon(icon12)
self.actionRun.setObjectName(_fromUtf8('actionRun'))
self.actionStop = QtGui.QAction(MainWindow)
icon13 = QtGui.QIcon(":/stop.png")
self.actionStop.setIcon(icon13)
self.actionStop.setObjectName(_fromUtf8('actionStop'))
self.actionManual = QtGui.QAction(MainWindow)
icon15 = QtGui.QIcon(":/help.png")
self.actionManual.setIcon(icon15)
self.actionManual.setObjectName(_fromUtf8('actionManual'))
self.actionLicense = QtGui.QAction(MainWindow)
icon16 = QtGui.QIcon(":/manual.png")
self.actionLicense.setIcon(icon16)
self.actionLicense.setObjectName(_fromUtf8('actionLicense'))
self.actionAbout = QtGui.QAction(MainWindow)
icon17 = QtGui.QIcon(":/about.png")
self.actionAbout.setIcon(icon17)
self.actionAbout.setObjectName(_fromUtf8('actionAbout'))
self.actionExit = QtGui.QAction(MainWindow)
icon18 = QtGui.QIcon(":/exit.png")
self.actionExit.setIcon(icon18)
self.actionExit.setObjectName(_fromUtf8('actionExit'))
self.actionOpenBoardEditor = QtGui.QAction(MainWindow)
icon19 = QtGui.QIcon(":/board-random.png")
self.actionOpenBoardEditor.setIcon(icon19)
self.actionOpenBoardEditor.setObjectName(_fromUtf8
('actionOpenBoardEditor'))
self.actionBoardOptions = QtGui.QAction(MainWindow)
icon20 = QtGui.QIcon(":/board-size.png")
self.actionBoardOptions.setIcon(icon20)
self.actionBoardOptions.setObjectName(_fromUtf8
('actionBoardOptions'))
self.actionLoadBoard = QtGui.QAction(MainWindow)
icon20 = QtGui.QIcon(":/board-new.png")
self.actionLoadBoard.setIcon(icon20)
self.actionLoadBoard.setObjectName(_fromUtf8
('actionLoadBoard'))
self.toolBar.addAction(self.actionChangeLang)
self.toolBar.addAction(self.actionNewFile)
self.toolBar.addAction(self.actionOpenFile)
self.toolBar.addAction(self.actionSave)
self.toolBar.addAction(self.actionCloseFile)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionUndo)
self.toolBar.addAction(self.actionRedo)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionOpenBoardEditor)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionCheck)
self.toolBar.addAction(self.actionRun)
self.toolBar.addAction(self.actionStop)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionManual)
self.toolBar.addAction(self.actionAbout)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionChangeLang)
self.menuFile.addAction(self.actionNewFile)
self.menuFile.addAction(self.actionOpenFile)
self.menuFile.addAction(self.actionSave)
self.menuFile.addAction(self.actionSaveAs)
self.menuFile.addAction(self.actionCloseFile)
self.menuFile.addSeparator()
self.menuFile.addAction(self.actionExit)
self.menuEdit.addSeparator()
self.menuEdit.addAction(self.actionUndo)
self.menuEdit.addAction(self.actionRedo)
self.menuEdit.addSeparator()
self.menuEdit.addAction(self.actionCut)
self.menuEdit.addAction(self.actionCopy)
self.menuEdit.addAction(self.actionPaste)
self.menuEdit.addAction(self.actionSelectAll)
self.menuEdit.addSeparator()
self.menuEdit.addAction(self.actionFind)
self.menuEdit.addAction(self.actionReplace)
self.menuEdit.addSeparator()
self.menuEdit.addAction(self.actionFonts)
self.menuEdit.addSeparator()
self.menuEdit.addAction(self.actionPreferences)
self.menuGobstones.addSeparator()
self.menuGobstones.addAction(self.actionRun)
self.menuGobstones.addAction(self.actionStop)
self.menuGobstones.addAction(self.actionCheck)
self.menuBoard.addSeparator()
self.menuBoard.addAction(self.actionLoadBoard)
self.menuBoard.addAction(self.actionBoardOptions)
self.menuBoard.addAction(self.actionOpenBoardEditor)
self.menuBoard.addSeparator()
self.menuBoard.addAction(self.menuSelectResultView.menuAction())
self.menuHelp.addSeparator()
self.menuHelp.addAction(self.actionManual)
self.menuHelp.addAction(self.actionLicense)
self.menuHelp.addAction(self.actionAbout)
self.menuBar.addAction(self.menuFile.menuAction())
self.menuBar.addAction(self.menuEdit.menuAction())
self.menuBar.addAction(self.menuGobstones.menuAction())
self.menuBar.addAction(self.menuBoard.menuAction())
self.menuBar.addAction(self.menuHelp.menuAction())
self.retranslateUi(MainWindow)
self.tabWidgetEditors.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def set_highlighter(self, highlighter_class):
if hasattr(self, "highlighter"):
self.highlighter["main"].setDocument(None)
self.highlighter["library"].setDocument(None)
else:
self.highlighter = {}
self.highlighter["main"] = highlighter_class(self.textEditFile.edit.document())
self.highlighter["library"] = highlighter_class(self.textEditLibrary.edit.document())
def retranslateUi(self, MainWindow):
self.tabWidgetEditors.setTabText(
self.tabWidgetEditors.indexOf(self.tabFile),
_translate('MainWindow', i18n('Untitled'), None))
self.tabWidgetEditors.setTabText(
self.tabWidgetEditors.indexOf(self.tabLibrary),
_translate('MainWindow', i18n('Untitled'), None))
self.toolBar.setWindowTitle(_translate('MainWindow', 'toolBar', None))
self.menuFile.setTitle(_translate('MainWindow', i18n('File'), None))
self.menuEdit.setTitle(_translate('MainWindow', i18n('Edit'), None))
self.menuGobstones.setTitle(_translate('MainWindow', 'Gobstones',
None))
self.menuBoard.setTitle(_translate('MainWindow', i18n('Board'), None))
self.menuSelectResultView.setTitle(_translate('MainWindow',
i18n('Select view results'), None))
self.menuHelp.setTitle(_translate('MainWindow', i18n('Help'), None))
self.actionChangeLang.setText(_translate('MainWindow',
'Gobstones ', None))
self.actionChangeLang.setToolTip(_translate('MainWindow',
i18n('Change the Gobstones Language'), None))
self.actionChangeLang.setShortcut(_translate('MainWindow', 'F11', None))
self.actionNewFile.setText(_translate('MainWindow', i18n('New'), None))
self.actionNewFile.setToolTip(_translate('MainWindow',
i18n('Create new file'), None))
self.actionNewFile.setShortcut(_translate('MainWindow', 'Ctrl+N',
None))
self.actionCloseFile.setText(_translate('MainWindow', i18n('Close'), None))
self.actionCloseFile.setToolTip(_translate('MainWindow',
i18n('Close the current file and the library'), None))
self.actionCloseFile.setShortcut(_translate('MainWindow', 'Ctrl+R',
None))
self.actionOpenFile.setText(_translate('MainWindow', i18n('Open'), None))
self.actionOpenFile.setToolTip(_translate('MainWindow',
i18n('Open an existent file'), None))
self.actionOpenFile.setShortcut(_translate('MainWindow', 'Ctrl+O',
None))
self.actionSave.setText(_translate('MainWindow', i18n('Save'), None))
self.actionSave.setToolTip(_translate('MainWindow',
i18n('Save the current file'), None))
self.actionSave.setShortcut(_translate('MainWindow', 'Ctrl+S', None))
self.actionSaveAs.setText(_translate('MainWindow', i18n('Save as...'),
None))
self.actionSaveAs.setToolTip(_translate('MainWindow',
i18n('Save the current file and allows put a name and choose the location'),
None))
self.actionUndo.setText(_translate('MainWindow', i18n('Undo'), None))
self.actionUndo.setShortcut(_translate('MainWindow', 'Ctrl+Z', None))
self.actionRedo.setText(_translate('MainWindow', i18n('Redo'), None))
self.actionRedo.setShortcut(_translate('MainWindow', 'Ctrl+Shift+Z',
None))
self.actionCut.setText(_translate('MainWindow', i18n('Cut'), None))
self.actionCut.setShortcut(_translate('MainWindow', 'Ctrl+X', None))
self.actionCopy.setText(_translate('MainWindow', i18n('Copy'), None))
self.actionCopy.setShortcut(_translate('MainWindow', 'Ctrl+C', None))
self.actionPaste.setText(_translate('MainWindow', i18n('Paste'), None))
self.actionPaste.setShortcut(_translate('MainWindow', 'Ctrl+V', None))
self.actionSelectAll.setText(_translate('MainWindow',
i18n('Select all'), None))
self.actionSelectAll.setShortcut(_translate('MainWindow', 'Ctrl+A',
None))
self.actionFind.setText(_translate('MainWindow', i18n('Search'), None))
self.actionFind.setShortcut(_translate('MainWindow', 'Ctrl+F', None))
self.actionReplace.setText(_translate('MainWindow', i18n('Search and replace'), None))
self.actionReplace.setShortcut(_translate('MainWindow', 'Ctrl+H', None))
self.actionFonts.setText(_translate('MainWindow', i18n('Select fonts'), None))
self.actionFonts.setShortcut(_translate('MainWindow', 'Ctrl+T', None))
self.actionPreferences.setText(_translate('MainWindow', i18n('Preferences'), None))
self.actionPreferences.setShortcut(_translate('MainWindow', 'Ctrl+P', None))
self.actionRun.setText(_translate('MainWindow', i18n('Run'), None))
self.actionRun.setToolTip(_translate('MainWindow',
i18n('Executes the current program'), None))
self.actionRun.setShortcut(_translate('MainWindow', 'F5', None))
self.actionStop.setText(_translate('MainWindow', i18n('Stop'), None))
self.actionStop.setToolTip(_translate('MainWindow',
i18n('Stops execution of the current program'), None))
self.actionStop.setShortcut(_translate('MainWindow', 'F6', None))
self.actionCheck.setText(_translate('MainWindow', i18n('Check'), None))
self.actionCheck.setToolTip(_translate('MainWindow',
i18n('Checks if the program is well-formed'), None))
self.actionCheck.setShortcut(_translate('MainWindow', 'F10', None))
self.actionManual.setText(_translate('MainWindow', i18n('Manual'), None))
self.actionManual.setToolTip(_translate('MainWindow',
i18n('Open the Gobstones\'s manual'), None))
self.actionLicense.setText(_translate('MainWindow', i18n('Licence'), None))
self.actionAbout.setText(_translate('MainWindow', i18n('About...'),
None))
self.actionExit.setText(_translate('MainWindow', i18n('Exit'), None))
self.actionExit.setToolTip(_translate('MainWindow',
i18n('Closes the application'), None))
self.actionExit.setShortcut(_translate('MainWindow', 'Ctrl+Q', None))
self.actionOpenBoardEditor.setText(_translate('MainWindow',
i18n('Board editor'), None))
self.actionOpenBoardEditor.setToolTip(_translate('MainWindow',
i18n('Open board editor'), None))
self.actionBoardOptions.setText(_translate('MainWindow',
i18n('Options Board'), None))
self.actionBoardOptions.setToolTip(_translate('MainWindow',
i18n('Select board options'), None))
self.actionLoadBoard.setText(_translate('MainWindow',
i18n('Load board'), None))
self.actionLoadBoard.setToolTip(_translate('MainWindow',
i18n('Open a board from existing .gbb file'), None))
| gpl-3.0 | 4,872,066,273,332,599,000 | 54.123077 | 139 | 0.651084 | false | 4.150193 | false | false | false |
davy39/eric | Helpviewer/AdBlock/AdBlockSubscription.py | 1 | 24766 | # -*- coding: utf-8 -*-
# Copyright (c) 2009 - 2014 Detlev Offenbach <[email protected]>
#
"""
Module implementing the AdBlock subscription class.
"""
from __future__ import unicode_literals
import os
import re
import hashlib
import base64
from PyQt5.QtCore import pyqtSignal, Qt, QObject, QByteArray, QDateTime, \
QUrl, QCryptographicHash, QFile, QIODevice, QTextStream, QDate, QTime, \
qVersion
from PyQt5.QtNetwork import QNetworkReply
from E5Gui import E5MessageBox
import Utilities
import Preferences
class AdBlockSubscription(QObject):
"""
Class implementing the AdBlock subscription.
@signal changed() emitted after the subscription has changed
@signal rulesChanged() emitted after the subscription's rules have changed
@signal enabledChanged(bool) emitted after the enabled state was changed
"""
changed = pyqtSignal()
rulesChanged = pyqtSignal()
enabledChanged = pyqtSignal(bool)
def __init__(self, url, custom, parent=None, default=False):
"""
Constructor
@param url AdBlock URL for the subscription (QUrl)
@param custom flag indicating a custom subscription (boolean)
@param parent reference to the parent object (QObject)
@param default flag indicating a default subscription (boolean)
"""
super(AdBlockSubscription, self).__init__(parent)
self.__custom = custom
self.__url = url.toEncoded()
self.__enabled = False
self.__downloading = None
self.__defaultSubscription = default
self.__title = ""
self.__location = QByteArray()
self.__lastUpdate = QDateTime()
self.__requiresLocation = ""
self.__requiresTitle = ""
self.__updatePeriod = 0 # update period in hours, 0 = use default
self.__remoteModified = QDateTime()
self.__rules = [] # list containing all AdBlock rules
self.__networkExceptionRules = []
self.__networkBlockRules = []
self.__domainRestrictedCssRules = []
self.__elementHidingRules = ""
self.__documentRules = []
self.__elemhideRules = []
self.__checksumRe = re.compile(
r"""^\s*!\s*checksum[\s\-:]+([\w\+\/=]+).*\n""",
re.IGNORECASE | re.MULTILINE)
self.__expiresRe = re.compile(
r"""(?:expires:|expires after)\s*(\d+)\s*(hour|h)?""",
re.IGNORECASE)
self.__remoteModifiedRe = re.compile(
r"""!\s*(?:Last modified|Updated):\s*(\d{1,2})\s*"""
r"""(Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec)\s*"""
r"""(\d{2,4})\s*((\d{1,2}):(\d{2}))?""",
re.IGNORECASE)
self.__monthNameToNumber = {
"Jan": 1,
"Feb": 2,
"Mar": 3,
"Apr": 4,
"May": 5,
"Jun": 6,
"Jul": 7,
"Aug": 8,
"Sep": 9,
"Oct": 10,
"Nov": 11,
"Dec": 12
}
self.__parseUrl(url)
def __parseUrl(self, url):
"""
Private method to parse the AdBlock URL for the subscription.
@param url AdBlock URL for the subscription (QUrl)
"""
if url.scheme() != "abp":
return
if url.path() != "subscribe":
return
if qVersion() >= "5.0.0":
from PyQt5.QtCore import QUrlQuery
urlQuery = QUrlQuery(url)
self.__title = urlQuery.queryItemValue("title")
self.__enabled = urlQuery.queryItemValue("enabled") != "false"
self.__location = QByteArray(urlQuery.queryItemValue("location"))
# Check for required subscription
self.__requiresLocation = urlQuery.queryItemValue(
"requiresLocation")
self.__requiresTitle = urlQuery.queryItemValue("requiresTitle")
if self.__requiresLocation and self.__requiresTitle:
import Helpviewer.HelpWindow
Helpviewer.HelpWindow.HelpWindow.adBlockManager()\
.loadRequiredSubscription(self.__requiresLocation,
self.__requiresTitle)
lastUpdateString = urlQuery.queryItemValue("lastUpdate")
self.__lastUpdate = QDateTime.fromString(lastUpdateString,
Qt.ISODate)
else:
self.__title = \
QUrl.fromPercentEncoding(url.encodedQueryItemValue("title"))
self.__enabled = QUrl.fromPercentEncoding(
url.encodedQueryItemValue("enabled")) != "false"
self.__location = QByteArray(QUrl.fromPercentEncoding(
url.encodedQueryItemValue("location")))
# Check for required subscription
self.__requiresLocation = QUrl.fromPercentEncoding(
url.encodedQueryItemValue("requiresLocation"))
self.__requiresTitle = QUrl.fromPercentEncoding(
url.encodedQueryItemValue("requiresTitle"))
if self.__requiresLocation and self.__requiresTitle:
import Helpviewer.HelpWindow
Helpviewer.HelpWindow.HelpWindow.adBlockManager()\
.loadRequiredSubscription(self.__requiresLocation,
self.__requiresTitle)
lastUpdateByteArray = url.encodedQueryItemValue("lastUpdate")
lastUpdateString = QUrl.fromPercentEncoding(lastUpdateByteArray)
self.__lastUpdate = QDateTime.fromString(lastUpdateString,
Qt.ISODate)
self.__loadRules()
def url(self):
"""
Public method to generate the URL for this subscription.
@return AdBlock URL for the subscription (QUrl)
"""
url = QUrl()
url.setScheme("abp")
url.setPath("subscribe")
queryItems = []
queryItems.append(("location", bytes(self.__location).decode()))
queryItems.append(("title", self.__title))
if self.__requiresLocation and self.__requiresTitle:
queryItems.append(("requiresLocation", self.__requiresLocation))
queryItems.append(("requiresTitle", self.__requiresTitle))
if not self.__enabled:
queryItems.append(("enabled", "false"))
if self.__lastUpdate.isValid():
queryItems.append(("lastUpdate",
self.__lastUpdate.toString(Qt.ISODate)))
if qVersion() >= "5.0.0":
from PyQt5.QtCore import QUrlQuery
query = QUrlQuery()
query.setQueryItems(queryItems)
url.setQuery(query)
else:
url.setQueryItems(queryItems)
return url
def isEnabled(self):
"""
Public method to check, if the subscription is enabled.
@return flag indicating the enabled status (boolean)
"""
return self.__enabled
def setEnabled(self, enabled):
"""
Public method to set the enabled status.
@param enabled flag indicating the enabled status (boolean)
"""
if self.__enabled == enabled:
return
self.__enabled = enabled
self.enabledChanged.emit(enabled)
def title(self):
"""
Public method to get the subscription title.
@return subscription title (string)
"""
return self.__title
def setTitle(self, title):
"""
Public method to set the subscription title.
@param title subscription title (string)
"""
if self.__title == title:
return
self.__title = title
self.changed.emit()
def location(self):
"""
Public method to get the subscription location.
@return URL of the subscription location (QUrl)
"""
return QUrl.fromEncoded(self.__location)
def setLocation(self, url):
"""
Public method to set the subscription location.
@param url URL of the subscription location (QUrl)
"""
if url == self.location():
return
self.__location = url.toEncoded()
self.__lastUpdate = QDateTime()
self.changed.emit()
def requiresLocation(self):
"""
Public method to get the location of a required subscription.
@return location of a required subscription (string)
"""
return self.__requiresLocation
def lastUpdate(self):
"""
Public method to get the date and time of the last update.
@return date and time of the last update (QDateTime)
"""
return self.__lastUpdate
def rulesFileName(self):
"""
Public method to get the name of the rules file.
@return name of the rules file (string)
"""
if self.location().scheme() == "file":
return self.location().toLocalFile()
if self.__location.isEmpty():
return ""
sha1 = bytes(QCryptographicHash.hash(
self.__location, QCryptographicHash.Sha1).toHex()).decode()
dataDir = os.path.join(
Utilities.getConfigDir(), "browser", "subscriptions")
if not os.path.exists(dataDir):
os.makedirs(dataDir)
fileName = os.path.join(
dataDir, "adblock_subscription_{0}".format(sha1))
return fileName
def __loadRules(self):
"""
Private method to load the rules of the subscription.
"""
fileName = self.rulesFileName()
f = QFile(fileName)
if f.exists():
if not f.open(QIODevice.ReadOnly):
E5MessageBox.warning(
None,
self.tr("Load subscription rules"),
self.tr(
"""Unable to open adblock file '{0}' for reading.""")
.format(fileName))
else:
textStream = QTextStream(f)
header = textStream.readLine(1024)
if not header.startswith("[Adblock"):
E5MessageBox.warning(
None,
self.tr("Load subscription rules"),
self.tr("""AdBlock file '{0}' does not start"""
""" with [Adblock.""")
.format(fileName))
f.close()
f.remove()
self.__lastUpdate = QDateTime()
else:
from .AdBlockRule import AdBlockRule
self.__updatePeriod = 0
self.__remoteModified = QDateTime()
self.__rules = []
self.__rules.append(AdBlockRule(header, self))
while not textStream.atEnd():
line = textStream.readLine()
self.__rules.append(AdBlockRule(line, self))
expires = self.__expiresRe.search(line)
if expires:
period, kind = expires.groups()
if kind:
# hours
self.__updatePeriod = int(period)
else:
# days
self.__updatePeriod = int(period) * 24
remoteModified = self.__remoteModifiedRe.search(line)
if remoteModified:
day, month, year, time, hour, minute = \
remoteModified.groups()
self.__remoteModified.setDate(
QDate(int(year),
self.__monthNameToNumber[month],
int(day))
)
if time:
self.__remoteModified.setTime(
QTime(int(hour), int(minute)))
self.__populateCache()
self.changed.emit()
elif not fileName.endswith("_custom"):
self.__lastUpdate = QDateTime()
self.checkForUpdate()
def checkForUpdate(self):
"""
Public method to check for an update.
"""
if self.__updatePeriod:
updatePeriod = self.__updatePeriod
else:
updatePeriod = Preferences.getHelp("AdBlockUpdatePeriod") * 24
if not self.__lastUpdate.isValid() or \
(self.__remoteModified.isValid() and
self.__remoteModified.addSecs(updatePeriod * 3600) <
QDateTime.currentDateTime()) or \
self.__lastUpdate.addSecs(updatePeriod * 3600) < \
QDateTime.currentDateTime():
self.updateNow()
def updateNow(self):
"""
Public method to update the subscription immediately.
"""
if self.__downloading is not None:
return
if not self.location().isValid():
return
if self.location().scheme() == "file":
self.__lastUpdate = QDateTime.currentDateTime()
self.__loadRules()
return
import Helpviewer.HelpWindow
from Helpviewer.Network.FollowRedirectReply import FollowRedirectReply
self.__downloading = FollowRedirectReply(
self.location(),
Helpviewer.HelpWindow.HelpWindow.networkAccessManager())
self.__downloading.finished.connect(self.__rulesDownloaded)
def __rulesDownloaded(self):
"""
Private slot to deal with the downloaded rules.
"""
reply = self.sender()
response = reply.readAll()
reply.close()
self.__downloading = None
if reply.error() != QNetworkReply.NoError:
if not self.__defaultSubscription:
# don't show error if we try to load the default
E5MessageBox.warning(
None,
self.tr("Downloading subscription rules"),
self.tr(
"""<p>Subscription rules could not be"""
""" downloaded.</p><p>Error: {0}</p>""")
.format(reply.errorString()))
else:
# reset after first download attempt
self.__defaultSubscription = False
return
if response.isEmpty():
E5MessageBox.warning(
None,
self.tr("Downloading subscription rules"),
self.tr("""Got empty subscription rules."""))
return
fileName = self.rulesFileName()
QFile.remove(fileName)
f = QFile(fileName)
if not f.open(QIODevice.ReadWrite):
E5MessageBox.warning(
None,
self.tr("Downloading subscription rules"),
self.tr(
"""Unable to open adblock file '{0}' for writing.""")
.file(fileName))
return
f.write(response)
f.close()
self.__lastUpdate = QDateTime.currentDateTime()
if self.__validateCheckSum(fileName):
self.__loadRules()
else:
QFile.remove(fileName)
self.__downloading = None
def __validateCheckSum(self, fileName):
"""
Private method to check the subscription file's checksum.
@param fileName name of the file containing the subscription (string)
@return flag indicating a valid file (boolean). A file is considered
valid, if the checksum is OK or the file does not contain a
checksum (i.e. cannot be checked).
"""
try:
f = open(fileName, "r", encoding="utf-8")
data = f.read()
f.close()
except (IOError, OSError):
return False
match = re.search(self.__checksumRe, data)
if match:
expectedChecksum = match.group(1)
else:
# consider it as valid
return True
# normalize the data
data = re.sub(r"\r", "", data) # normalize eol
data = re.sub(r"\n+", "\n", data) # remove empty lines
data = re.sub(self.__checksumRe, "", data) # remove checksum line
# calculate checksum
md5 = hashlib.md5()
md5.update(data.encode("utf-8"))
calculatedChecksum = base64.b64encode(md5.digest()).decode()\
.rstrip("=")
if calculatedChecksum == expectedChecksum:
return True
else:
res = E5MessageBox.yesNo(
None,
self.tr("Downloading subscription rules"),
self.tr(
"""<p>AdBlock subscription <b>{0}</b> has a wrong"""
""" checksum.<br/>"""
"""Found: {1}<br/>"""
"""Calculated: {2}<br/>"""
"""Use it anyway?</p>""")
.format(self.__title, expectedChecksum,
calculatedChecksum))
return res
def saveRules(self):
"""
Public method to save the subscription rules.
"""
fileName = self.rulesFileName()
if not fileName:
return
f = QFile(fileName)
if not f.open(QIODevice.ReadWrite | QIODevice.Truncate):
E5MessageBox.warning(
None,
self.tr("Saving subscription rules"),
self.tr(
"""Unable to open adblock file '{0}' for writing.""")
.format(fileName))
return
textStream = QTextStream(f)
if not self.__rules or not self.__rules[0].isHeader():
textStream << "[Adblock Plus 1.1.1]\n"
for rule in self.__rules:
textStream << rule.filter() << "\n"
def match(self, req, urlDomain, urlString):
"""
Public method to check the subscription for a matching rule.
@param req reference to the network request (QNetworkRequest)
@param urlDomain domain of the URL (string)
@param urlString URL (string)
@return reference to the rule object or None (AdBlockRule)
"""
for rule in self.__networkExceptionRules:
if rule.networkMatch(req, urlDomain, urlString):
return None
for rule in self.__networkBlockRules:
if rule.networkMatch(req, urlDomain, urlString):
return rule
return None
def adBlockDisabledForUrl(self, url):
"""
Public method to check, if AdBlock is disabled for the given URL.
@param url URL to check (QUrl)
@return flag indicating disabled state (boolean)
"""
for rule in self.__documentRules:
if rule.urlMatch(url):
return True
return False
def elemHideDisabledForUrl(self, url):
"""
Public method to check, if element hiding is disabled for the given
URL.
@param url URL to check (QUrl)
@return flag indicating disabled state (boolean)
"""
if self.adBlockDisabledForUrl(url):
return True
for rule in self.__elemhideRules:
if rule.urlMatch(url):
return True
return False
def elementHidingRules(self):
"""
Public method to get the element hiding rules.
@return element hiding rules (string)
"""
return self.__elementHidingRules
def elementHidingRulesForDomain(self, domain):
"""
Public method to get the element hiding rules for the given domain.
@param domain domain name (string)
@return element hiding rules (string)
"""
rules = ""
for rule in self.__domainRestrictedCssRules:
if rule.matchDomain(domain):
rules += rule.cssSelector() + ","
return rules
def rule(self, offset):
"""
Public method to get a specific rule.
@param offset offset of the rule (integer)
@return requested rule (AdBlockRule)
"""
if offset >= len(self.__rules):
return None
return self.__rules[offset]
def allRules(self):
"""
Public method to get the list of rules.
@return list of rules (list of AdBlockRule)
"""
return self.__rules[:]
def addRule(self, rule):
"""
Public method to add a rule.
@param rule reference to the rule to add (AdBlockRule)
@return offset of the rule (integer)
"""
self.__rules.append(rule)
self.__populateCache()
self.rulesChanged.emit()
return len(self.__rules) - 1
def removeRule(self, offset):
"""
Public method to remove a rule given the offset.
@param offset offset of the rule to remove (integer)
"""
if offset < 0 or offset > len(self.__rules):
return
del self.__rules[offset]
self.__populateCache()
self.rulesChanged.emit()
def replaceRule(self, rule, offset):
"""
Public method to replace a rule given the offset.
@param rule reference to the rule to set (AdBlockRule)
@param offset offset of the rule to remove (integer)
@return requested rule (AdBlockRule)
"""
if offset >= len(self.__rules):
return None
self.__rules[offset] = rule
self.__populateCache()
self.rulesChanged.emit()
return self.__rules[offset]
def __populateCache(self):
"""
Private method to populate the various rule caches.
"""
self.__networkExceptionRules = []
self.__networkBlockRules = []
self.__domainRestrictedCssRules = []
self.__elementHidingRules = ""
self.__documentRules = []
self.__elemhideRules = []
for rule in self.__rules:
if not rule.isEnabled():
continue
if rule.isCSSRule():
if rule.isDomainRestricted():
self.__domainRestrictedCssRules.append(rule)
else:
self.__elementHidingRules += rule.cssSelector() + ","
elif rule.isDocument():
self.__documentRules.append(rule)
elif rule.isElementHiding():
self.__elemhideRules.append(rule)
elif rule.isException():
self.__networkExceptionRules.append(rule)
else:
self.__networkBlockRules.append(rule)
def canEditRules(self):
"""
Public method to check, if rules can be edited.
@return flag indicating rules may be edited (boolean)
"""
return self.__custom
def canBeRemoved(self):
"""
Public method to check, if the subscription can be removed.
@return flag indicating removal is allowed (boolean)
"""
return not self.__custom and not self.__defaultSubscription
def setRuleEnabled(self, offset, enabled):
"""
Public method to enable a specific rule.
@param offset offset of the rule (integer)
@param enabled new enabled state (boolean)
@return reference to the changed rule (AdBlockRule)
"""
if offset >= len(self.__rules):
return None
rule = self.__rules[offset]
rule.setEnabled(enabled)
if rule.isCSSRule():
import Helpviewer.HelpWindow
self.__populateCache()
Helpviewer.HelpWindow.HelpWindow.mainWindow()\
.reloadUserStyleSheet()
return rule
| gpl-3.0 | -8,359,129,584,118,204,000 | 33.493036 | 78 | 0.519503 | false | 4.956174 | false | false | false |
ariegg/webiopi-drivers | chips/sensor/ina219/ina219.py | 1 | 22877 | # Copyright 2017 Andreas Riegg - t-h-i-n-x.net
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Changelog
#
# 1.0 2017/01/03 Initial release
#
# Config parameters
#
# - slave 8 bit Value of the I2C slave address for the chip.
# Defaults to 0x40. Possible values are from 0x40 to 0x4F.
# - shunt Float Value of the shunt resistor in Ohms. Default is 0.1.
# - vrange Integer Vrange value of the chip. Valid values are 16 or 32.
# Default is 32.
# - gaindiv Integer Gain divider (PGA) value of the chip. Valid values
# are from (1, 2, 4 , 8). Default is 8.
# - mode Integer Value of the chip mode. Possible values are from
# 0x0 to 0x7. Default is 0x7.
# - badc Integer Value of the voltage bus ADC settings. Possible
# values are from 0x0 to 0xF. Default is 0x3.
# - sadc Integer Value of the shunt voltage ADC settings. Possible
# values are from 0x0 to 0xF. Default is 0x3.
# - vmax Float Value of the desired vmax value for automatic
# calibration. Default is None. This parameter will
# only be used of imax is also not None.
# - imax Float Value of the desired imax value for automatic
# calibration. Default is None. If imax is given,
# the values for vrange, gaindiv and currentLSB will be
# ignored and calculated instead. If imax is higher than
# possible, then the highest possible value will be
# used instead and overflow may occur.
# - currentLSB Float Value of the current LSB to use. Default is None.
# If you mistrust the automatic calibration you can
# set the current LSB manual with this parameter. If
# used, make sure to manual set the desired gaindiv also.
# - bus String Name of the I2C bus
#
# Usage remarks
#
# - The default values of this driver are valid for a 32 V Bus range, a maximum
# possible current of 3.2 A and a current resolution of around 98 microAmperes/Bit.
# If you are fine with this you can just use those defaults.
# - If you want to have some more configuration while keeping it still simple you
# can provide parameters for vmax and imax and the driver will do its best to
# automatically calculate vrange, gaindiv and calibration with a very good resolution.
# - If you prefer complete manual setup you should set vrange, gaindiv, currentLSB and
# optional fine-tuned calibration (in this order).
# - Setting the calibration register via setCalibration() is to be used for the final
# calibration as explained in the chip spec for the final fine tuning. It must not
# be used for the currentLSB setting as this is calculated automatically by this
# driver based on the values of shunt and gaindiv.
# - This driver implements an automatical calibration feature calibrate(vmax, imax)
# that can be used during device creation and also at runtime. The value for vmax
# is used to set vrange within the allowed limits. The value for imax is used to
# set gaindiv so that the maximal desired current can be measured at the highest
# possible resolution for current LSB. If the desired imax is higher than the
# possible imax based on the value of shunt, then the maximum possible imax will
# be used. You get the choosen values via the response of the calibrate(...) call.
# In this case, sending a higher current through the shunt will result in overflow
# which will generate a debugging message (only when reading the bus voltage).
# - If values for vmax and imax are given at device creation they will override the
# init values for vrange and gaindiv as those will be ignored then and calculated via
# the automatic calibration feature instead.
# - All chip parameters with the exception of shunt can be changed at runtime. If
# an updated parameter has an influence on the currentLSB and/or calibration value,
# then this/these will be re-calculated automatically and the calibration register
# will be set also. If you use setCalibration() for final fine-tuning you have to
# repeat that step again if automatic calibration has taken place.
# - Updating of the mode value at runtime allows triggered conversions and power-down
# of the chip.
# - If you are unsure about the calculated values set debugging to "True" and look at
# the debugging messages as they will notify you about all resulting values. Or
# call getConfiguration() to see all values.
# - If you encounter overflow (getting the overflow error) try to increase the
# gaindiv value or reduce the shunt value (please as real hardware change).
#
# Implementation remarks
#
# - This driver is implemented based on the specs from Intel.
# - The default value for the shunt resistor of 0.1 Ohms is appropriate for the
# breakout board from Adafruit for this chip (Adafruit PRODUCT ID: 904).
# - The parameter value for shunt can't be changed at runtime after device
# creation because it is very unlikely to modify the shunt resistor during operation
# of the chip. Please provide the correct value via the config options or at
# device creation if the default value does not suit your hardware setup.
# - This driver uses floating point calculation and takes no care about integer
# only arithmetics. For that reason, the mathematical lowest possible LSB value is
# calculated automatically and used for best resolution with the exception when you
# manual set your own current LSB value.
# - If you want to override/select the current LSB value manual you can do that
# via config parameter or at runtime. In this case make sure to use the correct
# corresponding gaindiv value otherwise the value readings will be wrong.
# - If for some reason (e.g. an impropriate setting of the currentLSB) the value
# of the calibration register would be out of its allowed bounds it will be set
# to zero so that all current and power readings will also be zero to avoid wrong
# measurements until the calibration register is set again to an allowed range.
# - This driver does not use the shunt adc register as this value is not needed
# for operation if the calibration register is used.
#
from webiopi.utils.logger import debug
from webiopi.decorators.rest import request, response, api
from webiopi.utils.types import toint, signInteger, M_JSON
from webiopi.devices.i2c import I2C
from webiopi.devices.sensor import Current, Voltage, Power
#---------- Class definition ----------
class INA219(I2C, Current, Voltage, Power):
CONFIGURATION_ADDRESS = 0x00
#SHUNTADC_ADDRESS = 0x01
BUSADC_ADDRESS = 0x02
POWER_ADDRESS = 0x03
CURRENT_ADDRESS = 0x04
CALIBRATION_ADDRESS = 0x05
RESET_FLAG = 0b1 << 15
BRNG_16_VALUE = 0b0 << 13
BRNG_32_VALUE = 0b1 << 13
BRNG_MASK = 0b0010000000000000
GAINDIV_1_VALUE = 0b00 << 11
GAINDIV_2_VALUE = 0b01 << 11
GAINDIV_4_VALUE = 0b10 << 11
GAINDIV_8_VALUE = 0b11 << 11
GAINDIV_MASK = 0b0001100000000000
BADC_MASK = 0b0000011110000000
SADC_MASK = 0b0000000001111000
MODE_MASK = 0b0000000000000111
OVERFLOW_MASK = 0b0000000000000001
CALIBRATION_MASK = 0b1111111111111110
VSHUNT_FULL_SCALE_BASE_VALUE = 0.04 # always fixed to 40mV
CALIBRATION_CONSTANT_VALUE = 0.04096 # fixed value from data sheet
BUS_VOLTAGE_LSB_VALUE = 0.004 # always fixed to 4mV
CURRENT_LSB_TO_POWER_LSB_VALUE = 20 # always 20 times the currentLSB value
#---------- Class initialisation ----------
def __init__(self, slave=0x40, shunt=0.1, vrange=32, gaindiv=8, mode=0x7, badc=0x3, sadc=0x3, vmax=None, imax=None, currentLSB=None, bus=None):
I2C.__init__(self, toint(slave), bus)
self.__setShunt__(float(shunt))
self.__reset__()
if imax != None:
if vmax == None:
vmax = toint(vrange)
else:
vmax = float(vmax)
imax = float(imax)
self.__calibrate__(vmax, imax)
else:
self.__setVrange__(toint(vrange))
self.__setGaindiv__(toint(gaindiv))
if currentLSB != None:
self.__setCurrentLSB__(float(currentLSB))
self.__setMode__(toint(mode))
self.__setBadc__(toint(badc))
self.__setSadc__(toint(sadc))
#---------- Abstraction framework contracts ----------
def __str__(self):
return "INA219(slave=0x%02X, dev=%s, shunt=%f Ohm)" % (self.slave, self.device(), self._shunt)
def __family__(self):
return [Current.__family__(self), Voltage.__family__(self), Power.__family__(self)]
#---------- Current abstraction related methods ----------
def __getMilliampere__(self):
rawCurrent = self.__read16BitRegister__(self.CURRENT_ADDRESS)
debug("%s: raw current=%s" % (self.__str__(), bin(rawCurrent)))
return signInteger(rawCurrent, 16) * self._currentLSB * 1000 # scale from Amperes to milliAmperes
#---------- Voltage abstraction related methods ----------
def __getVolt__(self):
rawVoltage = self.__read16BitRegister__(self.BUSADC_ADDRESS)
debug("%s: raw voltage=%s" % (self.__str__(), bin(rawVoltage)))
overflow = rawVoltage & self.OVERFLOW_MASK
if overflow:
debug("%s: overflow condition" % self.__str__())
return (rawVoltage >> 3) * self.BUS_VOLTAGE_LSB_VALUE
#---------- Power abstraction related methods ----------
def __getWatt__(self):
rawWatt = self.__read16BitRegister__(self.POWER_ADDRESS)
debug("%s: raw watt=%s" % (self.__str__(), bin(rawWatt)))
return rawWatt * self.CURRENT_LSB_TO_POWER_LSB_VALUE * self._currentLSB
#---------- Device methods that implement features including additional REST mappings ----------
@api("Device", 3, "feature", "driver")
@request("POST", "run/calibrate/%(pars)s")
@response(contentType=M_JSON)
def calibrate(self, pars):
(vmax, imax) = pars.split(",")
vmax = float(vmax)
if vmax <= 0 or vmax > 32:
raise ValueError("Calibration parameter error, vmax:%f out of allowed range [0 < vmax <= 32]" % vmax)
imax = float(imax)
self.__calibrate__(vmax, imax)
values = self.getConfiguration()
values["vmax required"] = "%f" % vmax
values["imax required"] = "%f" % imax
return values
def __calibrate__(self, vmax, imax):
if vmax > 16:
self.setVrange(32)
else:
self.setVrange(16)
gaindiv = 1
shuntdiv = 1 / self._shunt
while True:
imaxpossible = self.__calculateImaxpossible__(gaindiv, shuntdiv)
if gaindiv == 8:
break
if imax > imaxpossible:
gaindiv *= 2
else:
break
self.setGaindiv(gaindiv)
debug("%s: auto-calibrated, max possible current=%f A" % (self.__str__(), imaxpossible))
@api("Device", 3, "feature", "driver")
@request("POST", "run/reset")
@response("%s")
def reset(self):
self.__reset__()
return "Chip is reset."
def __reset__(self):
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, self.RESET_FLAG)
debug("%s: chip reset" % self.__str__())
@api("Device", 3, "feature", "driver")
@request("POST", "run/recalibrate")
@response("%d")
def reCalibrate(self):
self.__reCalibrate__()
return self.__getCalibration__()
#---------- Device methods that implement chip configuration settings including additional REST mappings ----------
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/*")
@response(contentType=M_JSON)
def getConfiguration(self):
values = {}
values["vmax possible"] = "%d" % self._vrange
values["imax possible"] = "%f" % self.__calculateImaxpossible__(self._gaindiv, 1 / self._shunt)
values["current LSB"] = "%f" % self._currentLSB
values["calibration"] = "%d" % self._cal
values["gaindiv"] = "%d" % self._gaindiv
values["shunt"] = "%f" % self._shunt
return values
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/calibration")
@response("%d")
def getCalibration(self):
return self.__getCalibration__()
def __getCalibration__(self):
return self.__read16BitRegister__(self.CALIBRATION_ADDRESS)
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/calibration/%(calibration)d")
@response("%d")
def setCalibration(self, calibration):
self.__setCalibration__(calibration)
return self.__getCalibration__()
def __setCalibration__(self, calibration):
if calibration not in range(0, 65535):
self.__write16BitRegister__(self.CALIBRATION_ADDRESS, 0) # zero out calibration register to avoid wrong measurements
self._cal = 0
debug("%s: set calibration=0" % self.__str__())
raise ValueError("Parameter calibration:%d not in the allowed range [0 .. 65534]" % calibration)
calibration = calibration & self.CALIBRATION_MASK
self.__write16BitRegister__(self.CALIBRATION_ADDRESS, calibration)
self._cal = calibration
debug("%s: set calibration=%d" % (self.__str__(), self._cal))
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/vrange/%(vrange)d")
@response("%d")
def setVrange(self, vrange):
self.__setVrange__(vrange)
return self.__getVrange__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/vrange")
@response("%d")
def getVrange(self):
return self.__getVrange__()
def __setVrange__(self, vrange):
if vrange not in (16, 32):
raise ValueError("Parameter vrange:%d not one of the allowed values (16, 32)" % vrange)
if vrange == 16:
bitsVrange = self.BRNG_16_VALUE
elif vrange == 32:
bitsVrange = self.BRNG_32_VALUE
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.BRNG_MASK) | bitsVrange
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
self._vrange = vrange
debug("%s: set vrange=%d V" % (self.__str__(), vrange))
def __getVrange__(self):
bitsVrange = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.BRNG_MASK) >> 13
if bitsVrange == self.BRNG_16_VALUE:
self._vrange = 16
elif bitsVrange == self.BRNG_32_VALUE:
self._vrange = 32
return self._vrange
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/gaindiv/%(gaindiv)d")
@response("%d")
def setGaindiv(self, gaindiv):
self.__setGaindiv__(gaindiv)
return self.__getGaindiv__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/gaindiv")
@response("%d")
def getGaindiv(self):
return self.__getGaindiv__()
def __setGaindiv__(self, gaindiv):
if gaindiv not in (1, 2, 4, 8):
raise ValueError("Parameter gaindiv:%d not one of the allowed values (1, 2, 4, 8)" % gaindiv)
if gaindiv == 1:
bitsGaindiv = self.GAINDIV_1_VALUE
elif gaindiv == 2:
bitsGaindiv = self.GAINDIV_2_VALUE
elif gaindiv == 4:
bitsGaindiv = self.GAINDIV_4_VALUE
elif gaindiv == 8:
bitsGaindiv = self.GAINDIV_8_VALUE
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.GAINDIV_MASK) | bitsGaindiv
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
self._gaindiv = gaindiv
debug("%s: set gaindiv=%d" % (self.__str__(), gaindiv))
self.__reCalculate__()
def __getGaindiv__(self):
bitsGaindiv = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.GAINDIV_MASK) >> 11
if bitsGaindiv == self.GAINDIV_1_VALUE:
self._gaindiv = 1
elif bitsGaindiv == self.GAINDIV_2_VALUE:
self._gaindiv = 2
elif bitsGaindiv == self.GAINDIV_4_VALUE:
self._gaindiv = 4
elif bitsGaindiv == self.GAINDIV_8_VALUE:
self._gaindiv = 8
return self._gaindiv
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/mode/%(mode)d")
@response("%d")
def setMode(self, mode):
self.__setMode__(mode)
return self.__getMode__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/mode")
@response("%d")
def getMode(self):
return self.__getMode__()
def __setMode__(self, mode):
if mode not in range(0, 0x8):
raise ValueError("Parameter mode:0x%1X not in the allowed range [0x0 .. 0x7]" % mode)
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.MODE_MASK) | mode
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
debug("%s: set mode=0x%1X" % (self.__str__(), mode))
def __getMode__(self):
bitsMode = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.MODE_MASK)
return bitsMode
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/badc/%(badc)d")
@response("%d")
def setBadc(self, badc):
self.__setBadc__(badc)
return self.__getBadc__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/badc")
@response("%d")
def getBadc(self):
return self.__getBadc__()
def __setBadc__(self, badc):
if badc not in range(0, 0x10):
raise ValueError("Parameter badc:0x%1X not in the allowed range [0x0 .. 0xF]" % badc)
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.BADC_MASK) | badc << 7
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
debug("%s: set badc=0x%1X" % (self.__str__(), badc))
def __getBadc__(self):
bitsBadc = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.BADC_MASK) >> 7
return bitsBadc
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/sadc/%(sadc)d")
@response("%d")
def setSadc(self, sadc):
self.__setSadc__(sadc)
return self.__getSadc__()
@api("Device", 3, "configuration", "driver")
@request("GET", "configure/sadc")
@response("%d")
def getSadc(self):
return self.__getSadc__()
def __setSadc__(self, sadc):
if sadc not in range(0, 0x10):
raise ValueError("Parameter sadc:0x%1X not in the allowed range [0x0 .. 0xF]" % sadc)
currentValue = self.__read16BitRegister__(self.CONFIGURATION_ADDRESS)
newValue = (currentValue & ~self.SADC_MASK) | sadc << 3
self.__write16BitRegister__(self.CONFIGURATION_ADDRESS, newValue)
debug("%s: set sadc=0x%1X" % (self.__str__(), sadc))
def __getSadc__(self):
bitsSadc = (self.__read16BitRegister__(self.CONFIGURATION_ADDRESS) & self.SADC_MASK) >> 3
return bitsSadc
@api("Device", 3, "configuration", "driver")
@request("POST", "configure/currentlsb/%(currentLSB)f")
@response("%f")
def setCurrentLSB(self, currentLSB):
self.__setCurrentLSB__(currentLSB)
return self._currentLSB
#---------- Device methods that implement chip configuration settings ----------
def __setShunt__(self, shunt):
self._shunt = shunt
def __setCurrentLSB__(self, currentLSB):
self._currentLSB = currentLSB
debug("%s: set current LSB=%f mA" % (self.__str__(), self._currentLSB * 1000))
self.__setCalibration__(self.__calculateCalibration__())
#---------- Calibration helper methods ----------
def __reCalculate__(self):
self.__setCurrentLSB__(self.__calculateCurrentLSB__())
def __reCalibrate__(self):
self.__setCalibration__(self._cal)
def __calculateCurrentLSB__(self):
calCurrentLSB = self.VSHUNT_FULL_SCALE_BASE_VALUE * self._gaindiv / self._shunt / 2**15 # in Amperes
debug("%s: calculated current LSB=%f mA" % (self.__str__(), calCurrentLSB * 1000))
return calCurrentLSB
def __calculateCalibration__(self):
calCal = int(self.CALIBRATION_CONSTANT_VALUE / self._currentLSB / self._shunt) # this does trunc
debug("%s: calculated calibration=%d" % (self.__str__(), calCal))
return calCal
def __calculateImaxpossible__(self, gaindiv, shuntdiv):
return self.VSHUNT_FULL_SCALE_BASE_VALUE * gaindiv * shuntdiv
#---------- Register helper methods ----------
def __read16BitRegister__(self, addr):
regBytes = self.readRegisters(addr, 2)
return regBytes[0] << 8 | regBytes[1]
def __write16BitRegister__(self, addr, word):
data = bytearray(2)
data[0] = (word >> 8) & 0xFF
data[1] = word & 0xFF
self.writeRegisters(addr , data)
| apache-2.0 | -8,523,589,605,352,649,000 | 42.856863 | 147 | 0.604362 | false | 3.776952 | true | false | false |
mhorn71/StarbaseMini | instument_builder/builder.py | 1 | 1671 | __author__ = 'mark'
# StarbaseMini Staribus/Starinet Client for the British Astronomical Association Staribus Protocol
# Copyright (C) 2015 Mark Horn
#
# This file is part of StarbaseMini.
#
# StarbaseMini is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# StarbaseMini is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with StarbaseMini. If not, see <http://www.gnu.org/licenses/>.
import sys
import logging
from PyQt5 import QtWidgets
from ui import Ui_InstrumentBuilderDialog
logger = logging.getLogger('instrument.builder')
class InstrumentBuilder(QtWidgets.QDialog, Ui_InstrumentBuilderDialog):
def __init__(self):
QtWidgets.QDialog.__init__(self)
self.setupUi(self)
# Style sheets
stylebool = False
if sys.platform.startswith('darwin'):
stylesheet = 'css/macStyle.css'
stylebool = True
elif sys.platform.startswith('win32'):
stylesheet = 'css/winStyle.css'
stylebool = True
elif sys.platform.startswith('linux'):
stylesheet = 'css/nixStyle.css'
stylebool = True
if stylebool:
with open(stylesheet, 'r') as style:
self.setStyleSheet(style.read()) | gpl-2.0 | -8,834,807,194,172,752,000 | 32.44 | 98 | 0.692998 | false | 4.055825 | false | false | false |
mganeva/mantid | Framework/PythonInterface/test/python/plugins/algorithms/WorkflowAlgorithms/SANSILLReductionTest.py | 1 | 6050 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source
# & Institut Laue - Langevin
# SPDX - License - Identifier: GPL - 3.0 +
from __future__ import (absolute_import, division, print_function)
import unittest
from mantid.api import MatrixWorkspace
from mantid.simpleapi import SANSILLReduction, config, mtd
class SANSILLReductionTest(unittest.TestCase):
_facility = None
def setUp(self):
self._facility = config['default.facility']
config.appendDataSearchSubDir('ILL/D11/')
config.appendDataSearchSubDir('ILL/D33/')
config['default.facility'] = 'ILL'
def tearDown(self):
config['default.facility'] = self._facility
mtd.clear()
def test_absorber(self):
SANSILLReduction(Run='010462', ProcessAs='Absorber', OutputWorkspace='Cd')
self._check_output(mtd['Cd'], True, 1, 128*128)
self._check_process_flag(mtd['Cd'], 'Absorber')
def test_beam(self):
SANSILLReduction(Run='010414', ProcessAs='Beam', OutputWorkspace='Db', FluxOutputWorkspace='Fl')
self._check_output(mtd['Db'], True, 1, 128*128)
self._check_process_flag(mtd['Db'], 'Beam')
run = mtd['Db'].getRun()
self.assertAlmostEqual(run.getLogData('BeamCenterX').value, -0.0048, delta=1e-4)
self.assertAlmostEqual(run.getLogData('BeamCenterY').value, -0.0027, delta=1e-4)
self._check_output(mtd['Fl'], False, 1, 128*128)
self._check_process_flag(mtd['Fl'], 'Beam')
self.assertAlmostEqual(mtd['Fl'].readY(0)[0], 6628249, delta=1)
self.assertAlmostEqual(mtd['Fl'].readE(0)[0], 8566, delta=1)
def test_transmission(self):
SANSILLReduction(Run='010414', ProcessAs='Beam', OutputWorkspace='Db')
SANSILLReduction(Run='010585', ProcessAs='Transmission', BeamInputWorkspace='Db', OutputWorkspace='Tr')
self.assertAlmostEqual(mtd['Tr'].readY(0)[0], 0.640, delta=1e-3)
self.assertAlmostEqual(mtd['Tr'].readE(0)[0], 0.0019, delta=1e-4)
self._check_process_flag(mtd['Tr'], 'Transmission')
def test_container(self):
SANSILLReduction(Run='010460', ProcessAs='Container', OutputWorkspace='can')
self._check_output(mtd['can'], True, 1, 128*128)
self._check_process_flag(mtd['can'], 'Container')
def test_reference(self):
SANSILLReduction(Run='010453', ProcessAs='Reference', SensitivityOutputWorkspace='sens', OutputWorkspace='water')
self._check_output(mtd['water'], True, 1, 128*128)
self._check_output(mtd['sens'], False, 1, 128*128)
self._check_process_flag(mtd['water'], 'Reference')
self._check_process_flag(mtd['sens'], 'Sensitivity')
def test_sample(self):
SANSILLReduction(Run='010569', ProcessAs='Sample', OutputWorkspace='sample')
self._check_output(mtd['sample'], True, 1, 128*128)
self._check_process_flag(mtd['sample'], 'Sample')
def test_absorber_tof(self):
# D33 VTOF
# actually this is a container run, not an absorber, but is fine for this test
SANSILLReduction(Run='093409', ProcessAs='Absorber', OutputWorkspace='absorber')
self._check_output(mtd['absorber'], True, 30, 256*256)
self._check_process_flag(mtd['absorber'], 'Absorber')
def test_beam_tof(self):
# D33 VTOF
SANSILLReduction(Run='093406', ProcessAs='Beam', OutputWorkspace='beam', FluxOutputWorkspace='flux')
self._check_output(mtd['beam'], True, 30, 256*256)
self._check_process_flag(mtd['beam'], 'Beam')
run = mtd['beam'].getRun()
self.assertAlmostEqual(run.getLogData('BeamCenterX').value, -0.0025, delta=1e-4)
self.assertAlmostEqual(run.getLogData('BeamCenterY').value, 0.0009, delta=1e-4)
self._check_output(mtd['flux'], False, 30, 256*256)
self._check_process_flag(mtd['flux'], 'Beam')
def test_transmission_tof(self):
# D33 VTOF
SANSILLReduction(Run='093406', ProcessAs='Beam', OutputWorkspace='beam')
SANSILLReduction(Run='093407', ProcessAs='Transmission', BeamInputWorkspace='beam', OutputWorkspace='ctr')
self._check_output(mtd['ctr'], False, 75, 1)
def test_container_tof(self):
# D33 VTOF
# this is actually a sample run, not water, but is fine for this test
SANSILLReduction(Run='093410', ProcessAs='Reference', OutputWorkspace='ref')
self._check_output(mtd['ref'], True, 30, 256*256)
self._check_process_flag(mtd['ref'], 'Reference')
def test_sample_tof(self):
# D33 VTOF, Pluronic F127
SANSILLReduction(Run='093410', ProcessAs='Sample', OutputWorkspace='sample')
self._check_output(mtd['sample'], True, 30, 256*256)
self._check_process_flag(mtd['sample'], 'Sample')
def _check_process_flag(self, ws, value):
self.assertTrue(ws.getRun().getLogData('ProcessedAs').value, value)
def _check_output(self, ws, logs, blocksize, spectra):
self.assertTrue(ws)
self.assertTrue(isinstance(ws, MatrixWorkspace))
self.assertTrue(ws.isHistogramData())
self.assertTrue(not ws.isDistribution())
self.assertEqual(ws.getAxis(0).getUnit().unitID(), "Wavelength")
self.assertEqual(ws.blocksize(), blocksize)
self.assertEqual(ws.getNumberHistograms(), spectra)
self.assertTrue(ws.getInstrument())
self.assertTrue(ws.getRun())
self.assertTrue(ws.getHistory())
if logs:
self.assertTrue(ws.getRun().hasProperty('qmin'))
self.assertTrue(ws.getRun().hasProperty('qmax'))
self.assertTrue(ws.getRun().hasProperty('l2'))
self.assertTrue(ws.getRun().hasProperty('pixel_height'))
self.assertTrue(ws.getRun().hasProperty('pixel_width'))
self.assertTrue(ws.getRun().hasProperty('collimation.actual_position'))
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | 7,514,569,693,448,580,000 | 45.899225 | 121 | 0.652231 | false | 3.395062 | true | false | false |
openstack/tempest | tempest/cmd/cleanup_service.py | 1 | 39207 | # Copyright 2015 Dell Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from urllib import parse as urllib
from oslo_log import log as logging
from tempest import clients
from tempest.common import credentials_factory as credentials
from tempest.common import identity
from tempest.common import utils
from tempest.common.utils import net_info
from tempest import config
from tempest.lib import exceptions
LOG = logging.getLogger('tempest.cmd.cleanup')
CONF = config.CONF
CONF_FLAVORS = None
CONF_IMAGES = None
CONF_NETWORKS = []
CONF_PRIV_NETWORK_NAME = None
CONF_PUB_NETWORK = None
CONF_PUB_ROUTER = None
CONF_PROJECTS = None
CONF_USERS = None
IS_CINDER = None
IS_GLANCE = None
IS_NEUTRON = None
IS_NOVA = None
def init_conf():
global CONF_FLAVORS
global CONF_IMAGES
global CONF_NETWORKS
global CONF_PRIV_NETWORK
global CONF_PRIV_NETWORK_NAME
global CONF_PUB_NETWORK
global CONF_PUB_ROUTER
global CONF_PROJECTS
global CONF_USERS
global IS_CINDER
global IS_GLANCE
global IS_HEAT
global IS_NEUTRON
global IS_NOVA
IS_CINDER = CONF.service_available.cinder
IS_GLANCE = CONF.service_available.glance
IS_NEUTRON = CONF.service_available.neutron
IS_NOVA = CONF.service_available.nova
CONF_FLAVORS = [CONF.compute.flavor_ref, CONF.compute.flavor_ref_alt]
CONF_IMAGES = [CONF.compute.image_ref, CONF.compute.image_ref_alt]
CONF_PRIV_NETWORK_NAME = CONF.compute.fixed_network_name
CONF_PUB_NETWORK = CONF.network.public_network_id
CONF_PUB_ROUTER = CONF.network.public_router_id
CONF_PROJECTS = [CONF.auth.admin_project_name]
CONF_USERS = [CONF.auth.admin_username]
if IS_NEUTRON:
CONF_PRIV_NETWORK = _get_network_id(CONF.compute.fixed_network_name,
CONF.auth.admin_project_name)
CONF_NETWORKS = [CONF_PUB_NETWORK, CONF_PRIV_NETWORK]
def _get_network_id(net_name, project_name):
am = clients.Manager(
credentials.get_configured_admin_credentials())
net_cl = am.networks_client
pr_cl = am.projects_client
networks = net_cl.list_networks()
project = identity.get_project_by_name(pr_cl, project_name)
p_id = project['id']
n_id = None
for net in networks['networks']:
if (net['project_id'] == p_id and net['name'] == net_name):
n_id = net['id']
break
return n_id
class BaseService(object):
def __init__(self, kwargs):
self.client = None
for key, value in kwargs.items():
setattr(self, key, value)
self.tenant_filter = {}
if hasattr(self, 'tenant_id'):
self.tenant_filter['project_id'] = self.tenant_id
def _filter_by_tenant_id(self, item_list):
if (item_list is None or
not item_list or
not hasattr(self, 'tenant_id') or
self.tenant_id is None or
'tenant_id' not in item_list[0]):
return item_list
return [item for item in item_list
if item['tenant_id'] == self.tenant_id]
def list(self):
pass
def delete(self):
pass
def dry_run(self):
pass
def save_state(self):
pass
def run(self):
try:
if self.is_dry_run:
self.dry_run()
elif self.is_save_state:
self.save_state()
else:
self.delete()
except exceptions.NotImplemented as exc:
# Many OpenStack services use extensions logic to implement the
# features or resources. Tempest cleanup tries to clean up the test
# resources without having much logic of extensions checks etc.
# If any of the extension is missing then, service will return
# NotImplemented error.
msg = ("Got NotImplemented error in %s, full exception: %s" %
(str(self.__class__), str(exc)))
LOG.exception(msg)
self.got_exceptions.append(exc)
class SnapshotService(BaseService):
def __init__(self, manager, **kwargs):
super(SnapshotService, self).__init__(kwargs)
self.client = manager.snapshots_client_latest
def list(self):
client = self.client
snaps = client.list_snapshots()['snapshots']
if not self.is_save_state:
# recreate list removing saved snapshots
snaps = [snap for snap in snaps if snap['id']
not in self.saved_state_json['snapshots'].keys()]
LOG.debug("List count, %s Snapshots", len(snaps))
return snaps
def delete(self):
snaps = self.list()
client = self.client
for snap in snaps:
try:
LOG.debug("Deleting Snapshot with id %s", snap['id'])
client.delete_snapshot(snap['id'])
except Exception:
LOG.exception("Delete Snapshot %s exception.", snap['id'])
def dry_run(self):
snaps = self.list()
self.data['snapshots'] = snaps
def save_state(self):
snaps = self.list()
self.data['snapshots'] = {}
for snap in snaps:
self.data['snapshots'][snap['id']] = snap['name']
class ServerService(BaseService):
def __init__(self, manager, **kwargs):
super(ServerService, self).__init__(kwargs)
self.client = manager.servers_client
self.server_groups_client = manager.server_groups_client
def list(self):
client = self.client
servers_body = client.list_servers()
servers = servers_body['servers']
if not self.is_save_state:
# recreate list removing saved servers
servers = [server for server in servers if server['id']
not in self.saved_state_json['servers'].keys()]
LOG.debug("List count, %s Servers", len(servers))
return servers
def delete(self):
client = self.client
servers = self.list()
for server in servers:
try:
LOG.debug("Deleting Server with id %s", server['id'])
client.delete_server(server['id'])
except Exception:
LOG.exception("Delete Server %s exception.", server['id'])
def dry_run(self):
servers = self.list()
self.data['servers'] = servers
def save_state(self):
servers = self.list()
self.data['servers'] = {}
for server in servers:
self.data['servers'][server['id']] = server['name']
class ServerGroupService(ServerService):
def list(self):
client = self.server_groups_client
sgs = client.list_server_groups()['server_groups']
if not self.is_save_state:
# recreate list removing saved server_groups
sgs = [sg for sg in sgs if sg['id']
not in self.saved_state_json['server_groups'].keys()]
LOG.debug("List count, %s Server Groups", len(sgs))
return sgs
def delete(self):
client = self.server_groups_client
sgs = self.list()
for sg in sgs:
try:
LOG.debug("Deleting Server Group with id %s", sg['id'])
client.delete_server_group(sg['id'])
except Exception:
LOG.exception("Delete Server Group %s exception.", sg['id'])
def dry_run(self):
sgs = self.list()
self.data['server_groups'] = sgs
def save_state(self):
sgs = self.list()
self.data['server_groups'] = {}
for sg in sgs:
self.data['server_groups'][sg['id']] = sg['name']
class KeyPairService(BaseService):
def __init__(self, manager, **kwargs):
super(KeyPairService, self).__init__(kwargs)
self.client = manager.keypairs_client
def list(self):
client = self.client
keypairs = client.list_keypairs()['keypairs']
if not self.is_save_state:
# recreate list removing saved keypairs
keypairs = [keypair for keypair in keypairs
if keypair['keypair']['name']
not in self.saved_state_json['keypairs'].keys()]
LOG.debug("List count, %s Keypairs", len(keypairs))
return keypairs
def delete(self):
client = self.client
keypairs = self.list()
for k in keypairs:
name = k['keypair']['name']
try:
LOG.debug("Deleting keypair %s", name)
client.delete_keypair(name)
except Exception:
LOG.exception("Delete Keypair %s exception.", name)
def dry_run(self):
keypairs = self.list()
self.data['keypairs'] = keypairs
def save_state(self):
keypairs = self.list()
self.data['keypairs'] = {}
for keypair in keypairs:
keypair = keypair['keypair']
self.data['keypairs'][keypair['name']] = keypair
class VolumeService(BaseService):
def __init__(self, manager, **kwargs):
super(VolumeService, self).__init__(kwargs)
self.client = manager.volumes_client_latest
def list(self):
client = self.client
vols = client.list_volumes()['volumes']
if not self.is_save_state:
# recreate list removing saved volumes
vols = [vol for vol in vols if vol['id']
not in self.saved_state_json['volumes'].keys()]
LOG.debug("List count, %s Volumes", len(vols))
return vols
def delete(self):
client = self.client
vols = self.list()
for v in vols:
try:
LOG.debug("Deleting volume with id %s", v['id'])
client.delete_volume(v['id'])
except Exception:
LOG.exception("Delete Volume %s exception.", v['id'])
def dry_run(self):
vols = self.list()
self.data['volumes'] = vols
def save_state(self):
vols = self.list()
self.data['volumes'] = {}
for vol in vols:
self.data['volumes'][vol['id']] = vol['name']
class VolumeQuotaService(BaseService):
def __init__(self, manager, **kwargs):
super(VolumeQuotaService, self).__init__(kwargs)
self.client = manager.volume_quotas_client_latest
def delete(self):
client = self.client
try:
LOG.debug("Deleting Volume Quotas for project with id %s",
self.project_id)
client.delete_quota_set(self.project_id)
except Exception:
LOG.exception("Delete Volume Quotas exception for 'project %s'.",
self.project_id)
def dry_run(self):
quotas = self.client.show_quota_set(
self.project_id, params={'usage': True})['quota_set']
self.data['volume_quotas'] = quotas
class NovaQuotaService(BaseService):
def __init__(self, manager, **kwargs):
super(NovaQuotaService, self).__init__(kwargs)
self.client = manager.quotas_client
self.limits_client = manager.limits_client
def delete(self):
client = self.client
try:
LOG.debug("Deleting Nova Quotas for project with id %s",
self.project_id)
client.delete_quota_set(self.project_id)
except Exception:
LOG.exception("Delete Nova Quotas exception for 'project %s'.",
self.project_id)
def dry_run(self):
client = self.limits_client
quotas = client.show_limits()['limits']
self.data['compute_quotas'] = quotas['absolute']
class NetworkQuotaService(BaseService):
def __init__(self, manager, **kwargs):
super(NetworkQuotaService, self).__init__(kwargs)
self.client = manager.network_quotas_client
def delete(self):
client = self.client
try:
LOG.debug("Deleting Network Quotas for project with id %s",
self.project_id)
client.reset_quotas(self.project_id)
except Exception:
LOG.exception("Delete Network Quotas exception for 'project %s'.",
self.project_id)
def dry_run(self):
resp = [quota for quota in self.client.list_quotas()['quotas']
if quota['project_id'] == self.project_id]
self.data['network_quotas'] = resp
# Begin network service classes
class BaseNetworkService(BaseService):
def __init__(self, manager, **kwargs):
super(BaseNetworkService, self).__init__(kwargs)
self.networks_client = manager.networks_client
self.subnets_client = manager.subnets_client
self.ports_client = manager.ports_client
self.floating_ips_client = manager.floating_ips_client
self.metering_labels_client = manager.metering_labels_client
self.metering_label_rules_client = manager.metering_label_rules_client
self.security_groups_client = manager.security_groups_client
self.routers_client = manager.routers_client
self.subnetpools_client = manager.subnetpools_client
def _filter_by_conf_networks(self, item_list):
if not item_list or not all(('network_id' in i for i in item_list)):
return item_list
return [item for item in item_list if item['network_id']
not in CONF_NETWORKS]
class NetworkService(BaseNetworkService):
def list(self):
client = self.networks_client
networks = client.list_networks(**self.tenant_filter)
networks = networks['networks']
if not self.is_save_state:
# recreate list removing saved networks
networks = [network for network in networks if network['id']
not in self.saved_state_json['networks'].keys()]
# filter out networks declared in tempest.conf
if self.is_preserve:
networks = [network for network in networks
if network['id'] not in CONF_NETWORKS]
LOG.debug("List count, %s Networks", len(networks))
return networks
def delete(self):
client = self.networks_client
networks = self.list()
for n in networks:
try:
LOG.debug("Deleting Network with id %s", n['id'])
client.delete_network(n['id'])
except Exception:
LOG.exception("Delete Network %s exception.", n['id'])
def dry_run(self):
networks = self.list()
self.data['networks'] = networks
def save_state(self):
networks = self.list()
self.data['networks'] = {}
for network in networks:
self.data['networks'][network['id']] = network
class NetworkFloatingIpService(BaseNetworkService):
def list(self):
client = self.floating_ips_client
flips = client.list_floatingips(**self.tenant_filter)
flips = flips['floatingips']
if not self.is_save_state:
# recreate list removing saved flips
flips = [flip for flip in flips if flip['id']
not in self.saved_state_json['floatingips'].keys()]
LOG.debug("List count, %s Network Floating IPs", len(flips))
return flips
def delete(self):
client = self.floating_ips_client
flips = self.list()
for flip in flips:
try:
LOG.debug("Deleting Network Floating IP with id %s",
flip['id'])
client.delete_floatingip(flip['id'])
except Exception:
LOG.exception("Delete Network Floating IP %s exception.",
flip['id'])
def dry_run(self):
flips = self.list()
self.data['floatingips'] = flips
def save_state(self):
flips = self.list()
self.data['floatingips'] = {}
for flip in flips:
self.data['floatingips'][flip['id']] = flip
class NetworkRouterService(BaseNetworkService):
def list(self):
client = self.routers_client
routers = client.list_routers(**self.tenant_filter)
routers = routers['routers']
if not self.is_save_state:
# recreate list removing saved routers
routers = [router for router in routers if router['id']
not in self.saved_state_json['routers'].keys()]
if self.is_preserve:
routers = [router for router in routers
if router['id'] != CONF_PUB_ROUTER]
LOG.debug("List count, %s Routers", len(routers))
return routers
def delete(self):
client = self.routers_client
ports_client = self.ports_client
routers = self.list()
for router in routers:
rid = router['id']
ports = [port for port
in ports_client.list_ports(device_id=rid)['ports']
if net_info.is_router_interface_port(port)]
for port in ports:
try:
LOG.debug("Deleting port with id %s of router with id %s",
port['id'], rid)
client.remove_router_interface(rid, port_id=port['id'])
except Exception:
LOG.exception("Delete Router Interface exception for "
"'port %s' of 'router %s'.", port['id'], rid)
try:
LOG.debug("Deleting Router with id %s", rid)
client.delete_router(rid)
except Exception:
LOG.exception("Delete Router %s exception.", rid)
def dry_run(self):
routers = self.list()
self.data['routers'] = routers
def save_state(self):
routers = self.list()
self.data['routers'] = {}
for router in routers:
self.data['routers'][router['id']] = router['name']
class NetworkMeteringLabelRuleService(NetworkService):
def list(self):
client = self.metering_label_rules_client
rules = client.list_metering_label_rules()
rules = rules['metering_label_rules']
rules = self._filter_by_tenant_id(rules)
if not self.is_save_state:
saved_rules = self.saved_state_json['metering_label_rules'].keys()
# recreate list removing saved rules
rules = [rule for rule in rules if rule['id'] not in saved_rules]
LOG.debug("List count, %s Metering Label Rules", len(rules))
return rules
def delete(self):
client = self.metering_label_rules_client
rules = self.list()
for rule in rules:
try:
LOG.debug("Deleting Metering Label Rule with id %s",
rule['id'])
client.delete_metering_label_rule(rule['id'])
except Exception:
LOG.exception("Delete Metering Label Rule %s exception.",
rule['id'])
def dry_run(self):
rules = self.list()
self.data['metering_label_rules'] = rules
def save_state(self):
rules = self.list()
self.data['metering_label_rules'] = {}
for rule in rules:
self.data['metering_label_rules'][rule['id']] = rule
class NetworkMeteringLabelService(BaseNetworkService):
def list(self):
client = self.metering_labels_client
labels = client.list_metering_labels()
labels = labels['metering_labels']
labels = self._filter_by_tenant_id(labels)
if not self.is_save_state:
# recreate list removing saved labels
labels = [label for label in labels if label['id']
not in self.saved_state_json['metering_labels'].keys()]
LOG.debug("List count, %s Metering Labels", len(labels))
return labels
def delete(self):
client = self.metering_labels_client
labels = self.list()
for label in labels:
try:
LOG.debug("Deleting Metering Label with id %s", label['id'])
client.delete_metering_label(label['id'])
except Exception:
LOG.exception("Delete Metering Label %s exception.",
label['id'])
def dry_run(self):
labels = self.list()
self.data['metering_labels'] = labels
def save_state(self):
labels = self.list()
self.data['metering_labels'] = {}
for label in labels:
self.data['metering_labels'][label['id']] = label['name']
class NetworkPortService(BaseNetworkService):
def list(self):
client = self.ports_client
ports = [port for port in
client.list_ports(**self.tenant_filter)['ports']
if port["device_owner"] == "" or
port["device_owner"].startswith("compute:")]
if not self.is_save_state:
# recreate list removing saved ports
ports = [port for port in ports if port['id']
not in self.saved_state_json['ports'].keys()]
if self.is_preserve:
ports = self._filter_by_conf_networks(ports)
LOG.debug("List count, %s Ports", len(ports))
return ports
def delete(self):
client = self.ports_client
ports = self.list()
for port in ports:
try:
LOG.debug("Deleting port with id %s", port['id'])
client.delete_port(port['id'])
except Exception:
LOG.exception("Delete Port %s exception.", port['id'])
def dry_run(self):
ports = self.list()
self.data['ports'] = ports
def save_state(self):
ports = self.list()
self.data['ports'] = {}
for port in ports:
self.data['ports'][port['id']] = port['name']
class NetworkSecGroupService(BaseNetworkService):
def list(self):
client = self.security_groups_client
filter = self.tenant_filter
# cannot delete default sec group so never show it.
secgroups = [secgroup for secgroup in
client.list_security_groups(**filter)['security_groups']
if secgroup['name'] != 'default']
if not self.is_save_state:
# recreate list removing saved security_groups
secgroups = [secgroup for secgroup in secgroups if secgroup['id']
not in self.saved_state_json['security_groups'].keys()
]
if self.is_preserve:
secgroups = [secgroup for secgroup in secgroups
if secgroup['security_group_rules'][0]['project_id']
not in CONF_PROJECTS]
LOG.debug("List count, %s security_groups", len(secgroups))
return secgroups
def delete(self):
client = self.security_groups_client
secgroups = self.list()
for secgroup in secgroups:
try:
LOG.debug("Deleting security_group with id %s", secgroup['id'])
client.delete_security_group(secgroup['id'])
except Exception:
LOG.exception("Delete security_group %s exception.",
secgroup['id'])
def dry_run(self):
secgroups = self.list()
self.data['security_groups'] = secgroups
def save_state(self):
secgroups = self.list()
self.data['security_groups'] = {}
for secgroup in secgroups:
self.data['security_groups'][secgroup['id']] = secgroup['name']
class NetworkSubnetService(BaseNetworkService):
def list(self):
client = self.subnets_client
subnets = client.list_subnets(**self.tenant_filter)
subnets = subnets['subnets']
if not self.is_save_state:
# recreate list removing saved subnets
subnets = [subnet for subnet in subnets if subnet['id']
not in self.saved_state_json['subnets'].keys()]
if self.is_preserve:
subnets = self._filter_by_conf_networks(subnets)
LOG.debug("List count, %s Subnets", len(subnets))
return subnets
def delete(self):
client = self.subnets_client
subnets = self.list()
for subnet in subnets:
try:
LOG.debug("Deleting subnet with id %s", subnet['id'])
client.delete_subnet(subnet['id'])
except Exception:
LOG.exception("Delete Subnet %s exception.", subnet['id'])
def dry_run(self):
subnets = self.list()
self.data['subnets'] = subnets
def save_state(self):
subnets = self.list()
self.data['subnets'] = {}
for subnet in subnets:
self.data['subnets'][subnet['id']] = subnet['name']
class NetworkSubnetPoolsService(BaseNetworkService):
def list(self):
client = self.subnetpools_client
pools = client.list_subnetpools(**self.tenant_filter)['subnetpools']
if not self.is_save_state:
# recreate list removing saved subnet pools
pools = [pool for pool in pools if pool['id']
not in self.saved_state_json['subnetpools'].keys()]
if self.is_preserve:
pools = [pool for pool in pools if pool['project_id']
not in CONF_PROJECTS]
LOG.debug("List count, %s Subnet Pools", len(pools))
return pools
def delete(self):
client = self.subnetpools_client
pools = self.list()
for pool in pools:
try:
LOG.debug("Deleting Subnet Pool with id %s", pool['id'])
client.delete_subnetpool(pool['id'])
except Exception:
LOG.exception("Delete Subnet Pool %s exception.", pool['id'])
def dry_run(self):
pools = self.list()
self.data['subnetpools'] = pools
def save_state(self):
pools = self.list()
self.data['subnetpools'] = {}
for pool in pools:
self.data['subnetpools'][pool['id']] = pool['name']
# begin global services
class RegionService(BaseService):
def __init__(self, manager, **kwargs):
super(RegionService, self).__init__(kwargs)
self.client = manager.regions_client
def list(self):
client = self.client
regions = client.list_regions()
if not self.is_save_state:
regions = [region for region in regions['regions'] if region['id']
not in self.saved_state_json['regions'].keys()]
LOG.debug("List count, %s Regions", len(regions))
return regions
else:
LOG.debug("List count, %s Regions", len(regions['regions']))
return regions['regions']
def delete(self):
client = self.client
regions = self.list()
for region in regions:
try:
LOG.debug("Deleting region with id %s", region['id'])
client.delete_region(region['id'])
except Exception:
LOG.exception("Delete Region %s exception.", region['id'])
def dry_run(self):
regions = self.list()
self.data['regions'] = {}
for region in regions:
self.data['regions'][region['id']] = region
def save_state(self):
regions = self.list()
self.data['regions'] = {}
for region in regions:
self.data['regions'][region['id']] = region
class FlavorService(BaseService):
def __init__(self, manager, **kwargs):
super(FlavorService, self).__init__(kwargs)
self.client = manager.flavors_client
def list(self):
client = self.client
flavors = client.list_flavors({"is_public": None})['flavors']
if not self.is_save_state:
# recreate list removing saved flavors
flavors = [flavor for flavor in flavors if flavor['id']
not in self.saved_state_json['flavors'].keys()]
if self.is_preserve:
flavors = [flavor for flavor in flavors
if flavor['id'] not in CONF_FLAVORS]
LOG.debug("List count, %s Flavors after reconcile", len(flavors))
return flavors
def delete(self):
client = self.client
flavors = self.list()
for flavor in flavors:
try:
LOG.debug("Deleting flavor with id %s", flavor['id'])
client.delete_flavor(flavor['id'])
except Exception:
LOG.exception("Delete Flavor %s exception.", flavor['id'])
def dry_run(self):
flavors = self.list()
self.data['flavors'] = flavors
def save_state(self):
flavors = self.list()
self.data['flavors'] = {}
for flavor in flavors:
self.data['flavors'][flavor['id']] = flavor['name']
class ImageService(BaseService):
def __init__(self, manager, **kwargs):
super(ImageService, self).__init__(kwargs)
self.client = manager.image_client_v2
def list(self):
client = self.client
response = client.list_images()
images = []
images.extend(response['images'])
while 'next' in response:
parsed = urllib.urlparse(response['next'])
marker = urllib.parse_qs(parsed.query)['marker'][0]
response = client.list_images(params={"marker": marker})
images.extend(response['images'])
if not self.is_save_state:
images = [image for image in images if image['id']
not in self.saved_state_json['images'].keys()]
if self.is_preserve:
images = [image for image in images
if image['id'] not in CONF_IMAGES]
LOG.debug("List count, %s Images after reconcile", len(images))
return images
def delete(self):
client = self.client
images = self.list()
for image in images:
try:
LOG.debug("Deleting image with id %s", image['id'])
client.delete_image(image['id'])
except Exception:
LOG.exception("Delete Image %s exception.", image['id'])
def dry_run(self):
images = self.list()
self.data['images'] = images
def save_state(self):
self.data['images'] = {}
images = self.list()
for image in images:
self.data['images'][image['id']] = image['name']
class UserService(BaseService):
def __init__(self, manager, **kwargs):
super(UserService, self).__init__(kwargs)
self.client = manager.users_v3_client
def list(self):
users = self.client.list_users()['users']
if not self.is_save_state:
users = [user for user in users if user['id']
not in self.saved_state_json['users'].keys()]
if self.is_preserve:
users = [user for user in users if user['name']
not in CONF_USERS]
elif not self.is_save_state: # Never delete admin user
users = [user for user in users if user['name'] !=
CONF.auth.admin_username]
LOG.debug("List count, %s Users after reconcile", len(users))
return users
def delete(self):
users = self.list()
for user in users:
try:
LOG.debug("Deleting user with id %s", user['id'])
self.client.delete_user(user['id'])
except Exception:
LOG.exception("Delete User %s exception.", user['id'])
def dry_run(self):
users = self.list()
self.data['users'] = users
def save_state(self):
users = self.list()
self.data['users'] = {}
for user in users:
self.data['users'][user['id']] = user['name']
class RoleService(BaseService):
def __init__(self, manager, **kwargs):
super(RoleService, self).__init__(kwargs)
self.client = manager.roles_v3_client
def list(self):
try:
roles = self.client.list_roles()['roles']
# reconcile roles with saved state and never list admin role
if not self.is_save_state:
roles = [role for role in roles if
(role['id'] not in
self.saved_state_json['roles'].keys() and
role['name'] != CONF.identity.admin_role)]
LOG.debug("List count, %s Roles after reconcile", len(roles))
return roles
except Exception:
LOG.exception("Cannot retrieve Roles.")
return []
def delete(self):
roles = self.list()
for role in roles:
try:
LOG.debug("Deleting role with id %s", role['id'])
self.client.delete_role(role['id'])
except Exception:
LOG.exception("Delete Role %s exception.", role['id'])
def dry_run(self):
roles = self.list()
self.data['roles'] = roles
def save_state(self):
roles = self.list()
self.data['roles'] = {}
for role in roles:
self.data['roles'][role['id']] = role['name']
class ProjectService(BaseService):
def __init__(self, manager, **kwargs):
super(ProjectService, self).__init__(kwargs)
self.client = manager.projects_client
def list(self):
projects = self.client.list_projects()['projects']
if not self.is_save_state:
project_ids = self.saved_state_json['projects']
projects = [project
for project in projects
if (project['id'] not in project_ids and
project['name'] != CONF.auth.admin_project_name)]
if self.is_preserve:
projects = [project
for project in projects
if project['name'] not in CONF_PROJECTS]
LOG.debug("List count, %s Projects after reconcile", len(projects))
return projects
def delete(self):
projects = self.list()
for project in projects:
try:
LOG.debug("Deleting project with id %s", project['id'])
self.client.delete_project(project['id'])
except Exception:
LOG.exception("Delete project %s exception.", project['id'])
def dry_run(self):
projects = self.list()
self.data['projects'] = projects
def save_state(self):
projects = self.list()
self.data['projects'] = {}
for project in projects:
self.data['projects'][project['id']] = project['name']
class DomainService(BaseService):
def __init__(self, manager, **kwargs):
super(DomainService, self).__init__(kwargs)
self.client = manager.domains_client
def list(self):
client = self.client
domains = client.list_domains()['domains']
if not self.is_save_state:
domains = [domain for domain in domains if domain['id']
not in self.saved_state_json['domains'].keys()]
LOG.debug("List count, %s Domains after reconcile", len(domains))
return domains
def delete(self):
client = self.client
domains = self.list()
for domain in domains:
try:
LOG.debug("Deleting domain with id %s", domain['id'])
client.update_domain(domain['id'], enabled=False)
client.delete_domain(domain['id'])
except Exception:
LOG.exception("Delete Domain %s exception.", domain['id'])
def dry_run(self):
domains = self.list()
self.data['domains'] = domains
def save_state(self):
domains = self.list()
self.data['domains'] = {}
for domain in domains:
self.data['domains'][domain['id']] = domain['name']
def get_project_associated_cleanup_services():
"""Returns list of project service classes.
The list contains services whose resources need to be deleted prior,
the project they are associated with, deletion. The resources cannot be
most likely deleted after the project is deleted first.
"""
project_associated_services = []
# TODO(gmann): Tempest should provide some plugin hook for cleanup
# script extension to plugin tests also.
if IS_NOVA:
project_associated_services.append(NovaQuotaService)
if IS_CINDER:
project_associated_services.append(VolumeQuotaService)
if IS_NEUTRON:
project_associated_services.append(NetworkQuotaService)
return project_associated_services
def get_resource_cleanup_services():
"""Returns list of project related classes.
The list contains services whose resources are associated with a project,
however, their deletion is possible also after the project is deleted
first.
"""
resource_cleanup_services = []
# TODO(gmann): Tempest should provide some plugin hook for cleanup
# script extension to plugin tests also.
if IS_NOVA:
resource_cleanup_services.append(ServerService)
resource_cleanup_services.append(KeyPairService)
resource_cleanup_services.append(ServerGroupService)
if IS_NEUTRON:
resource_cleanup_services.append(NetworkFloatingIpService)
if utils.is_extension_enabled('metering', 'network'):
resource_cleanup_services.append(NetworkMeteringLabelRuleService)
resource_cleanup_services.append(NetworkMeteringLabelService)
resource_cleanup_services.append(NetworkRouterService)
resource_cleanup_services.append(NetworkPortService)
resource_cleanup_services.append(NetworkSubnetService)
resource_cleanup_services.append(NetworkService)
resource_cleanup_services.append(NetworkSecGroupService)
resource_cleanup_services.append(NetworkSubnetPoolsService)
if IS_CINDER:
resource_cleanup_services.append(SnapshotService)
resource_cleanup_services.append(VolumeService)
return resource_cleanup_services
def get_global_cleanup_services():
global_services = []
if IS_NOVA:
global_services.append(FlavorService)
if IS_GLANCE:
global_services.append(ImageService)
global_services.append(UserService)
global_services.append(ProjectService)
global_services.append(DomainService)
global_services.append(RoleService)
global_services.append(RegionService)
return global_services
| apache-2.0 | -4,164,139,727,326,428,000 | 33.635159 | 79 | 0.578264 | false | 4.240887 | false | false | false |
haihala/modman | cli.py | 1 | 12445 | #!/usr/bin/env python3
try:
import requests
except ImportError:
print("It looks like requests is not installed.")
print("Try: pip3 install requests")
exit(1)
import os
import sys
import subprocess
from getpass import getpass
import mod_manager
from mod_manager import server
from mod_manager.exceptions import LoginError
def open_gui_editor(filename):
"""Opens default GUI text editor."""
if sys.platform == "win32":
os.startfile(filename)
elif sys.platform.startswith("darwin"):
try:
subprocess.call(["open", filename])
except FileNotFoundError:
print("Your default editor \"{}\" could not be opened.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
elif sys.platform.startswith("linux"):
try:
subprocess.call(["xdg-open", filename])
except FileNotFoundError:
print("Your default editor \"{}\" could not be opened.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
else:
print("Could not determine text editor.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
def open_editor(filename):
"""Opens default text editor, preferring CLI editors to GUI editors."""
if sys.platform.startswith("win32"):
open_gui_editor(filename)
elif sys.platform.startswith("darwin") or sys.platform.startswith("linux"):
default_editor = os.environ.get("EDITOR", None)
if default_editor:
try:
subprocess.call([default_editor, filename])
except FileNotFoundError:
# could not use default editor
print("Your default editor \"{}\" could not be opened.")
print("You can manually open \"{}\" if you want to edit it.".format(filename))
else:
open_gui_editor(filename)
class CLI(object):
ACTIONS = [
"help [action]",
"list",
"contents <packname> [packname2]...",
"edit <packname>",
"compress <packname>",
"decompress <base64>",
"install <packname>",
"match <server_address>",
"enabled",
"enable <modname> [version]",
"disable <modname>",
"search <query> [-n <integer>]",
"credentials <action> [args]",
"cache <action>",
"apicache <action>",
"serv_install <modpacks> [experimental]",
]
HELP = {
"help": "If action is present, prints detailed information of the action, otherwise this help message is printed",
"list": "Lists all available modpacks",
"contents": "Lists all mods in a modpack",
"edit": "Opens the specified pack in default text editor",
"compress": "Makes a base64 digest of the mentioned modpack",
"decompress": "Unpacks a mod from base64 digest (overrides existing modpacks with the same name)",
"install": "Despite what is in the mod folder, downloads the newest mods into the specified folder",
"match": "Match your mod configuration to one in a server, using exactly same versions",
"enabled": "List enabled mods",
"enable": "Enables a single mod by name and optionally a version number",
"disable": "Disable a single mod",
"search": "Search for mods from the Factorio mod portal. Specify the amount of results with -n parameter. By default 5 results are displayed.",
"credentials": "Manage mod portal credentials. Actions: set, set [username] [password], clear",
"cache": "Manage cache. Actions: reset, list",
"apicache": "Manage api call cache. Actions: reset",
"serv_install": "Installs the newest server with the chosen modpacks. If '-experimental' or '-e' are present in the command, the newest experimental release is installed."
}
ACTION_NAMES = [a.split()[0] for a in ACTIONS]
def __init__(self):
self.mod_manager = mod_manager.ModManager(login_callback=self.login)
def print_progress_message(self, step):
print(step.message, end="")
sys.stdout.flush()
def print_2col_table(self, rows, indent=0, empty_msg=None):
if rows:
c1_max_width = max([len(c1) for c1, c2 in rows])
for c1, c2 in rows:
print("".join([" "*2*indent, c1, " "*(c1_max_width - len(c1) + 2), c2]))
elif empty_msg:
print("({})".format(empty_msg))
def prompt_credentials(self):
print("")
print("Logging in to Factorio mod portal")
print("(Password will not be displayed.)")
username = input("Username: ")
password = getpass("Password: ")
print("")
return mod_manager.credentials.Credentials(username, password)
def login(self):
if not mod_manager.credentials.Keyring.credentials_stored:
cred = self.prompt_credentials()
else:
cred = None
try:
self.mod_manager.mod_portal.login(cred)
except LoginError:
print("Could not log in to the mod portal.")
exit(1)
def cmd_help(self, args):
if args == []:
print("")
print("Usage: {} [action] [args]".format(sys.argv[0]))
print("")
self.print_2col_table([(action, self.HELP[action.split()[0]]) for action in self.ACTIONS], indent=1)
print("")
elif args[0] in self.ACTION_NAMES:
action = [a for a in self.ACTIONS if a.startswith(args[0])][0]
print(action+": "+self.HELP[args[0]])
else:
print("Invalid action \"{}\"".format(args[0]))
exit(1)
def cmd_list(self, args):
if len(args) != 0:
print("Invalid argument count")
exit(1)
for p in self.mod_manager.modpacks:
print(p.name)
def cmd_contents(self, args):
if len(args) == 0:
print("Invalid argument count")
exit(1)
packs = {p.name: p for p in self.mod_manager.modpacks}
for arg in args:
matching = []
if arg in packs:
pack = packs[arg]
if pack not in matching:
matching.append(pack)
else:
print("Mod pack \"{}\" does not exist.".format(arg))
exit(1)
lengths = [len(mod.name) for pack in matching for mod in pack.contents]
if lengths:
maxlen = max(lengths)
for pack in matching:
print(pack.name)
if pack.empty:
print(" (modpack is empty)")
else:
for mod in pack.contents:
ver = mod.version + " (" + ("fixed" if mod.fixed_version else "floating") + ")"
print(" "*2 + mod.name + " "*((maxlen-len(mod.name))+2) + ver)
def cmd_edit(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
mp = self.mod_manager.get_pack(args[0])
open_editor(mp.path)
def cmd_compress(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
mp = self.mod_manager.get_pack(args[0])
if mp.exists:
print(mp.compress())
else:
print("Mod pack \"{}\" does not exist.".format(args[0]))
exit(1)
def cmd_decompress(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
self.mod_manager.decompress_modpack(args[0]).save()
def cmd_install(self, args):
if args:
packs = []
for p in args:
mp = self.mod_manager.get_pack(p)
if mp.exists:
packs.append(mp)
else:
print("Mod pack \"{}\" does not exist.".format(p))
exit(1)
self.mod_manager.install_packs(packs, self.print_progress_message)
else:
print("Invalid argument count")
exit(1)
def cmd_match(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
try:
self.mod_manager.install_matching(args[0], callback=self.print_progress_message)
except ConnectionRefusedError:
print("Could not connect to the server. Is it running?")
exit(1)
except BrokenPipeError:
print("Could not communicate with the server. Are you using same Factorio version?")
exit(1)
def cmd_enabled(self, args):
if len(args) != 0:
print("Invalid argument count")
exit(1)
self.print_2col_table(
[(mod.name, mod.version) for mod in self.mod_manager.installed_mods],
empty_msg="no mods enabled"
)
def cmd_search(self, args):
search_args = " ".join(args)
wanted_responces = 5
lenght_param = search_args.rsplit(" -n ", 1)
if len(lenght_param) == 2 and len(lenght_param[1]):
try:
wanted_responces = int(lenght_param[1])
wanted_responces = min(max(wanted_responces, 0), 25)
search_args = " ".join(args[:-2])
except ValueError:
pass
results = self.mod_manager.mod_portal.search(search_args, n=wanted_responces)
for i,s in enumerate(results):
print("{}. {}: {} ({} downloads)".format(i+1, s.name, s.title, s.downloads_count))
def cmd_credentials(self, args):
if len(args) not in [1,3]:
print("Invalid argument count")
exit(1)
if args[0] == "clear":
if len(args) != 1:
print("Invalid arguments: clear doesn't take any")
exit(1)
mod_manager.credentials.Keyring.clear()
elif args[0] == "set":
if len(args) == 1:
c = self.prompt_credentials()
else:
c = mod_manager.credentials.Credentials(*args[1:])
print("Verifying... ", end="")
sys.stdout.flush()
try:
self.mod_manager.mod_portal.login(c)
except LoginError:
print("invalid credentials")
exit(1)
else:
print("ok")
mod_manager.credentials.Keyring.set_credentials(c)
else:
print("Invalid action \"{}\"".format(args[0]))
exit(1)
def cmd_cache(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
if args[0] == "reset":
self.mod_manager.mod_cache.reset()
elif args[0] == "list":
self.print_2col_table(
[(cmod.name, cmod.version) for cmod in self.mod_manager.mod_cache.mods],
empty_msg="no cached mods"
)
else:
print("Invalid arguments")
print("Usage: cache <action>")
print("Actions: reset, list")
exit(1)
def cmd_apicache(self, args):
if len(args) != 1:
print("Invalid argument count")
exit(1)
if args[0] == "reset":
self.mod_manager.mod_portal.api_cache.reset()
else:
print("Invalid arguments")
print("Usage: apicache reset")
exit(1)
def cmd_serv_install(self, args):
experimental = args[-1] in ["-e", "-experimental"]
if experimental:
modpacks = args[:-1]
else:
modpacks = args[:]
mod_manager.server.create_server(modpacks, experimental, self.mod_manager, self.print_progress_message)
def run(self, cmd):
if cmd == []:
cmd = ["help"]
if cmd[0] in self.ACTION_NAMES:
try:
# get function in this folder named "cmd_<action>"
fn = getattr(self, "cmd_"+cmd[0])
except AttributeError:
print("Action not implemented yet.")
exit(1)
fn(cmd[1:])
else:
print("Invalid action \"{}\"".format(cmd[0]))
exit(1)
def main():
CLI().run(sys.argv[1:])
if __name__ == '__main__':
main()
| mit | 3,436,079,789,954,197,000 | 32.910082 | 179 | 0.538931 | false | 4.104551 | false | false | false |
awesto/django-shop | shop/transition.py | 1 | 3695 | from urllib.parse import urlparse
from django.contrib.auth.models import AnonymousUser
from django.db import models
from django.http.request import HttpRequest
from post_office import mail
from post_office.models import EmailTemplate
from shop.conf import app_settings
from shop.models.order import BaseOrder
from shop.models.notification import Notification
from shop.serializers.delivery import DeliverySerializer
from shop.serializers.order import OrderDetailSerializer
from shop.signals import email_queued
class EmulateHttpRequest(HttpRequest):
"""
Use this class to emulate a HttpRequest object, when templates must be rendered
asynchronously, for instance when an email must be generated out of an Order object.
"""
def __init__(self, customer, stored_request):
super().__init__()
parsedurl = urlparse(stored_request.get('absolute_base_uri'))
self.path = self.path_info = parsedurl.path
self.environ = {}
self.META['PATH_INFO'] = parsedurl.path
self.META['SCRIPT_NAME'] = ''
self.META['HTTP_HOST'] = parsedurl.netloc
self.META['HTTP_X_FORWARDED_PROTO'] = parsedurl.scheme
self.META['QUERY_STRING'] = parsedurl.query
self.META['HTTP_USER_AGENT'] = stored_request.get('user_agent')
self.META['REMOTE_ADDR'] = stored_request.get('remote_ip')
self.method = 'GET'
self.LANGUAGE_CODE = self.COOKIES['django_language'] = stored_request.get('language')
self.customer = customer
self.user = customer.is_anonymous and AnonymousUser or customer.user
self.current_page = None
def transition_change_notification(order):
"""
This function shall be called, after an Order object performed a transition change.
"""
if not isinstance(order, BaseOrder):
raise TypeError("Object order must inherit from class BaseOrder")
emails_in_queue = False
for notification in Notification.objects.filter(transition_target=order.status):
recipient = notification.get_recipient(order)
if recipient is None:
continue
# emulate a request object which behaves similar to that one, when the customer submitted its order
emulated_request = EmulateHttpRequest(order.customer, order.stored_request)
customer_serializer = app_settings.CUSTOMER_SERIALIZER(order.customer)
render_context = {'request': emulated_request, 'render_label': 'email'}
order_serializer = OrderDetailSerializer(order, context=render_context)
language = order.stored_request.get('language')
context = {
'customer': customer_serializer.data,
'order': order_serializer.data,
'ABSOLUTE_BASE_URI': emulated_request.build_absolute_uri().rstrip('/'),
'render_language': language,
}
try:
latest_delivery = order.delivery_set.latest()
context['latest_delivery'] = DeliverySerializer(latest_delivery, context=render_context).data
except (AttributeError, models.ObjectDoesNotExist):
pass
try:
template = notification.mail_template.translated_templates.get(language=language)
except EmailTemplate.DoesNotExist:
template = notification.mail_template
attachments = {}
for notiatt in notification.notificationattachment_set.all():
attachments[notiatt.attachment.original_filename] = notiatt.attachment.file.file
mail.send(recipient, template=template, context=context,
attachments=attachments, render_on_delivery=True)
emails_in_queue = True
if emails_in_queue:
email_queued()
| bsd-3-clause | 40,270,448,760,447,544 | 44.617284 | 107 | 0.689851 | false | 4.39881 | false | false | false |
UKPLab/semeval2017-scienceie | code/convNet.py | 1 | 7292 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
from extras import VSM, read_and_map
from representation import VeryStupidCBOWMapper, CharMapper
import sys, numpy as np,os
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import confusion_matrix
from sklearn.metrics import precision_recall_fscore_support
from keras.layers import Dense, Dropout, Activation, Embedding
from keras.models import Sequential
from keras.utils.np_utils import to_categorical
from keras.layers import Convolution1D, GlobalMaxPooling1D, Lambda, Merge
from keras.preprocessing import sequence
from keras import backend as K
maxlen=50
maxlen=100
maxlen=150
maxlen=50+2*30
try:
L = int(sys.argv[5])
M = int(sys.argv[6])
R = int(sys.argv[7])
except IndexError:
L = 30
M = 50
R = 30
maxlen=L+M+R
# this is a simple cnn
# if you would want to use it below, you would have to do
# X_train = X_train.reshape(len(X_train),input_shape[0],input_shape[1])
def build_cnn(input_shape, output_dim,nb_filter):
clf = Sequential()
clf.add(Convolution1D(nb_filter=nb_filter,
filter_length=4,border_mode="valid",activation="relu",subsample_length=1,input_shape=input_shape))
clf.add(GlobalMaxPooling1D())
clf.add(Dense(100))
clf.add(Dropout(0.2))
clf.add(Activation("tanh"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
# just one filter
def build_cnn_char(input_dim, output_dim,nb_filter):
clf = Sequential()
clf.add(Embedding(input_dim,
32, # character embedding size
input_length=maxlen,
dropout=0.2))
clf.add(Convolution1D(nb_filter=nb_filter,
filter_length=3,border_mode="valid",activation="relu",subsample_length=1))
clf.add(GlobalMaxPooling1D())
clf.add(Dense(100))
clf.add(Dropout(0.2))
clf.add(Activation("tanh"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
# just one filter
def build_cnn_char_threeModels(input_dim, output_dim,nb_filter,filter_size=3):
left = Sequential()
left.add(Embedding(input_dim,
32, # character embedding size
input_length=L,
dropout=0.2))
left.add(Convolution1D(nb_filter=nb_filter,
filter_length=filter_size,border_mode="valid",activation="relu",subsample_length=1))
left.add(GlobalMaxPooling1D())
left.add(Dense(100))
left.add(Dropout(0.2))
left.add(Activation("tanh"))
center = Sequential()
center.add(Embedding(input_dim,
32, # character embedding size
input_length=M,
dropout=0.2))
center.add(Convolution1D(nb_filter=nb_filter,
filter_length=filter_size,border_mode="valid",activation="relu",subsample_length=1))
center.add(GlobalMaxPooling1D())
center.add(Dense(100))
center.add(Dropout(0.2))
center.add(Activation("tanh"))
right = Sequential()
right.add(Embedding(input_dim,
32, # character embedding size
input_length=R,
dropout=0.2))
right.add(Convolution1D(nb_filter=nb_filter,
filter_length=filter_size,border_mode="valid",activation="relu",subsample_length=1))
right.add(GlobalMaxPooling1D())
right.add(Dense(100))
right.add(Dropout(0.2))
right.add(Activation("tanh"))
clf = Sequential()
clf.add(Merge([left,center,right],mode="concat"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
def max_1d(X):
return K.max(X,axis=1)
# multiple filters
def build_cnn_char_complex(input_dim, output_dim,nb_filter):
randomEmbeddingLayer = Embedding(input_dim,32, input_length=maxlen,dropout=0.1)
poolingLayer = Lambda(max_1d, output_shape=(nb_filter,))
conv_filters = []
for n_gram in range(2,4):
ngramModel = Sequential()
ngramModel.add(randomEmbeddingLayer)
ngramModel.add(Convolution1D(nb_filter=nb_filter,
filter_length=n_gram,
border_mode="valid",
activation="relu",
subsample_length=1))
ngramModel.add(poolingLayer)
conv_filters.append(ngramModel)
clf = Sequential()
clf.add(Merge(conv_filters,mode="concat"))
clf.add(Activation("relu"))
clf.add(Dense(100))
clf.add(Dropout(0.1))
clf.add(Activation("tanh"))
clf.add(Dense(output_dim=output_dim, activation='softmax'))
clf.compile(optimizer='adagrad',
loss='categorical_crossentropy',
metrics=['accuracy'])
return clf
def acc(correct, total):
return 1.0*correct/total
# example argline:
# python convNet.py ../scienceie2017_train/train2 ../scienceie2017_dev/dev ../resources/vsm/glove.6B/glove.6B.100d.txt
if __name__=="__main__":
train_src = sys.argv[1]
dev_src = sys.argv[2]
# vsm_path = sys.argv[3]
vsm_path = None
print("Loading VSM")
vsm = VSM(vsm_path)
try:
csize = 2
except IndexError:
csize = int(sys.argv[4])
try:
n_filter = int(sys.argv[8])
except IndexError:
n_filter = 250
try:
filter_size = int(sys.argv[9])
except IndexError:
filter_size = 3
if len(sys.argv)>10 and sys.argv[10]=="document":
SB = False
else:
SB = True
mapper = CharMapper(vsm,csize,L=L,M=M,R=R,sentence_boundaries=SB)
print("Reading training data")
X_train, y_train, y_values, _ = read_and_map(train_src, mapper)
X_dev, y_dev_gold, _, estrings = read_and_map(dev_src, mapper, y_values)
vocabSize = mapper.curVal
print(X_train.shape)
print(y_train.shape)
#sys.exit(1)
print("Trainig a model")
timesteps = 2*csize + 1 # left, right, center
context_dim = 100
input_shape = (timesteps,context_dim)
clf = build_cnn_char(vocabSize+1, len(y_values)+1,n_filter)
clf = build_cnn_char_threeModels(vocabSize+1, len(y_values)+1,n_filter)
X_left = X_train[:,:L]
X_center = X_train[:,L:L+M]
X_right = X_train[:,L+M:L+M+R]
print L,M,R,X_train.shape,X_left.shape,X_center.shape,X_right.shape,y_train,y_values
clf.fit([X_left,X_center,X_right], to_categorical(y_train, len(y_values)+1), verbose=1, nb_epoch=15)
print("Reading test data")
print("Testing")
X_dev_left = X_dev[:,:L]
X_dev_center = X_dev[:,L:L+M]
X_dev_right = X_dev[:,L+M:L+M+R]
print(X_dev.shape,X_dev_left.shape,X_dev_center.shape,X_dev_right.shape)
y_dev_auto = clf.predict_classes([X_dev_left,X_dev_center,X_dev_right]) # for LogisticRegression just do predict()
print "==PREDICTING=="
for i in xrange(len(y_dev_auto)):
print y_values[y_dev_auto[i]]
| apache-2.0 | 7,097,266,528,719,987,000 | 31.995475 | 124 | 0.623149 | false | 3.310032 | false | false | false |
JaneliaSciComp/Neuroptikon | Source/library/library.py | 1 | 1804 | # Copyright (c) 2010 Howard Hughes Medical Institute.
# All rights reserved.
# Use is subject to Janelia Farm Research Campus Software Copyright 1.1 license terms.
# http://license.janelia.org/license/jfrc_copyright_1_1.html
from library_item import LibraryItem
from library_frame import LibraryFrame
from pydispatch import dispatcher
from itertools import groupby
class Library(object):
def __init__(self):
self._library = {}
self._frame = LibraryFrame()
def add(self, item):
if not issubclass(item.__class__, LibraryItem):
raise ValueError, gettext('Library items must be instances of a subclass of LibraryItem')
if item.__class__.__name__ in self._library:
# This class of item has been added before.
dict = self._library[item.__class__.__name__]
else:
# Create and retain a new dictionary for this class of item.
dict = {}
self._library[item.__class__.__name__] = dict
# Add a method to ourself that returns the full list of items of this class.
setattr(self, item.__class__.listProperty(), lambda: sorted([value for value, group in groupby(dict.values())], cmp=lambda x,y: cmp(x.name.lower(), y.name.lower())))
# Add a method to ourself that performs a lookup of items of this class.
setattr(self, item.__class__.lookupProperty(), lambda itemId: dict.get(itemId, None))
self._frame.addItemClass(item.__class__)
dict[item.identifier] = item
for synonym in item.synonyms:
dict[synonym] = item
dispatcher.send(('addition', item.__class__), self)
def browse(self):
self._frame.Show()
self._frame.Raise()
| bsd-3-clause | 8,292,867,889,160,056,000 | 39.088889 | 177 | 0.616408 | false | 4.254717 | false | false | false |
3bot/3bot-hook | threebot_hook/models.py | 1 | 1773 | # -*- coding: utf-8 -*-
from django import dispatch
from django.contrib.sites.models import Site
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from rest_framework.authtoken.models import Token
from threebot.models import Workflow
from threebot.models import Worker
from threebot.models import ParameterList
@python_2_unicode_compatible
class Hook(models.Model):
slug = models.SlugField(max_length=255)
user = models.CharField(max_length=255, blank=True, null=True)
repo = models.CharField(
max_length=255, blank=True, null=True,
help_text=u'Leave blank. Field is not used in the current version.')
secret = models.CharField(
max_length=255, blank=True, null=True,
help_text=u'Leave blank. Field is not used in the current version.')
workflow = models.ForeignKey(Workflow)
worker = models.ForeignKey(Worker)
param_list = models.ForeignKey(ParameterList)
def get_hook_url(self):
return "%d-%d-%d-%s" % (self.workflow.id, self.worker.id, self.param_list.id, self.slug)
def __str__(self):
return "%s (%d)" % (self.get_hook_url(), self.pk)
def make_full_url(self, user):
token, created = Token.objects.get_or_create(user=user)
return "https://%s/hooks/%s/%s-%s-%s/" % (Site.objects.get_current().domain, token, self.workflow.id, self.worker.id, self.param_list.id)
class Meta():
verbose_name = _("Hook")
verbose_name_plural = _("Hooks")
db_table = 'threebot_hook'
unique_together = ("workflow", "worker", "param_list")
class HookSignal(dispatch.Signal):
pass
pre_hook_signal = HookSignal()
post_hook_signal = HookSignal()
| bsd-3-clause | -1,663,933,699,646,561,500 | 34.46 | 145 | 0.685279 | false | 3.524851 | false | false | false |
borg-project/borg | borg/tools/get_features.py | 1 | 2297 | """@author: Bryan Silverthorn <[email protected]>"""
import os.path
import csv
import borg
import borg.distributors
logger = borg.get_logger(__name__, default_level = "INFO")
def features_for_path(domain, task_path):
# bring back relevant globals
import os.path
import borg
logger = borg.get_logger(__name__, default_level = "INFO")
# collect features
logger.info("getting features of %s", os.path.basename(task_path))
with domain.task_from_path(task_path) as task:
with borg.accounting() as accountant:
(names, values) = domain.compute_features(task)
return (
task_path,
["cpu_cost"] + list(names),
[accountant.total.cpu_seconds] + list(values))
@borg.annotations(
domain_name = ("suite path, or name of the problem domain", "positional"),
instances_root = ("path to instances files", "positional", None, os.path.abspath),
suffix = ("file suffix to apply", "positional"),
skip_existing = ("skip existing features?", "flag"),
distributor_name = ("name of task distributor", "option"),
workers = ("submit jobs?", "option", "w", int),
)
def main(
domain_name,
instances_root,
suffix = ".features.csv",
skip_existing = False,
distributor_name = "ipython",
workers = 0):
"""Collect task features."""
def yield_runs():
if os.path.exists(domain_name):
domain = borg.load_solvers(domain_name).domain
else:
domain = borg.get_domain(domain_name)
paths = list(borg.util.files_under(instances_root, domain.extensions))
count = 0
for path in paths:
if skip_existing and os.path.exists(path + suffix):
continue
count += 1
yield (features_for_path, [domain, path])
logger.info("collecting features for %i instances", count)
distributor = borg.distributors.make(
distributor_name,
workers=workers)
for (cnf_path, names, values) in distributor.do(yield_runs()):
csv_path = cnf_path + suffix
with open(csv_path, "wb") as csv_file:
csv.writer(csv_file).writerow(names)
csv.writer(csv_file).writerow(values)
if __name__ == "__main__":
borg.script(main)
| mit | 5,014,328,690,648,508,000 | 28.831169 | 86 | 0.610361 | false | 3.686998 | false | false | false |
qsnake/gpaw | config.py | 1 | 17421 | # Copyright (C) 2006 CSC-Scientific Computing Ltd.
# Please see the accompanying LICENSE file for further information.
import os
import sys
import re
import distutils.util
from distutils.sysconfig import get_config_var, get_config_vars
from distutils.command.config import config
from glob import glob
from os.path import join
from stat import ST_MTIME
def check_packages(packages, msg, include_ase, import_numpy):
"""Check the python version and required extra packages
If ASE is not installed, the `packages` list is extended with the
ASE modules if they are found."""
if sys.version_info < (2, 3, 0, 'final', 0):
raise SystemExit('Python 2.3.1 or later is required!')
if import_numpy:
try:
import numpy
except ImportError:
raise SystemExit('numpy is not installed!')
else:
msg += ['* numpy is not installed.',
' "include_dirs" in your customize.py must point to "numpy/core/include".']
if not include_ase:
if import_numpy:
try:
import ase
except ImportError:
import_ase = True
else:
import_ase = False
else:
import_ase = False
if include_ase or import_ase:
# Find ASE directories:
# include_ase works in case:
# cd gpaw # top-level gpaw source directory
# tar zxf ~/python-ase-3.1.0.846.tar.gz
# ln -s python-ase-3.1.0.846/ase .
ase_root = 'ase'
if include_ase:
assert os.path.isdir(ase_root), ase_root+': No such file or directory'
ase = []
for root, dirs, files in os.walk(ase_root):
if 'CVS' in dirs:
dirs.remove('CVS')
if '.svn' in dirs:
dirs.remove('.svn')
if '__init__.py' in files:
ase.append(root.replace('/', '.'))
if len(ase) == 0:
msg += ['* ASE is not installed! You may be able to install',
" gpaw, but you can't use it without ASE!"]
else:
packages += ase
def find_file(arg, dir, files):
#looks if the first element of the list arg is contained in the list files
# and if so, appends dir to to arg. To be used with the os.path.walk
if arg[0] in files:
arg.append(dir)
def get_system_config(define_macros, undef_macros,
include_dirs, libraries, library_dirs, extra_link_args,
extra_compile_args, runtime_library_dirs, extra_objects,
msg, import_numpy):
undef_macros += ['NDEBUG']
if import_numpy:
import numpy
include_dirs += [numpy.get_include()]
include_dirs += ['c/libxc']
machine = os.uname()[4]
if machine == 'sun4u':
# _
# |_ | ||\ |
# _||_|| \|
#
extra_compile_args += ['-Kpic', '-fast']
# Suppress warning from -fast (-xarch=native):
f = open('cc-test.c', 'w')
f.write('int main(){}\n')
f.close()
stderr = os.popen3('cc cc-test.c -fast')[2].read()
arch = re.findall('-xarch=(\S+)', stderr)
os.remove('cc-test.c')
if len(arch) > 0:
extra_compile_args += ['-xarch=%s' % arch[-1]]
# We need the -Bstatic before the -lsunperf and -lfsu:
# http://forum.java.sun.com/thread.jspa?threadID=5072537&messageID=9265782
extra_link_args += ['-Bstatic', '-lsunperf', '-lfsu', '-Bdynamic']
cc_version = os.popen3('cc -V')[2].readline().split()[3]
if cc_version > '5.6':
libraries.append('mtsk')
else:
extra_link_args.append('-lmtsk')
#define_macros.append(('NO_C99_COMPLEX', '1'))
msg += ['* Using SUN high performance library']
elif sys.platform in ['aix5', 'aix6']:
#
# o|_ _ _
# ||_)| | |
#
extra_compile_args += ['-qlanglvl=stdc99']
# setting memory limit is necessary on aix5
if sys.platform == 'aix5':
extra_link_args += ['-bmaxdata:0x80000000',
'-bmaxstack:0x80000000']
libraries += ['f', 'lapack', 'essl']
define_macros.append(('GPAW_AIX', '1'))
elif machine == 'x86_64':
# _
# \/|_||_ |_ |_|
# /\|_||_| _ |_| |
#
extra_compile_args += ['-Wall', '-std=c99']
# Look for ACML libraries:
acml = glob('/opt/acml*/g*64/lib')
if len(acml) > 0:
library_dirs += [acml[-1]]
libraries += ['acml']
if acml[-1].find('gfortran') != -1: libraries.append('gfortran')
if acml[-1].find('gnu') != -1: libraries.append('g2c')
extra_link_args += ['-Wl,-rpath=' + acml[-1]]
msg += ['* Using ACML library']
else:
atlas = False
for dir in ['/usr/lib', '/usr/local/lib']:
if glob(join(dir, 'libatlas.a')) != []:
atlas = True
break
if atlas:
libraries += ['lapack', 'atlas', 'blas']
library_dirs += [dir]
msg += ['* Using ATLAS library']
else:
libraries += ['blas', 'lapack']
msg += ['* Using standard lapack']
elif machine =='ia64':
# _ _
# |_ | o
# _||_||
#
extra_compile_args += ['-Wall', '-std=c99']
libraries += ['mkl','mkl_lapack64']
elif machine == 'i686':
# _
# o|_ |_||_
# ||_||_||_|
#
extra_compile_args += ['-Wall', '-std=c99']
if 'MKL_ROOT' in os.environ:
mklbasedir = [os.environ['MKL_ROOT']]
else:
mklbasedir = glob('/opt/intel/mkl*')
libs = ['libmkl_ia32.a']
if mklbasedir != []:
os.path.walk(mklbasedir[0],find_file, libs)
libs.pop(0)
if libs != []:
libs.sort()
libraries += ['mkl_lapack',
'mkl_ia32', 'guide', 'pthread', 'mkl']#, 'mkl_def']
library_dirs += libs
msg += ['* Using MKL library: %s' % library_dirs[-1]]
#extra_link_args += ['-Wl,-rpath=' + library_dirs[-1]]
else:
atlas = False
for dir in ['/usr/lib', '/usr/local/lib']:
if glob(join(dir, 'libatlas.a')) != []:
atlas = True
break
if atlas:
libraries += ['lapack', 'atlas', 'blas']
library_dirs += [dir]
msg += ['* Using ATLAS library']
else:
libraries += ['blas', 'lapack']
msg += ['* Using standard lapack']
# add libg2c if available
g2c=False
for dir in ['/usr/lib', '/usr/local/lib']:
if glob(join(dir, 'libg2c.so')) != []:
g2c=True
break
if glob(join(dir, 'libg2c.a')) != []:
g2c=True
break
if g2c: libraries += ['g2c']
elif sys.platform == 'darwin':
extra_compile_args += ['-Wall', '-std=c99']
include_dirs += ['/usr/include/malloc']
if glob('/System/Library/Frameworks/vecLib.framework') != []:
extra_link_args += ['-framework vecLib']
msg += ['* Using vecLib']
else:
libraries += ['blas', 'lapack']
msg += ['* Using standard lapack']
return msg
def get_parallel_config(mpi_libraries,mpi_library_dirs,mpi_include_dirs,
mpi_runtime_library_dirs,mpi_define_macros):
globals = {}
execfile('gpaw/mpi/config.py', globals)
mpi = globals['get_mpi_implementation']()
if mpi == '':
mpicompiler = None
elif mpi == 'sun':
mpi_include_dirs += ['/opt/SUNWhpc/include']
mpi_libraries += ['mpi']
mpi_library_dirs += ['/opt/SUNWhpc/lib']
mpi_runtime_library_dirs += ['/opt/SUNWhpc/lib']
mpicompiler = get_config_var('CC')
elif mpi == 'poe':
mpicompiler = 'mpcc_r'
else:
#Try to use mpicc
mpicompiler = 'mpicc'
return mpicompiler
def get_scalapack_config(define_macros):
# check ScaLapack settings
define_macros.append(('GPAW_WITH_SL', '1'))
def mtime(path, name, mtimes):
"""Return modification time.
The modification time of a source file is returned. If one of its
dependencies is newer, the mtime of that file is returned.
This function fails if two include files with the same name
are present in different directories."""
include = re.compile('^#\s*include "(\S+)"', re.MULTILINE)
if mtimes.has_key(name):
return mtimes[name]
t = os.stat(os.path.join(path, name))[ST_MTIME]
for name2 in include.findall(open(os.path.join(path, name)).read()):
path2, name22 = os.path.split(name2)
if name22 != name:
t = max(t, mtime(os.path.join(path, path2), name22, mtimes))
mtimes[name] = t
return t
def check_dependencies(sources):
# Distutils does not do deep dependencies correctly. We take care of
# that here so that "python setup.py build_ext" always does the right
# thing!
mtimes = {} # modification times
# Remove object files if any dependencies have changed:
plat = distutils.util.get_platform() + '-' + sys.version[0:3]
remove = False
for source in sources:
path, name = os.path.split(source)
t = mtime(path + '/', name, mtimes)
o = 'build/temp.%s/%s.o' % (plat, source[:-2]) # object file
if os.path.exists(o) and t > os.stat(o)[ST_MTIME]:
print 'removing', o
os.remove(o)
remove = True
so = 'build/lib.%s/_gpaw.so' % plat
if os.path.exists(so) and remove:
# Remove shared object C-extension:
# print 'removing', so
os.remove(so)
def test_configuration():
raise NotImplementedError
def write_configuration(define_macros, include_dirs, libraries, library_dirs,
extra_link_args, extra_compile_args,
runtime_library_dirs, extra_objects, mpicompiler,
mpi_libraries, mpi_library_dirs, mpi_include_dirs,
mpi_runtime_library_dirs, mpi_define_macros):
# Write the compilation configuration into a file
try:
out = open('configuration.log', 'w')
except IOError, x:
print x
return
print >> out, "Current configuration"
print >> out, "libraries", libraries
print >> out, "library_dirs", library_dirs
print >> out, "include_dirs", include_dirs
print >> out, "define_macros", define_macros
print >> out, "extra_link_args", extra_link_args
print >> out, "extra_compile_args", extra_compile_args
print >> out, "runtime_library_dirs", runtime_library_dirs
print >> out, "extra_objects", extra_objects
if mpicompiler is not None:
print >> out
print >> out, "Parallel configuration"
print >> out, "mpicompiler", mpicompiler
print >> out, "mpi_libraries", mpi_libraries
print >> out, "mpi_library_dirs", mpi_library_dirs
print >> out, "mpi_include_dirs", mpi_include_dirs
print >> out, "mpi_define_macros", mpi_define_macros
print >> out, "mpi_runtime_library_dirs", mpi_runtime_library_dirs
out.close()
def build_interpreter(define_macros, include_dirs, libraries, library_dirs,
extra_link_args, extra_compile_args,
runtime_library_dirs, extra_objects,
mpicompiler, mpilinker, mpi_libraries, mpi_library_dirs,
mpi_include_dirs, mpi_runtime_library_dirs,
mpi_define_macros):
#Build custom interpreter which is used for parallel calculations
cfgDict = get_config_vars()
plat = distutils.util.get_platform() + '-' + sys.version[0:3]
cfiles = glob('c/[a-zA-Z_]*.c') + ['c/bmgs/bmgs.c']
cfiles += glob('c/libxc/src/*.c')
if ('HDF5', 1) in define_macros:
cfiles += glob('h5py/c/*.c')
cfiles += glob('h5py/c/lzf/*.c')
cfiles2remove = ['c/libxc/src/test.c',
'c/libxc/src/xc_f.c',
'c/libxc/src/work_gga_x.c',
'c/libxc/src/work_lda.c'
]
for c2r in glob('c/libxc/src/funcs_*.c'): cfiles2remove.append(c2r)
for c2r in cfiles2remove: cfiles.remove(c2r)
sources = ['c/bc.c', 'c/localized_functions.c', 'c/mpi.c', 'c/_gpaw.c',
'c/operators.c', 'c/transformers.c', 'c/compiled_WITH_SL.c',
'c/blacs.c', 'c/utilities.c']
objects = ' '.join(['build/temp.%s/' % plat + x[:-1] + 'o'
for x in cfiles])
if not os.path.isdir('build/bin.%s/' % plat):
os.makedirs('build/bin.%s/' % plat)
exefile = 'build/bin.%s/' % plat + '/gpaw-python'
libraries += mpi_libraries
library_dirs += mpi_library_dirs
define_macros += mpi_define_macros
include_dirs += mpi_include_dirs
runtime_library_dirs += mpi_runtime_library_dirs
define_macros.append(('PARALLEL', '1'))
define_macros.append(('GPAW_INTERPRETER', '1'))
macros = ' '.join(['-D%s=%s' % x for x in define_macros if x[0].strip()])
include_dirs.append(cfgDict['INCLUDEPY'])
include_dirs.append(cfgDict['CONFINCLUDEPY'])
includes = ' '.join(['-I' + incdir for incdir in include_dirs])
library_dirs.append(cfgDict['LIBPL'])
lib_dirs = ' '.join(['-L' + lib for lib in library_dirs])
libs = ' '.join(['-l' + lib for lib in libraries if lib.strip()])
libs += ' -lpython%s' % cfgDict['VERSION']
libs = ' '.join([libs, cfgDict['LIBS'], cfgDict['LIBM']])
#Hack taken from distutils to determine option for runtime_libary_dirs
if sys.platform[:6] == 'darwin':
# MacOSX's linker doesn't understand the -R flag at all
runtime_lib_option = '-L'
elif sys.platform[:5] == 'hp-ux':
runtime_lib_option = '+s -L'
elif os.popen('mpicc --showme 2> /dev/null', 'r').read()[:3] == 'gcc':
runtime_lib_option = '-Wl,-R'
elif os.popen('mpicc -show 2> /dev/null', 'r').read()[:3] == 'gcc':
runtime_lib_option = '-Wl,-R'
else:
runtime_lib_option = '-R'
runtime_libs = ' '.join([ runtime_lib_option + lib for lib in runtime_library_dirs])
extra_link_args.append(cfgDict['LDFLAGS'])
if sys.platform in ['aix5', 'aix6']:
extra_link_args.append(cfgDict['LINKFORSHARED'].replace('Modules', cfgDict['LIBPL']))
elif sys.platform == 'darwin':
pass
else:
extra_link_args.append(cfgDict['LINKFORSHARED'])
if ('IO_WRAPPERS', 1) in define_macros:
extra_link_args += ['-Wl,-wrap,fread',
'-Wl,-wrap,_IO_getc',
'-Wl,-wrap,getc_unlocked',
'-Wl,-wrap,fgets',
'-Wl,-wrap,ungetc',
'-Wl,-wrap,feof',
'-Wl,-wrap,ferror',
'-Wl,-wrap,fflush',
'-Wl,-wrap,fseek',
'-Wl,-wrap,rewind',
# '-Wl,-wrap,fileno',
'-Wl,-wrap,flockfile',
'-Wl,-wrap,funlockfile',
'-Wl,-wrap,clearerr',
'-Wl,-wrap,fgetpos',
'-Wl,-wrap,fsetpos',
'-Wl,-wrap,setbuf',
'-Wl,-wrap,setvbuf',
'-Wl,-wrap,ftell',
'-Wl,-wrap,fstat',
'-Wl,-wrap,fstat64',
'-Wl,-wrap,fgetc',
# '-Wl,-wrap,fputc',
# '-Wl,-wrap,fputs',
# '-Wl,-wrap,fwrite',
# '-Wl,-wrap,_IO_putc',
'-Wl,-wrap,fopen',
'-Wl,-wrap,fopen64',
'-Wl,-wrap,fclose',
]
# Compile the parallel sources
for src in sources:
obj = 'build/temp.%s/' % plat + src[:-1] + 'o'
cmd = ('%s %s %s %s -o %s -c %s ' ) % \
(mpicompiler,
macros,
' '.join(extra_compile_args),
includes,
obj,
src)
print cmd
if '--dry-run' not in sys.argv:
error=os.system(cmd)
if error != 0:
msg = ['* compiling FAILED! Only serial version of code will work.']
break
# Link the custom interpreter
cmd = ('%s -o %s %s %s %s %s %s %s' ) % \
(mpilinker,
exefile,
objects,
' '.join(extra_objects),
lib_dirs,
libs,
runtime_libs,
' '.join(extra_link_args))
msg = ['* Building a custom interpreter']
print cmd
if '--dry-run' not in sys.argv:
error=os.system(cmd)
if error != 0:
msg += ['* linking FAILED! Only serial version of code will work.']
return error, msg
| gpl-3.0 | 1,044,954,531,353,206,300 | 33.772455 | 93 | 0.508983 | false | 3.680752 | true | false | false |
jamesabel/osnap | osnap/osnapy.py | 1 | 2772 |
import argparse
from osnap import default_python_version, get_logger, init_logger_from_args, __application_name__
import osnap.osnapy_win
import osnap.osnapy_mac
import osnap.util
LOGGER = get_logger(__application_name__)
def make_osnapy(
python_version,
application_name = None,
clean_cache = False,
use_pyrun = False, # support for eGenix™ PyRun™ has been removed
force_app_uninstall = False,
architecture = '64bit',
):
LOGGER.debug('creating osnapy Python environment using python %s' % python_version)
if osnap.util.is_mac() and application_name is None:
raise Exception('must specify the application name on mac')
osnapy = None
if osnap.util.is_windows():
osnapy = osnap.osnapy_win.OsnapyWin(python_version, application_name, clean_cache, architecture=architecture)
elif osnap.util.is_mac():
if use_pyrun:
LOGGER.critical('pyrun capability has been removed')
else:
osnapy = osnap.osnapy_mac.OsnapyMac(python_version, application_name, clean_cache, force_app_uninstall)
else:
raise NotImplementedError
osnapy.create_python()
osnapy.pip('pip')
osnapy.pip('setuptools')
osnapy.pip('Cython') # e.g. for kivy
osnapy.pip(None) # install all from requirements.txt
def main():
parser = argparse.ArgumentParser(description='create the osnapy Python environment',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-a', '--application', default=None, help='application name (required for OSX/MacOS)')
parser.add_argument('-A', '--architecture', default='64bit', choices=['64bit', '32bit'], help='The architecture to use for the launcher')
parser.add_argument('-p', '--python_version', default=default_python_version, help='python version')
parser.add_argument('-c', '--clear', action='store_true', default=False, help='clear cache')
parser.add_argument('-f', '--force_uninstall', action='store_true', default=False,
help='force application uninstalls if necessary')
parser.add_argument('-v', '--verbose', action='store_true', default=False, help='print more verbose messages')
args = parser.parse_args()
init_logger_from_args(args)
make_osnapy(
python_version = args.python_version,
application_name = args.application,
clean_cache = args.clear,
use_pyrun = False, # support for eGenix™ PyRun™ has been removed
force_app_uninstall = args.force_uninstall,
architecture = args.architecture
)
if __name__ == '__main__':
main()
| mit | 8,053,201,480,406,695,000 | 40.253731 | 141 | 0.643632 | false | 3.81768 | false | false | false |
oblank/pydjango-froum | xp/settings.py | 1 | 7211 | # coding: utf-8
# Django settings for xp project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'forum', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': 'root',
'PASSWORD': '123456',
'HOST': '127.0.0.1', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '3306', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'Asia/Shanghai'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'zh-CN'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True # 只有用admin的时候需要开启
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = False
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'h6=yzee&jze#4p1@twhksg1wg6hv%pzwomw(!o($qsly%lzlhe'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.cache.UpdateCacheMiddleware', # 缓存中间件,必须放在开头
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware', # 开启了CSRF,记得在POST表单中加{% csrf_token %},使用RequestContext
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware', # 缓存中间件,必须放在最后
)
ROOT_URLCONF = 'xp.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'xp.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
TEMPLATE_CONTEXT_PROCESSORS = ( # F2E中有current_user对象和request对象,这里设置可在模板中使用RquestContext
'django.contrib.auth.context_processors.auth', # user对象等等
'django.core.context_processors.request', # request对象等等
'django.core.context_processors.static', # 在模板中使用{{ STATIC_URL }}获取静态文件路径
'forum.context_processors.custom_proc', # 自定义模板上下文处理器
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'django.contrib.sitemaps', # Django sitemap framework
'forum',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'django.db.backends': {
'level': 'DEBUG',
'handlers': ['console'],
},
}
}
# CACHES = { # memcached缓存设置
# 'default': {
# 'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
# 'LOCATION': '127.0.0.1:11211',
# }
# }
# SESSION_ENGINE = 'django.contrib.sessions.backends.cache' # 使用memcached存储session
# 自定义User类
AUTH_USER_MODEL = 'forum.ForumUser'
# 用户认证BackEnds
AUTHENTICATION_BACKENDS = ('forum.backends.EmailAuthBackend',)
# 默认登陆uri
LOGIN_URL = '/login/'
# 发送邮件设置
EMAIL_HOST = 'smtp.163.com'
EMAIL_PORT = 25
EMAIL_HOST_USER= 'a135689110'
EMAIL_HOST_PASSWORD= '8804183'
DEFAULT_FROM_EMAIL = '[email protected]'
# 注册用户保留关键字,非Django设置
RESERVED = ["user", "topic", "home", "setting", "forgot", "login", "logout", "register", "admin"]
| mit | -2,053,450,417,403,817,500 | 33.014706 | 136 | 0.682951 | false | 3.309013 | false | false | false |
aldanor/blox | blox/utils.py | 1 | 1944 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import six
import struct
import functools
import numpy as np
try:
import ujson as json
json_dumps = json.dumps
except ImportError:
import json
json_dumps = functools.partial(json.dumps, separators=',:')
def flatten_dtype(dtype):
dtype = np.dtype(dtype)
if dtype.fields is not None:
if dtype.type is np.record:
return ('record', list(dtype.descr))
return list(dtype.descr)
return str(dtype)
def restore_dtype(dtype):
def _convert_dtype(dt):
# workaround for a long-standing bug in numpy:
# https://github.com/numpy/numpy/issues/2407
is_string = lambda s: isinstance(s, (six.text_type, six.string_types))
if isinstance(dt, list):
if len(dt) == 2 and is_string(dt[0]):
return _convert_dtype(tuple(dt))
return [_convert_dtype(subdt) for subdt in dt]
elif isinstance(dt, tuple):
return tuple(_convert_dtype(subdt) for subdt in dt)
elif isinstance(dt, six.text_type) and six.PY2:
return dt.encode('ascii')
return dt
dtype = _convert_dtype(dtype)
if isinstance(dtype, (list, tuple)) and len(dtype) == 2 and dtype[0] == 'record':
return np.dtype((np.record, np.dtype(dtype[1])))
return np.dtype(dtype)
def write_i64(stream, *values):
for value in values:
stream.write(struct.pack('<Q', value))
def read_i64(stream, count=None):
if count is None:
return struct.unpack('<Q', stream.read(8))[0]
return tuple(struct.unpack('<Q', stream.read(8))[0] for _ in range(count))
def write_json(stream, data):
payload = json_dumps(data).encode('utf-8')
write_i64(stream, len(payload))
stream.write(payload)
return len(payload) + 8
def read_json(stream):
length = read_i64(stream)
return json.loads(stream.read(length).decode('utf-8'))
| mit | 1,654,692,354,838,072,600 | 28.014925 | 85 | 0.629115 | false | 3.471429 | false | false | false |
basimr/snoop-dogg-number | filter_graph_by_sdn.py | 1 | 1253 | #!/usr/bin/python2
# TODO: Add description.
import psycopg2
import networkx as nx
# TODO: Create a class for storing artists' SDN and path to avoid doing this.
SDN = 0
PATH = 1
# Load graph from disk
graph = nx.read_gexf("graph/sdn-unweighted.gexf")
# Initialize dictionary with the Snoop Dogg as the base case
artists = {"Snoop Dogg" : (0, ["Snoop Dogg"])}
# Traverse the graph breadth-first and compute every artist's Snoop Dogg Number in O(V + E)
for edge in nx.bfs_edges(graph, "Snoop Dogg"):
parent = edge[0]
child = edge[1]
dist_to_snoopdogg = artists[parent][SDN] + 1
path_to_snoopdogg = artists[parent][PATH] + [child]
artists[child] = (dist_to_snoopdogg, path_to_snoopdogg)
# Remove artists far from Snoop Dogg and save a separate graph for each iteration
# TODO: Can I use comprehensions to simplify these loops?
for sdn in [5, 4, 3, 2, 1]:
distant_artists = []
for a in artists:
if artists[a][SDN] > sdn:
distant_artists.append(a)
for a in distant_artists:
del artists[a]
graph.remove_node(a)
filename = "graph/sdn-" + str(sdn) + ".gexf"
nx.write_gexf(graph, filename)
print("Wrote graph of artists with SDN of " + sdn + " or less at " + filename)
print(nx.info(graph))
| mit | 2,550,880,907,891,530,000 | 29.560976 | 91 | 0.676776 | false | 2.887097 | false | false | false |
kurokid/connme | connme/connme.py | 1 | 2726 | #!/usr/bin/env python2
import sip
sip.setapi('QString', 2)
from PyQt4 import QtGui, QtCore, QtNetwork
from connmeMain import connme
import sys,os
class SingleApplicationWithMessaging(QtGui.QApplication):
messageAvailable = QtCore.pyqtSignal(object)
def __init__(self, argv, key):
QtGui.QApplication.__init__(self, argv)
self._key = key
self._memory = QtCore.QSharedMemory(self)
self._memory.setKey(self._key)
if self._memory.attach():
self._running = True
else:
self._running = False
if not self._memory.create(1):
raise RuntimeError(self._memory.errorString())
self._timeout = 1000
self._server = QtNetwork.QLocalServer(self)
if not self.isRunning():
self._server.newConnection.connect(self.handleMessage)
self._server.listen(self._key)
def handleMessage(self):
socket = self._server.nextPendingConnection()
if socket.waitForReadyRead(self._timeout):
self.messageAvailable.emit(
socket.readAll().data().decode('utf-8'))
socket.disconnectFromServer()
else:
QtCore.qDebug(socket.errorString())
def isRunning(self):
return self._running
def sendMessage(self, message):
if self.isRunning():
socket = QtNetwork.QLocalSocket(self)
socket.connectToServer(self._key, QtCore.QIODevice.WriteOnly)
if not socket.waitForConnected(self._timeout):
print(socket.errorString())
return False
if not isinstance(message, bytes):
message = message.encode('utf-8')
socket.write(message)
if not socket.waitForBytesWritten(self._timeout):
print(socket.errorString())
return False
socket.disconnectFromServer()
return True
return False
def main():
key = 'connme'
app = SingleApplicationWithMessaging(sys.argv, key)
if app.isRunning():
app.sendMessage(' '.join(sys.argv[1:]))
sys.exit(1)
gui = connme()
gui.address = os.path.realpath(__file__)
app.messageAvailable.connect(gui.processClient)
gui.showGui()
sys.exit(app.exec_())
if __name__ == '__main__':
euid = os.geteuid()
os.chdir(sys.path[0])
if euid != 0:
if os.path.exists("/usr/bin/gksu"):
args = ['gksu', sys.executable] + sys.argv + [os.environ]
os.execlpe('gksu', *args)
elif os.path.exists("/usr/bin/kdesudo"):
args = ['kdesudo', sys.executable] + sys.argv + [os.environ]
os.execlpe('kdesudo', *args)
main() | gpl-3.0 | -4,815,941,542,223,123,000 | 32.256098 | 73 | 0.591709 | false | 3.916667 | false | false | false |
BaseBot/Triangula | src/python/triangula/navigation.py | 1 | 1101 | class TaskWaypoint:
"""
Consists of a target Pose defining a location and orientation, and a Task which should be run when the robot reaches
the target position. The task can be None, in which case the robot won't attempt to do anything at the target point.
"""
def __init__(self, pose, task=None, stop=False):
"""
Constructor
:param triangula.chassis.Pose pose:
The target Pose, defining the location and orientation of this waypoint
:param triangula.task.Task task:
A Task to run when the target point is reached. The task will be run until a non-None value is returned from
the poll method. Defaults to None, in which case no task will be invoked and the robot will proceed
immediately to the next waypoint.
:param stop:
Defaults to False, if this is set to True then the robot will come to a complete stop before either running
the sub-task or proceeding to the next waypoint.
"""
self.pose = pose
self.task = task
self.stop = stop | apache-2.0 | -6,752,106,085,634,482,000 | 46.913043 | 120 | 0.656676 | false | 4.493878 | false | false | false |
ivanlai/Kaggle-Planet-Amazon | PyTorch_models.py | 1 | 5862 | # Reference and ideas from http://pytorch.org/tutorials/beginner/transfer_learning_tutorial.html
from __future__ import print_function
import torch.nn as nn
import torchvision.models as models
import warnings
warnings.filterwarnings("ignore")
##################################################################
## PyTorch Model implementations in
## /usr/local/lib/python2.7/dist-packages/torchvision/models ##
##################################################################
def resnet18(num_classes, pretrained=True, freeze=False):
model = models.resnet18( pretrained=True)
if freeze:
model = freeze_all_layers(model)
# Parameters of newly constructed modules have requires_grad=True by default
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Resnet18'
def resnet34(num_classes, pretrained=True, freeze=False):
model = models.resnet34( pretrained=True)
if freeze:
model = freeze_all_layers(model)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Resnet34'
def resnet50(num_classes, pretrained=True, freeze=False):
model = models.resnet50( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Resnet50'
def resnet101(num_classes, pretrained=True, freeze=False):
model = models.resnet101( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Resnet101'
def resnet152(num_classes, pretrained=True, freeze=False):
model = models.resnet152( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Resnet152'
##################################################################
def densenet121(num_classes, pretrained=True, freeze=False):
model = models.densenet121( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Densenet121'
def densenet161(num_classes, pretrained=True, freeze=False):
model = models.densenet161( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Densenet161'
def densenet169(num_classes, pretrained=True, freeze=False):
model = models.densenet169(pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Densenet169'
def densenet201(num_classes, pretrained=True, freeze=False):
model = models.densenet201( pretrained=pretrained)
if freeze:
model = freeze_all_layers(model)
num_features = model.classifier.in_features
model.classifier = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Densenet201'
##################################################################
def inception_v3(num_classes, pretrained=True, freeze=False):
model = models.inception_v3(pretrained=pretrained)
model.aux_logits = False
if freeze:
model = freeze_all_layers(model)
num_features = model.fc.in_features
model.fc = nn.Linear(num_features, num_classes)
model = Add_Sigmoid(model)
return model, 'Inception_v3'
##################################################################
def vgg16(num_classes, pretrained=True, freeze=False):
# Credit: https://discuss.pytorch.org/t/how-to-perform-finetuning-in-pytorch/419/10
model = models.vgg16(pretrained=True)
if freeze:
model = freeze_all_layers(model)
mod = list(model.classifier.children())
mod.pop()
mod.append(nn.Linear(4096, 17))
new_classifier = nn.Sequential(*mod)
model.classifier = new_classifier
model = Add_Sigmoid(model)
return model, 'VGG16'
##################################################################
def vgg19(num_classes, pretrained=True, freeze=False):
# Credit: https://discuss.pytorch.org/t/how-to-perform-finetuning-in-pytorch/419/10
model = models.vgg19(pretrained=True)
if freeze:
model = freeze_all_layers(model)
mod = list(model.classifier.children())
mod.pop()
mod.append(nn.Linear(4096, 17))
new_classifier = nn.Sequential(*mod)
model.classifier = new_classifier
model = Add_Sigmoid(model)
return model, 'VGG19'
##################################################################
class Add_Sigmoid(nn.Module):
def __init__(self, pretrained_model):
super(Add_Sigmoid, self).__init__()
self.pretrained_model = pretrained_model
self.sigmoid = nn.Sigmoid()
def forward(self, x):
return self.sigmoid(self.pretrained_model(x))
##################################################################
def freeze_all_layers(model):
#Freeze all layers except last during training (last layer training set to true when it get redefined)
for param in model.parameters():
param.requires_grad = False
return model
################################################################## | mit | 1,453,137,191,912,408,600 | 27.740196 | 106 | 0.624019 | false | 3.75048 | false | false | false |
trozet/python-tackerclient | tackerclient/shell.py | 1 | 32048 | # Copyright 2012 OpenStack Foundation.
# All Rights Reserved
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Command-line interface to the Tacker APIs
"""
from __future__ import print_function
import argparse
import getpass
import inspect
import itertools
import logging
import os
import sys
from keystoneclient.auth.identity import v2 as v2_auth
from keystoneclient.auth.identity import v3 as v3_auth
from keystoneclient import discover
from keystoneclient.openstack.common.apiclient import exceptions as ks_exc
from keystoneclient import session
from oslo_utils import encodeutils
import six.moves.urllib.parse as urlparse
from cliff import app
from cliff import commandmanager
from tackerclient.common import clientmanager
from tackerclient.common import command as openstack_command
from tackerclient.common import exceptions as exc
from tackerclient.common import extension as client_extension
from tackerclient.common import utils
from tackerclient.i18n import _
from tackerclient.tacker.v1_0 import extension
from tackerclient.tacker.v1_0.vm import device
from tackerclient.tacker.v1_0.vm import device_template
from tackerclient.tacker.v1_0.vm import vnf
from tackerclient.tacker.v1_0.vm import vnfd
from tackerclient.version import __version__
VERSION = '1.0'
TACKER_API_VERSION = '1.0'
def run_command(cmd, cmd_parser, sub_argv):
_argv = sub_argv
index = -1
values_specs = []
if '--' in sub_argv:
index = sub_argv.index('--')
_argv = sub_argv[:index]
values_specs = sub_argv[index:]
known_args, _values_specs = cmd_parser.parse_known_args(_argv)
cmd.values_specs = (index == -1 and _values_specs or values_specs)
return cmd.run(known_args)
def env(*_vars, **kwargs):
"""Search for the first defined of possibly many env vars.
Returns the first environment variable defined in vars, or
returns the default defined in kwargs.
"""
for v in _vars:
value = os.environ.get(v, None)
if value:
return value
return kwargs.get('default', '')
def check_non_negative_int(value):
try:
value = int(value)
except ValueError:
raise argparse.ArgumentTypeError(_("invalid int value: %r") % value)
if value < 0:
raise argparse.ArgumentTypeError(_("input value %d is negative") %
value)
return value
class BashCompletionCommand(openstack_command.OpenStackCommand):
"""Prints all of the commands and options for bash-completion."""
resource = "bash_completion"
COMMAND_V1 = {
'bash-completion': BashCompletionCommand,
'ext-list': extension.ListExt,
'ext-show': extension.ShowExt,
'device-template-create': device_template.CreateDeviceTemplate,
'device-template-list': device_template.ListDeviceTemplate,
'device-template-show': device_template.ShowDeviceTemplate,
'device-template-update': device_template.UpdateDeviceTemplate,
'device-template-delete': device_template.DeleteDeviceTemplate,
'device-create': device.CreateDevice,
'device-list': device.ListDevice,
'device-show': device.ShowDevice,
'device-update': device.UpdateDevice,
'device-delete': device.DeleteDevice,
'interface-attach': device.AttachInterface,
'interface-detach': device.DetachInterface,
# MANO lingo
'vnfd-create': vnfd.CreateVNFD,
'vnfd-delete': vnfd.DeleteVNFD,
'vnfd-list': vnfd.ListVNFD,
'vnfd-show': vnfd.ShowVNFD,
'vnf-create': vnf.CreateVNF,
'vnf-update': vnf.UpdateVNF,
'vnf-delete': vnf.DeleteVNF,
'vnf-list': vnf.ListVNF,
'vnf-show': vnf.ShowVNF,
# 'vnf-config-create'
# 'vnf-config-push'
}
COMMANDS = {'1.0': COMMAND_V1}
class HelpAction(argparse.Action):
"""Provide a custom action so the -h and --help options
to the main app will print a list of the commands.
The commands are determined by checking the CommandManager
instance, passed in as the "default" value for the action.
"""
def __call__(self, parser, namespace, values, option_string=None):
outputs = []
max_len = 0
app = self.default
parser.print_help(app.stdout)
app.stdout.write(_('\nCommands for API v%s:\n') % app.api_version)
command_manager = app.command_manager
for name, ep in sorted(command_manager):
factory = ep.load()
cmd = factory(self, None)
one_liner = cmd.get_description().split('\n')[0]
outputs.append((name, one_liner))
max_len = max(len(name), max_len)
for (name, one_liner) in outputs:
app.stdout.write(' %s %s\n' % (name.ljust(max_len), one_liner))
sys.exit(0)
class TackerShell(app.App):
# verbose logging levels
WARNING_LEVEL = 0
INFO_LEVEL = 1
DEBUG_LEVEL = 2
CONSOLE_MESSAGE_FORMAT = '%(message)s'
DEBUG_MESSAGE_FORMAT = '%(levelname)s: %(name)s %(message)s'
log = logging.getLogger(__name__)
def __init__(self, apiversion):
super(TackerShell, self).__init__(
description=__doc__.strip(),
version=VERSION,
command_manager=commandmanager.CommandManager('tacker.cli'), )
self.commands = COMMANDS
for k, v in self.commands[apiversion].items():
self.command_manager.add_command(k, v)
self._register_extensions(VERSION)
# Pop the 'complete' to correct the outputs of 'tacker help'.
self.command_manager.commands.pop('complete')
# This is instantiated in initialize_app() only when using
# password flow auth
self.auth_client = None
self.api_version = apiversion
def build_option_parser(self, description, version):
"""Return an argparse option parser for this application.
Subclasses may override this method to extend
the parser with more global options.
:param description: full description of the application
:paramtype description: str
:param version: version number for the application
:paramtype version: str
"""
parser = argparse.ArgumentParser(
description=description,
add_help=False, )
parser.add_argument(
'--version',
action='version',
version=__version__, )
parser.add_argument(
'-v', '--verbose', '--debug',
action='count',
dest='verbose_level',
default=self.DEFAULT_VERBOSE_LEVEL,
help=_('Increase verbosity of output and show tracebacks on'
' errors. You can repeat this option.'))
parser.add_argument(
'-q', '--quiet',
action='store_const',
dest='verbose_level',
const=0,
help=_('Suppress output except warnings and errors.'))
parser.add_argument(
'-h', '--help',
action=HelpAction,
nargs=0,
default=self, # tricky
help=_("Show this help message and exit."))
parser.add_argument(
'-r', '--retries',
metavar="NUM",
type=check_non_negative_int,
default=0,
help=_("How many times the request to the Tacker server should "
"be retried if it fails."))
# FIXME(bklei): this method should come from python-keystoneclient
self._append_global_identity_args(parser)
return parser
def _append_global_identity_args(self, parser):
# FIXME(bklei): these are global identity (Keystone) arguments which
# should be consistent and shared by all service clients. Therefore,
# they should be provided by python-keystoneclient. We will need to
# refactor this code once this functionality is available in
# python-keystoneclient.
#
# Note: At that time we'll need to decide if we can just abandon
# the deprecated args (--service-type and --endpoint-type).
parser.add_argument(
'--os-service-type', metavar='<os-service-type>',
default=env('OS_SERVICEVM_SERVICE_TYPE', default='servicevm'),
help=_('Defaults to env[OS_SERVICEVM_SERVICE_TYPE] or servicevm.'))
parser.add_argument(
'--os-endpoint-type', metavar='<os-endpoint-type>',
default=env('OS_ENDPOINT_TYPE', default='publicURL'),
help=_('Defaults to env[OS_ENDPOINT_TYPE] or publicURL.'))
# FIXME(bklei): --service-type is deprecated but kept in for
# backward compatibility.
parser.add_argument(
'--service-type', metavar='<service-type>',
default=env('OS_SERVICEVM_SERVICE_TYPE', default='servicevm'),
help=_('DEPRECATED! Use --os-service-type.'))
# FIXME(bklei): --endpoint-type is deprecated but kept in for
# backward compatibility.
parser.add_argument(
'--endpoint-type', metavar='<endpoint-type>',
default=env('OS_ENDPOINT_TYPE', default='publicURL'),
help=_('DEPRECATED! Use --os-endpoint-type.'))
parser.add_argument(
'--os-auth-strategy', metavar='<auth-strategy>',
default=env('OS_AUTH_STRATEGY', default='keystone'),
help=_('DEPRECATED! Only keystone is supported.'))
parser.add_argument(
'--os_auth_strategy',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-auth-url', metavar='<auth-url>',
default=env('OS_AUTH_URL'),
help=_('Authentication URL, defaults to env[OS_AUTH_URL].'))
parser.add_argument(
'--os_auth_url',
help=argparse.SUPPRESS)
project_name_group = parser.add_mutually_exclusive_group()
project_name_group.add_argument(
'--os-tenant-name', metavar='<auth-tenant-name>',
default=env('OS_TENANT_NAME'),
help=_('Authentication tenant name, defaults to '
'env[OS_TENANT_NAME].'))
project_name_group.add_argument(
'--os-project-name',
metavar='<auth-project-name>',
default=utils.env('OS_PROJECT_NAME'),
help='Another way to specify tenant name. '
'This option is mutually exclusive with '
' --os-tenant-name. '
'Defaults to env[OS_PROJECT_NAME].')
parser.add_argument(
'--os_tenant_name',
help=argparse.SUPPRESS)
project_id_group = parser.add_mutually_exclusive_group()
project_id_group.add_argument(
'--os-tenant-id', metavar='<auth-tenant-id>',
default=env('OS_TENANT_ID'),
help=_('Authentication tenant ID, defaults to '
'env[OS_TENANT_ID].'))
project_id_group.add_argument(
'--os-project-id',
metavar='<auth-project-id>',
default=utils.env('OS_PROJECT_ID'),
help='Another way to specify tenant ID. '
'This option is mutually exclusive with '
' --os-tenant-id. '
'Defaults to env[OS_PROJECT_ID].')
parser.add_argument(
'--os-username', metavar='<auth-username>',
default=utils.env('OS_USERNAME'),
help=_('Authentication username, defaults to env[OS_USERNAME].'))
parser.add_argument(
'--os_username',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-id', metavar='<auth-user-id>',
default=env('OS_USER_ID'),
help=_('Authentication user ID (Env: OS_USER_ID)'))
parser.add_argument(
'--os_user_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-domain-id',
metavar='<auth-user-domain-id>',
default=utils.env('OS_USER_DOMAIN_ID'),
help='OpenStack user domain ID. '
'Defaults to env[OS_USER_DOMAIN_ID].')
parser.add_argument(
'--os_user_domain_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-user-domain-name',
metavar='<auth-user-domain-name>',
default=utils.env('OS_USER_DOMAIN_NAME'),
help='OpenStack user domain name. '
'Defaults to env[OS_USER_DOMAIN_NAME].')
parser.add_argument(
'--os_user_domain_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os_project_id',
help=argparse.SUPPRESS)
parser.add_argument(
'--os_project_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-project-domain-id',
metavar='<auth-project-domain-id>',
default=utils.env('OS_PROJECT_DOMAIN_ID'),
help='Defaults to env[OS_PROJECT_DOMAIN_ID].')
parser.add_argument(
'--os-project-domain-name',
metavar='<auth-project-domain-name>',
default=utils.env('OS_PROJECT_DOMAIN_NAME'),
help='Defaults to env[OS_PROJECT_DOMAIN_NAME].')
parser.add_argument(
'--os-cert',
metavar='<certificate>',
default=utils.env('OS_CERT'),
help=_("Path of certificate file to use in SSL "
"connection. This file can optionally be "
"prepended with the private key. Defaults "
"to env[OS_CERT]."))
parser.add_argument(
'--os-cacert',
metavar='<ca-certificate>',
default=env('OS_CACERT', default=None),
help=_("Specify a CA bundle file to use in "
"verifying a TLS (https) server certificate. "
"Defaults to env[OS_CACERT]."))
parser.add_argument(
'--os-key',
metavar='<key>',
default=utils.env('OS_KEY'),
help=_("Path of client key to use in SSL "
"connection. This option is not necessary "
"if your key is prepended to your certificate "
"file. Defaults to env[OS_KEY]."))
parser.add_argument(
'--os-password', metavar='<auth-password>',
default=utils.env('OS_PASSWORD'),
help=_('Authentication password, defaults to env[OS_PASSWORD].'))
parser.add_argument(
'--os_password',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-region-name', metavar='<auth-region-name>',
default=env('OS_REGION_NAME'),
help=_('Authentication region name, defaults to '
'env[OS_REGION_NAME].'))
parser.add_argument(
'--os_region_name',
help=argparse.SUPPRESS)
parser.add_argument(
'--os-token', metavar='<token>',
default=env('OS_TOKEN'),
help=_('Authentication token, defaults to env[OS_TOKEN].'))
parser.add_argument(
'--os_token',
help=argparse.SUPPRESS)
parser.add_argument(
'--http-timeout', metavar='<seconds>',
default=env('OS_NETWORK_TIMEOUT', default=None), type=float,
help=_('Timeout in seconds to wait for an HTTP response. Defaults '
'to env[OS_NETWORK_TIMEOUT] or None if not specified.'))
parser.add_argument(
'--os-url', metavar='<url>',
default=env('OS_URL'),
help=_('Defaults to env[OS_URL].'))
parser.add_argument(
'--os_url',
help=argparse.SUPPRESS)
parser.add_argument(
'--insecure',
action='store_true',
default=env('TACKERCLIENT_INSECURE', default=False),
help=_("Explicitly allow tackerclient to perform \"insecure\" "
"SSL (https) requests. The server's certificate will "
"not be verified against any certificate authorities. "
"This option should be used with caution."))
def _bash_completion(self):
"""Prints all of the commands and options for bash-completion."""
commands = set()
options = set()
for option, _action in self.parser._option_string_actions.items():
options.add(option)
for command_name, command in self.command_manager:
commands.add(command_name)
cmd_factory = command.load()
cmd = cmd_factory(self, None)
cmd_parser = cmd.get_parser('')
for option, _action in cmd_parser._option_string_actions.items():
options.add(option)
print(' '.join(commands | options))
def _register_extensions(self, version):
for name, module in itertools.chain(
client_extension._discover_via_entry_points()):
self._extend_shell_commands(module, version)
def _extend_shell_commands(self, module, version):
classes = inspect.getmembers(module, inspect.isclass)
for cls_name, cls in classes:
if (issubclass(cls, client_extension.TackerClientExtension) and
hasattr(cls, 'shell_command')):
cmd = cls.shell_command
if hasattr(cls, 'versions'):
if version not in cls.versions:
continue
try:
self.command_manager.add_command(cmd, cls)
self.commands[version][cmd] = cls
except TypeError:
pass
def run(self, argv):
"""Equivalent to the main program for the application.
:param argv: input arguments and options
:paramtype argv: list of str
"""
try:
index = 0
command_pos = -1
help_pos = -1
help_command_pos = -1
for arg in argv:
if arg == 'bash-completion' and help_command_pos == -1:
self._bash_completion()
return 0
if arg in self.commands[self.api_version]:
if command_pos == -1:
command_pos = index
elif arg in ('-h', '--help'):
if help_pos == -1:
help_pos = index
elif arg == 'help':
if help_command_pos == -1:
help_command_pos = index
index = index + 1
if command_pos > -1 and help_pos > command_pos:
argv = ['help', argv[command_pos]]
if help_command_pos > -1 and command_pos == -1:
argv[help_command_pos] = '--help'
self.options, remainder = self.parser.parse_known_args(argv)
self.configure_logging()
self.interactive_mode = not remainder
self.initialize_app(remainder)
except Exception as err:
if self.options.verbose_level >= self.DEBUG_LEVEL:
self.log.exception(err)
raise
else:
self.log.error(err)
return 1
if self.interactive_mode:
_argv = [sys.argv[0]]
sys.argv = _argv
return self.interact()
return self.run_subcommand(remainder)
def run_subcommand(self, argv):
subcommand = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = subcommand
cmd = cmd_factory(self, self.options)
try:
self.prepare_to_run_command(cmd)
full_name = (cmd_name
if self.interactive_mode
else ' '.join([self.NAME, cmd_name])
)
cmd_parser = cmd.get_parser(full_name)
return run_command(cmd, cmd_parser, sub_argv)
except Exception as e:
if self.options.verbose_level >= self.DEBUG_LEVEL:
self.log.exception("%s", e)
raise
self.log.error("%s", e)
return 1
def authenticate_user(self):
"""Make sure the user has provided all of the authentication
info we need.
"""
if self.options.os_auth_strategy == 'keystone':
if self.options.os_token or self.options.os_url:
# Token flow auth takes priority
if not self.options.os_token:
raise exc.CommandError(
_("You must provide a token via"
" either --os-token or env[OS_TOKEN]"
" when providing a service URL"))
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"
" when providing a token"))
else:
# Validate password flow auth
project_info = (self.options.os_tenant_name or
self.options.os_tenant_id or
(self.options.os_project_name and
(self.options.os_project_domain_name or
self.options.os_project_domain_id)) or
self.options.os_project_id)
if (not self.options.os_username
and not self.options.os_user_id):
raise exc.CommandError(
_("You must provide a username or user ID via"
" --os-username, env[OS_USERNAME] or"
" --os-user-id, env[OS_USER_ID]"))
if not self.options.os_password:
# No password, If we've got a tty, try prompting for it
if hasattr(sys.stdin, 'isatty') and sys.stdin.isatty():
# Check for Ctl-D
try:
self.options.os_password = getpass.getpass(
'OS Password: ')
except EOFError:
pass
# No password because we didn't have a tty or the
# user Ctl-D when prompted.
if not self.options.os_password:
raise exc.CommandError(
_("You must provide a password via"
" either --os-password or env[OS_PASSWORD]"))
if (not project_info):
# tenent is deprecated in Keystone v3. Use the latest
# terminology instead.
raise exc.CommandError(
_("You must provide a project_id or project_name ("
"with project_domain_name or project_domain_id) "
"via "
" --os-project-id (env[OS_PROJECT_ID])"
" --os-project-name (env[OS_PROJECT_NAME]),"
" --os-project-domain-id "
"(env[OS_PROJECT_DOMAIN_ID])"
" --os-project-domain-name "
"(env[OS_PROJECT_DOMAIN_NAME])"))
if not self.options.os_auth_url:
raise exc.CommandError(
_("You must provide an auth url via"
" either --os-auth-url or via env[OS_AUTH_URL]"))
auth_session = self._get_keystone_session()
auth = auth_session.auth
else: # not keystone
if not self.options.os_url:
raise exc.CommandError(
_("You must provide a service URL via"
" either --os-url or env[OS_URL]"))
auth_session = None
auth = None
self.client_manager = clientmanager.ClientManager(
token=self.options.os_token,
url=self.options.os_url,
auth_url=self.options.os_auth_url,
tenant_name=self.options.os_tenant_name,
tenant_id=self.options.os_tenant_id,
username=self.options.os_username,
user_id=self.options.os_user_id,
password=self.options.os_password,
region_name=self.options.os_region_name,
api_version=self.api_version,
auth_strategy=self.options.os_auth_strategy,
# FIXME (bklei) honor deprecated service_type and
# endpoint type until they are removed
service_type=self.options.os_service_type or
self.options.service_type,
endpoint_type=self.options.os_endpoint_type or self.endpoint_type,
insecure=self.options.insecure,
ca_cert=self.options.os_cacert,
timeout=self.options.http_timeout,
retries=self.options.retries,
raise_errors=False,
session=auth_session,
auth=auth,
log_credentials=True)
return
def initialize_app(self, argv):
"""Global app init bits:
* set up API versions
* validate authentication info
"""
super(TackerShell, self).initialize_app(argv)
self.api_version = {'servicevm': self.api_version}
# If the user is not asking for help, make sure they
# have given us auth.
cmd_name = None
if argv:
cmd_info = self.command_manager.find_command(argv)
cmd_factory, cmd_name, sub_argv = cmd_info
if self.interactive_mode or cmd_name != 'help':
self.authenticate_user()
def configure_logging(self):
"""Create logging handlers for any log output."""
root_logger = logging.getLogger('')
# Set up logging to a file
root_logger.setLevel(logging.DEBUG)
# Send higher-level messages to the console via stderr
console = logging.StreamHandler(self.stderr)
console_level = {self.WARNING_LEVEL: logging.WARNING,
self.INFO_LEVEL: logging.INFO,
self.DEBUG_LEVEL: logging.DEBUG,
}.get(self.options.verbose_level, logging.DEBUG)
# The default log level is INFO, in this situation, set the
# log level of the console to WARNING, to avoid displaying
# useless messages. This equals using "--quiet"
if console_level == logging.INFO:
console.setLevel(logging.WARNING)
else:
console.setLevel(console_level)
if logging.DEBUG == console_level:
formatter = logging.Formatter(self.DEBUG_MESSAGE_FORMAT)
else:
formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT)
logging.getLogger('iso8601.iso8601').setLevel(logging.WARNING)
logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
console.setFormatter(formatter)
root_logger.addHandler(console)
return
def get_v2_auth(self, v2_auth_url):
return v2_auth.Password(
v2_auth_url,
username=self.options.os_username,
password=self.options.os_password,
tenant_id=self.options.os_tenant_id,
tenant_name=self.options.os_tenant_name)
def get_v3_auth(self, v3_auth_url):
project_id = self.options.os_project_id or self.options.os_tenant_id
project_name = (self.options.os_project_name or
self.options.os_tenant_name)
return v3_auth.Password(
v3_auth_url,
username=self.options.os_username,
password=self.options.os_password,
user_id=self.options.os_user_id,
user_domain_name=self.options.os_user_domain_name,
user_domain_id=self.options.os_user_domain_id,
project_id=project_id,
project_name=project_name,
project_domain_name=self.options.os_project_domain_name,
project_domain_id=self.options.os_project_domain_id
)
def _discover_auth_versions(self, session, auth_url):
# discover the API versions the server is supporting base on the
# given URL
try:
ks_discover = discover.Discover(session=session, auth_url=auth_url)
return (ks_discover.url_for('2.0'), ks_discover.url_for('3.0'))
except ks_exc.ClientException:
# Identity service may not support discover API version.
# Lets try to figure out the API version from the original URL.
url_parts = urlparse.urlparse(auth_url)
(scheme, netloc, path, params, query, fragment) = url_parts
path = path.lower()
if path.startswith('/v3'):
return (None, auth_url)
elif path.startswith('/v2'):
return (auth_url, None)
else:
# not enough information to determine the auth version
msg = _('Unable to determine the Keystone version '
'to authenticate with using the given '
'auth_url. Identity service may not support API '
'version discovery. Please provide a versioned '
'auth_url instead.')
raise exc.CommandError(msg)
def _get_keystone_session(self):
# first create a Keystone session
cacert = self.options.os_cacert or None
cert = self.options.os_cert or None
key = self.options.os_key or None
insecure = self.options.insecure or False
ks_session = session.Session.construct(dict(cacert=cacert,
cert=cert,
key=key,
insecure=insecure))
# discover the supported keystone versions using the given url
(v2_auth_url, v3_auth_url) = self._discover_auth_versions(
session=ks_session,
auth_url=self.options.os_auth_url)
# Determine which authentication plugin to use. First inspect the
# auth_url to see the supported version. If both v3 and v2 are
# supported, then use the highest version if possible.
user_domain_name = self.options.os_user_domain_name or None
user_domain_id = self.options.os_user_domain_id or None
project_domain_name = self.options.os_project_domain_name or None
project_domain_id = self.options.os_project_domain_id or None
domain_info = (user_domain_name or user_domain_id or
project_domain_name or project_domain_id)
if (v2_auth_url and not domain_info) or not v3_auth_url:
ks_session.auth = self.get_v2_auth(v2_auth_url)
else:
ks_session.auth = self.get_v3_auth(v3_auth_url)
return ks_session
def main(argv=sys.argv[1:]):
try:
return TackerShell(TACKER_API_VERSION).run(
list(map(encodeutils.safe_decode, argv)))
except KeyboardInterrupt:
print("... terminating tacker client", file=sys.stderr)
return 130
except exc.TackerClientException:
return 1
except Exception as e:
print(e)
return 1
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| apache-2.0 | -3,938,062,603,018,916,000 | 38.419434 | 79 | 0.564185 | false | 4.262836 | false | false | false |
demisto/content | Packs/DNSDB/Integrations/DNSDB_v2/DNSDB_v2.py | 1 | 30136 | # Copyright (c) 2020 by Farsight Security, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import locale
from typing import Iterator, Dict, List, Tuple, Union, Any, Callable, Iterable
import urllib
import urllib.parse
from CommonServerPython import * # noqa: E402 lgtm [py/polluting-import]
import datetime # type: ignore[no-redef]
import json
import re
import requests
# Disable insecure warnings
requests.packages.urllib3.disable_warnings()
INTEGRATION_NAME = 'Farsight DNSDB'
INTEGRATION_COMMAND_NAME = 'dnsdb'
INTEGRATION_CONTEXT_NAME = 'DNSDB'
RECORD_SUBCONTEXT_NAME = 'Record'
SUMMARY_SUBCONTEXT_NAME = 'Summary'
RATE_SUBCONTEXT_NAME = 'Rate'
# CONSTANTS
DEFAULT_DNSDB_SERVER = 'https://api.dnsdb.info'
TIMEOUT = 60
SWCLIENT = "demisto"
VERSION = "v2.1.2"
PATH_PREFIX = 'dnsdb/v2'
IDN_REGEX = re.compile(r'(?:^|(?<=[\s=.:@]))xn--[a-z0-9\-]+\.')
FALSE_REGEX = re.compile(r'^(?i:f(alse)?)$')
COND_BEGIN = 'begin'
COND_ONGOING = 'ongoing'
COND_SUCCEEDED = 'succeeded'
COND_LIMITED = 'limited'
COND_FAILED = 'failed'
locale.setlocale(locale.LC_ALL, '')
''' HELPER FUNCTIONS '''
class QueryError(Exception):
pass
class timeval(int):
pass
class Client(BaseClient):
def __init__(self, base_url: str, apikey: str, verify=None, proxy=None):
BaseClient.__init__(
self,
base_url,
verify=verify,
headers={
'Accept': 'application/x-ndjson',
'X-Api-Key': apikey,
},
proxy=proxy,
ok_codes=(200, ),
)
self.apikey = apikey
@staticmethod
def base_params() -> dict:
return {
'swclient': SWCLIENT,
'version': VERSION,
}
def rate_limit(self) -> Dict:
params = self.base_params()
url_suffix = 'dnsdb/v2/rate_limit'
return self._http_request('GET', url_suffix=url_suffix, params=params)
def lookup_rrset(self, owner_name: str, rrtype: str = None, bailiwick: str = None, limit: int = None,
time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None) -> Iterator[Dict]:
return self._query_rrset("lookup",
owner_name=owner_name,
rrtype=rrtype,
bailiwick=bailiwick,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
offset=offset)
def summarize_rrset(self, owner_name: str, rrtype: str = None, bailiwick: str = None, limit: int = None,
time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, max_count: int = None) -> dict:
try:
return next(self._query_rrset("summarize",
owner_name=owner_name,
rrtype=rrtype,
bailiwick=bailiwick,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
max_count=max_count))
except StopIteration:
raise QueryError("no data")
def _query_rrset(self, mode: str, owner_name: str, rrtype: str = None, bailiwick: str = None, limit: int = None,
time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None, max_count: int = None) -> Iterator[Dict]:
owner_name = quote(to_ascii(owner_name))
if bailiwick:
if not rrtype:
rrtype = 'ANY'
bailiwick = quote(to_ascii(bailiwick))
path = f'{PATH_PREFIX}/{mode}/rrset/name/{owner_name}/{rrtype}/{bailiwick}'
elif rrtype:
path = f'{PATH_PREFIX}/{mode}/rrset/name/{owner_name}/{rrtype}'
else:
path = f'{PATH_PREFIX}/{mode}/rrset/name/{owner_name}'
return self._query(path, limit=limit, time_first_before=time_first_before, time_first_after=time_first_after,
time_last_before=time_last_before, time_last_after=time_last_after,
aggr=aggr, offset=offset, max_count=max_count)
def lookup_rdata_name(self, value: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None) -> Iterator[Dict]:
return self._query_rdata_name("lookup",
name=value,
rrtype=rrtype,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
offset=offset)
def summarize_rdata_name(self, value: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, max_count: int = None) -> dict:
try:
return next(self._query_rdata_name("summarize",
name=value,
rrtype=rrtype,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
max_count=max_count))
except StopIteration:
raise QueryError("no data")
def _query_rdata_name(self, mode: str, name: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None, max_count: int = None) -> Iterator[Dict]:
rdata_name = quote(to_ascii(name))
if rrtype:
path = f'{PATH_PREFIX}/{mode}/rdata/name/{rdata_name}/{rrtype}'
else:
path = f'{PATH_PREFIX}/{mode}/rdata/name/{rdata_name}'
return self._query(path, limit=limit, time_first_before=time_first_before, time_first_after=time_first_after,
time_last_before=time_last_before, time_last_after=time_last_after,
aggr=aggr, offset=offset, max_count=max_count)
def lookup_rdata_ip(self, value: str, limit: int = None,
time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None) -> Iterator[Dict]:
return self._query_rdata_ip("lookup",
ip=value,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
offset=offset)
def summarize_rdata_ip(self, value: str, limit: int = None,
time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, max_count: int = None) -> dict:
try:
return next(self._query_rdata_ip("summarize",
ip=value,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
max_count=max_count))
except StopIteration:
raise QueryError("no data")
def _query_rdata_ip(self, mode: str, ip: str,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None, max_count: int = None) -> Iterator[Dict]:
ip = ip.replace('/', ',')
path = f'{PATH_PREFIX}/{mode}/rdata/ip/{ip}'
return self._query(path, limit=limit, time_first_before=time_first_before, time_first_after=time_first_after,
time_last_before=time_last_before, time_last_after=time_last_after,
aggr=aggr, offset=offset, max_count=max_count)
def lookup_rdata_raw(self, value: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None) -> Iterator[Dict]:
return self._query_rdata_raw("lookup",
raw=value,
rrtype=rrtype,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
offset=offset)
def summarize_rdata_raw(self, value: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, max_count: int = None) -> dict:
try:
return next(self._query_rdata_raw("summarize",
raw=value,
rrtype=rrtype,
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
max_count=max_count))
except StopIteration:
raise QueryError("no data")
def _query_rdata_raw(self, mode: str, raw: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None, max_count: int = None) -> Iterator[Dict]:
if rrtype:
path = f'{PATH_PREFIX}/{mode}/rdata/raw/{quote(raw)}/{rrtype}'
else:
path = f'{PATH_PREFIX}/{mode}/rdata/raw/{quote(raw)}'
return self._query(path, limit=limit, time_first_before=time_first_before, time_first_after=time_first_after,
time_last_before=time_last_before, time_last_after=time_last_after,
aggr=aggr, offset=offset, max_count=max_count)
def flex(self, method: str, key: str, value: str, rrtype: str = None,
limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None):
path = f'{PATH_PREFIX}/{method}/{key}/{quote(value)}'
if rrtype:
path += f'/{rrtype}'
return self._query(path, limit=limit, time_first_before=time_first_before, time_first_after=time_first_after,
time_last_before=time_last_before, time_last_after=time_last_after)
def _query(self, path: str, limit: int = None, time_first_before: timeval = None, time_first_after: timeval = None,
time_last_before: timeval = None, time_last_after: timeval = None,
aggr: bool = None, offset: int = None, max_count: int = None) -> Iterator[Dict]:
params = self.base_params()
params.update(
assign_params(
limit=limit,
time_first_before=time_first_before,
time_first_after=time_first_after,
time_last_before=time_last_before,
time_last_after=time_last_after,
aggr=aggr,
offset=offset,
max_count=max_count,
)
)
res = self._http_request('GET', path,
params=params,
stream=True,
resp_type='response',
timeout=TIMEOUT)
return _handle_saf(res.iter_lines(decode_unicode=True))
def _handle_saf(i: Iterable[str]):
for line in i:
if not line:
continue
try:
saf_msg = json.loads(line)
except json.JSONDecodeError as e:
raise DemistoException(f'saf protocol error: could not decode json: {line}') from e
cond = saf_msg.get('cond')
obj = saf_msg.get('obj')
msg = saf_msg.get('msg')
if cond == COND_BEGIN:
continue
elif cond == COND_SUCCEEDED:
return
if obj:
yield obj
if cond == COND_ONGOING or not cond:
continue
elif cond == COND_LIMITED:
return
elif cond == COND_FAILED:
raise QueryError(f'saf query failed: {msg}')
else:
raise QueryError(f'saf protocol error: invalid cond: {cond}')
raise QueryError('saf query truncated')
def quote(path: str) -> str:
return urllib.parse.quote(path, safe='')
@logger
def _run_query(f, args):
sig = inspect.signature(f)
kwargs = {} # type: Dict[str, Any]
for name, p in sig.parameters.items():
if name in args:
if p.annotation != p.empty:
if p.annotation == bool:
if FALSE_REGEX.match(args[name]):
kwargs[name] = False
else:
kwargs[name] = True
elif p.annotation == timeval:
try:
kwargs[name] = int(args[name])
except ValueError:
kwargs[name] = date_to_timestamp(args[name])
else:
kwargs[name] = p.annotation(args[name])
else:
kwargs[name] = args[name]
elif p.kind == p.POSITIONAL_ONLY:
raise Exception(f'Missing argument: {name}')
return f(**kwargs)
def to_unicode(domain: str) -> str:
try:
return domain.encode('utf8').decode('idna')
except UnicodeError:
return domain
def to_ascii(domain: str) -> str:
try:
return domain.encode('idna').decode('utf8')
except UnicodeError:
return domain
def format_name_for_context(domain: str) -> str:
return domain.rstrip('.')
def format_name_for_markdown(domain: str) -> str:
return to_unicode(domain.rstrip('.'))
def parse_rdata(rdata: Union[str, List[str]]):
if isinstance(rdata, list):
return [parse_rdata(entry) for entry in rdata] # pragma: no cover
def f(m):
return to_unicode(m.group(0))
return str(IDN_REGEX.sub(f, rdata))
def format_rdata_for_markdown(rdata: Union[str, List[str]]):
rdata = parse_rdata(rdata)
if isinstance(rdata, str):
return rdata
return '<br>'.join(rdata)
def parse_rate_limit_int(i):
try:
return int(i)
except ValueError:
return i
def parse_unix_time(ts) -> str:
try:
return datetime.datetime.utcfromtimestamp(ts).strftime("%Y-%m-%dT%H:%M:%SZ") # type: ignore[attr-defined]
except TypeError:
return ts
def nop(x):
return x
@logger
def build_result_context(results: Dict) -> Dict:
ctx = {}
for ckey, rkey, f in (
('RRName', 'rrname', format_name_for_context),
('RRType', 'rrtype', str),
('Bailiwick', 'bailiwick', format_name_for_context),
('RData', 'rdata', nop),
('RawRData', 'raw_rdata', nop),
('Count', 'count', int),
('NumResults', 'num_results', int),
('TimeFirst', 'time_first', parse_unix_time),
('TimeLast', 'time_last', parse_unix_time),
('TimeFirst', 'zone_time_first', parse_unix_time),
('TimeLast', 'zone_time_last', parse_unix_time),
):
if rkey in results:
ctx[ckey] = f(results[rkey]) # type: ignore[operator]
if 'zone_time_first' in results or 'time_first' in results:
ctx['FromZoneFile'] = 'zone_time_first' in results
return ctx
@logger
def build_rate_limits_context(results: Dict) -> Dict:
"""Formatting results from Rate Limit API to Demisto Context"""
rate = results.get('rate')
if rate is None:
raise ValueError("Missing rate key")
ctx = {}
if rate['limit'] == 'unlimited':
return {
'Unlimited': True
}
for ckey, rkey, f in (
('Limit', 'limit', parse_rate_limit_int),
('Remaining', 'remaining', parse_rate_limit_int),
('Expires', 'expires', parse_unix_time),
('ResultsMax', 'results_max', parse_rate_limit_int),
('BurstSize', 'burst_size', parse_rate_limit_int),
('BurstWindow', 'burst_window', parse_rate_limit_int),
):
if rkey in rate:
ctx[ckey] = f(rate[rkey])
if 'reset' in rate:
if rate['reset'] == "n/a":
ctx['NeverResets'] = True
else:
ctx['Reset'] = parse_unix_time(rate['reset'])
if 'offset_max' in rate:
if rate['offset_max'] == "n/a":
ctx['OffsetNotAllowed'] = True
else:
ctx['OffsetMax'] = parse_rate_limit_int(rate['offset_max'])
return ctx
@logger
def lookup_to_markdown(results: List[Dict], title: str = 'Farsight DNSDB Lookup', want_bailiwick=True, header_filter=None) -> str:
# TODO this should be more specific, include arguments?
out = []
keys = [
('RRName', 'rrname', format_name_for_context),
('RRType', 'rrtype', str),
('Bailiwick', 'bailiwick', format_name_for_context),
('RData', 'rdata', format_rdata_for_markdown),
('Count', 'count', str),
] # type: List[Tuple[str, str, Callable]]
if not want_bailiwick:
keys = list(filter(lambda r: r[1] != 'bailiwick', keys))
headers = [k[0] for k in keys] + ['TimeFirst', 'TimeLast', 'FromZoneFile']
if header_filter:
headers = list(filter(header_filter, headers))
for result in results:
row = dict() # type: Dict[str, Any]
for ckey, rkey, f in keys:
if rkey in result:
row[ckey] = f(result[rkey])
if 'time_first' in result:
row['TimeFirst'] = parse_unix_time(result['time_first'])
elif 'zone_time_first' in result:
row['TimeFirst'] = parse_unix_time(result['zone_time_first'])
if 'time_last' in result:
row['TimeLast'] = parse_unix_time(result['time_last'])
elif 'zone_time_last' in result:
row['TimeLast'] = parse_unix_time(result['zone_time_last'])
row['FromZoneFile'] = str("zone_time_first" in result)
out.append(row)
return tableToMarkdown(title, out, headers=headers)
@logger
def summarize_to_markdown(summary: Dict) -> str:
headers = []
out = dict() # type: Dict[str, Any]
for ckey, rkey, f in (
('Count', 'count', int),
('NumResults', 'num_results', int),
('TimeFirst', 'time_first', parse_unix_time),
('TimeLast', 'time_last', parse_unix_time),
('ZoneTimeFirst', 'zone_time_first', parse_unix_time),
('ZoneTimeLast', 'zone_time_last', parse_unix_time),
):
if rkey in summary:
headers.append(ckey)
out[ckey] = f(summary[rkey]) # type: ignore[operator]
return tableToMarkdown('Farsight DNSDB Summarize', out, headers=headers)
@logger
def rate_limit_to_markdown(results: Dict) -> str:
rate = results.get('rate')
if rate is None:
return '### Error'
out = dict() # type: Dict[str, Any]
headers = []
if rate['limit'] != "unlimited":
for ckey, rkey, f in (
('Limit', 'limit', parse_rate_limit_int),
('Remaining', 'remaining', parse_rate_limit_int),
('Reset', 'reset', parse_unix_time),
('Expires', 'expires', parse_unix_time),
('ResultsMax', 'results_max', parse_rate_limit_int),
('OffsetMax', 'offset_max', parse_rate_limit_int),
('BurstSize', 'burst_size', parse_rate_limit_int),
('BurstWindow', 'burst_window', parse_rate_limit_int),
):
if rkey in rate:
headers.append(ckey)
if rkey == 'reset':
if rate[rkey] == "n/a":
NEVER_RESETS = 'NeverResets'
out[NEVER_RESETS] = True
headers.append(NEVER_RESETS)
else:
out[f'{ckey}'] = f(rate[rkey])
elif rkey == 'offset_max':
if rate[rkey] == "n/a":
OFFSET_NOT_ALLOWED = 'OffsetNotAllowed'
out[OFFSET_NOT_ALLOWED] = True
headers.append(OFFSET_NOT_ALLOWED)
else:
out[f'{ckey}'] = f(rate[rkey])
else:
out[f'{ckey}'] = f(rate[rkey])
else:
UNLIMITED = 'Unlimited'
out[UNLIMITED] = True
headers.append(UNLIMITED)
return tableToMarkdown('Farsight DNSDB Service Limits', out, headers=headers)
''' COMMANDS '''
@logger
def test_module(client, _):
try:
client.rate_limit()
except DemistoException as e:
if 'forbidden' in str(e):
return 'Authorization Error: make sure API Key is correctly set'
else:
raise e
return 'ok'
@logger
def dnsdb_flex(client, args):
res = list(_run_query(client.flex, args))
def skip_rrname(header) -> bool:
return header.lower() not in ('rrname', 'fromzonefile')
def skip_rdata(header) -> bool:
return header.lower() not in ('rdata', 'fromzonefile')
if args.get('key') == 'rdata':
skip = skip_rrname
else:
skip = skip_rdata
return CommandResults(
readable_output=lookup_to_markdown(res, title='Farsight DNSDB Flex Search', want_bailiwick=False,
header_filter=skip),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{RECORD_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=[build_result_context(r) for r in res],
)
@logger
def dnsdb_rdata(client, args):
type = args.get('type')
if type == 'name':
res = list(_run_query(client.lookup_rdata_name, args))
elif type == 'ip':
res = list(_run_query(client.lookup_rdata_ip, args))
elif type == 'raw':
res = list(_run_query(client.lookup_rdata_raw, args))
else:
raise Exception(f'Invalid rdata query type: {type}')
return CommandResults(
readable_output=lookup_to_markdown(res, want_bailiwick=False),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{RECORD_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=[build_result_context(r) for r in res],
)
@logger
def dnsdb_summarize_rdata(client, args):
type = args.get('type')
if type == 'name':
res = _run_query(client.summarize_rdata_name, args)
elif type == 'ip':
res = _run_query(client.summarize_rdata_ip, args)
elif type == 'raw':
res = _run_query(client.summarize_rdata_raw, args)
else:
raise Exception(f'Invalid rdata query type: {type}')
return CommandResults(
readable_output=summarize_to_markdown(res),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{SUMMARY_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=build_result_context(res),
)
@logger
def dnsdb_rrset(client, args):
q = _run_query(client.lookup_rrset, args)
res = list(q)
return CommandResults(
readable_output=lookup_to_markdown(res),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{RECORD_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=[build_result_context(r) for r in res],
)
@logger
def dnsdb_summarize_rrset(client, args):
res = _run_query(client.summarize_rrset, args)
return CommandResults(
readable_output=summarize_to_markdown(res),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{SUMMARY_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=build_result_context(res),
)
@logger
def dnsdb_rate_limit(client, _):
res = client.rate_limit()
return CommandResults(
readable_output=rate_limit_to_markdown(res),
outputs_prefix=f'{INTEGRATION_CONTEXT_NAME}.{RATE_SUBCONTEXT_NAME}',
outputs_key_field='',
outputs=build_rate_limits_context(res),
)
def main():
"""
PARSE AND VALIDATE INTEGRATION PARAMS
"""
apikey = demisto.params().get('apikey')
base_url = demisto.params().get('url')
if not base_url:
base_url = DEFAULT_DNSDB_SERVER
verify_certificate = not demisto.params().get('insecure', False)
proxy = demisto.params().get('proxy', False)
client = Client(
base_url,
apikey,
verify=verify_certificate,
proxy=proxy)
command = demisto.command()
LOG(f'Command being called is {command}')
try:
if command == 'test-module':
return_results(test_module(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-flex':
return_results(dnsdb_flex(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-rdata':
return_results(dnsdb_rdata(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-summarize-rdata':
return_results(dnsdb_summarize_rdata(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-rrset':
return_results(dnsdb_rrset(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-summarize-rrset':
return_results(dnsdb_summarize_rrset(client, demisto.args()))
elif command == f'{INTEGRATION_COMMAND_NAME}-rate-limit':
return_results(dnsdb_rate_limit(client, demisto.args()))
# Log exceptions
except Exception as e:
demisto.error(traceback.format_exc()) # print the traceback
err_msg = f'Error in {INTEGRATION_NAME} Integration [{e}]'
return_error(err_msg, error=e)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | -323,106,621,309,568,450 | 37.935401 | 130 | 0.525684 | false | 3.943986 | false | false | false |
codeforsanjose/calischools | schools/serializers.py | 1 | 1123 | from rest_framework import serializers
from .models import County, District, School
class CountySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = County
class CountyMixin(serializers.Serializer):
county = CountySerializer(read_only=True)
class DistrictCompactSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = District
exclude = ('county',)
class DistrictSerializer(CountyMixin,
serializers.HyperlinkedModelSerializer):
class Meta:
model = District
class DistrictCompactMixin(serializers.Serializer):
district = DistrictCompactSerializer(read_only=True)
class SchoolCompactSerializer(serializers.HyperlinkedModelSerializer):
short_code = serializers.ReadOnlyField()
class Meta:
model = School
fields = ('url', 'short_code', 'name',)
class SchoolSerializer(DistrictCompactMixin,
CountyMixin,
serializers.HyperlinkedModelSerializer):
short_code = serializers.ReadOnlyField()
class Meta:
model = School
| mit | -468,564,356,021,282,600 | 23.955556 | 72 | 0.700801 | false | 4.221805 | false | false | false |
mdavoodi/konkourse-python | conversation/views.py | 1 | 4095 | from django.http import HttpResponse
from django.utils import simplejson
from account.models import UserProfile
from conversation.models import ConversationPost, ConvoWall, ConversationComment
from notification.views import notifyComment, notifyPost
def post(request):
results = {'success': False}
if request.user.is_authenticated() and request.user.is_active:
if request.method == 'POST':
POST = request.POST
wall = ConvoWall.objects.get(id=POST['id'])
message = POST['message']
if message == '':
results = {'success': False}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
elif len(message) > 5000:
results = {'success': False, 'error': 'invalid length'}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
post_type = POST['type']
wallPost = ConversationPost(creator=request.user, wall=wall, message=message, post_type=post_type)
wallPost.save()
notifyPost(request=request, wall=wall, post=wallPost)
results = {'success': True}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
def comment(request):
results = {'success': False}
if request.user.is_authenticated() and request.user.is_active:
if request.method == 'POST':
POST = request.POST
convoPost = ConversationPost.objects.get(id=POST['id'])
message = POST['message']
if message == '':
results = {'success': False}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
elif len(message) > 5000:
results = {'success': False, 'error': 'invalid length'}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
comment = ConversationComment(creator=request.user, message=message, post=convoPost)
comment.save()
convoPost.comments.add(comment)
convoPost.save()
notifyComment(request=request, post=convoPost, comment=comment)
results = {'success': True}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
def deletePost(request):
results = {'success': False}
if request.user.is_authenticated() and request.user.is_active:
if request.method == 'POST':
POST = request.POST
convoPost = ConversationPost.objects.get(id=POST['id'])
parent = convoPost.wall.getParent
if convoPost.creator != request.user or (isinstance(parent, UserProfile) and parent.user != request.user):
results = {'success': False}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
convoPost.deleted = True
convoPost.save()
results = {'success': True}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
def deleteComment(request):
results = {'success': False}
if request.user.is_authenticated() and request.user.is_active:
if request.method == 'POST':
POST = request.POST
comment = ConversationComment.objects.get(id=POST['id'])
parent = comment.post.wall.getParent
if comment.creator != request.user or comment.post != request.user or (isinstance(parent, UserProfile) and parent.user != request.user):
results = {'success': False}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
comment.deleted = True
comment.save()
results = {'success': True}
json = simplejson.dumps(results)
return HttpResponse(json, mimetype='application/json')
| mit | -4,804,500,232,000,305,000 | 44.5 | 148 | 0.615873 | false | 4.539911 | false | false | false |
XENON1T/processing | montecarlo/fax_waveform/CreateFakeCSV.py | 1 | 4806 | #################################
## Sub-code used in WF simulation
## It creates a csv file for the input of fax
## by Qing Lin
## @ 2016-09-12
##
## HARDCODE WARNING: The FV dimensions below need to be modified
## according to the detector you wish to simulate
##
## Ref: http://xenon1t.github.io/pax/simulator.html#instruction-file-format
## Code: https://github.com/XENON1T/pax/blob/master/pax/plugins/io/WaveformSimulator.py#L244
##
#################################
import sys
import numpy as np
import scipy as sp
if len(sys.argv)<2:
print("========= Syntax ==========")
print("python CreateFakeCSV.py ..... ")
print("<detector: XENON100, XENON1T>")
print("<number of events>")
print("<photon number lower>")
print("<photon number upper>")
print("<electron number lower>")
print("<electron number upper>")
print("<recoil type: ER, NR>")
print("<output file (abs. path)>")
print("<If force S1-S2 correlation (0 for no; 1 for yes)>")
exit()
Detector = sys.argv[1]
NumEvents = int(sys.argv[2])
PhotonNumLower = float(sys.argv[3])
PhotonNumUpper = float(sys.argv[4])
ElectronNumLower = float(sys.argv[5])
ElectronNumUpper = float(sys.argv[6])
DefaultType = sys.argv[7]
OutputFilename = sys.argv[8]
IfS1S2Correlation = True
if int(sys.argv[9])==0:
IfS1S2Correlation = False
####################################
## Some nuisance parameters (HARDCODE WARNING):
####################################
MaxDriftTime = 650. # us
####################################
## Some functions (HARDCODE WARNING):
####################################
# Current FV cut for Xe1T
scalecmtomm=1
def radius2_cut(zpos):
return 1400*scalecmtomm**2+(zpos+100*scalecmtomm)*(2250-1900)*scalecmtomm/100
def IfPassFV(x,y,z):
if Detector == "XENON100":
# check if the x,y,z passing X48kg0
I = np.power( (z+15.)/14.6, 4.)
I += np.power( (x**2+y**2)/20000., 4.)
if I<1:
return True
elif Detector == "XENON1T": # NEED TO UPDATE THIS
Zlower, Zupper = -90*scalecmtomm, -15*scalecmtomm
Zcut = ((z>=Zlower) & (z<=Zupper))
R2upper=radius2_cut(z)
Rcut = (x**2+y**2<R2upper)
if(Zcut & Rcut):
return True
return False
def RandomizeFV():
# randomize the X, Y, Z according to X48kg FV
if Detector == "XENON100":
Zlower, Zupper = -14.6-15.0, -14.6+15.0
Rlower, Rupper = -np.sqrt(200.), np.sqrt(200.)
elif Detector == "XENON1T": # NEED TO UPDATE THIS
Zlower, Zupper = -90*scalecmtomm, -15*scalecmtomm
Rlower, Rupper = -46*scalecmtomm, 46*scalecmtomm
for i in range(100000):
x = np.random.uniform(Rlower,Rupper)
y = np.random.uniform(Rlower,Rupper)
z = np.random.uniform(Zlower,Zupper)
if IfPassFV(x,y,z):
return (x,y,z)
return (0,0,0)
####################################
## Starts to create
####################################
# Some default
DefaultEventTime = MaxDriftTime*1000.
##########
fout = open(OutputFilename, 'w')
# headers
fout.write("instruction,recoil_type,x,y,depth,s1_photons,s2_electrons,t\n")
if IfS1S2Correlation:
# events loop
for i in range(NumEvents):
fout.write(str(i)+",")
fout.write(DefaultType+",")
X, Y, Z = RandomizeFV()
fout.write(str(X)+",")
fout.write(str(Y)+",")
#fout.write("random,")
#fout.write("random,")
fout.write(str(-Z)+",")
NumPhoton = int( np.random.uniform(PhotonNumLower, PhotonNumUpper) )
fout.write(str(NumPhoton)+",")
NumElectron = int( np.random.uniform(ElectronNumLower, ElectronNumUpper) )
fout.write(str(NumElectron)+",")
fout.write(str(DefaultEventTime)+"\n")
else:
# events loop S1-S2 no correlation
for i in range(NumEvents):
# first for S1
fout.write(str(i)+",")
fout.write(DefaultType+",")
X, Y, Z = RandomizeFV()
fout.write(str(X)+",")
fout.write(str(Y)+",")
fout.write(str(-Z)+",")
NumPhoton = int( np.random.uniform(PhotonNumLower, PhotonNumUpper) )
fout.write(str(NumPhoton)+",")
fout.write("0,")
fout.write(str(DefaultEventTime)+"\n")
# second for S2
fout.write(str(i)+",")
fout.write(DefaultType+",")
X, Y, Z = RandomizeFV()
fout.write(str(X)+",")
fout.write(str(Y)+",")
fout.write(str(-Z)+",")
fout.write("0,")
NumElectron = int( np.random.uniform(ElectronNumLower, ElectronNumUpper) )
fout.write(str(NumElectron)+",")
TimeOffset = np.random.uniform(-MaxDriftTime*1000., MaxDriftTime*1000.)
S2EventTime = DefaultEventTime+TimeOffset
fout.write(str(S2EventTime)+"\n")
fout.close()
| apache-2.0 | -7,343,586,964,277,585,000 | 31.04 | 92 | 0.572201 | false | 3.114712 | false | false | false |
teheavy/AMA3D | Nh3D/3_CathTopo_uploader.py | 1 | 1726 | # Script Version: 1.0
# Author: Te Chen
# Project: AMA3D
# Task Step: 1
# This script is specially for loading CATH Node Name file and record all the topology level into database.
# CathDomainList File format: Cath Names File (CNF) Format 2.0, to find more info, please visit www.cathdb.info
import MySQLdb
import os
import sys
# Connect to Database by reading Account File.
with open("Account", "r") as file:
parsed = file.readline().split()
DB = MySQLdb.connect(host=parsed[0], user=parsed[1], passwd=parsed[2], db=parsed[3])
cursor = DB.cursor()
# Read the node list and register CATH topology into database.
os.getcwd()
node_file = open("./Nh3D/CathNames", "r")
line = node_file.readline()
trigger = ''
while line:
if line.startswith("#") == False and line != "":
node_info = line.split(" ")
if len(node_info) == 3:
if node_info[0].count('.') == 2:
print "Working on Node: " + node_info[0]
cursor.execute("""INSERT INTO Topology(Node, Description, Comment, Representative) VALUES (\'%s\', \'%s\', \'%s\', \'%s\')"""
% (node_info[0], str(MySQLdb.escape_string(node_info[2][1:-1])), 'from CathNames', node_info[1]))
# print """INSERT INTO Topology(Node, Description, Comment, Representative) VALUES (\'%s\', \'%s\', \'%s\', \'%s\')"""\
# % (node_info[0], (node_info[2][1:-1]).replace(";", ""), 'from CathNames', node_info[1])
# Trigger a new TC
print trigger
sys.stdout.flush()
trigger = "trigger\t%s\t%d\t%d"%(node_info[0], 4, 0)
elif node_info[0].count('.') == 3:
# Trigger a new TC but leave last flag on.
print trigger[:-1] + "1"
sys.stdout.flush()
break
line = node_file.readline()
# Wrap up and close connection.
DB.commit()
DB.close() | gpl-2.0 | -5,206,685,651,074,116,000 | 33.54 | 129 | 0.637891 | false | 3.001739 | false | false | false |
pmghalvorsen/gramps_branch | gramps/plugins/view/view.gpr.py | 1 | 8180 | # encoding:utf-8
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2009 Benny Malengier
# Copyright (C) 2009 Douglas S. Blank
# Copyright (C) 2009 Nick Hall
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
MODULE_VERSION="4.2"
#------------------------------------------------------------------------
#
# default views of Gramps
#
#------------------------------------------------------------------------
register(VIEW,
id = 'eventview',
name = _("Events"),
description = _("The view showing all the events"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'eventview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Events", _("Events")),
viewclass = 'EventView',
order = START,
)
register(VIEW,
id = 'familyview',
name = _("Families"),
description = _("The view showing all families"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'familyview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Families", _("Families")),
viewclass = 'FamilyView',
order = START,
)
register(VIEW,
id = 'dashboardview',
name = _("Dashboard"),
description = _("The view showing Gramplets"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'dashboardview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Dashboard", _("Dashboard")),
viewclass = 'DashboardView',
order = START,
)
register(VIEW,
id = 'mediaview',
name = _("Media"),
description = _("The view showing all the media objects"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'mediaview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Media", _("Media")),
viewclass = 'MediaView',
order = START,
)
register(VIEW,
id = 'noteview',
name = _("Notes"),
description = _("The view showing all the notes"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'noteview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Notes", _("Notes")),
viewclass = 'NoteView',
order = START,
)
register(VIEW,
id = 'relview',
name = _("Relationships"),
description = _("The view showing all relationships of the selected person"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'relview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Relationships", _("Relationships")),
viewclass = 'RelationshipView',
order = START,
)
register(VIEW,
id = 'pedigreeview',
name = _("Pedigree"),
description = _("The view showing an ancestor pedigree of the selected person"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'pedigreeview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Ancestry", _("Charts")),
viewclass = 'PedigreeView',
order = START,
stock_icon = 'gramps-pedigree',
)
register(VIEW,
id = 'fanchartview',
name = _("Fan Chart"),
category = ("Ancestry", _("Charts")),
description = _("A view showing parents through a fanchart"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'fanchartview.py',
authors = ["Douglas S. Blank", "B. Malengier"],
authors_email = ["[email protected]", "[email protected]"],
viewclass = 'FanChartView',
stock_icon = 'gramps-fanchart',
)
register(VIEW,
id = 'fanchartdescview',
name = _("Descendant Fan"),
category = ("Ancestry", _("Charts")),
description = _("Showing descendants through a fanchart"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'fanchartdescview.py',
authors = ["B. Malengier"],
authors_email = ["[email protected]"],
viewclass = 'FanChartDescView',
stock_icon = 'gramps-fanchartdesc',
)
register(VIEW,
id = 'personview',
name = _("Grouped People"),
description = _("The view showing all people in the Family Tree grouped per"
" family name"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'persontreeview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("People", _("People")),
viewclass = 'PersonTreeView',
order = START,
stock_icon = 'gramps-tree-group',
)
register(VIEW,
id = 'personlistview',
name = _("People"),
description = _("The view showing all people in the Family Tree"
" in a flat list"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'personlistview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("People", _("People")),
viewclass = 'PersonListView',
order = START,
stock_icon = 'gramps-tree-list',
)
register(VIEW,
id = 'placelistview',
name = _("Places"),
description = _("The view showing all the places of the Family Tree"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'placelistview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Places", _("Places")),
viewclass = 'PlaceListView',
order = START,
stock_icon = 'gramps-tree-list',
)
register(VIEW,
id = 'placetreeview',
name = _("Place Tree"),
description = _("A view displaying places in a tree format."),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'placetreeview.py',
authors = ["Donald N. Allingham", "Gary Burton", "Nick Hall"],
authors_email = [""],
category = ("Places", _("Places")),
viewclass = 'PlaceTreeView',
stock_icon = 'gramps-tree-group',
)
register(VIEW,
id = 'repoview',
name = _("Repositories"),
description = _("The view showing all the repositories"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'repoview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Repositories", _("Repositories")),
viewclass = 'RepositoryView',
order = START,
)
register(VIEW,
id = 'sourceview',
name = _("Sources"),
description = _("The view showing all the sources"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'sourceview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Sources", _("Sources")),
viewclass = 'SourceView',
order = START,
stock_icon = 'gramps-tree-list',
)
register(VIEW,
id = 'citationlistview',
name = _("Citations"),
description = _("The view showing all the citations"),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'citationlistview.py',
authors = ["The Gramps project"],
authors_email = ["http://gramps-project.org"],
category = ("Citations", _("Citations")),
viewclass = 'CitationListView',
order = START,
)
register(VIEW,
id = 'citationtreeview',
name = _("Citation Tree"),
description = _("A view displaying citations and sources in a tree format."),
version = '1.0',
gramps_target_version = MODULE_VERSION,
status = STABLE,
fname = 'citationtreeview.py',
authors = ["Tim G L Lyons", "Nick Hall"],
authors_email = [""],
category = ("Sources", _("Sources")),
viewclass = 'CitationTreeView',
stock_icon = 'gramps-tree-select',
)
| gpl-2.0 | -9,066,584,488,088,675,000 | 26.918089 | 81 | 0.665037 | false | 3.249901 | false | false | false |
minghuascode/pyj | library/pyjamas/ui/BuilderPanel.py | 1 | 3810 | """ Pyjamas UI BuilderPanel: takes a PyJsGlade builder spec and adds widgets
requested using the methods just like in any other Panel class.
Copyright (C) 2010 Luke Kenneth Casson Leighton <[email protected]>
The purpose of this class is to be able to set up a Panel of any type
that can be dynamically created using Builder, and then add child widgets
once again by their name as specified in the Builder spec file.
This class therefore has all of the usual Panel functions (add,
remove, insert, __iter__, getWidget) as well as those required
for it to be instantiable via Builder itself (!) such as
addIndexedItem, getIndex and getIndexedChild.
"""
from pyjamas.ui.BuilderWidget import BuilderWidget
class BuilderPanel(BuilderWidget):
def __init__(self, **kwargs):
self.panel_instance_name = None
BuilderWidget.__init__(self, **kwargs)
def add(self, child_instance_name, *args, **kwargs):
""" versatile adding-function, copes with:
HTMLPanel.add(widget, id)
HTMLTable.add(item, row, col)
HorizontalPanel.add(item)
VerticalPanel.add(item)
VerticalSplitPanel.add(item)
HorizontalSplitPanel.add(item)
DeckPanel.add(item)
TabPanel.add(item)
DockPanel.add(widget, direction)
StackPanel.add(widget, stackText, asHTML)
AbsolutePanel.add(widget, left, top)
FlowPanel.add(widget)
CaptionPanel.add(widget)
ScrollPanel.add(widget)
"""
widget = self.b.createInstance(child_instance_name, self.event_receiver)
self.getPanel().add(widget, *args, **kwargs)
return widget
def insert(self, child_instance_name, *args, **kwargs):
widget = self.b.createInstance(child_instance_name, self.event_receiver)
self.getPanel().insert(widget, *args, **kwargs)
return widget
def remove(self, widget, *args, **kwargs):
""" versatile removing-function, copes with:
HTMLPanel.remove(widget) # if it had one
HTMLTable.remove(item)
HorizontalPanel.remove(item)
VerticalPanel.remove(item)
VerticalSplitPanel.remove(item) # if it had one
HorizontalSplitPanel.remove(item) # if it had one
DeckPanel.remove(item)
TabPanel.remove(item)
DockPanel.remove(item)
StackPanel.remove(item, index=None)
AbsolutePanel.remove(item)
FlowPanel.add(widget)
"""
self.getPanel().remove(widget, *args, **kwargs)
def __iter__(self):
return self.b.__iter__()
def getChildren(self):
return self.b.getChildren()
def setPanelInstanceName(self, panel_instance_name):
self.panel_instance_name = panel_instance_name
def getPanel(self):
if self.panel_instance_name is None:
return self.widget
wids = self.b.widget_instances[self.instance_name]
return wids[self.panel_instance_name]
# these next three functions are part of the standard Builder API
# and are required for panels to be manageable by PyJsGlade.
def addIndexedItem(self, index, instance_name):
widget = self.b.createInstance(child_instance_name, self.event_receiver)
self.getPanel().addIndexedItem(index, widget)
def getIndexedChild(self, index):
return self.getPanel().getIndexedChild(index)
def getWidgetIndex(self, widget):
return self.getPanel().getWidgetIndex(widget)
def getWidget(self, *args):
return self.getPanel().getWidget(*args)
def getWidgetCount(self):
return self.getPanel().getWidgetCount()
def setWidgetPosition(self, *args):
return self.getPanel().setWidgetPosition(*args)
| apache-2.0 | 5,191,970,841,784,223,000 | 35.990291 | 80 | 0.655643 | false | 3.915725 | false | false | false |
mostaphaRoudsari/Honeybee | src/Honeybee_EnergyPlus Window Air Gap.py | 1 | 3032 | #
# Honeybee: A Plugin for Environmental Analysis (GPL) started by Mostapha Sadeghipour Roudsari
#
# This file is part of Honeybee.
#
# Copyright (c) 2013-2020, Mostapha Sadeghipour Roudsari <[email protected]>
# Honeybee is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Honeybee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Honeybee; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+>
"""
Use this component to create a custom material for a window air gap, which can be plugged into the "Honeybee_EnergyPlus Construction" component.
_
It is important to note that this component only creates gaps of air and not other gasses.
Also, the material out of this component represents only a single layer of air, which can be combined with the "Honeybee_EnergyPlus Glass Material" to make multi-pane windows.
If you have specifications for a whole window element and not individual panes of glass and gas, you are better-off using the "Honeybee_EnergyPlus Window Material" component instead of this one.
-
Provided by Honeybee 0.0.66
Args:
_name: A text name for your window air gap material.
_thickness_: A number that represents the thickness of the air gap in meters. The default is set to 0.0125 meters (1.25 cm).
Returns:
EPMaterial: A window air gap material that can be plugged into the "Honeybee_EnergyPlus Construction" component.
"""
ghenv.Component.Name = "Honeybee_EnergyPlus Window Air Gap"
ghenv.Component.NickName = 'EPWindowAirGap'
ghenv.Component.Message = 'VER 0.0.66\nJUL_07_2020'
ghenv.Component.IconDisplayMode = ghenv.Component.IconDisplayMode.application
ghenv.Component.Category = "HB-Legacy"
ghenv.Component.SubCategory = "06 | Energy | Material | Construction"
#compatibleHBVersion = VER 0.0.56\nFEB_01_2015
#compatibleLBVersion = VER 0.0.59\nFEB_01_2015
try: ghenv.Component.AdditionalHelpFromDocStrings = "0"
except: pass
def main(name, thickness):
if name == None: name = "AIRGAP"
gasType = "AIR"
if thickness == None: thickness = .0125
values = [name.upper(), gasType, thickness]
comments = ["Name", "Gas type", "Thickness {m}"]
materialStr = "WindowMaterial:Gas,\n"
for count, (value, comment) in enumerate(zip(values, comments)):
if count!= len(values) - 1:
materialStr += str(value) + ", !-" + str(comment) + "\n"
else:
materialStr += str(value) + "; !-" + str(comment)
return materialStr
EPMaterial = main(_name_, _thickness_)
| gpl-3.0 | 6,590,567,128,053,553,000 | 41.704225 | 194 | 0.716689 | false | 3.465143 | false | false | false |
jawrainey/sris | settings.py | 1 | 1602 | import os
class Config(object):
"""
The shared configuration settings for the flask app.
"""
# Service settings
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__)))
SERVICE_ONTOLOGY = PROJECT_ROOT + '/sris/config/ontology.json'
# Database settings
CLIENT_NAME = 'client'
SERVICE_NAME = 'service'
# These need to be set by you!
ACCOUNT_SID = os.environ.get('ACCOUNT_SID', None)
AUTH_TOKEN = os.environ.get('AUTH_TOKEN', None)
NUM = os.environ.get('NUM', None)
class ProdConfig(Config):
"""
Setup the production configuration for the flask app.
Args:
Config (object): Inherit the default shared configuration settings.
"""
DEBUG = False
# These are set server-side for ease-of-use when using PaaS.
SQLALCHEMY_BINDS = {
Config.CLIENT_NAME: os.environ.get('CLIENT_DATABASE_URL', None),
Config.SERVICE_NAME: os.environ.get('SERVICE_DATABASE_URL', None)
}
class DevConfig(Config):
"""
Setup the development configuration for the flask app.
Args:
Config (object): Inherit the default shared configuration settings.
"""
DEBUG = True
# Store these in the root directly.
CLIENT_DB = os.path.join(Config.PROJECT_ROOT, Config.CLIENT_NAME + '.db')
SERVICE_DB = os.path.join(Config.PROJECT_ROOT, Config.SERVICE_NAME + '.db')
# Support for multiple databases (client & service)
SQLALCHEMY_BINDS = {
Config.CLIENT_NAME: 'sqlite:///{0}'.format(CLIENT_DB),
Config.SERVICE_NAME: 'sqlite:///{0}'.format(SERVICE_DB)
}
| mit | -7,199,926,662,232,172,000 | 29.807692 | 79 | 0.651685 | false | 3.657534 | true | false | false |
kostya0shift/SyncToGit | synctogit/Config.py | 1 | 1728 | from __future__ import absolute_import
try:
import configparser
except:
import ConfigParser as configparser
class _NotSet(object):
pass
class ConfigException(Exception):
pass
class Config:
def __init__(self, conffile):
self.conffile = conffile
self.conf = configparser.ConfigParser()
with open(self.conffile, 'r') as f:
self.conf.readfp(f)
def _get(self, section, key, getter, default=_NotSet()):
if not self.conf.has_section(section):
if isinstance(default, _NotSet):
raise ConfigException('Section %s is missing' % section)
else:
return default
if not self.conf.has_option(section, key):
if isinstance(default, _NotSet):
raise ConfigException('Key %s from section %s is missing' % (key, section))
else:
v = default
else:
v = getter(section, key)
return v
def get_int(self, section, key, default=_NotSet()):
v = self._get(section, key, self.conf.getint, default)
return int(v)
def get_string(self, section, key, default=_NotSet()):
v = self._get(section, key, self.conf.get, default)
return "" + v
def get_boolean(self, section, key, default=_NotSet()):
v = self._get(section, key, self.conf.getboolean, default)
return bool(v)
def _write(self):
with open(self.conffile, 'w') as f:
self.conf.write(f)
def set(self, section, key, value):
self.conf.set(section, key, value)
self._write()
def unset(self, section, key):
self.conf.remove_option(section, key)
self._write()
| mit | 587,310,397,621,717,500 | 25.584615 | 91 | 0.58044 | false | 3.945205 | true | false | false |
FiveEye/ml-notebook | dlp/ch6_2_pretrained_embedding.py | 1 | 2633 | import os
imdb_dir = '/home/han/code/data/aclImdb'
train_dir = os.path.join(imdb_dir, 'train')
# Processing the labels of the raw IMDB data
labels = []
texts = []
for label_type in ['neg', 'pos']:
dir_name = os.path.join(train_dir, label_type)
for fname in os.listdir(dir_name):
if fname[-4:] == '.txt':
f = open(os.path.join(dir_name, fname))
texts.append(f.read())
f.close()
if label_type == 'neg':
labels.append(0)
else:
labels.append(1)
# Tokenizing the text of the raw IMDB data
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
import numpy as np
maxlen = 100
training_samples = 10000
validation_samples = 10000
max_words = 10000
tokenizer = Tokenizer(num_words=max_words)
tokenizer.fit_on_texts(texts)
sequences = tokenizer.texts_to_sequences(texts)
word_index = tokenizer.word_index
data = pad_sequences(sequences, maxlen=maxlen)
labels = np.asarray(labels)
indices = np.arange(data.shape[0])
np.random.shuffle(indices)
data = data[indices]
labels = labels[indices]
x_train = data[:training_samples]
y_train = labels[:training_samples]
x_val = data[training_samples : training_samples + validation_samples]
y_val = labels[training_samples : training_samples + validation_samples]
# Parsing the GloVe word-embedding file
glove_dir = '/home/han/code/models/glove.6B'
embeddings_index = {}
f = open(os.path.join(glove_dir, 'glove.6B.100d.txt'))
for line in f:
values = line.split()
word = values[0]
coefs = np.asarray(values[1:], dtype='float32')
embeddings_index[word] = coefs
f.close()
# preparing the glove matrix
embedding_dim = 100
embedding_matrix = np.zeros((max_words, embedding_dim))
for word, i in word_index.items():
if i < max_words:
embedding_vector = embeddings_index.get(word)
if embedding_vector is not None:
embedding_matrix[i] = embedding_vector
else:
print("Not found ", word)
# build model
from keras.models import Sequential
from keras.layers import Embedding, Dense, Flatten, LSTM
model = Sequential()
model.add(Embedding(max_words, embedding_dim, input_length=maxlen))
model.add(LSTM(32))
#model.add(Flatten())
#model.add(Dense(32, activation='relu'))
model.add(Dense(1, activation='sigmoid'))
model.layers[0].set_weights([embedding_matrix])
model.layers[0].trainable = False
print(model.summary())
model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['acc'])
history = model.fit(x_train, y_train, epochs=10, batch_size=32, validation_data=(x_val, y_val))
| mit | -4,351,887,156,097,140,700 | 23.813725 | 95 | 0.692366 | false | 3.022962 | false | false | false |
seanfisk/buzzword-bingo-server | djangorestframework/status.py | 1 | 1455 | """
Descriptive HTTP status codes, for code readability.
See RFC 2616 - Sec 10: http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html
Also see django.core.handlers.wsgi.STATUS_CODE_TEXT
"""
HTTP_100_CONTINUE = 100
HTTP_101_SWITCHING_PROTOCOLS = 101
HTTP_200_OK = 200
HTTP_201_CREATED = 201
HTTP_202_ACCEPTED = 202
HTTP_203_NON_AUTHORITATIVE_INFORMATION = 203
HTTP_204_NO_CONTENT = 204
HTTP_205_RESET_CONTENT = 205
HTTP_206_PARTIAL_CONTENT = 206
HTTP_300_MULTIPLE_CHOICES = 300
HTTP_301_MOVED_PERMANENTLY = 301
HTTP_302_FOUND = 302
HTTP_303_SEE_OTHER = 303
HTTP_304_NOT_MODIFIED = 304
HTTP_305_USE_PROXY = 305
HTTP_306_RESERVED = 306
HTTP_307_TEMPORARY_REDIRECT = 307
HTTP_400_BAD_REQUEST = 400
HTTP_401_UNAUTHORIZED = 401
HTTP_402_PAYMENT_REQUIRED = 402
HTTP_403_FORBIDDEN = 403
HTTP_404_NOT_FOUND = 404
HTTP_405_METHOD_NOT_ALLOWED = 405
HTTP_406_NOT_ACCEPTABLE = 406
HTTP_407_PROXY_AUTHENTICATION_REQUIRED = 407
HTTP_408_REQUEST_TIMEOUT = 408
HTTP_409_CONFLICT = 409
HTTP_410_GONE = 410
HTTP_411_LENGTH_REQUIRED = 411
HTTP_412_PRECONDITION_FAILED = 412
HTTP_413_REQUEST_ENTITY_TOO_LARGE = 413
HTTP_414_REQUEST_URI_TOO_LONG = 414
HTTP_415_UNSUPPORTED_MEDIA_TYPE = 415
HTTP_416_REQUESTED_RANGE_NOT_SATISFIABLE = 416
HTTP_417_EXPECTATION_FAILED = 417
HTTP_500_INTERNAL_SERVER_ERROR = 500
HTTP_501_NOT_IMPLEMENTED = 501
HTTP_502_BAD_GATEWAY = 502
HTTP_503_SERVICE_UNAVAILABLE = 503
HTTP_504_GATEWAY_TIMEOUT = 504
HTTP_505_HTTP_VERSION_NOT_SUPPORTED = 505
| bsd-3-clause | 6,804,221,208,206,848,000 | 29.3125 | 77 | 0.768385 | false | 2.724719 | false | false | false |
dlebauer/plantcv | lib/plantcv/dev/roi_multi_objects.py | 1 | 3484 | import cv2
import numpy as np
from plantcv import print_image
### Find Objects Partially Inside Region of Interest or Cut Objects to Region of Interest
def roi_objects(img,roi_type,roi_contour, roi_hierarchy,object_contour, obj_hierarchy, device, debug=False):
# img = img to display kept objects
# roi_type = 'cutto' or 'partial' (for partially inside)
# roi_contour = contour of roi, output from "View and Ajust ROI" function
# roi_hierarchy = contour of roi, output from "View and Ajust ROI" function
# object_contour = contours of objects, output from "Identifying Objects" fuction
# obj_hierarchy = hierarchy of objects, output from "Identifying Objects" fuction
# device = device number. Used to count steps in the pipeline
device +=1
if len(np.shape(img))==3:
ix,iy,iz=np.shape(img)
else:
ix,iy=np.shape(img)
size = ix,iy,3
background = np.zeros(size, dtype=np.uint8)
ori_img=np.copy(img)
w_back=background+255
background1 = np.zeros(size, dtype=np.uint8)
background2 = np.zeros(size, dtype=np.uint8)
# Allows user to find all objects that are completely inside or overlapping with ROI
if roi_type=='partial':
for c,cnt in enumerate(object_contour):
length=(len(cnt)-1)
stack=np.vstack(cnt)
test=[]
keep=False
for i in range(0,length):
pptest=cv2.pointPolygonTest(roi_contour[0], (stack[i][0],stack[i][1]), False)
if int(pptest)!=-1:
keep=True
if keep==True:
if obj_hierarchy[0][c][3]>-1:
cv2.drawContours(w_back,object_contour,c, (255,255,255),-1, lineType=8,hierarchy=obj_hierarchy)
else:
cv2.drawContours(w_back,object_contour,c, (0,0,0),-1, lineType=8,hierarchy=obj_hierarchy)
else:
cv2.drawContours(w_back,object_contour,c, (255,255,255),-1, lineType=8,hierarchy=obj_hierarchy)
kept=cv2.cvtColor(w_back, cv2.COLOR_RGB2GRAY )
kept_obj= cv2.bitwise_not(kept)
mask=np.copy(kept_obj)
obj_area=cv2.countNonZero(kept_obj)
kept_cnt,hierarchy=cv2.findContours(kept_obj,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE)
cv2.drawContours(ori_img,kept_cnt,-1, (0,255,0),-1, lineType=8,hierarchy=hierarchy)
cv2.drawContours(ori_img,roi_contour,-1, (255,0,0),5, lineType=8,hierarchy=roi_hierarchy)
# Allows uer to cut objects to the ROI (all objects completely outside ROI will not be kept)
elif roi_type=='cutto':
cv2.drawContours(background1,object_contour,-1, (255,255,255),-1, lineType=8,hierarchy=obj_hierarchy)
roi_points=np.vstack(roi_contour[0])
cv2.fillPoly(background2,[roi_points], (255,255,255))
obj_roi=cv2.multiply(background1,background2)
kept_obj=cv2.cvtColor(obj_roi, cv2.COLOR_RGB2GRAY)
mask=np.copy(kept_obj)
obj_area=cv2.countNonZero(kept_obj)
kept_cnt,hierarchy = cv2.findContours(kept_obj,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE)
cv2.drawContours(w_back,kept_cnt,-1, (0,0,0),-1)
cv2.drawContours(ori_img,kept_cnt,-1, (0,255,0),-1, lineType=8,hierarchy=hierarchy)
cv2.drawContours(ori_img,roi_contour,-1, (255,0,0),5, lineType=8,hierarchy=roi_hierarchy)
else:
fatal_error('ROI Type' + str(roi_type) + ' is not "cutto" or "partial"!')
if debug:
print_image(w_back, (str(device) + '_roi_objects.png'))
print_image(ori_img, (str(device) + '_obj_on_img.png'))
print_image(mask, (str(device) + '_roi_mask.png'))
#print ('Object Area=', obj_area)
return device, kept_cnt, hierarchy, mask, obj_area | gpl-2.0 | -8,480,578,486,346,879,000 | 44.25974 | 108 | 0.683984 | false | 2.834825 | false | false | false |
jeffFranklin/iam-resttools | resttools/dao_implementation/nws.py | 1 | 2539 | """
Contains NWS DAO implementations.
"""
from resttools.mock.mock_http import MockHTTP
import re
from resttools.dao_implementation.live import get_con_pool, get_live_url
from resttools.dao_implementation.mock import get_mockdata_url
import logging
logger = logging.getLogger(__name__)
class File(object):
"""
The File DAO implementation returns generally static content. Use this
DAO with this configuration:
"""
_max_pool_size = 5
def __init__(self, conf):
self._conf = conf
if 'MAX_POOL_SIZE' in conf:
self._max_pool_size = conf['MAX_POOL_SIZE']
def getURL(self, url, headers):
logger.debug('file nws get url: ' + url)
response = get_mockdata_url("nws", self._conf, url, headers)
if response.status == 404:
logger.debug('status 404')
response.data = '{"error": {"code": "7000","message": "No record matched"}}'
return response
def postURL(self, url, headers, body):
logger.debug('file nws post url: ' + url)
response = get_mockdata_url("nws", self._conf, url, headers)
if response.status == 404:
logger.debug('status 404')
response.data = '{"error": {"code": "7000","message": "No record matched"}}'
return response
class Live(object):
"""
This DAO provides real data. It requires further configuration, (conf)
"""
_max_pool_size = 5
def __init__(self, conf):
self._conf = conf
if 'MAX_POOL_SIZE' in conf:
self._max_pool_size = conf['MAX_POOL_SIZE']
pool = None
def getURL(self, url, headers):
if Live.pool is None:
Live.pool = self._get_pool()
return get_live_url(Live.pool, 'GET',
self._conf['HOST'],
url, headers=headers,
service_name='nws')
def postURL(self, url, headers, body):
if Live.pool is None:
Live.pool = self._get_pool()
return get_live_url(Live.pool, 'POST',
self._conf['HOST'],
url, headers=headers, body=body,
service_name='nws')
def _get_pool(self):
return get_con_pool(self._conf['HOST'],
self._conf['KEY_FILE'],
self._conf['CERT_FILE'],
self._conf['CA_FILE'],
max_pool_size=self._max_pool_size, verify_https=False)
| apache-2.0 | 8,144,495,644,167,200,000 | 29.963415 | 88 | 0.541552 | false | 3.954829 | false | false | false |
anaruse/chainer | chainer/links/connection/deformable_convolution_2d.py | 1 | 5529 | from chainer.functions import deformable_convolution_2d_sampler
from chainer import initializers
from chainer.initializers import constant
from chainer import link
from chainer.links.connection.convolution_2d import Convolution2D
from chainer import variable
class DeformableConvolution2D(link.Chain):
"""Two-dimensional deformable convolutional layer.
This link wraps the
convolution layer for offset prediction and
the :func:`~chainer.functions.deformable_convolution_2d_sampler`
function.
This also holds the filter weights and bias vectors of two
convolution layers as parameters.
Args:
in_channels (int): Number of channels of input arrays. If ``None``,
parameter initialization will be deferred until the first forward
data pass at which time the size will be determined.
channel_multiplier (int): Channel multiplier number. Number of output
arrays equal ``in_channels * channel_multiplier``.
ksize (int or pair of ints): Size of filters (a.k.a. kernels).
``ksize=k`` and ``ksize=(k, k)`` are equivalent.
stride (int or pair of ints): Stride of filter applications.
``stride=s`` and ``stride=(s, s)`` are equivalent.
pad (int or pair of ints): Spatial padding width for input arrays.
``pad=p`` and ``pad=(p, p)`` are equivalent.
offset_nobias (bool): If ``True``, then this link does not use the
bias term for the first convolution layer.
offset_initialW (:ref:`initializer <initializer>`): Initializer to
initialize the weight of the first convolution layer.
When it is :class:`numpy.ndarray`, its ``ndim`` should be 4.
offset_initial_bias (:ref:`initializer <initializer>`): Initializer to
initialize the bias of the first convolution layer.
If ``None``, the bias will be initialized to
zero. When it is :class:`numpy.ndarray`, its ``ndim`` should be 1.
deform_nobias (bool): If ``True``, then this link does not use the
bias term for the second convolution layer.
deform_initialW (:ref:`initializer <initializer>`): Initializer to
initialize the weight for the second convolution layer.
When it is :class:`numpy.ndarray`,
its ``ndim`` should be 4.
deform_initial_bias (:ref:`initializer <initializer>`): Initializer to
initialize the bias for the second convolution layer.
If ``None``, the bias will be initialized to
zero. When it is :class:`numpy.ndarray`, its ``ndim`` should be 1.
.. seealso::
See :func:`chainer.functions.deformable_convolution_2d_sampler`.
"""
def __init__(self, in_channels, out_channels, ksize, stride=1, pad=0,
offset_nobias=False, offset_initialW=None,
offset_initial_bias=None,
deform_nobias=False,
deform_initialW=None, deform_initial_bias=None):
super(DeformableConvolution2D, self).__init__()
kh, kw = _pair(ksize)
with self.init_scope():
self.offset_conv = Convolution2D(
in_channels, 2 * kh * kw, ksize, stride, pad,
offset_nobias, offset_initialW, offset_initial_bias)
self.deform_conv = DeformableConvolution2DSampler(
in_channels, out_channels, ksize, stride, pad,
deform_nobias, deform_initialW, deform_initial_bias)
def __call__(self, x):
"""Applies the deformable convolution.
Args:
x (~chainer.Variable): Input image.
Returns:
~chainer.Variable: Output of the deformable convolution.
"""
offset = self.offset_conv(x)
return self.deform_conv(x, offset)
class DeformableConvolution2DSampler(link.Link):
"""Apply a two-dimensional deformable convolution layer using offsets"""
def __init__(self, in_channels, out_channels, ksize, stride=1, pad=0,
nobias=False, initialW=None, initial_bias=None):
super(DeformableConvolution2DSampler, self).__init__()
self.ksize = ksize
self.stride = _pair(stride)
self.pad = _pair(pad)
self.out_channels = out_channels
self.initialW = initialW
if initialW is None:
initialW = constant.Zero()
with self.init_scope():
W_initializer = initializers._get_initializer(initialW)
self.W = variable.Parameter(W_initializer)
if nobias:
self.b = None
else:
if initial_bias is None:
initial_bias = initializers.Constant(0)
bias_initializer = initializers._get_initializer(initial_bias)
self.b = variable.Parameter(bias_initializer)
if in_channels is not None:
self._initialize_params(in_channels)
def _initialize_params(self, in_channels):
kh, kw = _pair(self.ksize)
W_shape = (self.out_channels, in_channels, kh, kw)
self.W.initialize(W_shape)
if self.b is not None:
self.b.initialize(self.out_channels)
def __call__(self, x, offset):
if self.W.data is None:
self._initialize_params(x.shape[1])
return deformable_convolution_2d_sampler(
x, offset, self.W, self.b, self.stride, self.pad)
def _pair(x):
if hasattr(x, '__getitem__'):
return x
return x, x
| mit | -8,680,863,434,744,701,000 | 40.261194 | 78 | 0.621993 | false | 4.157143 | false | false | false |
agermanidis/Pattern | graph/__init__.py | 1 | 46675 | #### PATTERN | GRAPH #################################################################################
# Copyright (c) 2010 University of Antwerp, Belgium
# Author: Tom De Smedt <[email protected]>
# License: BSD (see LICENSE.txt for details).
# http://www.clips.ua.ac.be/pages/pattern
######################################################################################################
# This module can benefit greatly from loading psyco.
from math import sqrt, pow
from math import sin, cos, atan2, degrees, radians, pi
from random import random
from heapq import heappush, heappop
from warnings import warn
from codecs import open
# float("inf") doesn't work on windows.
INFINITE = 1e20
# This module is standalone, line(), ellipse() and Text.draw()
# must be either implemented or patched:
def line(x1, y1, x2, y2, stroke=(0,0,0,1), strokewidth=1):
pass
def ellipse(x, y, width, height, fill=(0,0,0,1), stroke=None, strokewidth=1):
pass
class Text:
def __init__(self, string, **kwargs):
self.string = string
self.__dict__.update(kwargs)
def copy(self):
k = self.__dict__.copy()
k.pop("string")
return Text(self.string, **k)
def draw(self):
pass
class Vector(object):
def __init__(self, x=0, y=0):
self.x = x
self.y = y
class Base(object):
pass
#--- NODE --------------------------------------------------------------------------------------------
def _copy(x):
# A color can be represented as a tuple or as a nodebox.graphics.Color object,
# in which case it needs to be copied by invoking Color.copy().
return hasattr(x, "copy") and x.copy() or x
class Node(object):
def __init__(self, id="", radius=5, **kwargs):
""" A node with a unique id in the graph.
Node.id is drawn as a text label, unless optional parameter text=False.
Optional parameters include: fill, stroke, strokewidth, text, font, fontsize, fontweight.
"""
self.graph = None
self.links = Links()
self.id = id
self._x = 0 # Calculated by Graph.layout.update().
self._y = 0 # Calculated by Graph.layout.update().
self.force = Vector(0,0)
self.radius = radius
self.fill = kwargs.get("fill", None)
self.stroke = kwargs.get("stroke", (0,0,0,1))
self.strokewidth = kwargs.get("strokewidth", 1)
self.text = kwargs.get("text", True) and \
Text(unicode(id),
width = 85,
fill = kwargs.pop("text", (0,0,0,1)),
fontsize = kwargs.pop("fontsize", 11), **kwargs) or None
self._weight = None # Calculated by Graph.eigenvector_centrality().
self._centrality = None # Calculated by Graph.betweenness_centrality().
@property
def _distance(self):
# Graph.distance controls the (x,y) spacing between nodes.
return self.graph and float(self.graph.distance) or 1.0
def _get_x(self):
return self._x * self._distance
def _get_y(self):
return self._y * self._distance
def _set_x(self, v):
self._x = v / self._distance
def _set_y(self, v):
self._y = v / self._distance
x = property(_get_x, _set_x)
y = property(_get_y, _set_y)
@property
def edges(self):
return self.graph is not None \
and [e for e in self.graph.edges if self.id in (e.node1, e.node2)] \
or []
@property
def weight(self):
if self.graph and self._weight is None:
self.graph.eigenvector_centrality()
return self._weight
@property
def centrality(self):
if self.graph and self._centrality is None:
self.graph.betweenness_centrality()
return self._centrality
def flatten(self, depth=1, _visited=None):
""" Recursively lists the node and nodes linked to it.
Depth 0 returns a list with the node.
Depth 1 returns a list with the node and all the directly linked nodes.
Depth 2 includes the linked nodes' links, and so on.
"""
_visited = _visited or {}
_visited[self.id] = (self, depth)
if depth >= 1:
for n in self.links:
if n.id not in _visited or _visited[n.id][1] < depth-1:
n.flatten(depth-1, _visited)
return [n for n,d in _visited.values()] # Fast, but not order-preserving.
def draw(self, weighted=False):
""" Draws the node as a circle with the given radius, fill, stroke and strokewidth.
Draws the node centrality as a shadow effect when weighted=True.
Draws the node text label.
Override this method in a subclass for custom drawing.
"""
# Draw the node weight as a shadow (based on node betweenness centrality).
if weighted is not False and self.centrality > (weighted==True and -1 or weighted):
w = self.centrality * 35
ellipse(
self.x,
self.y,
self.radius*2 + w,
self.radius*2 + w, fill=(0,0,0,0.2), stroke=None)
# Draw the node.
ellipse(
self.x,
self.y,
self.radius*2,
self.radius*2, fill=self.fill, stroke=self.stroke, strokewidth=self.strokewidth)
# Draw the node text label.
if self.text:
self.text.draw(
self.x + self.radius,
self.y + self.radius)
def contains(self, x, y):
return abs(self.x - x) < self.radius*2 and \
abs(self.y - y) < self.radius*2
def __repr__(self):
return "%s(id=%s)" % (self.__class__.__name__, repr(self.id))
def __eq__(self, node):
return isinstance(node, Node) and self.id == node.id
def __ne__(self, node):
return not self.__eq__(node)
def copy(self):
""" Returns a shallow copy of the node (i.e. linked nodes are not copied).
"""
n = Node(self.id, self.radius,
text = None,
fill = _copy(self.fill),
stroke = _copy(self.stroke),
strokewidth = self.strokewidth)
if self.text:
n.text = self.text.copy()
n.__class__ = self.__class__
return n
class Links(list):
def __init__(self):
""" A list in which each node has an associated edge.
The edge() method returns the edge for a given node id.
"""
self.edges = dict()
def append(self, node, edge=None):
if node.id not in self.edges:
list.append(self, node)
self.edges[node.id] = edge
def remove(self, node):
list.remove(self, node)
self.edges.pop(node.id, None)
def edge(self, node):
return self.edges.get(isinstance(node, Node) and node.id or node)
#--- EDGE --------------------------------------------------------------------------------------------
coordinates = lambda x, y, d, a: (x + d*cos(radians(a)), y + d*sin(radians(a)))
class Edge(object):
def __init__(self, node1, node2, weight=0.0, length=1.0, type=None, stroke=(0,0,0,1), strokewidth=1):
""" A connection between two nodes.
Its weight indicates the importance (not the cost) of the connection.
Its type is useful in a semantic network (e.g. "is-a", "is-part-of", ...)
"""
self.node1 = node1
self.node2 = node2
self.weight = weight
self.length = length
self.type = type
self.stroke = stroke
self.strokewidth = strokewidth
def draw(self, weighted=False, directed=False):
""" Draws the edge as a line with the given stroke and strokewidth (increased with Edge.weight).
Override this method in a subclass for custom drawing.
"""
w = weighted and self.weight or 0
line(
self.node1.x,
self.node1.y,
self.node2.x,
self.node2.y, stroke=self.stroke, strokewidth=self.strokewidth+w)
if directed:
self.draw_arrow(stroke=self.stroke, strokewidth=self.strokewidth+w)
def draw_arrow(self, **kwargs):
""" Draws the direction of the edge as an arrow on the rim of the receiving node.
"""
x0, y0 = self.node1.x, self.node1.y
x1, y1 = self.node2.x, self.node2.y
# Find the edge's angle based on node1 and node2 position.
a = degrees(atan2(y1-y0, x1-x0))
# The arrow points to node2's rim instead of it's center.
r = self.node2.radius
d = sqrt(pow(x1-x0, 2) + pow(y1-y0, 2))
x01, y01 = coordinates(x0, y0, d-r-1, a)
# Find the two other arrow corners under the given angle.
r = max(kwargs.get("strokewidth", 1) * 3, 6)
dx1, dy1 = coordinates(x01, y01, -r, a-20)
dx2, dy2 = coordinates(x01, y01, -r, a+20)
line(x01, y01, dx1, dy1, **kwargs)
line(x01, y01, dx2, dy2, **kwargs)
line(dx1, dy1, dx2, dy2, **kwargs)
def __repr__(self):
return "%s(id1=%s, id2=%s)" % (self.__class__.__name__, repr(self.node1.id), repr(self.node2.id))
def copy(self, node1, node2):
e = Edge(node1, node2, self.weight, self.length, self.type, _copy(self.stroke), self.strokewidth)
e.__class__ = self.__class__
return e
#--- GRAPH -------------------------------------------------------------------------------------------
def unique(list):
u, b = [], {}
for item in list:
if item not in b: u.append(item); b[item]=1
return u
# Graph layouts:
SPRING = "spring"
# Graph node sort order:
WEIGHT, CENTRALITY = "weight", "centrality"
ALL = "all"
class Graph(dict):
def __init__(self, layout=SPRING, distance=10.0):
""" A network of nodes connected by edges that can be drawn with a given layout.
"""
self.nodes = []
self.edges = []
self.root = None
self.distance = distance
self.layout = layout==SPRING and GraphSpringLayout(self) or GraphLayout(self)
def append(self, type, *args, **kwargs):
""" Appends a Node or Edge to the graph: Graph.append(Node, id="rabbit").
"""
if type is Node:
return self.add_node(*args, **kwargs)
if type is Edge:
return self.add_edge(*args, **kwargs)
def add_node(self, id, *args, **kwargs):
""" Appends a new Node to the graph.
"""
n = isinstance(id, Node) and id or self.get(id) or Node(id, *args, **kwargs)
if n.id not in self:
self.nodes.append(n)
self[n.id] = n; n.graph = self
self.root = kwargs.get("root", False) and n or self.root
return n
def add_edge(self, id1, id2, *args, **kwargs):
""" Appends a new Edge to the graph.
"""
# Create nodes that are not yet part of the graph.
n1 = self.add_node(id1)
n2 = self.add_node(id2)
# Creates an Edge instance.
# If an edge (in the same direction) already exists, yields that edge instead.
e1 = n1.links.edge(n2)
if e1 and e1.node1 == n1 and e1.node2 == n2:
return e1
e2 = Edge(n1, n2, *args, **kwargs)
self.edges.append(e2)
# Synchronizes Node.links:
# A.links.edge(B) yields edge A->B
# B.links.edge(A) yields edge B->A
n1.links.append(n2, edge=e2)
n2.links.append(n1, edge=e1 or e2)
return e2
def remove(self, x):
""" Removes the given Node (and all its edges) or Edge from the graph.
Note: removing Edge a->b does not remove Edge b->a.
"""
if isinstance(x, Node) and x.id in self:
self.pop(x.id)
self.nodes.remove(x); x.graph = None
# Remove all edges involving the given node.
for e in list(self.edges):
if x in (e.node1, e.node2):
if x in e.node1.links: e.node1.links.remove(x)
if x in e.node2.links: e.node2.links.remove(x)
self.edges.remove(e)
if isinstance(x, Edge):
self.edges.remove(x)
def node(self, id):
""" Returns the node in the graph with the given id.
"""
return self.get(id)
def edge(self, id1, id2):
""" Returns the edge between the nodes with given id1 and id2.
"""
return id1 in self and id2 in self and self[id1].links.edge(id2) or None
def shortest_path(self, node1, node2, heuristic=None, directed=False):
""" Returns a list of nodes connecting the two nodes.
"""
try:
p = dijkstra_shortest_path(self, node1.id, node2.id, heuristic, directed)
p = [self[id] for id in p]
return p
except IndexError:
return None
def eigenvector_centrality(self, normalized=True, reversed=True, rating={}, iterations=100, tolerance=0.0001):
""" Calculates eigenvector centrality and returns a node => weight dictionary.
Node.weight is updated in the process.
Node.weight is higher for nodes with a lot of (indirect) incoming traffic.
"""
ec = eigenvector_centrality(self, normalized, reversed, rating, iterations, tolerance)
ec = dict([(self[id], w) for id, w in ec.items()])
for n, w in ec.items():
n._weight = w
return ec
def betweenness_centrality(self, normalized=True, directed=False):
""" Calculates betweenness centrality and returns a node => weight dictionary.
Node.centrality is updated in the process.
Node.centrality is higher for nodes with a lot of passing traffic.
"""
bc = brandes_betweenness_centrality(self, normalized, directed)
bc = dict([(self[id], w) for id, w in bc.items()])
for n, w in bc.items():
n._centrality = w
return bc
def sorted(self, order=WEIGHT, threshold=0.0):
""" Returns a list of nodes sorted by WEIGHT or CENTRALITY.
Nodes with a lot of traffic will be at the start of the list.
"""
o = lambda node: getattr(node, order)
nodes = [(o(n), n) for n in self.nodes if o(n) > threshold]
nodes = reversed(sorted(nodes))
return [n for w, n in nodes]
def prune(self, depth=0):
""" Removes all nodes with less or equal links than depth.
"""
for n in [n for n in self.nodes if len(n.links) <= depth]:
self.remove(n)
def fringe(self, depth=0):
""" For depth=0, returns the list of leaf nodes (nodes with only one connection).
For depth=1, returns the list of leaf nodes and their connected nodes, and so on.
"""
u = []; [u.extend(n.flatten(depth)) for n in self.nodes if len(n.links) == 1]
return unique(u)
@property
def density(self):
# Number of edges vs. maximum number of possible edges.
# E.g. <0.35 => sparse, >0.65 => dense, 1.0 => complete.
return 2.0*len(self.edges) / (len(self.nodes) * (len(self.nodes)-1))
def split(self):
return partition(self)
def update(self, iterations=10, **kwargs):
""" Graph.layout.update() is called the given number of iterations.
"""
for i in range(iterations):
self.layout.update(**kwargs)
def draw(self, weighted=False, directed=False):
""" Draws all nodes and edges.
"""
for e in self.edges:
e.draw(weighted, directed)
for n in reversed(self.nodes): # New nodes (with Node._weight=None) first.
n.draw(weighted)
def node_at(self, x, y):
""" Returns the node at (x,y) or None.
"""
for n in self.nodes:
if n.contains(x, y): return n
def copy(self, nodes=ALL):
""" Returns a copy of the graph with the given list of nodes (and connecting edges).
The layout will be reset.
"""
g = Graph(layout=None, distance=self.distance)
g.layout = self.layout.copy(graph=g)
for n in (nodes==ALL and self.nodes or nodes):
g.append(n.copy(), root=self.root==n)
for e in self.edges:
if e.node1.id in g and e.node2.id in g:
g.append(e.copy(
node1=g[e.node1.id],
node2=g[e.node2.id]))
return g
#--- GRAPH LAYOUT ------------------------------------------------------------------------------------
# Graph drawing or graph layout, as a branch of graph theory,
# applies topology and geometry to derive two-dimensional representations of graphs.
class GraphLayout:
def __init__(self, graph):
""" Calculates node positions iteratively when GraphLayout.update() is called.
"""
self.graph = graph
self.iterations = 0
def update(self):
self.iterations += 1
def reset(self):
self.iterations = 0
for n in self.graph.nodes:
n._x = 0
n._y = 0
n.force = Vector(0,0)
@property
def bounds(self):
""" Returns a (x, y, width, height)-tuple of the approximate layout dimensions.
"""
x0, y0 = +INFINITE, +INFINITE
x1, y1 = -INFINITE, -INFINITE
for n in self.graph.nodes:
if (n.x < x0): x0 = n.x
if (n.y < y0): y0 = n.y
if (n.x > x1): x1 = n.x
if (n.y > y1): y1 = n.y
return (x0, y0, x1-x0, y1-y0)
def copy(self, graph):
return GraphLayout(self, graph)
class GraphSpringLayout(GraphLayout):
def __init__(self, graph):
""" A force-based layout in which edges are regarded as springs.
The forces are applied to the nodes, pulling them closer or pushing them apart.
"""
# Based on: http://snipplr.com/view/1950/graph-javascript-framework-version-001/
GraphLayout.__init__(self, graph)
self.k = 4.0 # Force constant.
self.force = 0.01 # Force multiplier.
self.repulsion = 15 # Maximum repulsive force radius.
def _distance(self, node1, node2):
# Yields a tuple with distances (dx, dy, d, d**2).
# Ensures that the distance is never zero (which deadlocks the animation).
dx = node2._x - node1._x
dy = node2._y - node1._y
d2 = dx*dx + dy*dy
if d2 < 0.01:
dx = random() * 0.1 + 0.1
dy = random() * 0.1 + 0.1
d2 = dx*dx + dy*dy
return dx, dy, sqrt(d2), d2
def _repulse(self, node1, node2):
# Updates Node.force with the repulsive force.
dx, dy, d, d2 = self._distance(node1, node2)
if d < self.repulsion:
f = self.k**2 / d2
node2.force.x += f * dx
node2.force.y += f * dy
node1.force.x -= f * dx
node1.force.y -= f * dy
def _attract(self, node1, node2, weight=0, length=1.0):
# Updates Node.force with the attractive edge force.
dx, dy, d, d2 = self._distance(node1, node2)
d = min(d, self.repulsion)
f = (d2 - self.k**2) / self.k * length
f *= weight * 0.5 + 1
f /= d
node2.force.x -= f * dx
node2.force.y -= f * dy
node1.force.x += f * dx
node1.force.y += f * dy
def update(self, weight=10.0, limit=0.5):
""" Updates the position of nodes in the graph.
The weight parameter determines the impact of edge weight.
The limit parameter determines the maximum movement each update().
"""
GraphLayout.update(self)
# Forces on all nodes due to node-node repulsions.
for i, n1 in enumerate(self.graph.nodes):
for j, n2 in enumerate(self.graph.nodes[i+1:]):
self._repulse(n1, n2)
# Forces on nodes due to edge attractions.
for e in self.graph.edges:
self._attract(e.node1, e.node2, weight*e.weight, 1.0/(e.length or 0.01))
# Move nodes by given force.
for n in self.graph.nodes:
n._x += max(-limit, min(self.force * n.force.x, limit))
n._y += max(-limit, min(self.force * n.force.y, limit))
n.force.x = 0
n.force.y = 0
def copy(self, graph):
g = GraphSpringLayout(graph)
g.k, g.force, g.repulsion = self.k, self.force, self.repulsion
return g
#--- GRAPH THEORY ------------------------------------------------------------------------------------
def depth_first_search(node, visit=lambda node: False, traversable=lambda node, edge: True, _visited=None):
""" Visits all the nodes connected to the given root node, depth-first.
The visit function is called on each node.
Recursion will stop if it returns True, and subsequently dfs() will return True.
The traversable function takes the current node and edge,
and returns True if we are allowed to follow this connection to the next node.
For example, the traversable for directed edges is follows:
lambda node, edge: node == edge.node1
"""
stop = visit(node)
_visited = _visited or {}
_visited[node.id] = True
for n in node.links:
if stop: return True
if not traversable(node, node.links.edge(n)): continue
if not n.id in _visited:
stop = depth_first_search(n, visit, traversable, _visited)
return stop
dfs = depth_first_search;
def breadth_first_search(node, visit=lambda node: False, traversable=lambda node, edge: True):
""" Visits all the nodes connected to the given root node, breadth-first.
"""
q = [node]
_visited = {}
while q:
node = q.pop(0)
if not node.id in _visited:
if visit(node):
return True
q.extend((n for n in node.links if traversable(node, node.links.edge(n))))
_visited[node.id] = True
return False
bfs = breadth_first_search;
def adjacency(graph, directed=False, reversed=False, stochastic=False, heuristic=None):
""" Returns a dictionary indexed by node id1's,
in which each value is a dictionary of connected node id2's linking to the edge weight.
If directed=True, edges go from id1 to id2, but not the other way.
If stochastic=True, all the weights for the neighbors of a given node sum to 1.
A heuristic function can be given that takes two node id's and returns
an additional cost for movement between the two nodes.
"""
map = {}
for n in graph.nodes:
map[n.id] = {}
for e in graph.edges:
id1, id2 = not reversed and (e.node1.id, e.node2.id) or (e.node2.id, e.node1.id)
map[id1][id2] = 1.0 - 0.5 * e.weight
if heuristic:
map[id1][id2] += heuristic(id1, id2)
if not directed:
map[id2][id1] = map[id1][id2]
if stochastic:
for id1 in map:
n = sum(map[id1].values())
for id2 in map[id1]:
map[id1][id2] /= n
return map
def dijkstra_shortest_path(graph, id1, id2, heuristic=None, directed=False):
""" Dijkstra algorithm for finding shortest paths.
Raises an IndexError between nodes on unconnected graphs.
"""
# Based on: Connelly Barnes, http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/119466
def flatten(list):
# Flattens a linked list of the form [0,[1,[2,[]]]]
while len(list) > 0:
yield list[0]; list=list[1]
G = adjacency(graph, directed=directed, heuristic=heuristic)
q = [(0, id1, ())] # Heap of (cost, path_head, path_rest).
visited = set() # Visited nodes.
while True:
(cost1, n1, path) = heappop(q)
if n1 not in visited:
visited.add(n1)
if n1 == id2:
return list(flatten(path))[::-1] + [n1]
path = (n1, path)
for (n2, cost2) in G[n1].iteritems():
if n2 not in visited:
heappush(q, (cost1 + cost2, n2, path))
def brandes_betweenness_centrality(graph, normalized=True, directed=False):
""" Betweenness centrality for nodes in the graph.
Betweenness centrality is a measure of the number of shortests paths that pass through a node.
Nodes in high-density areas will get a good score.
"""
# Ulrik Brandes, A Faster Algorithm for Betweenness Centrality,
# Journal of Mathematical Sociology 25(2):163-177, 2001,
# http://www.inf.uni-konstanz.de/algo/publications/b-fabc-01.pdf
# Based on: Dijkstra's algorithm for shortest paths modified from Eppstein.
# Based on: NetworkX 1.0.1: Aric Hagberg, Dan Schult and Pieter Swart.
# http://python-networkx.sourcearchive.com/documentation/1.0.1/centrality_8py-source.html
G = graph.keys()
W = adjacency(graph, directed=directed)
betweenness = dict.fromkeys(G, 0.0) # b[v]=0 for v in G
for s in G:
S = []
P = {}
for v in G: P[v] = []
sigma = dict.fromkeys(G, 0) # sigma[v]=0 for v in G
D = {}
sigma[s] = 1
seen = {s: 0}
Q = [] # use Q as heap with (distance, node id) tuples
heappush(Q, (0, s, s))
while Q:
(dist, pred, v) = heappop(Q)
if v in D: continue # already searched this node
sigma[v] = sigma[v] + sigma[pred] # count paths
S.append(v)
D[v] = dist
for w in W[v].keys():
vw_dist = D[v] + W[v][w]
if w not in D and (w not in seen or vw_dist < seen[w]):
seen[w] = vw_dist
heappush(Q, (vw_dist, v, w))
sigma[w] = 0
P[w] = [v]
elif vw_dist == seen[w]: # handle equal paths
sigma[w] = sigma[w] + sigma[v]
P[w].append(v)
delta = dict.fromkeys(G,0)
while S:
w = S.pop()
for v in P[w]:
delta[v] = delta[v] + (float(sigma[v]) / float(sigma[w])) * (1.0 + delta[w])
if w != s:
betweenness[w] = betweenness[w] + delta[w]
if normalized:
# Normalize between 0.0 and 1.0.
m = max(betweenness.values())
if m == 0: m = 1
else:
m = 1
betweenness = dict([(id, w/m) for id, w in betweenness.iteritems()])
return betweenness
def eigenvector_centrality(graph, normalized=True, reversed=True, rating={}, iterations=100, tolerance=0.0001):
""" Eigenvector centrality for nodes in the graph (cfr. Google's PageRank).
Eigenvector centrality is a measure of the importance of a node in a directed network.
It rewards nodes with a high potential of (indirectly) connecting to high-scoring nodes.
Nodes with no incoming connections have a score of zero.
If you want to measure outgoing connections, reversed should be False.
"""
# Based on: NetworkX, Aric Hagberg ([email protected])
# http://python-networkx.sourcearchive.com/documentation/1.0.1/centrality_8py-source.html
def normalize(vector):
w = 1.0 / (sum(vector.values()) or 1)
for node in vector:
vector[node] *= w
return vector
G = adjacency(graph, directed=True, reversed=reversed)
v = normalize(dict([(n, random()) for n in graph])) # Node ID => weight vector.
# Eigenvector calculation using the power iteration method: y = Ax.
# It has no guarantee of convergence.
for i in range(iterations):
v0 = v
v = dict.fromkeys(v0.keys(), 0)
for n1 in v:
for n2 in G[n1]:
v[n1] += 0.01 + v0[n2] * G[n1][n2] * rating.get(n1, 1)
normalize(v)
e = sum([abs(v[n]-v0[n]) for n in v]) # Check for convergence.
if e < len(G) * tolerance:
if normalized:
# Normalize between 0.0 and 1.0.
m = max(v.values()) or 1
v = dict([(id, w/m) for id, w in v.items()])
return v
warn("node weight is 0 because eigenvector_centrality() did not converge.", Warning)
return dict([(n, 0) for n in G])
# a | b => all elements from a and all the elements from b.
# a & b => elements that appear in a as well as in b.
# a - b => elements that appear in a but not in b.
def union(a, b):
return [x for x in a] + [x for x in b if x not in a]
def intersection(a, b):
return [x for x in a if x in b]
def difference(a, b):
return [x for x in a if x not in b]
def partition(graph):
""" Returns a list of unconnected subgraphs.
"""
# Creates clusters of nodes and directly connected nodes.
# Iteratively merges two clusters if they overlap.
# Optimized: about 2x faster than original implementation.
g = []
for n in graph.nodes:
g.append(dict.fromkeys([n.id for n in n.flatten()], True))
for i in reversed(range(len(g))):
for j in reversed(range(i+1, len(g))):
if g[i] and g[j] and len(intersection(g[i], g[j])) > 0:
g[i] = union(g[i], g[j])
g[j] = []
g = [graph.copy(nodes=[graph[id] for id in n]) for n in g if n]
g.sort(lambda a, b: len(b) - len(a))
return g
#--- GRAPH MAINTENANCE -------------------------------------------------------------------------------
# Utility commands for safe linking and unlinking of nodes,
# with respect for the surrounding nodes.
def unlink(graph, node1, node2=None):
""" Removes the edges between node1 and node2.
If only node1 is given, removes all edges to and from it.
This does not remove node1 from the graph.
"""
for e in list(graph.edges):
if node1 in (e.node1, e.node2) and node2 in (e.node1, e.node2, None):
graph.edges.remove(e)
try:
node1.links.remove(node2)
node2.links.remove(node1)
except: # 'NoneType' object has no attribute 'links'
pass
def redirect(graph, node1, node2):
""" Connects all of node1's edges to node2 and unlinks node1.
"""
for e in graph.edges:
if node in (e.node1, e.node2):
if e.node1 == node1 and e.node2 != node2:
graph.append(e.copy(node2, e.node2))
if e.node2 == node1 and e.node1 != node2:
graph.append(e.copy(e.node1, node2))
unlink(graph, node1)
def cut(graph, node):
""" Unlinks the given node, but keeps edges intact by connecting the surrounding nodes.
If A, B, C, D are nodes and A->B, B->C, B->D, if we then cut B: A->C, A->D.
"""
for e in graph.edges:
if node in (e.node1, e.node2):
for n in node.links:
if e.node1 == node and e.node2 != n:
graph.append(e.copy(n, e.node2))
if e.node2 == node and e.node1 != n:
graph.append(e.copy(e.node1, n))
unlink(graph, node)
def insert(graph, node, a, b):
""" Inserts the given node between node a and node b.
If A, B, C are nodes and A->B, if we then insert C: A->C, C->B.
"""
for e in graph.edges:
for (n1,n2) in ((a,b), (b,a)):
if e.node1 == n1 and e.node2 == n2:
graph.append(e.copy(node, n2))
if e.node1 == n2 and e.node2 == n1:
graph.append(e.copy(n2, node))
unlink(graph, a, b)
#--- HTML CANVAS RENDERER ----------------------------------------------------------------------------
import os, shutil, glob
try:
MODULE = os.path.dirname(__file__)
except:
MODULE = ""
DEFAULT, INLINE = "default", "inline"
HTML, CANVAS, STYLE, SCRIPT, DATA = "html", "canvas", "style", "script", "data"
class HTMLCanvasRenderer:
def __init__(self, graph, **kwargs):
self.graph = graph
self._source = \
"<!DOCTYPE html>\n" \
"<html>\n" \
"<head>\n" \
"\t<title>%s</title>\n" \
"\t<meta http-equiv=\"Content-Type\" content=\"text/html; charset=utf-8\" />\n" \
"\t%s\n" \
"\t<!--[if lte IE 8]><script type=\"text/javascript\" src=\"%sexcanvas.js\"></script><![endif]-->\n" \
"\t<script type=\"text/javascript\" src=\"%sgraph.js\"></script>\n" \
"\t%s\n" \
"</head>\n" \
"<body onload=\"javascript:init_%s();\">\n" \
"\t<div id=\"%s\" style=\"width:%spx; height:%spx;\">\n" \
"\t\t<canvas id=\"%s\" width=\"%s\" height=\"%s\">\n" \
"\t\t</canvas>\n" \
"\t</div>\n" \
"\t<p>Generated with " \
"<a href=\"http://www.clips.ua.ac.be/pages/pattern\">Pattern</a>.</p>\n" \
"</body>\n" \
"</html>"
# HTML
self.title = "Graph" # <title>Graph</title>
self.javascript = "js/" # Path to excanvas.js + graph.js.
self.stylesheet = INLINE # Either None, INLINE, DEFAULT (screen.css) or a custom path.
self.id = "graph" # <div id="graph">
self.ctx = "_ctx" # <canvas id="_ctx" width=700 height=500>
self.width = 700 # Canvas width in pixels.
self.height = 500 # Canvas height in pixels.
# Javascript:Graph
self.frames = 500 # Number of frames of animation.
self.fps = 20 # Frames per second.
self.ipf = 2 # Iterations per frame.
self.weighted = False # Indicate betweenness centrality as a shadow?
self.directed = False # Indicate edge direction with an arrow?
self.prune = None # None or int, calls Graph.prune() in Javascript.
self.pack = True # Shortens leaf edges, adds eigenvector weight to node radius.
# Javascript:GraphLayout
self.distance = 10 # Node spacing.
self.k = 4.0 # Force constant.
self.force = 0.01 # Force dampener.
self.repulsion = 50 # Repulsive force radius.
# Data
self.weight = [WEIGHT, CENTRALITY] # Calculate these in Python, or True (in Javascript).
self.href = {} # Dictionary of Node.id => URL.
self.css = {} # Dictionary of Node.id => CSS classname.
# Default options.
# If a Node or Edge has one of these settings,
# it is not passed to Javascript to save bandwidth.
self.default = {
"radius": 5,
"fill": None,
"stroke": (0,0,0,1),
"strokewidth": 1,
"text": (0,0,0,1),
"fontsize": 11,
}
def _escape(self, s):
return s.replace("\"", "\\\"")
def _rgba(self, clr):
# Color or tuple to a CSS "rgba(255,255,255,1.0)" string.
return "\"rgba(%s,%s,%s,%.2f)\"" % (int(clr[0]*255), int(clr[1]*255), int(clr[2]*255), clr[3])
@property
def data(self):
""" Yields a string of Javascript code that loads the nodes and edges into variable g,
which is a Javascript Graph object (see graph.js).
This can be the response to a XMLHttpRequest, after wich you move g into your own variable.
"""
return "".join(self._data())
def _data(self):
if self.graph.nodes and isinstance(self.weight, (list, tuple)):
if WEIGHT in self.weight and self.graph.nodes[-1]._weight is None:
self.graph.eigenvector_centrality()
if CENTRALITY in self.weight and self.graph.nodes[-1]._centrality is None:
self.graph.betweenness_centrality()
s = []
s.append("var g = new Graph(document.getElementById(\"%s\"), %s);\n" % (self.ctx, self.distance))
s.append("var n = {")
if len(self.graph.nodes) > 0:
s.append("\n")
# Translate node properties to Javascript dictionary (var n).
for n in self.graph.nodes:
p = []
if n._x != 0:
p.append("x:%i" % n._x) # 0
if n._y != 0:
p.append("y:%i" % n._y) # 0
if n.radius != self.default["radius"]:
p.append("radius:%.1f" % n.radius) # 5.0
if n._weight is not None:
p.append("weight:%.2f" % n.weight) # 0.00
if n._centrality is not None:
p.append("centrality:%.2f" % n.centrality) # 0.00
if n.fill != self.default["fill"]:
p.append("fill:%s" % self._rgba(n.fill)) # [0,0,0,1.0]
if n.stroke != self.default["stroke"]:
p.append("stroke:%s" % self._rgba(n.stroke)) # [0,0,0,1.0]
if n.strokewidth != self.default["strokewidth"]:
p.append("strokewidth:%.1f" % n.strokewidth) # 0.5
if n.text and n.text.fill != self.default["text"]:
p.append("text:%s" % self._rgba(n.text.fill)) # [0,0,0,1.0]
if n.text and "font" in n.text.__dict__:
p.append("font:\"%s\"" % n.text.__dict__["font"]) # "sans-serif"
if n.text and n.text.__dict__.get("fontsize", self.default["fontsize"]) != self.default["fontsize"]:
p.append("fontsize:%i" % int(max(1, n.text.fontsize)))
if n.text and "fontweight" in n.text.__dict__: # "bold"
p.append("fontweight:\"%s\"" % n.text.__dict__["fontweight"])
if n.text and n.text.string != n.id:
p.append("label:\"%s\"" % n.text.string)
if n.id in self.href:
p.append("href:\"%s\"" % self.href[n.id])
if n.id in self.css:
p.append("css:\"%s\"" % self.css[n.id])
s.append("\t\"%s\": {%s},\n" % (self._escape(n.id), ", ".join(p)))
s.append("};\n")
s.append("var e = [")
if len(self.graph.edges) > 0:
s.append("\n")
# Translate edge properties to Javascript dictionary (var e).
for e in self.graph.edges:
id1, id2 = self._escape(e.node1.id), self._escape(e.node2.id)
p = []
if e.weight != 0:
p.append("weight:%.2f" % e.weight) # 0.00
if e.length != 1:
p.append("length:%.2f" % e.length) # 1.00
if e.type is not None:
p.append("type:\"%s\"" % self.type) # "is-part-of"
if e.stroke != self.default["stroke"]:
p.append("stroke:%s" % self._rgba(e.stroke)) # [0,0,0,1.0]
if e.strokewidth != self.default["strokewidth"]:
p.append("strokewidth:%.2f" % e.strokewidth) # 0.5
s.append("\t[\"%s\", \"%s\", {%s}],\n" % (id1, id2, ", ".join(p)))
s.append("];\n")
# Append the nodes to graph g.
s.append("for (var id in n) {\n"
"\tg.addNode(id, n[id]);\n"
"}\n")
# Append the edges to graph g.
s.append("for (var i=0; i < e.length; i++) {\n"
"\tvar n1 = g.nodeset[e[i][0]];\n"
"\tvar n2 = g.nodeset[e[i][1]];\n"
"\tg.addEdge(n1, n2, e[i][2]);\n"
"}")
return s
@property
def script(self):
""" Yields a string of Javascript code that loads the nodes and edges into variable g (Graph),
and starts the animation of the visualization by calling g.loop().
"""
return "".join(self._script())
def _script(self):
s = self._data()
s.append("\n")
# Apply node weight to node radius.
if self.pack:
s.append(
"for (var i=0; i < g.nodes.length; i++) {\n"
"\tvar n = g.nodes[i];\n"
"\tn.radius = n.radius + n.radius * n.weight;\n"
"}\n")
# Apply edge length (leaves get shorter edges).
if self.pack:
s.append(
"for (var i=0; i < g.nodes.length; i++) {\n"
"\tvar e = g.nodes[i].edges();\n"
"\tif (e.length == 1) {\n"
"\t\te[0].length *= 0.2;\n"
"\t}\n"
"}\n")
# Apply eigenvector and betweenness centrality.
if self.weight is True:
s.append(
"g.eigenvectorCentrality();\n"
"g.betweennessCentrality();\n")
# Apply pruning.
if self.prune is not None:
s.append(
"g.prune(%s);\n" % self.prune)
# Include the layout settings (for clarity).
s.append("g.layout.k = %s; // Force constant (= edge length).\n"
"g.layout.force = %s; // Repulsive strength.\n"
"g.layout.repulsion = %s; // Repulsive radius.\n" % (
self.k, self.force, self.repulsion))
# Start the graph animation loop.
s.append("// Start the animation loop.\n")
s.append("g.loop({frames:%s, fps:%s, ipf:%s, weighted:%s, directed:%s});" % (
int(self.frames),
int(self.fps),
int(self.ipf),
str(self.weighted).lower(),
str(self.directed).lower()))
return s
@property
def canvas(self):
""" Yields a string of HTML with a <div id="graph"> containing a HTML5 <canvas> element.
"""
s = [
"<div id=\"%s\" style=\"width:%spx; height:%spx;\">\n" % (self.id, self.width, self.height),
"\t<canvas id=\"%s\" width=\"%s\" height=\"%s\">\n" % (self.ctx, self.width, self.height),
"\t</canvas>\n",
"</div>"
]
#s.append("\n<script type=\"text/javascript\">\n")
#s.append("".join(self._script()).replace("\n", "\n\t"))
#s.append("\n</script>")
return "".join(s)
@property
def style(self):
""" Yields a string of CSS for <div id="graph">.
"""
return \
"body { font: 11px sans-serif; }\n" \
"a { color: dodgerblue; }\n" \
"#%s {\n" \
"\tdisplay: block;\n" \
"\tposition: relative;\n" \
"\toverflow: hidden;\n" \
"\tborder: 1px solid #ccc;\n" \
"}\n" \
"#%s canvas { }\n" \
".node-label { font-size: 11px; }" % (self.id, self.id)
@property
def html(self):
""" Yields a string of HTML to visualize the graph using a force-based spring layout.
The js parameter sets the path to graph.js and excanvas.js (by default, "./").
"""
js = self.javascript.rstrip("/")
js = (js and js or ".")+"/"
if self.stylesheet == INLINE:
css = self.style.replace("\n","\n\t\t").rstrip("\t")
css = "<style type=\"text/css\">\n\t\t%s\n\t</style>" % css
elif self.stylesheet == DEFAULT:
css = "<link rel=\"stylesheet\" href=\"screen.css\" type=\"text/css\" media=\"screen\" />"
elif self.stylesheet is not None:
css = "<link rel=\"stylesheet\" href=\"%s\" type=\"text/css\" media=\"screen\" />" % self.stylesheet
s = self._script()
s = "".join(s)
s = s.replace("\n", "\n\t\t")
s = "<script type=\"text/javascript\">\n\tfunction init_%s() {\n\t\t%s\n\t}\n\t</script>" % (self.id, s)
s = s.rstrip()
s = self._source % (
self.title,
css,
js,
js,
s,
self.id,
self.id,
self.width,
self.height,
self.ctx,
self.width,
self.height)
return s
def render(self, type=HTML):
if type == HTML:
return self.html
if type == CANVAS:
return self.canvas
if type == STYLE:
return self.style
if type == SCRIPT:
return self.script
if type == DATA:
return self.data
def export(self, path, overwrite=False, encoding="utf-8"):
""" Generates a folder at the given path containing an index.html
that visualizes the graph using the HTML5 <canvas> tag.
"""
if overwrite and os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path) # With overwrite=False, raises OSError if the path already exists.
os.mkdir(os.path.join(path, "js"))
# Copy js/graph.js + js/excanvas.js (unless a custom path is given.)
if self.javascript == "js/":
for f in glob.glob(os.path.join(MODULE, "js", "*.js")):
shutil.copy(f, os.path.join(path, "js", os.path.basename(f)))
# Create screen.css.
if self.stylesheet == DEFAULT:
f = open(os.path.join(path, "screen.css"), "w")
f.write(self.style)
f.close()
# Create index.html.
f = open(os.path.join(path, "index.html"), "w", encoding=encoding)
f.write(self.html)
f.close()
def render(graph, type=HTML, **kwargs):
renderer = HTMLCanvasRenderer(graph)
renderer.default.update(kwargs.get("default", {}))
kwargs["default"] = renderer.default
kwargs["stylesheet"] = kwargs.get("stylesheet", INLINE)
for k,v in kwargs.items():
if k in renderer.__dict__:
renderer.__dict__[k] = v
return renderer.render(type)
def export(graph, path, overwrite=False, encoding="utf-8", **kwargs):
renderer = HTMLCanvasRenderer(graph)
renderer.default.update(kwargs.get("default", {}))
kwargs["default"] = renderer.default
kwargs["stylesheet"] = kwargs.get("stylesheet", DEFAULT)
for k,v in kwargs.items():
if k in renderer.__dict__:
renderer.__dict__[k] = v
return renderer.export(path, overwrite)
| bsd-3-clause | -9,090,473,286,160,558,000 | 39.693112 | 118 | 0.526213 | false | 3.600077 | false | false | false |
pragle/craft | web/model/db_model.py | 1 | 1193 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Michal Szczepanski'
from sqlalchemy.sql.schema import Column, ForeignKey
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Integer, String, Boolean, Binary
Base = declarative_base()
class DatabaseConnection(Base):
__tablename__ = 'database_connection'
connection_name = Column(String, unique=True, primary_key=True)
name = Column(String)
host = Column(String)
port = Column(Integer)
username = Column(String)
password = Column(String)
database = Column(String)
'''
class SSHConnection(Base):
__tablename__ = 'ssh_connection'
id = Column(Integer, primary_key=True)
name = Column(String)
host = Column(String)
port = Column(String)
auth_id = Column(Integer, ForeignKey('ssh_connection_auth.id'))
class SSHConnectionAuth(Base):
__tablename__ = 'ssh_connection_auth'
id = Column(Integer, primary_key=True)
key = Column(Boolean, default=False)
key_data = Column(Binary)
username = Column(String)
password = Column(String)
connections = relationship('SSHConnection')
'''
| bsd-3-clause | -1,577,213,707,850,402,800 | 23.346939 | 67 | 0.694049 | false | 3.811502 | false | false | false |
wasade/picrust | picrust/ace.py | 1 | 3860 | #!/usr/bin/env python
# Author: Morgan Langille ([email protected])
# count_wagner.py
""" Application controller for the `ace' function within the R package `ape`.
File created on Feb 2012.
"""
from __future__ import division
from cogent.util.table import Table
from os.path import split, splitext
from os import remove, environ
from glob import glob
from cogent.app.util import CommandLineApplication, ResultPath, get_tmp_filename
from cogent.app.parameters import ValuedParameter, FilePath
from cogent import LoadTree
from cogent import LoadTable
from picrust.util import get_picrust_project_dir
from os.path import join
__author__ = "Morgan Langille"
__copyright__ = "Copyright 2011-2013, The PICRUSt Project"
__credits__ = ["Morgan Langille", "Greg Caporaso"]
__license__ = "GPL"
__version__ = "1.0.0-dev"
__maintainer__ = "Morgan Langille"
__email__ = "[email protected]"
__status__ = "Development"
class Ace(CommandLineApplication):
""" Application controller for 'ace' fucntion within the 'ape' R package."""
ace_script_fp = join(get_picrust_project_dir(),'picrust','support_files','R','ace.R')
_command = ace_script_fp
_input_handler = '_input_as_string'
_suppress_stdout = False
_suppress_stderr = False
# Overridden to call script with R rather than directly - this is useful
# because permisssions on the script are set to 644 when PICRUSt is installed
# with setup.py. This is fine if we're executing it with R, but not if we're
# trying to execute it directly.
def _get_base_command(self):
""" Returns the full command string
input_arg: the argument to the command which represents the input
to the program, this will be a string, either
representing input or a filename to get input from
"""
command_parts = []
# Append a change directory to the beginning of the command to change
# to self.WorkingDir before running the command
# WorkingDir should be in quotes -- filenames might contain spaces
cd_command = ''.join(['cd ',str(self.WorkingDir),';'])
if self._command is None:
raise ApplicationError, '_command has not been set.'
command = self._command
parameters = self.Parameters
command_parts.append(cd_command)
command_parts.append("R")
command_parts.append("-f")
command_parts.append(command)
command_parts.append("--args")
command_parts.append(self._command_delimiter.join(filter(\
None,(map(str,parameters.values())))))
return self._command_delimiter.join(command_parts).strip()
BaseCommand = property(_get_base_command)
def ace_for_picrust(tree_path,trait_table_path,method='pic',HALT_EXEC=False):
'''Runs the Ace application controller given path of tree and trait table and returns a Table'''
#initialize Ace app controller
ace=Ace(HALT_EXEC=HALT_EXEC)
tmp_output_count_path=get_tmp_filename()
tmp_output_prob_path=get_tmp_filename()
#quote file names
tree_path='"{0}"'.format(tree_path)
trait_table_path='"{0}"'.format(trait_table_path)
as_string = " ".join([tree_path,trait_table_path,method,tmp_output_count_path,tmp_output_prob_path])
#Run ace here
result = ace(data=as_string)
#Load the output into Table objects
try:
asr_table=LoadTable(filename=tmp_output_count_path,header=True,sep='\t')
except IOError:
raise RuntimeError,\
("R reported an error on stderr:"
" %s" % "\n".join(result["StdErr"].readlines()))
asr_prob_table=LoadTable(filename=tmp_output_prob_path,header=True,sep='\t')
#Remove tmp files
remove(tmp_output_count_path)
remove(tmp_output_prob_path)
return asr_table,asr_prob_table
| gpl-3.0 | -2,905,388,587,920,334,000 | 36.475728 | 104 | 0.673834 | false | 3.648393 | false | false | false |
frederick623/HTI | fa_util_py/HTI_ExportSIToMSS.py | 1 | 16771 | import ael
import acm
import time
import datetime
import os
import stat
import smtplib
import shutil
import string
#import HTI_DB_Functions
from datetime import date
from datetime import datetime
import shlex
#import HTI_MailFunction
import os
SEQNBR = 0
TRDNBR = 1
SETTTYPE = 2
VALUEDAY = 3
TEXT1 = 4
PRFID = 5
AMOUNT = 6
ISIN = 7
INSTYPE = 8
UI_ISIN = 9
'''
select s.seqnbr, t.trdnbr, s.type, s.value_day, t.text1
from settlement s, trade t, instrument i
where s.trdnbr = t.trdnbr
and t.insaddr = i.insaddr
and s.status = 'Released'
and s.updat_time >= Today and s.updat_time < Today + 1
and s.type in ('Security Nominal', 'End Security')
'''
dsn = "HTIConnString"
msse_fa_acc_mapping = {'Trading Book 5': '02-0238771-22',
'Trading Book 6': '02-0228640-30',
'Trading Book 7': '02-0228640-30',
'Trading Book 8': '02-0228640-30',
'Trading Book 13': '02-0263880-22',
'Trading Book 14': '02-0228640-30',
'Trading Book 17': '02-0238771-22'}
pfs = acm.FPhysicalPortfolio.Select('')
def get_dates():
dates = []
dates.append("TODAY")
dates.append(ael.date('2015-05-28'))
return dates
def get_all_setttypes():
settType = []
settType.append('Coupon')
settType.append('Coupon Transfer')
def get_all_instypes():
insType = []
insType.append('BasketRepo/Reverse')
insType.append('BasketSecurityLoan')
insType.append('Bill')
insType.append('Bond')
insType.append('BondIndex')
insType.append('BuySellback')
insType.append('CallAccount')
insType.append('Cap')
insType.append('CashCollateral')
insType.append('CD')
insType.append('Certificate')
insType.append('CFD')
insType.append('CLN')
insType.append('Collar')
insType.append('Collateral')
insType.append('Combination')
insType.append('Commodity')
insType.append('Commodity Index')
insType.append('Commodity Variant')
insType.append('Convertible')
insType.append('Credit Balance')
insType.append('CreditDefaultSwap')
insType.append('CreditIndex')
insType.append('Curr')
insType.append('CurrSwap')
insType.append('Deposit')
insType.append('Depositary Receipt')
insType.append('Dividend Point Index')
insType.append('DualCurrBond')
insType.append('EquityIndex')
insType.append('EquitySwap')
insType.append('ETF')
insType.append('Flexi Bond')
insType.append('Floor')
insType.append('FRA')
insType.append('FreeDefCF')
insType.append('FRN')
insType.append('Fund')
insType.append('Future/Forward')
insType.append('Fx Rate')
insType.append('FXOptionDatedFwd')
insType.append('FxSwap')
insType.append('IndexLinkedBond')
insType.append('IndexLinkedSwap')
insType.append('LEPO')
insType.append('MBS/ABS')
insType.append('MultiAsset')
insType.append('MultiOption')
insType.append('None')
insType.append('Option')
insType.append('Portfolio Swap')
insType.append('PriceIndex')
insType.append('PriceSwap')
insType.append('PromisLoan')
insType.append('RateIndex')
insType.append('Repo/Reverse')
insType.append('SecurityLoan')
insType.append('Stock')
insType.append('StockRight')
insType.append('Swap')
insType.append('TotalReturnSwap')
insType.append('UnKnown')
insType.append('VarianceSwap')
insType.append('VolatilitySwap')
insType.append('Warrant')
insType.append('Zero')
insType.sort()
return insType
def get_all_portfolios():
portfolios = []
for port in ael.Portfolio.select():
portfolios.append(port.display_id())
portfolios.sort()
return portfolios
def get_all_acquirers():
acquirers = []
for acq in ael.Party.select("type = 'Intern Dept'"):
acquirers.append(acq.display_id())
acquirers.sort()
return acquirers
def get_all_fileMsgType():
msgType = []
msgType.append("SI") # Sec In/Out
msgType.sort()
return msgType
def disable_variables(variables, enable = 0):
for i in variables:
for j in ael_variables:
if i == j[0]:
j[9] = enable
def get_all_status():
status = []
status.append('Released')
status.append('Pending Closure')
status.append('Closed')
status.sort()
return status
ael_variables = [['acquirers', 'Acquirers', 'string', get_all_acquirers(), 'HTIFP', 1, 1, 'Acquirers', None, 1], \
['sett_status', 'Settlement Status', 'string', get_all_status(), 'Released', 1, 1, 'Settlement Status', None, 1], \
['instypes', 'Instrument Types', 'string', get_all_instypes(), 'Bond', 1, 1, 'Instrument Types', None, 1], \
['not_setttypes', 'Not Settlement Types', 'string', get_all_setttypes(), 'Coupon,Coupon Transfer', 1, 1, 'Not Settlement Types', None, 1], \
['pf', 'Portfolio', 'string', get_all_portfolios(), None, 1, 1, 'Portfolio', None, 1], \
['filePath', 'File Path', 'string', None, 'c:\\temp', 1, 0, 'File Name', None, 1], \
['fileName', 'File Name', 'string', None, '<FileMsgType>_<YYYYMMDDhhmmss>.csv', 1, 0, 'File Name', None, 0], \
['participant_id', 'Participant Id', 'string', None, 'B01143', 1, 0, 'Haitong Participant Id', None, 1], \
['asofdate', 'Date', 'string', get_dates(), "TODAY", 1, 0, 'Date', None, 1], \
['fileMsgType', 'File Message Type', 'string', get_all_fileMsgType(), 'SI', 1, 0, 'File Message Type', None, 0]]
def EmailNotify(subject, messg, RECIPIENTS):
session = smtplib.SMTP(smtpserver)
BODY = string.join((
"From: %s" % SENDER,
"To: %s" % RECIPIENTS,
"Subject: %s" % subject,
"",
messg
), "\r\n")
#print BODY
if AUTHREQUIRED:
session.login(smtpuser, smtppass)
smtpresult = session.sendmail(SENDER, RECIPIENTS, BODY)
if smtpresult:
errstr = ''
for recip in smtpresult.keys():
errstr = 'Could not delivery mail to: %s Server said: %s %s %s' % (recip, smtpresult[recip][0], smtpresult[recip][1])
raise smtplib.SMTPException, errstr
session.quit()
def ValidPortfolio(array_pf, portfolio):
for pf in array_pf:
if portfolio == pf:
return True
return False
def getExecBroker(ptyid):
p = ael.Party[ptyid]
for ai in p.additional_infos():
if ai.addinf_specnbr.field_name == 'Broker Ref':
return ai.value.strip()
return ''
def ConvertDateToYYYYMMDD(dt):
d = ael.date(dt).to_ymd()
yy = str(d[0])
mm = str(d[1])
if d[1] < 10:
mm = "0" + mm
dd = str(d[2])
if d[2] < 10:
dd = "0" + dd
return yy+mm+dd
def getChildPortfolio(pPf, pfarr):
if (pPf == None):
return pfarr
for child in pPf.children():
pfid = child.display_id()
cPf = ael.Portfolio[pfid]
if cPf != None:
if cPf.compound == True:
pfarr = getChildPortfolio(cPf, pfarr)
else:
pfarr.append(pfid)
return pfarr
def ael_main(dict):
# Acquirers
acq_array_list = dict['acquirers']
acq_list = ''
for acq in acq_array_list:
if acq_list == '':
acq_list = "'" + acq + "'"
else:
acq_list = acq_list + ",'" + acq + "'"
# instypes
instype_array_list = dict['instypes']
instype_list = ''
for instype in instype_array_list:
if instype_list == '':
instype_list = "'" + instype + "'"
else:
instype_list = instype_list + ",'" + instype + "'"
# settlement status
sett_status_array_list = dict['sett_status']
sett_status_list = ''
for sett_status in sett_status_array_list:
if sett_status_list == '':
sett_status_list = "'" + sett_status + "'"
else:
sett_status_list = sett_status_list + ",'" + sett_status + "'"
# Portfolios
pf_array_list = dict['pf']
pf_list = ''
for pf in pf_array_list:
if pf_list == '':
pf_list = "'" + pf + "'"
else:
pf_list = pf_list + ",'" + pf + "'"
# sett_types
not_setttype_array_list = dict['not_setttypes']
not_setttype_list = ''
for setttype in not_setttype_array_list:
if not_setttype_list == '':
not_setttype_list = "'" + setttype + "'"
else:
not_setttype_list = not_setttype_list + ",'" + setttype + "'"
participant_id = dict['participant_id']
print 'pf_list', pf_list
print 'acq_list', acq_list
print 'sett_status_list', sett_status_list
print 'not_setttype_list', not_setttype_list
print 'instype_list', instype_list
# File Message Type
fileMsgType = dict['fileMsgType']
# Asof Date
asofdate = dict['asofdate']
if asofdate == 'TODAY':
d = ael.date_today().to_ymd()
d1 = ael.date_today().add_days(1).to_ymd()
else:
d = ael.date(asofdate).to_ymd()
d1 = ael.date(asofdate).add_days(1).to_ymd()
yy = str(d[0])
mm = str(d[1])
mm = "%02d" % int(mm)
dd = str(d[2])
dd = "%02d" % int(dd)
asofdate = yy+'-'+mm+'-'+dd
yy = str(d1[0])
mm = str(d1[1])
mm = "%02d" % int(mm)
dd = str(d1[2])
dd = "%02d" % int(dd)
d1_date = yy+'-'+mm+'-'+dd
# File Name
filePath = dict['filePath']
fileName = dict['fileName']
fileName = filePath + '\\' + fileName
genDate = ael.date_today()
timeStamp = time.strftime("%Y%m%d%H%M%S")
fileName = fileName.replace("<YYYYMMDDhhmmss>", timeStamp)
fileName = fileName.replace("<FileMsgType>", fileMsgType)
errMsg = ''
print fileName
f = open(fileName, "w")
# trade details
if fileMsgType == 'SI':
# Header
headerLine = "settleDate,instructionType,settleMethod,haitongParticipantId,market,stockCode,shares,payment,ccassClientAccountNo,haitongClientAccountNo"
headerLine = str(headerLine) + '\n'
print headerLine
f.write(headerLine)
strSql = """select s.seqnbr, t.trdnbr, s.type, s.value_day, t.text1, pf.prfid, s.amount, i.isin, i.instype, ui.isin
from settlement s, trade t, instrument i, party acq, portfolio pf, instrument ui
where s.trdnbr = t.trdnbr
and t.insaddr = i.insaddr
and t.acquirer_ptynbr = acq.ptynbr
and t.prfnbr = pf.prfnbr
and acq.ptyid in (%s)
and s.status in (%s)
and s.updat_time >= '%s' and s.updat_time < '%s'
and i.instype in (%s)
and t.category ~= 'Collateral'
and pf.prfid in (%s)
and i.und_insaddr *= ui.insaddr
and s.type in ('Security Nominal', 'End Security')""" % (acq_list, sett_status_list, asofdate, d1_date, instype_list, pf_list)
print strSql
recCnt = 0
rs = ael.asql(strSql)
columns, buf = rs
for table in buf:
for row in table:
print row
seqnbr = str(row[SEQNBR]).strip()
trdnbr = str(row[TRDNBR]).strip()
setttype = str(row[SETTTYPE]).strip()
valueday = str(row[VALUEDAY]).strip()
text1 = str(row[TEXT1]).strip()
sec_amount = str(row[AMOUNT]).strip()
instype = str(row[INSTYPE]).strip()
print 'louis1'
if instype == 'Repo/Reverse':
if text1 == '':
prfid = str(row[PRFID]).strip()
else:
prfid = text1
isin = str(row[UI_ISIN]).strip()
else:
prfid = str(row[PRFID]).strip()
isin = str(row[ISIN]).strip()
accountId = ''
try:
accountId = msse_fa_acc_mapping[prfid]
except:
print 'cannot get accountId'
settledt = ael.date(valueday).to_string("%Y-%m-%d")
if float(sec_amount) >= 0:
instructionType = 'DELIVER'
else:
instructionType = 'RECEIVE'
settlemethod = 'FOP'
marketcode = 'OTC'
payment = '0.00'
sec_amount = str(abs(float(sec_amount)))
payment_strSql = """select sum(s.amount) 'amount'
from settlement s, trade t, instrument i, party acq, portfolio pf, instrument ui
where s.trdnbr = t.trdnbr
and t.insaddr = i.insaddr
and t.acquirer_ptynbr = acq.ptynbr
and t.prfnbr = pf.prfnbr
and acq.ptyid in (%s)
and s.status in (%s)
and i.instype in (%s)
and t.category ~= 'Collateral'
and pf.prfid in (%s)
and i.und_insaddr *= ui.insaddr
and s.type not in ('Security Nominal', 'End Security')
and s.type not in (%s)
and s.value_day = '%s'
and t.trdnbr = %s""" % (acq_list, sett_status_list, instype_list, pf_list, not_setttype_list, settledt, int(trdnbr))
print payment_strSql
payment_rs = ael.asql(payment_strSql)
payment_columns, payment_buf = payment_rs
for payment_table in payment_buf:
for payment_row in payment_table:
payment = str(abs(float(str(payment_row[0]).strip())))
settlemethod = 'DVP'
print 'payment', payment
detailLine = settledt + ',' + instructionType + ',' + settlemethod + ',' + participant_id + ',' + marketcode + ',' + isin + ',' + sec_amount + ',' + payment + ',' + '' + ',' + accountId
detailLine = str(detailLine) + '\n'
recCnt = recCnt + 1
print detailLine
f.write(detailLine)
else:
recCnt = 0
f.close()
mb = acm.GetFunction("msgBox", 3)
if mb != None:
mb("Message", "File has been generated successfully at " + fileName, 0)
mb = None
return
| apache-2.0 | -2,448,934,486,967,091,700 | 35.458696 | 245 | 0.476179 | false | 3.801224 | false | false | false |
awesto/django-shop | shop/views/auth.py | 1 | 8484 | from django.contrib.auth import logout, get_user_model
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.tokens import default_token_generator
from django.core.exceptions import NON_FIELD_ERRORS
from django.utils.encoding import force_str
from django.utils.translation import gettext_lazy as _
from rest_framework import status
from rest_framework.views import APIView
from rest_framework.exceptions import ErrorDetail, ValidationError
from rest_framework.generics import GenericAPIView
from rest_framework.permissions import AllowAny
from rest_framework.renderers import JSONRenderer, BrowsableAPIRenderer
from rest_framework.response import Response
from rest_framework.settings import api_settings
from rest_auth.views import LoginView as OriginalLoginView, PasswordChangeView as OriginalPasswordChangeView
from shop.models.cart import CartModel
from shop.models.customer import CustomerModel
from shop.rest.renderers import CMSPageRenderer
from shop.serializers.auth import PasswordResetRequestSerializer, PasswordResetConfirmSerializer
from shop.signals import email_queued
class AuthFormsView(GenericAPIView):
"""
Generic view to handle authentication related forms such as user registration
"""
serializer_class = None
form_class = None
def post(self, request, *args, **kwargs):
if request.customer.is_visitor:
customer = CustomerModel.objects.get_or_create_from_request(request)
else:
customer = request.customer
form_data = request.data.get(self.form_class.scope_prefix, {})
form = self.form_class(data=form_data, instance=customer)
if form.is_valid():
form.save(request=request)
response_data = {form.form_name: {
'success_message': _("Successfully registered yourself."),
}}
return Response(response_data, status=status.HTTP_200_OK)
errors = dict(form.errors)
if 'email' in errors:
errors.update({NON_FIELD_ERRORS: errors.pop('email')})
return Response({form.form_name: errors}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
class LoginView(OriginalLoginView):
form_name = 'login_form'
def login(self):
"""
Logs in as the given user, and moves the items from the current to the new cart.
"""
try:
anonymous_cart = CartModel.objects.get_from_request(self.request)
except CartModel.DoesNotExist:
anonymous_cart = None
if self.request.customer.user.is_anonymous or self.request.customer.is_registered:
previous_user = None
else:
previous_user = self.request.customer.user
super().login() # this rotates the session_key
if not self.serializer.data.get('stay_logged_in'):
self.request.session.set_expiry(0) # log out when the browser is closed
authenticated_cart = CartModel.objects.get_from_request(self.request)
if anonymous_cart:
# an anonymous customer logged in, now merge his current cart with a cart,
# which previously might have been created under his account.
authenticated_cart.merge_with(anonymous_cart)
if previous_user and previous_user.is_active is False and previous_user != self.request.user:
# keep the database clean and remove this anonymous entity
if previous_user.customer.orders.count() == 0:
previous_user.delete()
def post(self, request, *args, **kwargs):
self.request = request
if request.user.is_anonymous:
form_data = request.data.get('form_data', {})
self.serializer = self.get_serializer(data=form_data)
if self.serializer.is_valid():
self.login()
return self.get_response()
exc = ValidationError({self.form_name: self.serializer.errors})
else:
message = ErrorDetail("Please log out before signing in again.")
exc = ValidationError({self.form_name: {api_settings.NON_FIELD_ERRORS_KEY: [message]}})
response = self.handle_exception(exc)
self.response = self.finalize_response(request, response, *args, **kwargs)
return self.response
class LogoutView(APIView):
"""
Calls Django logout method and delete the auth Token assigned to the current User object.
"""
permission_classes = (AllowAny,)
form_name = 'logout_form'
def post(self, request):
try:
request.user.auth_token.delete()
except:
pass
logout(request)
request.user = AnonymousUser()
response_data = {self.form_name: {'success_message': _("Successfully logged out.")}}
return Response(response_data)
class PasswordChangeView(OriginalPasswordChangeView):
form_name = 'password_change_form'
def post(self, request, *args, **kwargs):
form_data = request.data.get('form_data', {})
serializer = self.get_serializer(data=form_data)
if serializer.is_valid():
serializer.save()
response_data = {self.form_name: {
'success_message': _("Password has been changed successfully."),
}}
return Response(response_data)
return Response({self.form_name: serializer.errors}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
class PasswordResetRequestView(GenericAPIView):
"""
Calls Django Auth PasswordResetRequestForm save method.
Accepts the following POST parameters: email
Returns the success/fail message.
"""
serializer_class = PasswordResetRequestSerializer
permission_classes = (AllowAny,)
form_name = 'password_reset_request_form'
def post(self, request, *args, **kwargs):
form_data = request.data.get('form_data', {})
serializer = self.get_serializer(data=form_data)
if not serializer.is_valid():
return Response({self.form_name: serializer.errors}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
# send email containing a reset link
serializer.save()
# trigger async email queue
email_queued()
# Return the success message with OK HTTP status
msg = _("Instructions on how to reset the password have been sent to '{email}'.")
response_data = {self.form_name: {
'success_message': msg.format(**serializer.data),
}}
return Response(response_data)
class PasswordResetConfirmView(GenericAPIView):
"""
Password reset e-mail link points onto a CMS page with the Page ID = 'password-reset-confirm'.
This page then shall render the CMS plugin as provided by the **ShopAuthenticationPlugin** using
the form "Confirm Password Reset".
"""
renderer_classes = (CMSPageRenderer, JSONRenderer, BrowsableAPIRenderer)
serializer_class = PasswordResetConfirmSerializer
permission_classes = (AllowAny,)
token_generator = default_token_generator
form_name = 'password_reset_confirm_form'
def get(self, request, uidb64=None, token=None):
data = {'uid': uidb64, 'token': token}
serializer_class = self.get_serializer_class()
password = get_user_model().objects.make_random_password()
data.update(new_password1=password, new_password2=password)
serializer = serializer_class(data=data, context=self.get_serializer_context())
if not serializer.is_valid():
return Response({'validlink': False})
return Response({
'validlink': True,
'user_name': force_str(serializer.user),
'form_name': 'password_reset_form',
})
def post(self, request, uidb64=None, token=None):
try:
data = dict(request.data['form_data'], uid=uidb64, token=token)
except (KeyError, TypeError, ValueError):
errors = {'non_field_errors': [_("Invalid POST data.")]}
else:
serializer = self.get_serializer(data=data)
if serializer.is_valid():
serializer.save()
response_data = {self.form_name: {
'success_message': _("Password has been reset with the new password."),
}}
return Response(response_data)
else:
errors = serializer.errors
return Response({self.form_name: errors}, status=status.HTTP_422_UNPROCESSABLE_ENTITY)
| bsd-3-clause | 8,623,765,009,324,266,000 | 41.208955 | 109 | 0.660655 | false | 4.289181 | false | false | false |
charlesthk/django-nightmare-pdf | nightmare_pdf/generators.py | 1 | 2545 | import subprocess
import os
from django.core.validators import URLValidator
from nightmare_pdf.settings import pdf_settings
from django.http import (
HttpResponse,
Http404
)
from django.core.files.base import ContentFile
from .models import PdfDoc
from .utils import get_random_filename
validate_url = URLValidator(schemes=['https', 'http'])
class PDFGenerator(object):
def __init__(self, url, timeout=1000, page_size='A4', landscape=0,
print_background=1, margins_type=1, script=pdf_settings.DEFAULT_RENDER_SCRIPT,
temp_dir=pdf_settings.DEFAULT_TEMP_DIR):
validate_url(url)
self.url = url
self.filename = self.__get_random_filename()
self.filepath = self.__get_filepath()
self.timeout = timeout
self.page_size = page_size
self.landscape = landscape
self.print_background = print_background
self.margins_type = margins_type
self.script = script
self.temp_dir = temp_dir
self.pdf_data = None
self.__generate()
self.__set_pdf_data()
self.__remove_source_file()
def __get_random_filename(self):
name = get_random_filename(20)
return "%s.pdf" % name
def __get_filepath(self):
return os.path.join(pdf_settings.DEFAULT_TEMP_DIR, self.filename)
def __generate(self):
"""
call the following command:
node render_pdf.js [url] [filepath]
--timeout [timeout]
--pageSize [page_size]
--landscape [landscape]
--printBackground [print_background]
--marginsType [margins_type]
"""
command = [
pdf_settings.NODE_PATH,
self.script,
self.url,
self.filepath,
'--timeout',
str(self.timeout),
'--pageSize',
self.page_size,
'--landscape',
str(self.landscape),
'--printBackground',
str(self.print_background),
'--marginsType',
str(self.margins_type)
]
return subprocess.call(command)
def __set_pdf_data(self):
with open(self.filepath) as pdf:
self.pdf_data = pdf.read()
def get_content_file(self, filename):
return ContentFile(self.pdf_data, name=filename)
def get_data(self):
return self.pdf_data
def get_http_response(self, filename):
response = HttpResponse(self.pdf_data, content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="%s.pdf"' % filename
return response
def __remove_source_file(self):
return subprocess.call(['rm', self.filepath])
def save(self, filename, title='', description=''):
file = self.get_content_file(filename)
document = PdfDoc(
title=title,
description=description,
document=file)
document.save()
return document
| mit | 8,103,723,720,229,453,000 | 22.564815 | 83 | 0.697053 | false | 3.134236 | false | false | false |
zerotired/kotori | kotori/daq/decoder/__init__.py | 1 | 1246 | # -*- coding: utf-8 -*-
# (c) 2019-2020 Andreas Motl <[email protected]>
from kotori.daq.decoder.airrohr import AirrohrDecoder
from kotori.daq.decoder.tasmota import TasmotaSensorDecoder, TasmotaStateDecoder
from kotori.daq.decoder.schema import MessageType
class DecoderInfo:
def __init__(self):
self.message_type = None
self.decoder = None
class DecoderManager:
def __init__(self, topology):
self.topology = topology
self.info = DecoderInfo()
def probe(self):
if 'slot' not in self.topology:
return False
# Airrohr
if self.topology.slot.endswith('airrohr.json'):
self.info.message_type = MessageType.DATA_CONTAINER
self.info.decoder = AirrohrDecoder
return True
# Tasmota Sensor
if self.topology.slot.endswith('SENSOR'):
self.info.message_type = MessageType.DATA_CONTAINER
self.info.decoder = TasmotaSensorDecoder
return True
# Tasmota State
if self.topology.slot.endswith('STATE'):
self.info.message_type = MessageType.DATA_CONTAINER
self.info.decoder = TasmotaStateDecoder
return True
return False
| agpl-3.0 | -397,541,410,420,240,700 | 27.318182 | 80 | 0.638042 | false | 3.869565 | false | false | false |
tinyms/ArchiveX | tinyms/core/common.py | 1 | 14690 | __author__ = 'tinyms'
#coding=UTF8
import os
import sys
import re
import codecs
import hashlib
import json
#import urllib.request
#import urllib.parse
import time
import datetime
import decimal
import uuid
from imp import find_module, load_module, acquire_lock, release_lock
from tornado.template import Template
# import psycopg2
# import psycopg2.extras
#
#
# class Postgres():
# DATABASE_NAME = "postgres"
# USER_NAME = "postgres"
# PASSWORD = ""
#
# @staticmethod
# #Connect to Postgres Database
# def open():
# return psycopg2.connect(database=Postgres.DATABASE_NAME,
# user=Postgres.USER_NAME,
# password=Postgres.PASSWORD)
#
# @staticmethod
# def update(sql, params, return_col_name=None):
#
# """
# for Insert,Update,Delete
# :param sql:
# :param params:
# :param return_col_name: last insert row id etc.
# :return:
# """
# if return_col_name:
# sql += " RETURNING %s" % return_col_name
# cnn = None
# try:
# cnn = Postgres.open()
# cur = cnn.cursor()
# cur.execute(sql, params)
# if return_col_name:
# result = cur.fetchone()[0]
# else:
# result = True
# cnn.commit()
# except psycopg2.DatabaseError as e:
# print("Error %s" % e)
# cnn.rollback()
# result = False
# finally:
# if cnn:
# cnn.close()
#
# return result
#
# @staticmethod
# #Batch Insert,Update,Delete
# def update_many(sql, arr_params):
# try:
# cnn = Postgres.open()
# cur = cnn.cursor()
# cur.executemany(sql, arr_params)
# cnn.commit()
# except psycopg2.DatabaseError as e:
# print("Error %s" % e)
# finally:
# if cnn:
# cnn.close()
#
# @staticmethod
# #Query DataSet
# def many(sql, params=(), callback=None):
# dataset = list()
# cnn = None
# try:
# cnn = Postgres.open()
# cur = cnn.cursor(cursor_factory=psycopg2.extras.DictCursor)
# cur.execute(sql, params)
# rows = cur.fetchall()
# for row in rows:
# c = row.copy()
# if callback:
# callback(c)
# dataset.append(c)
# cur.close()
# except psycopg2.DatabaseError as e:
# print("Error %s" % e)
# finally:
# if cnn:
# cnn.close()
# return dataset
#
# @staticmethod
# #First Row Data
# def row(sql, params, callback=None):
# items = Postgres.many(sql, params, callback)
# if len(items) > 0:
# return items[0]
# return None
#
# @staticmethod
# #First Column Data
# def col(sql, params, callback=None):
# items = Postgres.many(sql, params, callback)
# cols = list()
# for item in items:
# values = [i for i in item.values()]
# if len(values) > 0:
# cols.append(values[0])
# return cols
#
# @staticmethod
# #First Row And First Column
# def one(sql, params=(), callback=None):
# first_col = Postgres.col(sql, params, callback)
# if len(first_col) > 0:
# return first_col[0]
# return None
#
# @staticmethod
# #Store Proc, Return Single Result
# def proc_one(name, params, callback=None):
# first_col = Postgres.proc_many(name, params, callback)
# if len(first_col) > 0:
# return first_col[0]
# return None
#
# @staticmethod
# #Store Proc, Return DataSet
# def proc_many(name, params, callback=None):
# dataset = list()
# cnn = None
# try:
# cnn = Postgres.open()
# cur = cnn.cursor(cursor_factory=psycopg2.extras.DictCursor)
# rows = cur.callproc(name, params)
# for row in rows:
# c = row.copy()
# if callback:
# callback(c)
# dataset.append(c)
# cur.close()
# except psycopg2.DatabaseError as e:
# print("Error %s" % e)
# finally:
# if cnn:
# cnn.close()
# return dataset
#
# @staticmethod
# #Return all cols name from current Query cursor
# def col_names(cur):
# names = list()
# for col in cur.description:
# names.append(col.name)
# return names
class JsonEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, decimal.Decimal):
return float(o)
elif isinstance(o, datetime.date):
return Utils.format_datetime_short(o)
elif isinstance(o, datetime.datetime):
return Utils.format_datetime_short(o)
elif isinstance(o, datetime.time):
return Utils.format_time(o)
super(JsonEncoder, self).default(o)
class Utils():
def __init__(self):
pass
@staticmethod
def text_read(f_name, join=True):
if not os.path.exists(f_name):
return ""
f = codecs.open(f_name, "r", "utf-8")
all_ = f.readlines()
f.close()
if join:
return "".join(all_)
return all
@staticmethod
def text_write(f_name, lines=list(), suffix="\n"):
f = codecs.open(f_name, "w+", "utf-8")
if isinstance(lines, list):
for line in lines:
f.write(line + suffix)
else:
f.write(lines)
f.write(suffix)
f.close()
# @staticmethod
# def url_with_params(url):
# r1 = urllib.parse.urlsplit(url)
# if r1.query != "":
# return True
# return False
@staticmethod
def trim(text):
return "".join(text.split())
@staticmethod
def uniq_index():
return uuid.uuid1()
@staticmethod
def render(tpl_text, context):
"""
render a template
:param tpl_text: template text
:param context: dict object
:return: str
"""
tpl = Template(tpl_text)
return tpl.generate(context)
@staticmethod
def md5(s):
h = hashlib.new('ripemd160')
h.update(bytearray(s.encode("utf8")))
return h.hexdigest()
@staticmethod
def current_datetime():
from datetime import datetime as tmp
return tmp.now()
@staticmethod
def mkdirs(path):
isexists = os.path.exists(path)
if not isexists:
os.makedirs(path)
return True
else:
return False
@staticmethod
def parse_int(text):
nums = Utils.parse_int_array(text)
if len(nums) > 0:
return int(nums[0])
return None
@staticmethod
def parse_int_array(text):
arr = list()
p = re.compile("[-]?\\d+", re.M)
nums = p.findall(text)
if len(nums) > 0:
arr = [int(s) for s in nums]
return arr
@staticmethod
def parse_time_text(text):
if not text:
return ""
p = re.compile("\\d{2}:\\d{2}")
dates = p.findall(text)
if len(dates) > 0:
return dates[0]
return ""
@staticmethod
def parse_time(text):
time_text = Utils.parse_time_text(text)
if not time_text:
return None
time_struct = time.strptime(time_text, "%H:%M")
return datetime.time(time_struct.tm_hour, time_struct.tm_min)
@staticmethod
def parse_date_text(text):
if not text:
return ""
p = re.compile("\\d{4}-\\d{2}-\\d{2}")
dates = p.findall(text)
if len(dates) > 0:
return dates[0]
return ""
@staticmethod
def parse_date(text):
date_text = Utils.parse_date_text(text)
if not date_text:
return None
from datetime import datetime
return datetime.strptime(date_text, "%Y-%m-%d").date()
@staticmethod
def parse_datetime_text(text):
if not text:
return ""
p = "\\d{4}-\\d{2}-\\d{2}\\s{1}\\d{2}:\\d{2}"
r = re.compile(p)
matchs = r.findall(text)
if len(matchs) > 0:
return matchs[0]
return ""
@staticmethod
def parse_datetime(text):
datetime_text = Utils.parse_datetime_text(text)
if not datetime_text:
return None
from datetime import datetime
return datetime.strptime(datetime_text, "%Y-%m-%d %H:%M")
@staticmethod
def parse_float(text):
floats = Utils.parse_float_array(text)
if len(floats) > 0:
return float(floats[0])
return None
@staticmethod
def parse_float_array(text):
p = re.compile("[-]?\\d+\\.\\d+", re.M)
return [float(s) for s in p.findall(text)]
@staticmethod
def parse_number_array(text):
"""
int or float
:param text:
:return:
"""
p = re.compile("[-]?\\d+[\\.]?[\\d]*", re.M)
return [float(s) for s in p.findall(text)]
@staticmethod
def encode(obj):
return json.dumps(obj, cls=JsonEncoder)
@staticmethod
def decode(text):
return json.loads(text)
# @staticmethod
# def download(url, save_path):
# try:
# f = urllib.request.urlopen(url, timeout=15)
# data = f.read()
# with open(save_path, "wb") as cache:
# cache.write(data)
# except urllib.error.URLError as ex:
# info = sys.exc_info()
# print(info[0], ":", info[1], ex)
@staticmethod
def matrix_reverse(arr):
"""
矩阵翻转
:param arr:
:return:
"""
return [[r[col] for r in arr] for col in range(len(arr[0]))]
@staticmethod
def combine_text_files(folder, target_file_name):
text = Utils.text_read(os.path.join(folder, "combine.list"))
cfg = json.loads(text)
for key in cfg.keys():
files = cfg[key]
if len(files) > 0:
combine_file = os.path.join(folder, target_file_name + "." + key)
if os.path.exists(combine_file):
os.remove(combine_file)
all_ = list()
for file_ in files:
path = os.path.join(folder, file_)
all_.append(Utils.text_read(path))
Utils.text_write(combine_file, all_)
pass
@staticmethod
def is_email(s):
p = r"[^@]+@[^@]+\.[^@]+"
if re.match(p, s):
return True
return False
@staticmethod
def email_account_name(s):
#匹配@前面的字符串
p = r".*(?=@)"
r = re.compile(p)
matchs = r.findall(s)
if len(matchs) > 0:
return matchs[0]
return ""
@staticmethod
def format_year_month(date_obj):
if not date_obj:
return ""
return date_obj.strftime('%Y-%m')
@staticmethod
def format_datetime(date_obj):
if not date_obj:
return ""
return date_obj.strftime('%Y-%m-%d %H:%M:%S')
@staticmethod
def format_datetime_short(date_obj):
if not date_obj:
return ""
return date_obj.strftime('%Y-%m-%d %H:%M')
@staticmethod
def format_date(date_obj):
if not date_obj:
return ""
return date_obj.strftime('%Y-%m-%d')
@staticmethod
def format_time(datetime_obj):
if not datetime_obj:
return ""
if isinstance(datetime_obj, datetime.time):
curr_date = Utils.current_datetime()
dt = datetime.datetime.combine(curr_date, datetime_obj)
return dt.strftime('%H:%M')
elif isinstance(datetime_obj, datetime.datetime):
return datetime_obj.strftime('%H:%M')
return ""
class Plugin():
def __init__(self):
pass
ObjectPool = dict()
@staticmethod
def one(type_):
plugins = Plugin.get(type_)
if len(plugins) > 0:
return plugins[0]
return None
@staticmethod
def get(type_, class_full_name=""):
"""
get plugin class object instance
:param type_: extends plugin interface
:param class_full_name: class name with module name
:return: a object
"""
if not class_full_name:
return Plugin.ObjectPool.get(type_)
else:
arr = Plugin.ObjectPool.get(type_)
for t in arr:
name = "%s.%s" % (t.__class__.__module__, t.__class__.__name__)
if name.lower() == class_full_name.lower():
return t
@staticmethod
def load():
Plugin.ObjectPool.clear()
path = os.path.join(os.getcwd(), "config")
wid = os.walk(path)
plugins = []
print("Search config modules..")
for rootDir, pathList, fileList in wid:
if rootDir.find("__pycache__") != -1:
continue
for file_ in fileList:
if file_.find("__init__.py") != -1:
continue
#re \\.py[c]?$
if file_.endswith(".py") or file_.endswith(".pyc"):
plugins.append((os.path.splitext(file_)[0], rootDir))
print(plugins)
print("Instance all Config class.")
for (name, dir_) in plugins:
try:
acquire_lock()
file_, filename, desc = find_module(name, [dir_])
prev = sys.modules.get(name)
if prev:
del sys.modules[name]
module_ = load_module(name, file_, filename, desc)
finally:
release_lock()
if hasattr(module_, "__export__"):
attrs = [getattr(module_, x) for x in module_.__export__]
for attr in attrs:
parents = attr.__bases__
if len(parents) > 0:
parent = parents[0]
if not Plugin.ObjectPool.get(parent):
Plugin.ObjectPool[parent] = [attr()]
else:
Plugin.ObjectPool[parent].append(attr())
print("Config init completed.") | bsd-3-clause | -1,749,095,310,060,765,400 | 27.260116 | 81 | 0.50375 | false | 3.775032 | false | false | false |
YantaiGao/learn_Python_The-Hard-Way | No38_ListOp.py | 1 | 1059 | # -*- coding:utf-8 -*-
#注意类的声明方法:
class Thing(object):
#self是需要有的 否则报错
def test(self,hi):
print hi
a = Thing()#这是声明类的一个对象
a.test("hahaha")
print "---------------------------------"
test_things = "Apple Orange Crows Telephone Light Suger"
print "There is not 10 things in that list,let's fix it."
stuff = test_things.split(' ')
more_stuff = ["Mon","Tues","Wed","Thris","Fir","Sat","Sun","MOON"]
while len(stuff)!=10:
#注意:pop()方法是从后往前出,先出最后一个
next = more_stuff.pop()
print "Adding ", next
#append()方法是增加
stuff.append(next)
print "There are %d elements in list " %len(stuff)
print "Here we go: ",stuff
#注意:下标从0开始!!!
print stuff[1]
#注意:-1是最后一个,下标从-1开始 ,从后向前遍历
print "stuff[-1] == ",stuff[-1]
print "stuff[-2] == ",stuff[-2]
print stuff.pop()
#注意:并没有修改数组实际的元素
print ' '.join(stuff)
#stuff[3:5]类似range()
print '#'.join(stuff[3:5])
print stuff | gpl-3.0 | -114,587,832,266,722,030 | 18.545455 | 66 | 0.630966 | false | 1.926009 | false | false | false |
jr-garcia/Engendro3D | e3d/model_management/interpolation.py | 1 | 1562 | # import numpy
# from cycgkit.cgtypes import vec3, quat
def getClosest(keys, time, chrid, sortedKeys):
def getfrom(keys1, time, ch):
try:
if ch == 'p':
return keys1[time].position
elif ch == 's':
return keys1[time].scale
else:
return keys1[time].rotation
except KeyError:
return None
a = None
b = None
a1 = -1
b1 = -1
for i in range(len(keys) - 1, -1, -1):
if sortedKeys[i] < time:
a = getfrom(keys, sortedKeys[i], chrid)
a1 = sortedKeys[i]
break
for j in range(len(keys)):
if sortedKeys[j] > time:
b = getfrom(keys, sortedKeys[j], chrid)
b1 = sortedKeys[j]
break
if a is None:
if b is not None:
return b, None, time
else:
return getfrom(keys, 0, chrid), None, time
t = 1.0 - ((b1 - time) / (b1 - a1))
return a, b, t
# -----------
# http:#keithmaggio.wordpress.com/2011/02/15/math-magician-lerp-slerp-and-nlerp/
def Lerp(percent, start, end):
return start + (percent * (end - start))
# def Nlerp(percent, start, end):
# res = Lerp(percent, start, end)
# if res.shape[0] == 3:
# return numpy.array(vec3(res).normalize())
# else:
# na = numpy.zeros(shape=(4,))
# tres = quat(res).normalize()
# # na = res
# na[0] = tres.w
# na[1] = tres.x
# na[2] = tres.y
# na[3] = tres.z
# return na | mit | -1,189,140,796,025,778,700 | 23.809524 | 80 | 0.493598 | false | 3.099206 | false | false | false |
NetApp/manila | manila/tests/share/drivers/emc/plugins/vnx/test_object_manager.py | 1 | 125573 | # Copyright (c) 2015 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import ddt
from lxml import builder
import mock
from oslo_concurrency import processutils
from manila.common import constants as const
from manila import exception
from manila.share.drivers.emc.plugins.vnx import connector
from manila.share.drivers.emc.plugins.vnx import constants
from manila.share.drivers.emc.plugins.vnx import object_manager as manager
from manila.share.drivers.emc.plugins.vnx import xml_api_parser as parser
from manila import test
from manila.tests.share.drivers.emc.plugins.vnx import fakes
from manila.tests.share.drivers.emc.plugins.vnx import utils
class StorageObjectManagerTestCase(test.TestCase):
@mock.patch.object(connector, "XMLAPIConnector", mock.Mock())
@mock.patch.object(connector, "SSHConnector", mock.Mock())
def setUp(self):
super(StorageObjectManagerTestCase, self).setUp()
emd_share_driver = fakes.FakeEMCShareDriver()
self.manager = manager.StorageObjectManager(
emd_share_driver.configuration)
def test_get_storage_context(self):
type_map = {
'FileSystem': manager.FileSystem,
'StoragePool': manager.StoragePool,
'MountPoint': manager.MountPoint,
'Mover': manager.Mover,
'VDM': manager.VDM,
'Snapshot': manager.Snapshot,
'MoverInterface': manager.MoverInterface,
'DNSDomain': manager.DNSDomain,
'CIFSServer': manager.CIFSServer,
'CIFSShare': manager.CIFSShare,
'NFSShare': manager.NFSShare,
}
for key, value in type_map.items():
self.assertTrue(
isinstance(self.manager.getStorageContext(key), value))
for key in self.manager.context.keys():
self.assertTrue(key in type_map)
def test_get_storage_context_invalid_type(self):
fake_type = 'fake_type'
self.assertRaises(exception.EMCVnxXMLAPIError,
self.manager.getStorageContext,
fake_type)
class StorageObjectTestCaseBase(test.TestCase):
@mock.patch.object(connector, "XMLAPIConnector", mock.Mock())
@mock.patch.object(connector, "SSHConnector", mock.Mock())
def setUp(self):
super(StorageObjectTestCaseBase, self).setUp()
emd_share_driver = fakes.FakeEMCShareDriver()
self.manager = manager.StorageObjectManager(
emd_share_driver.configuration)
self.base = fakes.StorageObjectTestData()
self.pool = fakes.PoolTestData()
self.vdm = fakes.VDMTestData()
self.mover = fakes.MoverTestData()
self.fs = fakes.FileSystemTestData()
self.mount = fakes.MountPointTestData()
self.snap = fakes.SnapshotTestData()
self.cifs_share = fakes.CIFSShareTestData()
self.nfs_share = fakes.NFSShareTestData()
self.cifs_server = fakes.CIFSServerTestData()
self.dns = fakes.DNSDomainTestData()
class StorageObjectTestCase(StorageObjectTestCaseBase):
def test_xml_api_retry(self):
hook = utils.RequestSideEffect()
hook.append(self.base.resp_need_retry())
hook.append(self.base.resp_task_succeed())
elt_maker = builder.ElementMaker(nsmap={None: constants.XML_NAMESPACE})
xml_parser = parser.XMLAPIParser()
storage_object = manager.StorageObject(self.manager.connectors,
elt_maker, xml_parser,
self.manager)
storage_object.conn['XML'].request = utils.EMCMock(side_effect=hook)
fake_req = storage_object._build_task_package(
elt_maker.StartFake(name='foo')
)
resp = storage_object._send_request(fake_req)
self.assertEqual('ok', resp['maxSeverity'])
expected_calls = [
mock.call(self.base.req_fake_start_task()),
mock.call(self.base.req_fake_start_task())
]
storage_object.conn['XML'].request.assert_has_calls(expected_calls)
class FileSystemTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
self.ssh_hook = utils.SSHSideEffect()
def test_create_file_system_on_vdm(self):
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.fs.resp_task_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.fs.filesystem_name,
size=self.fs.filesystem_size,
pool_name=self.pool.pool_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.fs.req_create_on_vdm()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_file_system_on_mover(self):
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.fs.resp_task_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.fs.filesystem_name,
size=self.fs.filesystem_size,
pool_name=self.pool.pool_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.fs.req_create_on_mover()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_file_system_but_already_exist(self):
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.fs.resp_create_but_already_exist())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.fs.filesystem_name,
size=self.fs.filesystem_size,
pool_name=self.pool.pool_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.fs.req_create_on_vdm()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_file_system_invalid_mover_id(self, sleep_mock):
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.fs.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.fs.resp_task_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.fs.filesystem_name,
size=self.fs.filesystem_size,
pool_name=self.pool.pool_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.fs.req_create_on_mover()),
mock.call(self.mover.req_get_ref()),
mock.call(self.fs.req_create_on_mover()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_file_system_with_error(self):
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.fs.resp_task_error())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
name=self.fs.filesystem_name,
size=self.fs.filesystem_size,
pool_name=self.pool.pool_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.fs.req_create_on_vdm()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_file_system(self):
self.hook.append(self.fs.resp_get_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.fs.filesystem_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.fs.filesystem_name, context.filesystem_map)
property_map = [
'name',
'pools_id',
'volume_id',
'size',
'id',
'type',
'dataServicePolicies',
]
for prop in property_map:
self.assertIn(prop, out)
id = context.get_id(self.fs.filesystem_name)
self.assertEqual(self.fs.filesystem_id, id)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_file_system_but_not_found(self):
self.hook.append(self.fs.resp_get_but_not_found())
self.hook.append(self.fs.resp_get_without_value())
self.hook.append(self.fs.resp_get_error())
self.hook.append(self.fs.resp_get_but_not_found())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.fs.filesystem_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
status, out = context.get(self.fs.filesystem_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
status, out = context.get(self.fs.filesystem_name)
self.assertEqual(constants.STATUS_ERROR, status)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get_id,
self.fs.filesystem_name)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.fs.req_get()),
mock.call(self.fs.req_get()),
mock.call(self.fs.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_file_system_but_miss_property(self):
self.hook.append(self.fs.resp_get_but_miss_property())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.fs.filesystem_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.fs.filesystem_name, context.filesystem_map)
property_map = [
'name',
'pools_id',
'volume_id',
'size',
'id',
'type',
'dataServicePolicies',
]
for prop in property_map:
self.assertIn(prop, out)
self.assertIsNone(out['dataServicePolicies'])
id = context.get_id(self.fs.filesystem_name)
self.assertEqual(self.fs.filesystem_id, id)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_file_system(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.fs.resp_task_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.fs.filesystem_name)
self.assertNotIn(self.fs.filesystem_name, context.filesystem_map)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.fs.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertNotIn(self.fs.filesystem_name, context.filesystem_map)
def test_delete_file_system_but_not_found(self):
self.hook.append(self.fs.resp_get_but_not_found())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.fs.filesystem_name)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_file_system_but_get_file_system_error(self):
self.hook.append(self.fs.resp_get_error())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.fs.filesystem_name)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_file_system_with_error(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.fs.resp_delete_but_failed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.fs.filesystem_name)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.fs.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertIn(self.fs.filesystem_name, context.filesystem_map)
def test_extend_file_system(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.fs.resp_task_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.extend(name=self.fs.filesystem_name,
pool_name=self.pool.pool_name,
new_size=self.fs.filesystem_new_size)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.pool.req_get()),
mock.call(self.fs.req_extend()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_extend_file_system_but_not_found(self):
self.hook.append(self.fs.resp_get_but_not_found())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.extend,
name=self.fs.filesystem_name,
pool_name=self.fs.pool_name,
new_size=self.fs.filesystem_new_size)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_extend_file_system_with_small_size(self):
self.hook.append(self.fs.resp_get_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.extend,
name=self.fs.filesystem_name,
pool_name=self.pool.pool_name,
new_size=1)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_extend_file_system_with_same_size(self):
self.hook.append(self.fs.resp_get_succeed())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.extend(name=self.fs.filesystem_name,
pool_name=self.pool.pool_name,
new_size=self.fs.filesystem_size)
expected_calls = [mock.call(self.fs.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_extend_file_system_with_error(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.pool.resp_get_succeed())
self.hook.append(self.fs.resp_extend_but_error())
context = self.manager.getStorageContext('FileSystem')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.extend,
name=self.fs.filesystem_name,
pool_name=self.pool.pool_name,
new_size=self.fs.filesystem_new_size)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.pool.req_get()),
mock.call(self.fs.req_extend()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_filesystem_from_snapshot(self):
self.ssh_hook.append()
self.ssh_hook.append()
self.ssh_hook.append(self.fs.output_copy_ckpt)
self.ssh_hook.append(self.fs.output_info())
self.ssh_hook.append()
self.ssh_hook.append()
self.ssh_hook.append()
context = self.manager.getStorageContext('FileSystem')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.create_from_snapshot(self.fs.filesystem_name,
self.snap.src_snap_name,
self.fs.src_fileystems_name,
self.pool.pool_name,
self.vdm.vdm_name,
self.mover.interconnect_id,)
ssh_calls = [
mock.call(self.fs.cmd_create_from_ckpt(), False),
mock.call(self.mount.cmd_server_mount('ro'), False),
mock.call(self.fs.cmd_copy_ckpt(), True),
mock.call(self.fs.cmd_nas_fs_info(), False),
mock.call(self.mount.cmd_server_umount(), False),
mock.call(self.fs.cmd_delete(), False),
mock.call(self.mount.cmd_server_mount('rw'), False),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_create_filesystem_from_snapshot_with_error(self):
self.ssh_hook.append()
self.ssh_hook.append()
self.ssh_hook.append(ex=processutils.ProcessExecutionError(
stdout=self.fs.fake_output, stderr=None))
self.ssh_hook.append(self.fs.output_info())
self.ssh_hook.append()
self.ssh_hook.append()
self.ssh_hook.append()
context = self.manager.getStorageContext('FileSystem')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.create_from_snapshot(
self.fs.filesystem_name,
self.snap.src_snap_name,
self.fs.src_fileystems_name,
self.pool.pool_name,
self.vdm.vdm_name,
self.mover.interconnect_id, )
ssh_calls = [
mock.call(self.fs.cmd_create_from_ckpt(), False),
mock.call(self.mount.cmd_server_mount('ro'), False),
mock.call(self.fs.cmd_copy_ckpt(), True),
mock.call(self.fs.cmd_nas_fs_info(), False),
mock.call(self.mount.cmd_server_umount(), False),
mock.call(self.fs.cmd_delete(), False),
mock.call(self.mount.cmd_server_mount('rw'), False),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
class MountPointTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_create_mount_point_on_vdm(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mount_path=self.mount.path,
fs_name=self.fs.filesystem_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_create(self.vdm.vdm_id, True)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_mount_point_on_mover(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mount_path=self.mount.path,
fs_name=self.fs.filesystem_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_create(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_mount_point_but_already_exist(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_create_but_already_exist())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mount_path=self.mount.path,
fs_name=self.fs.filesystem_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_create(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_mount_point_invalid_mover_id(self, sleep_mock):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mount_path=self.mount.path,
fs_name=self.fs.filesystem_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_create(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_create(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_mount_point_with_error(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_task_error())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
mount_path=self.mount.path,
fs_name=self.fs.filesystem_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_create(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_mount_point_on_vdm(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mount_path=self.mount.path,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_delete(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_mount_point_on_mover(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mount_path=self.mount.path,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_delete(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_mount_point_but_nonexistent(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_delete_but_nonexistent())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mount_path=self.mount.path,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_delete(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_delete_mount_point_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_task_succeed())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mount_path=self.mount.path,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_delete(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_delete(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_delete_mount_point_with_error(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_task_error())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
mount_path=self.mount.path,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_delete(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mount_points(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.mount.resp_get_succeed(self.vdm.vdm_id))
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_get_succeed(self.mover.mover_id,
False))
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_OK, status)
property_map = [
'path',
'mover',
'moverIdIsVdm',
'fileSystem',
]
for item in out:
for prop in property_map:
self.assertIn(prop, item)
status, out = context.get(self.mover.mover_name, False)
self.assertEqual(constants.STATUS_OK, status)
property_map = [
'path',
'mover',
'moverIdIsVdm',
'fileSystem',
]
for item in out:
for prop in property_map:
self.assertIn(prop, item)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.mount.req_get(self.vdm.vdm_id)),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mount_points_but_not_found(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_get_without_value())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.mover.mover_name, False)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_get_mount_points_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_get_succeed(self.mover.mover_id,
False))
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.mover.mover_name, False)
self.assertEqual(constants.STATUS_OK, status)
property_map = [
'path',
'mover',
'moverIdIsVdm',
'fileSystem',
]
for item in out:
for prop in property_map:
self.assertIn(prop, item)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_get(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_get_mount_points_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mount.resp_get_error())
context = self.manager.getStorageContext('MountPoint')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.mover.mover_name, False)
self.assertEqual(constants.STATUS_ERROR, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mount.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
class VDMTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
self.ssh_hook = utils.SSHSideEffect()
def test_create_vdm(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.vdm.resp_task_succeed())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(self.vdm.vdm_name, self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.vdm.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_vdm_but_already_exist(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.vdm.resp_create_but_already_exist())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create VDM which already exists.
context.create(self.vdm.vdm_name, self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.vdm.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_vdm_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.vdm.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.vdm.resp_task_succeed())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create VDM with invalid mover ID
context.create(self.vdm.vdm_name, self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.vdm.req_create()),
mock.call(self.mover.req_get_ref()),
mock.call(self.vdm.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_vdm_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.vdm.resp_task_error())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create VDM with invalid mover ID
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
name=self.vdm.vdm_name,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.vdm.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_vdm(self):
self.hook.append(self.vdm.resp_get_succeed())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.vdm.vdm_name, context.vdm_map)
property_map = [
'name',
'id',
'state',
'host_mover_id',
'interfaces',
]
for prop in property_map:
self.assertIn(prop, out)
expected_calls = [mock.call(self.vdm.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_vdm_with_error(self):
self.hook.append(self.vdm.resp_get_error())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Get VDM with error
status, out = context.get(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_ERROR, status)
expected_calls = [mock.call(self.vdm.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_vdm_but_not_found(self):
self.hook.append(self.vdm.resp_get_without_value())
self.hook.append(self.vdm.resp_get_succeed('fake'))
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Get VDM which does not exist
status, out = context.get(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
status, out = context.get(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.vdm.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_vdm_id_with_error(self):
self.hook.append(self.vdm.resp_get_error())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get_id,
self.vdm.vdm_name)
expected_calls = [mock.call(self.vdm.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_vdm(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.vdm.resp_task_succeed())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.vdm.vdm_name)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.vdm.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_vdm_but_not_found(self):
self.hook.append(self.vdm.resp_get_but_not_found())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.vdm.vdm_name)
expected_calls = [mock.call(self.vdm.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_vdm_but_failed_to_get_vdm(self):
self.hook.append(self.vdm.resp_get_error())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.vdm.vdm_name)
expected_calls = [mock.call(self.vdm.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_vdm_with_error(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.vdm.resp_task_error())
context = self.manager.getStorageContext('VDM')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.vdm.vdm_name)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.vdm.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_attach_detach_nfs_interface(self):
self.ssh_hook.append()
self.ssh_hook.append()
context = self.manager.getStorageContext('VDM')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.attach_nfs_interface(self.vdm.vdm_name,
self.mover.interface_name2)
context.detach_nfs_interface(self.vdm.vdm_name,
self.mover.interface_name2)
ssh_calls = [
mock.call(self.vdm.cmd_attach_nfs_interface(), False),
mock.call(self.vdm.cmd_detach_nfs_interface(), True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_detach_nfs_interface_with_error(self):
self.ssh_hook.append(ex=processutils.ProcessExecutionError(
stdout=self.vdm.fake_output))
self.ssh_hook.append(self.vdm.output_get_interfaces(
self.mover.interface_name2))
self.ssh_hook.append(ex=processutils.ProcessExecutionError(
stdout=self.vdm.fake_output))
self.ssh_hook.append(self.vdm.output_get_interfaces(
nfs_interface=fakes.FakeData.interface_name1))
context = self.manager.getStorageContext('VDM')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.detach_nfs_interface,
self.vdm.vdm_name,
self.mover.interface_name2)
context.detach_nfs_interface(self.vdm.vdm_name,
self.mover.interface_name2)
ssh_calls = [
mock.call(self.vdm.cmd_detach_nfs_interface(), True),
mock.call(self.vdm.cmd_get_interfaces(), False),
mock.call(self.vdm.cmd_detach_nfs_interface(), True),
mock.call(self.vdm.cmd_get_interfaces(), False),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_get_cifs_nfs_interface(self):
self.ssh_hook.append(self.vdm.output_get_interfaces())
context = self.manager.getStorageContext('VDM')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
interfaces = context.get_interfaces(self.vdm.vdm_name)
self.assertIsNotNone(interfaces['cifs'])
self.assertIsNotNone(interfaces['nfs'])
ssh_calls = [mock.call(self.vdm.cmd_get_interfaces(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
class StoragePoolTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_get_pool(self):
self.hook.append(self.pool.resp_get_succeed())
context = self.manager.getStorageContext('StoragePool')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.pool.pool_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.pool.pool_name, context.pool_map)
property_map = [
'name',
'movers_id',
'total_size',
'used_size',
'diskType',
'dataServicePolicies',
'id',
]
for prop in property_map:
self.assertIn(prop, out)
expected_calls = [mock.call(self.pool.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_pool_with_error(self):
self.hook.append(self.pool.resp_get_error())
self.hook.append(self.pool.resp_get_without_value())
self.hook.append(self.pool.resp_get_succeed(name='other'))
context = self.manager.getStorageContext('StoragePool')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.pool.pool_name)
self.assertEqual(constants.STATUS_ERROR, status)
status, out = context.get(self.pool.pool_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
status, out = context.get(self.pool.pool_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [
mock.call(self.pool.req_get()),
mock.call(self.pool.req_get()),
mock.call(self.pool.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_pool_id_with_error(self):
self.hook.append(self.pool.resp_get_error())
context = self.manager.getStorageContext('StoragePool')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get_id,
self.pool.pool_name)
expected_calls = [mock.call(self.pool.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
class MoverTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
self.ssh_hook = utils.SSHSideEffect()
def test_get_mover(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_succeed())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.mover.mover_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.mover.mover_name, context.mover_map)
property_map = [
'name',
'id',
'Status',
'version',
'uptime',
'role',
'interfaces',
'devices',
'dns_domain',
]
for prop in property_map:
self.assertIn(prop, out)
status, out = context.get(self.mover.mover_name)
self.assertEqual(constants.STATUS_OK, status)
status, out = context.get(self.mover.mover_name, True)
self.assertEqual(constants.STATUS_OK, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_ref_not_found(self):
self.hook.append(self.mover.resp_get_ref_succeed(name='other'))
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get_ref(self.mover.mover_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [mock.call(self.mover.req_get_ref())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_ref_with_error(self):
self.hook.append(self.mover.resp_get_error())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get_ref(self.mover.mover_name)
self.assertEqual(constants.STATUS_ERROR, status)
expected_calls = [mock.call(self.mover.req_get_ref())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_ref_and_mover(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_succeed())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get_ref(self.mover.mover_name)
self.assertEqual(constants.STATUS_OK, status)
property_map = ['name', 'id']
for prop in property_map:
self.assertIn(prop, out)
status, out = context.get(self.mover.mover_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.mover.mover_name, context.mover_map)
property_map = [
'name',
'id',
'Status',
'version',
'uptime',
'role',
'interfaces',
'devices',
'dns_domain',
]
for prop in property_map:
self.assertIn(prop, out)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_failed_to_get_mover_ref(self):
self.hook.append(self.mover.resp_get_error())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get,
self.mover.mover_name)
expected_calls = [mock.call(self.mover.req_get_ref())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_but_not_found(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_without_value())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(name=self.mover.mover_name, force=True)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_error())
context = self.manager.getStorageContext('Mover')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.mover.mover_name)
self.assertEqual(constants.STATUS_ERROR, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_interconnect_id(self):
self.ssh_hook.append(self.mover.output_get_interconnect_id())
context = self.manager.getStorageContext('Mover')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
conn_id = context.get_interconnect_id(self.mover.mover_name,
self.mover.mover_name)
self.assertEqual(self.mover.interconnect_id, conn_id)
ssh_calls = [mock.call(self.mover.cmd_get_interconnect_id(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_get_physical_devices(self):
self.ssh_hook.append(self.mover.output_get_physical_devices())
context = self.manager.getStorageContext('Mover')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
devices = context.get_physical_devices(self.mover.mover_name)
self.assertIn(self.mover.device_name, devices)
ssh_calls = [mock.call(self.mover.cmd_get_physical_devices(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
class SnapshotTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_create_snapshot(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.snap.resp_task_succeed())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.snap.snapshot_name,
fs_name=self.fs.filesystem_name,
pool_id=self.pool.pool_id)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.snap.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_snapshot_but_already_exist(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.snap.resp_create_but_already_exist())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.snap.snapshot_name,
fs_name=self.fs.filesystem_name,
pool_id=self.pool.pool_id,
ckpt_size=self.snap.snapshot_size)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.snap.req_create_with_size()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_snapshot_with_error(self):
self.hook.append(self.fs.resp_get_succeed())
self.hook.append(self.snap.resp_task_error())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
name=self.snap.snapshot_name,
fs_name=self.fs.filesystem_name,
pool_id=self.pool.pool_id,
ckpt_size=self.snap.snapshot_size)
expected_calls = [
mock.call(self.fs.req_get()),
mock.call(self.snap.req_create_with_size()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_snapshot(self):
self.hook.append(self.snap.resp_get_succeed())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.snap.snapshot_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.snap.snapshot_name, context.snap_map)
property_map = [
'name',
'id',
'checkpointOf',
'state',
]
for prop in property_map:
self.assertIn(prop, out)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_snapshot_but_not_found(self):
self.hook.append(self.snap.resp_get_without_value())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.snap.snapshot_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_snapshot_with_error(self):
self.hook.append(self.snap.resp_get_error())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(self.snap.snapshot_name)
self.assertEqual(constants.STATUS_ERROR, status)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_snapshot(self):
self.hook.append(self.snap.resp_get_succeed())
self.hook.append(self.snap.resp_task_succeed())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.snap.snapshot_name)
self.assertNotIn(self.snap.snapshot_name, context.snap_map)
expected_calls = [
mock.call(self.snap.req_get()),
mock.call(self.snap.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_snapshot_failed_to_get_snapshot(self):
self.hook.append(self.snap.resp_get_error())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.snap.snapshot_name)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_snapshot_but_not_found(self):
self.hook.append(self.snap.resp_get_without_value())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(self.snap.snapshot_name)
self.assertNotIn(self.snap.snapshot_name, context.snap_map)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_snapshot_with_error(self):
self.hook.append(self.snap.resp_get_succeed())
self.hook.append(self.snap.resp_task_error())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
self.snap.snapshot_name)
expected_calls = [
mock.call(self.snap.req_get()),
mock.call(self.snap.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_snapshot_id(self):
self.hook.append(self.snap.resp_get_succeed())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
id = context.get_id(self.snap.snapshot_name)
self.assertEqual(self.snap.snapshot_id, id)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_snapshot_id_with_error(self):
self.hook.append(self.snap.resp_get_error())
context = self.manager.getStorageContext('Snapshot')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get_id,
self.snap.snapshot_name)
expected_calls = [mock.call(self.snap.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
@ddt.ddt
class MoverInterfaceTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_create_mover_interface(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_succeed())
self.hook.append(self.mover.resp_task_succeed())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
context.create(interface)
interface['name'] = self.mover.long_interface_name
context.create(interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
mock.call(self.mover.req_create_interface(
self.mover.long_interface_name[:31])),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_mover_interface_name_already_exist(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(
self.mover.resp_create_interface_but_name_already_exist())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
context.create(interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_mover_interface_ip_already_exist(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(
self.mover.resp_create_interface_but_ip_already_exist())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
context.create(interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@ddt.data(fakes.MoverTestData().resp_task_succeed(),
fakes.MoverTestData().resp_task_error())
def test_create_mover_interface_with_conflict_vlan_id(self, xml_resp):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(
self.mover.resp_create_interface_with_conflicted_vlan_id())
self.hook.append(xml_resp)
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
mock.call(self.mover.req_delete_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_mover_interface_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_succeed())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
context.create(interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_mover_interface_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_error())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
interface = {
'name': self.mover.interface_name1,
'device_name': self.mover.device_name,
'ip': self.mover.ip_address1,
'mover_name': self.mover.mover_name,
'net_mask': self.mover.net_mask,
'vlan_id': self.mover.vlan_id,
}
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
interface)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_create_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_interface(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_succeed())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(name=self.mover.interface_name1,
mover_name=self.mover.mover_name)
self.assertEqual(constants.STATUS_OK, status)
property_map = [
'name',
'device',
'up',
'ipVersion',
'netMask',
'ipAddress',
'vlanid',
]
for prop in property_map:
self.assertIn(prop, out)
context.get(name=self.mover.long_interface_name,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_mover_interface_not_found(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_get_without_value())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(name=self.mover.interface_name1,
mover_name=self.mover.mover_name)
self.assertEqual(constants.STATUS_NOT_FOUND, status)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_mover_interface(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_succeed())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(ip_addr=self.mover.ip_address1,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_delete_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_mover_interface_but_nonexistent(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_delete_interface_but_nonexistent())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(ip_addr=self.mover.ip_address1,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_delete_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_delete_mover_interface_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_succeed())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(ip_addr=self.mover.ip_address1,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_delete_interface()),
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_delete_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_delete_mover_interface_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.mover.resp_task_error())
context = self.manager.getStorageContext('MoverInterface')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
ip_addr=self.mover.ip_address1,
mover_name=self.mover.mover_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_delete_interface()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
class DNSDomainTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_create_dns_domain(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_task_succeed())
context = self.manager.getStorageContext('DNSDomain')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mover_name=self.mover.mover_name,
name=self.dns.domain_name,
servers=self.dns.dns_ip_address)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_dns_domain_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_task_succeed())
context = self.manager.getStorageContext('DNSDomain')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(mover_name=self.mover.mover_name,
name=self.dns.domain_name,
servers=self.dns.dns_ip_address)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_create()),
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_dns_domain_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_task_error())
context = self.manager.getStorageContext('DNSDomain')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
mover_name=self.mover.mover_name,
name=self.mover.domain_name,
servers=self.dns.dns_ip_address)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_create()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_dns_domain(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_task_succeed())
self.hook.append(self.dns.resp_task_error())
context = self.manager.getStorageContext('DNSDomain')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mover_name=self.mover.mover_name,
name=self.mover.domain_name)
context.delete(mover_name=self.mover.mover_name,
name=self.mover.domain_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_delete()),
mock.call(self.dns.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_delete_dns_domain_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.dns.resp_task_succeed())
context = self.manager.getStorageContext('DNSDomain')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(mover_name=self.mover.mover_name,
name=self.mover.domain_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_delete()),
mock.call(self.mover.req_get_ref()),
mock.call(self.dns.req_delete()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
class CIFSServerTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
def test_create_cifs_server(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
self.hook.append(self.cifs_server.resp_task_error())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True))
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create CIFS server on mover
cifs_server_args = {
'name': self.cifs_server.cifs_server_name,
'interface_ip': self.cifs_server.ip_address1,
'domain_name': self.cifs_server.domain_name,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.mover.mover_name,
'is_vdm': False,
}
context.create(cifs_server_args)
# Create CIFS server on VDM
cifs_server_args = {
'name': self.cifs_server.cifs_server_name,
'interface_ip': self.cifs_server.ip_address1,
'domain_name': self.cifs_server.domain_name,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
'is_vdm': True,
}
context.create(cifs_server_args)
# Create CIFS server on VDM
cifs_server_args = {
'name': self.cifs_server.cifs_server_name,
'interface_ip': self.cifs_server.ip_address1,
'domain_name': self.cifs_server.domain_name,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
'is_vdm': True,
}
context.create(cifs_server_args)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_create(self.mover.mover_id, False)),
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_create(self.vdm.vdm_id)),
mock.call(self.cifs_server.req_create(self.vdm.vdm_id)),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_create_cifs_server_already_exist(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_task_error())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True))
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
@mock.patch('time.sleep')
def test_create_cifs_server_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create CIFS server on mover
cifs_server_args = {
'name': self.cifs_server.cifs_server_name,
'interface_ip': self.cifs_server.ip_address1,
'domain_name': self.cifs_server.domain_name,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.mover.mover_name,
'is_vdm': False,
}
context.create(cifs_server_args)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_create(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_create(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_cifs_server_with_error(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_task_error())
self.hook.append(self.cifs_server.resp_get_error())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
# Create CIFS server on VDM
cifs_server_args = {
'name': self.cifs_server.cifs_server_name,
'interface_ip': self.cifs_server.ip_address1,
'domain_name': self.cifs_server.domain_name,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
'is_vdm': True,
}
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
cifs_server_args)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_create(self.vdm.vdm_id)),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_all_cifs_server(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True))
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True))
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get_all(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.vdm.vdm_name, context.cifs_server_map)
# Get CIFS server from the cache
status, out = context.get_all(self.vdm.vdm_name)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.vdm.vdm_name, context.cifs_server_map)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_get_all_cifs_server_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True))
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get_all(self.mover.mover_name, False)
self.assertEqual(constants.STATUS_OK, status)
self.assertIn(self.mover.mover_name, context.cifs_server_map)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_get(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_get_cifs_server(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True))
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
status, out = context.get(name=self.cifs_server.cifs_server_name,
mover_name=self.vdm.vdm_name)
self.assertEqual(constants.STATUS_OK, status)
property_map = {
'name',
'compName',
'Aliases',
'type',
'interfaces',
'domain',
'domainJoined',
'mover',
'moverIdIsVdm',
}
for prop in property_map:
self.assertIn(prop, out)
context.get(name=self.cifs_server.cifs_server_name,
mover_name=self.vdm.vdm_name)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_modify_cifs_server(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': True,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.mover.mover_name,
'is_vdm': False,
}
context.modify(cifs_server_args)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': False,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
}
context.modify(cifs_server_args)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_modify(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True)),
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_modify(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_modify_cifs_server_but_unjoin_domain(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_modify_but_unjoin_domain())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': False,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
}
context.modify(cifs_server_args)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_modify(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_modify_cifs_server_but_already_join_domain(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(
self.cifs_server.resp_modify_but_already_join_domain())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': True,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
}
context.modify(cifs_server_args)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_modify(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=True)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_modify_cifs_server_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_task_succeed())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': True,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.mover.mover_name,
'is_vdm': False,
}
context.modify(cifs_server_args)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_modify(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_modify(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_modify_cifs_server_with_error(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_task_error())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
cifs_server_args = {
'name': self.cifs_server.cifs_server_name[-14:],
'join_domain': False,
'user_name': self.cifs_server.domain_user,
'password': self.cifs_server.domain_password,
'mover_name': self.vdm.vdm_name,
}
self.assertRaises(exception.EMCVnxXMLAPIError,
context.modify,
cifs_server_args)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_modify(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_cifs_server(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True))
self.hook.append(self.cifs_server.resp_task_succeed())
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.vdm.vdm_id, is_vdm=True, join_domain=False))
self.hook.append(self.cifs_server.resp_task_succeed())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(computer_name=self.cifs_server.cifs_server_name,
mover_name=self.mover.mover_name,
is_vdm=False)
context.delete(computer_name=self.cifs_server.cifs_server_name,
mover_name=self.vdm.vdm_name)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_get(self.mover.mover_id, False)),
mock.call(self.cifs_server.req_delete(self.mover.mover_id, False)),
mock.call(self.vdm.req_get()),
mock.call(self.cifs_server.req_get(self.vdm.vdm_id)),
mock.call(self.cifs_server.req_delete(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_cifs_server_but_not_found(self):
self.hook.append(self.mover.resp_get_without_value())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_get_without_value())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(computer_name=self.cifs_server.cifs_server_name,
mover_name=self.mover.mover_name,
is_vdm=False)
context.delete(computer_name=self.cifs_server.cifs_server_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_get(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_cifs_server_with_error(self):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_server.resp_get_succeed(
mover_id=self.mover.mover_id, is_vdm=False, join_domain=True))
self.hook.append(self.cifs_server.resp_task_error())
context = self.manager.getStorageContext('CIFSServer')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
computer_name=self.cifs_server.cifs_server_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_server.req_get(self.mover.mover_id, False)),
mock.call(self.cifs_server.req_delete(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
class CIFSShareTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.hook = utils.RequestSideEffect()
self.ssh_hook = utils.SSHSideEffect()
def test_create_cifs_share(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.cifs_share.share_name,
server_name=self.cifs_share.cifs_server_name[-14:],
mover_name=self.vdm.vdm_name,
is_vdm=True)
context.create(name=self.cifs_share.share_name,
server_name=self.cifs_share.cifs_server_name[-14:],
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_share.req_create(self.vdm.vdm_id)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_create(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_create_cifs_share_invalid_mover_id(self, sleep_mock):
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.create(name=self.cifs_share.share_name,
server_name=self.cifs_share.cifs_server_name[-14:],
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_create(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_create(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_create_cifs_share_with_error(self):
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_share.resp_task_error())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
name=self.cifs_share.share_name,
server_name=self.cifs_share.cifs_server_name[-14:],
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.vdm.req_get()),
mock.call(self.cifs_share.req_create(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_cifs_share(self):
self.hook.append(self.cifs_share.resp_get_succeed(self.vdm.vdm_id))
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
self.hook.append(self.cifs_share.resp_get_succeed(self.mover.mover_id,
False))
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
context.delete(name=self.cifs_share.share_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.cifs_share.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.cifs_share.req_delete(self.vdm.vdm_id)),
mock.call(self.cifs_share.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_delete(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_delete_cifs_share_not_found(self):
self.hook.append(self.cifs_share.resp_get_error())
self.hook.append(self.cifs_share.resp_get_without_value())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
context.delete(name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.cifs_share.req_get()),
mock.call(self.cifs_share.req_get()),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
@mock.patch('time.sleep')
def test_delete_cifs_share_invalid_mover_id(self, sleep_mock):
self.hook.append(self.cifs_share.resp_get_succeed(self.mover.mover_id,
False))
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_invalid_mover_id())
self.hook.append(self.mover.resp_get_ref_succeed())
self.hook.append(self.cifs_share.resp_task_succeed())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.delete(name=self.cifs_share.share_name,
mover_name=self.mover.mover_name,
is_vdm=False)
expected_calls = [
mock.call(self.cifs_share.req_get()),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_delete(self.mover.mover_id, False)),
mock.call(self.mover.req_get_ref()),
mock.call(self.cifs_share.req_delete(self.mover.mover_id, False)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
self.assertTrue(sleep_mock.called)
def test_delete_cifs_share_with_error(self):
self.hook.append(self.cifs_share.resp_get_succeed(self.vdm.vdm_id))
self.hook.append(self.vdm.resp_get_succeed())
self.hook.append(self.cifs_share.resp_task_error())
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name,
is_vdm=True)
expected_calls = [
mock.call(self.cifs_share.req_get()),
mock.call(self.vdm.req_get()),
mock.call(self.cifs_share.req_delete(self.vdm.vdm_id)),
]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_get_cifs_share(self):
self.hook.append(self.cifs_share.resp_get_succeed(self.vdm.vdm_id))
context = self.manager.getStorageContext('CIFSShare')
context.conn['XML'].request = utils.EMCMock(side_effect=self.hook)
context.get(self.cifs_share.share_name)
expected_calls = [mock.call(self.cifs_share.req_get())]
context.conn['XML'].request.assert_has_calls(expected_calls)
def test_disable_share_access(self):
self.ssh_hook.append('Command succeeded')
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.disable_share_access(share_name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.cifs_share.cmd_disable_access(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_disable_share_access_with_error(self):
self.ssh_hook.append(ex=processutils.ProcessExecutionError(
stdout=self.cifs_share.fake_output))
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.disable_share_access,
share_name=self.cifs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.cifs_share.cmd_disable_access(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_allow_share_access(self):
self.ssh_hook.append(self.cifs_share.output_allow_access())
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.allow_share_access(mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [mock.call(self.cifs_share.cmd_change_access(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_allow_share_access_duplicate_ACE(self):
expt_dup_ace = processutils.ProcessExecutionError(
stdout=self.cifs_share.output_allow_access_but_duplicate_ace())
self.ssh_hook.append(ex=expt_dup_ace)
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.allow_share_access(mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [mock.call(self.cifs_share.cmd_change_access(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_allow_share_access_with_error(self):
expt_err = processutils.ProcessExecutionError(
self.cifs_share.fake_output)
self.ssh_hook.append(ex=expt_err)
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.allow_share_access,
mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [mock.call(self.cifs_share.cmd_change_access(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_share_access(self):
self.ssh_hook.append('Command succeeded')
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.deny_share_access(mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [
mock.call(self.cifs_share.cmd_change_access(action='revoke'),
True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_share_access_no_ace(self):
expt_no_ace = processutils.ProcessExecutionError(
stdout=self.cifs_share.output_deny_access_but_no_ace())
self.ssh_hook.append(ex=expt_no_ace)
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.deny_share_access(mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [
mock.call(self.cifs_share.cmd_change_access(action='revoke'),
True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_share_access_but_no_user_found(self):
expt_no_user = processutils.ProcessExecutionError(
stdout=self.cifs_share.output_deny_access_but_no_user_found())
self.ssh_hook.append(ex=expt_no_user)
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.deny_share_access(mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [
mock.call(self.cifs_share.cmd_change_access(action='revoke'),
True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_share_access_with_error(self):
expt_err = processutils.ProcessExecutionError(
self.cifs_share.fake_output)
self.ssh_hook.append(ex=expt_err)
context = self.manager.getStorageContext('CIFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.deny_share_access,
mover_name=self.vdm.vdm_name,
share_name=self.cifs_share.share_name,
user_name=self.cifs_server.domain_user,
domain=self.cifs_server.domain_name,
access=constants.CIFS_ACL_FULLCONTROL)
ssh_calls = [
mock.call(self.cifs_share.cmd_change_access(action='revoke'),
True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
class NFSShareTestCase(StorageObjectTestCaseBase):
def setUp(self):
super(self.__class__, self).setUp()
self.ssh_hook = utils.SSHSideEffect()
def test_create_nfs_share(self):
self.ssh_hook.append(self.nfs_share.output_create())
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.create(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_create(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_create_nfs_share_with_error(self):
expt_err = processutils.ProcessExecutionError(
stdout=self.nfs_share.fake_output)
self.ssh_hook.append(ex=expt_err)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.create,
name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_create(), True)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_delete_nfs_share(self):
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
self.ssh_hook.append(self.nfs_share.output_delete_succeed())
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.delete(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get(), False),
mock.call(self.nfs_share.cmd_delete(), True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_delete_nfs_share_not_found(self):
expt_not_found = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_get_but_not_found())
self.ssh_hook.append(ex=expt_not_found)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.delete(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_get(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
@mock.patch('time.sleep')
def test_delete_nfs_share_locked(self, sleep_mock):
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
expt_locked = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_delete_but_locked())
self.ssh_hook.append(ex=expt_locked)
self.ssh_hook.append(self.nfs_share.output_delete_succeed())
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.delete(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get(), False),
mock.call(self.nfs_share.cmd_delete(), True),
mock.call(self.nfs_share.cmd_delete(), True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
self.assertTrue(sleep_mock.called)
def test_delete_nfs_share_with_error(self):
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
expt_err = processutils.ProcessExecutionError(
stdout=self.nfs_share.fake_output)
self.ssh_hook.append(ex=expt_err)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.delete,
name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get(), False),
mock.call(self.nfs_share.cmd_delete(), True),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_get_nfs_share(self):
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.get(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
# Get NFS share from cache
context.get(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_get(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_get_nfs_share_not_found(self):
expt_not_found = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_get_but_not_found())
self.ssh_hook.append(ex=expt_not_found)
self.ssh_hook.append(self.nfs_share.output_get_but_not_found())
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
context.get(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
context.get(name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get(), False),
mock.call(self.nfs_share.cmd_get(), False),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_get_nfs_share_with_error(self):
expt_err = processutils.ProcessExecutionError(
stdout=self.nfs_share.fake_output)
self.ssh_hook.append(ex=expt_err)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = mock.Mock(side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.get,
name=self.nfs_share.share_name,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_get(), False)]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_allow_share_access(self):
rw_hosts = copy.deepcopy(self.nfs_share.rw_hosts)
rw_hosts.append(self.nfs_share.nfs_host_ip)
ro_hosts = copy.deepcopy(self.nfs_share.ro_hosts)
ro_hosts.append(self.nfs_share.nfs_host_ip)
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
self.ssh_hook.append(self.nfs_share.output_set_access_success())
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts))
self.ssh_hook.append(self.nfs_share.output_set_access_success())
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts, ro_hosts=ro_hosts))
self.ssh_hook.append(self.nfs_share.output_set_access_success())
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts))
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
context.allow_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name,
access_level=const.ACCESS_LEVEL_RW)
context.allow_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name,
access_level=const.ACCESS_LEVEL_RO)
context.allow_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name,
access_level=const.ACCESS_LEVEL_RW)
context.allow_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name,
access_level=const.ACCESS_LEVEL_RW)
ssh_calls = [
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts)),
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(
rw_hosts=self.nfs_share.rw_hosts, ro_hosts=ro_hosts)),
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts)),
mock.call(self.nfs_share.cmd_get()),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_allow_share_access_not_found(self):
expt_not_found = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_get_but_not_found())
self.ssh_hook.append(ex=expt_not_found)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.allow_share_access,
share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name,
access_level=const.ACCESS_LEVEL_RW)
ssh_calls = [mock.call(self.nfs_share.cmd_get())]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_rw_share_access(self):
rw_hosts = copy.deepcopy(self.nfs_share.rw_hosts)
rw_hosts.append(self.nfs_share.nfs_host_ip)
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts))
self.ssh_hook.append(self.nfs_share.output_set_access_success())
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
context.deny_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(self.nfs_share.rw_hosts,
self.nfs_share.ro_hosts)),
mock.call(self.nfs_share.cmd_get()),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_ro_share_access(self):
ro_hosts = copy.deepcopy(self.nfs_share.ro_hosts)
ro_hosts.append(self.nfs_share.nfs_host_ip)
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts, ro_hosts=ro_hosts))
self.ssh_hook.append(self.nfs_share.output_set_access_success())
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=self.nfs_share.rw_hosts,
ro_hosts=self.nfs_share.ro_hosts))
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
context.deny_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name)
context.deny_share_access(share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(self.nfs_share.rw_hosts,
self.nfs_share.ro_hosts)),
mock.call(self.nfs_share.cmd_get()),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_share_not_found(self):
expt_not_found = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_get_but_not_found())
self.ssh_hook.append(ex=expt_not_found)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.deny_share_access,
share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name)
ssh_calls = [mock.call(self.nfs_share.cmd_get())]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
def test_deny_rw_share_with_error(self):
rw_hosts = copy.deepcopy(self.nfs_share.rw_hosts)
rw_hosts.append(self.nfs_share.nfs_host_ip)
self.ssh_hook.append(self.nfs_share.output_get_succeed(
rw_hosts=rw_hosts, ro_hosts=self.nfs_share.ro_hosts))
expt_not_found = processutils.ProcessExecutionError(
stdout=self.nfs_share.output_get_but_not_found())
self.ssh_hook.append(ex=expt_not_found)
context = self.manager.getStorageContext('NFSShare')
context.conn['SSH'].run_ssh = utils.EMCNFSShareMock(
side_effect=self.ssh_hook)
self.assertRaises(exception.EMCVnxXMLAPIError,
context.deny_share_access,
share_name=self.nfs_share.share_name,
host_ip=self.nfs_share.nfs_host_ip,
mover_name=self.vdm.vdm_name)
ssh_calls = [
mock.call(self.nfs_share.cmd_get()),
mock.call(self.nfs_share.cmd_set_access(self.nfs_share.rw_hosts,
self.nfs_share.ro_hosts)),
]
context.conn['SSH'].run_ssh.assert_has_calls(ssh_calls)
| apache-2.0 | 3,176,923,940,610,800,600 | 39.678005 | 79 | 0.601164 | false | 3.488042 | true | false | false |
Agiliza/AgilizaFramework | tests/agiliza/core/utils/patterns/test_singleton.py | 1 | 1894 | """
This file is part of Agiliza.
Agiliza is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Agiliza is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Agiliza. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) 2012 Vicente Ruiz <[email protected]>
"""
import unittest
from agiliza.core.utils.patterns import Singleton
class SingletonTest(unittest.TestCase):
def test_must_retrieve_the_same_instance(self):
class SingletonExample(Singleton): pass
instance1 = SingletonExample()
instance2 = SingletonExample.getInstance()
self.assertEqual(
instance1, instance2,
"Singleton makes different instances"
)
def test_must_retrieve_the_same_instance_multiple_times(self):
class SingletonExample(Singleton): pass
instance1 = SingletonExample()
SingletonExample()
SingletonExample()
instance2 = SingletonExample()
self.assertEqual(
instance1, instance2,
"Singleton makes different instances"
)
def test_must_invalidate_a_instance(self):
class SingletonExample(Singleton): pass
instance1 = SingletonExample.getInstance()
SingletonExample.invalidateInstance()
instance2 = SingletonExample()
self.assertNotEqual(
instance1, instance2,
"Singleton does not invalidate instances"
)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 | -4,045,939,752,745,946,600 | 27.69697 | 68 | 0.69377 | false | 4.354023 | true | false | false |
dhalleine/tensorflow | tensorflow/contrib/learn/python/learn/experiment.py | 1 | 6100 | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Experiment class collecting information needed for a single training run."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
from tensorflow.python.platform import tf_logging as logging
class Experiment(object):
"""Experiment is a class containing all information needed to train a model.
"""
def __init__(self,
estimator,
train_input_fn,
eval_input_fn,
eval_metrics=None,
train_steps=None,
eval_steps=100,
train_monitors=None):
"""Constructor for `Experiment`.
Args:
estimator: `Estimator` object.
train_input_fn: function, returns features and targets for training.
eval_input_fn: function, returns features and targets for evaluation. If
`eval_steps` is `None`, this should be configured only to produce for a
finite number of batches (generally, 1 epoch over the evaluation data).
eval_metrics: `dict` of string, metric function. If `None`, default set
is used.
train_steps: Perform this many steps of training. `None`, the default,
means train forever.
eval_steps: `evaluate` runs until input is exhausted (or another exception
is raised), or for `eval_steps` steps, if specified.
train_monitors: A list of monitors to pass to the `Estimator`'s `fit`
function.
"""
super(Experiment, self).__init__()
self._estimator = estimator
self._train_input_fn = train_input_fn
self._eval_input_fn = eval_input_fn
self._eval_metrics = eval_metrics
self._train_steps = train_steps
self._eval_steps = eval_steps
self._train_monitors = train_monitors
def train(self, delay_secs=0):
"""Fit the estimator using the training data.
Train the estimator for `steps` steps, after waiting for `delay_secs`
seconds. If `steps` is `None`, train forever.
Args:
delay_secs: Start training after this many seconds.
Returns:
The trained estimator.
"""
if delay_secs:
logging.info("Waiting %d secs before starting training.", delay_secs)
time.sleep(delay_secs)
return self._estimator.fit(input_fn=self._train_input_fn,
steps=self._train_steps,
monitors=self._train_monitors)
def evaluate(self, delay_secs=0):
"""Evaluate on the evaluation data.
Runs evaluation on the evaluation data and returns the result. If `steps`
is given, only run for this many steps. Otherwise run until input is
exhausted, or another exception is raised. Start the evaluation after
`delay_secs` seconds.
Args:
delay_secs: Start evaluating after waiting for this many seconds.
Returns:
The result of the `evaluate` call to the `Estimator`.
"""
if delay_secs:
logging.info("Waiting %d secs before starting eval.", delay_secs)
time.sleep(delay_secs)
return self._estimator.evaluate(input_fn=self._eval_input_fn,
steps=self._eval_steps,
metrics=self._eval_metrics,
name="one_pass")
def local_run(self):
"""Run when called on local machine.
Returns:
The result of the `evaluate` call to the `Estimator`.
"""
# TODO(ipolosukhin): Add a ValidationMonitor to run in-training evaluation.
self.train()
return self.evaluate()
def _continuous_eval(self,
input_fn,
name,
delay_secs=0,
throttle_delay_secs=60):
"""Run continuous eval.
Run `steps` steps of evaluation on the evaluation data set. This function
starts evaluating after `delay_secs` seconds and then runs no more than one
evaluation per `throttle_delay_secs`. It never returns.
Args:
input_fn: The input to use for this eval.
name: A string appended to the folder name of evaluation results.
delay_secs: Start evaluating after this many seconds.
throttle_delay_secs: Do not re-evaluate unless the last evaluation was
started at least this many seconds ago.
"""
if delay_secs:
logging.info("Waiting %f secs before starting eval.", delay_secs)
time.sleep(delay_secs)
while True:
start = time.time()
self._estimator.evaluate(input_fn=input_fn,
steps=self._eval_steps,
metrics=self._eval_metrics,
name=name)
duration = time.time() - start
if duration < throttle_delay_secs:
difference = throttle_delay_secs - duration
logging.info("Waiting %f secs before starting next eval run.",
difference)
time.sleep(difference)
def continuous_eval(self, delay_secs=0, throttle_delay_secs=60):
self._continuous_eval(self._eval_input_fn,
name="continuous",
delay_secs=delay_secs,
throttle_delay_secs=throttle_delay_secs)
def continuous_eval_on_train_data(self, delay_secs=0, throttle_delay_secs=60):
self._continuous_eval(self._train_input_fn,
name="continuous_on_train_data",
delay_secs=delay_secs,
throttle_delay_secs=throttle_delay_secs)
| apache-2.0 | 2,442,779,366,369,398,300 | 36.423313 | 80 | 0.627705 | false | 4.379038 | false | false | false |
faroit/loudness | python/tests/test_OME.py | 1 | 2084 | import numpy as np
import matplotlib.pyplot as plt
import loudness as ln
def plotResponse(freqPoints, dataPoints,
freqsInterp, responseInterp,
ylim=(-40, 10), title = ""):
if np.any(dataPoints):
plt.semilogx(freqPoints, dataPoints, 'o')
plt.semilogx(freqsInterp, responseInterp)
plt.xlim(20, 20e3)
plt.ylim(ylim)
plt.xlabel("Frequency, Hz")
plt.ylabel("Response, dB")
plt.title(title)
plt.show()
def plotMiddleEar(filterType, ylim=(-40, 0)):
freqs = np.arange(20, 20000, 2)
ome = ln.OME(filterType, ln.OME.NONE)
ome.interpolateResponse(freqs)
response = ome.getResponse()
freqPoints = ome.getMiddleEarFreqPoints()
dataPoints = ome.getMiddleEardB()
plotResponse(freqPoints, dataPoints,
freqs, response, ylim)
def plotOuterEar(filterType, ylim=(-40, 0)):
freqs = np.arange(20, 20000, 2)
ome = ln.OME(ln.OME.NONE, filterType)
ome.interpolateResponse(freqs)
response = ome.getResponse()
freqPoints = ome.getOuterEarFreqPoints()
dataPoints = ome.getOuterEardB()
plotResponse(freqPoints, dataPoints,
freqs, response, ylim)
def plotCombined(middleFilterType, outerFilterType, ylim=(-40, 10)):
freqs = np.arange(20, 20000, 2)
ome = ln.OME(middleFilterType, outerFilterType)
ome.interpolateResponse(freqs)
response = ome.getResponse()
plotResponse(None, None,
freqs, response, ylim)
plt.figure(1)
plotMiddleEar(ln.OME.ANSIS342007_MIDDLE_EAR, (-40, 0))
plt.figure(2)
plotMiddleEar(ln.OME.CHGM2011_MIDDLE_EAR, (-40, 10))
plt.figure(2)
plotMiddleEar(ln.OME.ANSIS342007_MIDDLE_EAR_HPF, (-40, 0))
plt.figure(3)
plotOuterEar(ln.OME.ANSIS342007_FREEFIELD, (-5, 20))
plt.figure(4)
plotOuterEar(ln.OME.ANSIS342007_DIFFUSEFIELD, (-5, 20))
plt.figure(5)
plotOuterEar(ln.OME.BD_DT990, (-10, 10))
plt.figure(6)
plotCombined(ln.OME.ANSIS342007_MIDDLE_EAR,
ln.OME.ANSIS342007_FREEFIELD, (-40, 10))
plt.figure(7)
plotCombined(ln.OME.ANSIS342007_MIDDLE_EAR, ln.OME.BD_DT990, (-40, 10))
| gpl-3.0 | -6,410,065,352,474,172,000 | 30.104478 | 71 | 0.678503 | false | 2.843111 | false | false | false |
Boussadia/SimpleScraper | scraper.py | 1 | 6078 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import time
import logging
import sys
import urllib
import mechanize
import cookielib
import urlparse
class Singleton(object):
_instances = {}
def __new__(class_, *args, **kwargs):
if class_ not in class_._instances:
class_._instances[class_] = super(Singleton, class_).__new__(class_, *args, **kwargs)
return class_._instances[class_]
class BaseScraper(Singleton):
"""
Base Crawler class.
The Crawler has to perform:
- GET methods
- POST methods
- handle cookies
In order to take into account for the network failures, it will handle a certain amount of retries.
Every time a page is fetched, it has to return a code as well, he are the definition of the codes:
- -1 : Network failure (after N number of attempts)
- 200 : every thing is ok
- 404 : page not found
- 500 : server error
"""
# The number of times the crawler has to retry to fetch html page when a network failure error occurs
MAX_NETWORK_FAILURE_TRIES = 10
INTERVAL = 2
# interval between 2 http request in seconds
def __init__(self):
# Mechanize Browser
self.browser = mechanize.Browser()
# Cookie Jar
self.jar = cookielib.LWPCookieJar()
self.browser.set_cookiejar(self.jar)
# Browser options
self.browser.set_handle_equiv(True)
# self.browser.set_handle_gzip(True)
self.browser.set_handle_redirect(True)
self.browser.set_handle_referer(True)
self.browser.set_handle_robots(False)
# Follows refresh 0 but not hangs on refresh > 0
self.browser.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
self.__network_failures_retry__ = 0
# time of last http request
self.last_time = 0
def do_request(self, url ='', data = {}, request = None, is_post = False, url_fix = True):
"""
Base method to perform a request to a url.
Input :
- url (string) : url of page to retrive
- data (hash {'param': 'value_param'}) : data to send to server, if an empty hash, it is not taken into account
Output:
- (html, code) : html as string and code as defined in the class docstring.
"""
if url_fix:
# Making sure it is utf8 encoded
url = self.url_fix(url)
# Request cannot happen inside a cetain lapse of time (INTERVAL seconds in between)
now = time.time()
if now-self.last_time<BaseScraper.INTERVAL:
print 'Waiting %d ms in order not to flood server'%((BaseScraper.INTERVAL+self.last_time-now)*1000)
time.sleep(BaseScraper.INTERVAL+self.last_time-now)
return self.do_request( url, data, request, is_post= is_post, url_fix = url_fix)
self.last_time = now
# Encapsulating request in try block in order to catch HTTPError
try:
if request is not None:
self.jar.add_cookie_header(request)
response = self.browser.open(request)
print "Fetching page from "+response.geturl()
print "Using personalized Request"
html = response.read()
elif not is_post:
print "Fetching page from "+url
print "GET method used"
response = self.browser.open(url)
html = response.read()
else:
print "Fetching page from "+url
print "POST method used"
form_data = urllib.urlencode(data)
response = self.browser.open(url, form_data)
html = response.read()
self.__network_failures_retry__ = 0 # Everything went OK, setting variable for network failure to 0
return html, 200
except mechanize.HTTPError, e:
if e.code == 404:
print "Error when retrieving "+url+" : page not found."
return None, 404
else:
print 'Error : %s'%(e)
self.__network_failures_retry__ = self.__network_failures_retry__ + 1
if self.__network_failures_retry__ < BaseScraper.MAX_NETWORK_FAILURE_TRIES:
print "Error occured, retrying in "+str(self.__network_failures_retry__)+" s"
time.sleep(self.__network_failures_retry__)
return self.do_request(url, data, is_post = is_post, url_fix = url_fix)
else:
print "Error when retrieving "+url
return None, e.code
except mechanize.URLError, e:
print 'Error : %s'%(e)
self.__network_failures_retry__ = self.__network_failures_retry__ + 1
if self.__network_failures_retry__ < BaseScraper.MAX_NETWORK_FAILURE_TRIES:
print "Error occured, retrying in "+str(self.__network_failures_retry__)+" s"
time.sleep(self.__network_failures_retry__)
return self.do_request(url, data, is_post = is_post, url_fix = url_fix)
else:
print "Error when retrieving "+url
return None, -1
except Exception, e:
print 'Unexpected error occured.'
print e
return None, -1
def get(self,url, url_fix = True):
"""
Executes a GET url fetch.
"""
return self.do_request(url, url_fix = url_fix)
def post(self, url, data = {}, url_fix = True):
"""
Executes a POST url fetch.
"""
return self.do_request(url, data = data, is_post=True, url_fix = url_fix)
def empty_cookie_jar(self):
"""
Removing all cookies from cookie jar
"""
self.jar.clear()
def get_cookie(self, name = None):
"""
Get cookie by name
Input :
- name (string) : name of cookie.
Output :
- hash : {
'name': ...,
'value': ...
}
"""
cookie = {}
if name:
for c in self.jar:
if name == c.name:
cookie['name'] = c.name
cookie['value'] = c.value
return cookie
def url_fix(self, s, charset='utf-8'):
"""
Sometimes you get an URL by a user that just isn't a real
URL because it contains unsafe characters like ' ' and so on. This
function can fix some of the problems in a similar way browsers
handle data entered by the user:
>>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffsklärung)')
'http://de.wikipedia.org/wiki/Elf%20%28Begriffskl%C3%A4rung%29'
:param charset: The target charset for the URL if the url was
given as unicode string.
"""
if isinstance(s, unicode):
s = s.encode(charset, 'ignore')
scheme, netloc, path, qs, anchor = urlparse.urlsplit(s)
path = urllib.quote(path, '/%')
qs = urllib.quote_plus(qs, ':&=')
return urlparse.urlunsplit((scheme, netloc, path, qs, anchor))
| mit | 2,890,036,489,779,167,000 | 29.385 | 115 | 0.666776 | false | 3.190026 | false | false | false |
awemulya/fieldsight-kobocat | onadata/apps/fsforms/models.py | 1 | 37515 | from __future__ import unicode_literals
import datetime
import os
import json
import re
from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import GenericRelation
from django.contrib.postgres.fields import ArrayField
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models import Max
from django.db.models.signals import post_save, pre_delete
from django.utils.translation import ugettext_lazy as _
from django.dispatch import receiver
from jsonfield import JSONField
from pyxform import create_survey_from_xls, SurveyElementBuilder
from pyxform.xform2json import create_survey_element_from_xml
from xml.dom import Node
from onadata.apps.fieldsight.models import Site, Project, Organization
from onadata.apps.fsforms.fieldsight_models import IntegerRangeField
from onadata.apps.fsforms.utils import send_message, send_message_project_form, check_version
from onadata.apps.logger.models import XForm, Instance
from onadata.apps.logger.xform_instance_parser import clean_and_parse_xml
from onadata.apps.viewer.models import ParsedInstance
from onadata.apps.fsforms.fsxform_responses import get_instances_for_field_sight_form
from onadata.settings.local_settings import XML_VERSION_MAX_ITER
#To get domain to give complete url for app devs to make them easier.
from django.contrib.sites.models import Site as DjangoSite
from onadata.libs.utils.model_tools import set_uuid
SHARED_LEVEL = [(0, 'Global'), (1, 'Organization'), (2, 'Project'),]
SCHEDULED_LEVEL = [(0, 'Daily'), (1, 'Weekly'), (2, 'Monthly'),]
FORM_STATUS = [(0, 'Pending'), (1, 'Rejected'), (2, 'Flagged'), (3, 'Approved'), ]
class FormGroup(models.Model):
name = models.CharField(max_length=256, unique=True)
description = models.TextField(blank=True, null=True)
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
creator = models.ForeignKey(User, related_name="form_group")
is_global = models.BooleanField(default=False)
organization = models.ForeignKey(Organization, null=True, blank=True)
project = models.ForeignKey(Project, null=True, blank=True)
logs = GenericRelation('eventlog.FieldSightLog')
class Meta:
db_table = 'fieldsight_forms_group'
verbose_name = _("FieldSight Form Group")
verbose_name_plural = _("FieldSight Form Groups")
ordering = ("-date_modified",)
def __unicode__(self):
return getattr(self, "name", "")
class Stage(models.Model):
name = models.CharField(max_length=256)
description = models.TextField(blank=True, null=True)
group = models.ForeignKey(FormGroup,related_name="stage", null=True, blank=True)
order = IntegerRangeField(min_value=0, max_value=30,default=0)
stage = models.ForeignKey('self', blank=True, null=True, related_name="parent")
shared_level = models.IntegerField(default=2, choices=SHARED_LEVEL)
date_created = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
site = models.ForeignKey(Site, related_name="stages", null=True, blank=True)
project = models.ForeignKey(Project, related_name="stages", null=True, blank=True)
ready = models.BooleanField(default=False)
project_stage_id = models.IntegerField(default=0)
weight = models.IntegerField(default=0)
tags = ArrayField(models.IntegerField(), default=[])
logs = GenericRelation('eventlog.FieldSightLog')
class Meta:
db_table = 'fieldsight_forms_stage'
verbose_name = _("FieldSight Form Stage")
verbose_name_plural = _("FieldSight Form Stages")
ordering = ("order",)
def save(self, *args, **kwargs):
if self.stage:
self.group = self.stage.group
super(Stage, self).save(*args, **kwargs)
def get_display_name(self):
return "Stage" if not self.stage else "SubStage"
def is_main_stage(self):
return True if not self.stage else False
def sub_stage_count(self):
if not self.stage:
return Stage.objects.filter(stage=self).count()
return 0
def form_exists(self):
return True if FieldSightXF.objects.filter(stage=self).count() > 0 else False
def form_name(self):
if not FieldSightXF.objects.filter(stage=self).count():
return ""
return FieldSightXF.objects.filter(stage=self)[0].xf.title
def form(self):
if not FieldSightXF.objects.filter(stage=self).count():
return None
return FieldSightXF.objects.filter(stage=self)[0]
def active_substages(self):
return self.parent.filter(stage_forms__isnull=False)
def get_sub_stage_list(self):
if not self.stage:
return Stage.objects.filter(stage=self).values('stage_forms__id','name','stage_id')
return []
@property
def xf(self):
return FieldSightXF.objects.filter(stage=self)[0].xf.pk if self.form_exists() else None
@property
def form_status(self):
status = 0
if self.stage_forms.site_form_instances.filter(form_status=3).exists():
status = 1
return status
@property
def form_count(self):
return self.stage_forms.site_form_instances.all().count()
@staticmethod
def site_submission_count(id, site_id):
return Stage.objects.get(pk=id).stage_forms.project_form_instances.filter(site_id=site_id).count()
@staticmethod
def rejected_submission_count(id, site_id):
return Stage.objects.get(pk=id).stage_forms.project_form_instances.filter(form_status=1, site_id=site_id).count()
@staticmethod
def flagged_submission_count(id, site_id):
return Stage.objects.get(pk=id).stage_forms.project_form_instances.filter(form_status=2, site_id=site_id).count()
@classmethod
def get_order(cls, site, project, stage):
if site:
if not Stage.objects.filter(site=site).exists():
return 1
elif stage is not None:
if not Stage.objects.filter(stage=stage).exists():
return 1
else:
mo = Stage.objects.filter(stage=stage).aggregate(Max('order'))
order = mo.get('order__max', 0)
return order + 1
else:
mo = Stage.objects.filter(site=site, stage__isnull=True).aggregate(Max('order'))
order = mo.get('order__max', 0)
return order + 1
else:
if not Stage.objects.filter(project=project).exists():
return 1
elif stage is not None:
if not Stage.objects.filter(stage=stage).exists():
return 1
else:
mo = Stage.objects.filter(stage=stage).aggregate(Max('order'))
order = mo.get('order__max', 0)
return order + 1
else:
mo = Stage.objects.filter(project=project, stage__isnull=True).aggregate(Max('order'))
order = mo.get('order__max', 0)
return order + 1
def __unicode__(self):
return getattr(self, "name", "")
class Days(models.Model):
day = models.CharField(max_length=9)
index = models.IntegerField()
def __unicode__(self):
return getattr(self, "day", "")
class Schedule(models.Model):
name = models.CharField("Schedule Name", max_length=256, blank=True, null=True)
site = models.ForeignKey(Site, related_name="schedules", null=True, blank=True)
project = models.ForeignKey(Project, related_name="schedules", null=True, blank=True)
date_range_start = models.DateField(default=datetime.date.today)
date_range_end = models.DateField(default=datetime.date.today)
selected_days = models.ManyToManyField(Days, related_name='days', blank=True,)
shared_level = models.IntegerField(default=2, choices=SHARED_LEVEL)
schedule_level_id = models.IntegerField(default=0, choices=SCHEDULED_LEVEL)
date_created = models.DateTimeField(auto_now_add=True)
logs = GenericRelation('eventlog.FieldSightLog')
class Meta:
db_table = 'fieldsight_forms_schedule'
verbose_name = _("Form Schedule")
verbose_name_plural = _("Form Schedules")
ordering = ('-date_range_start', 'date_range_end')
def form_exists(self):
return True if FieldSightXF.objects.filter(schedule=self).count() > 0 else False
def form(self):
return FieldSightXF.objects.filter(schedule=self)[0] if self.form_exists() else None
@property
def xf(self):
return FieldSightXF.objects.filter(schedule=self)[0].xf.pk if self.form_exists() else None
def __unicode__(self):
return getattr(self, "name", "")
class DeletedXForm(models.Model):
xf = models.OneToOneField(XForm, related_name="deleted_xform")
date_created = models.DateTimeField(auto_now=True)
class FieldSightXF(models.Model):
xf = models.ForeignKey(XForm, related_name="field_sight_form")
site = models.ForeignKey(Site, related_name="site_forms", null=True, blank=True)
project = models.ForeignKey(Project, related_name="project_forms", null=True, blank=True)
is_staged = models.BooleanField(default=False)
is_scheduled = models.BooleanField(default=False)
date_created = models.DateTimeField(auto_now=True)
date_modified = models.DateTimeField(auto_now=True)
schedule = models.OneToOneField(Schedule, blank=True, null=True, related_name="schedule_forms")
stage = models.OneToOneField(Stage, blank=True, null=True, related_name="stage_forms")
shared_level = models.IntegerField(default=2, choices=SHARED_LEVEL)
form_status = models.IntegerField(default=0, choices=FORM_STATUS)
fsform = models.ForeignKey('self', blank=True, null=True, related_name="parent")
is_deployed = models.BooleanField(default=False)
is_deleted = models.BooleanField(default=False)
is_survey = models.BooleanField(default=False)
from_project = models.BooleanField(default=True)
default_submission_status = models.IntegerField(default=0, choices=FORM_STATUS)
logs = GenericRelation('eventlog.FieldSightLog')
class Meta:
db_table = 'fieldsight_forms_data'
# unique_together = (("xf", "site"), ("xf", "is_staged", "stage"),("xf", "is_scheduled", "schedule"))
verbose_name = _("XForm")
verbose_name_plural = _("XForms")
ordering = ("-date_created",)
def url(self):
return reverse(
"download_fild_sight_form",
kwargs={
"site": self.site.username,
"id_string": self.id_string
}
)
def getname(self):
return '{0} form {1}'.format(self.form_type(),
self.xf.title,)
def getresponces(self):
return get_instances_for_field_sight_form(self.pk)
def getlatestsubmittiondate(self):
if self.site is not None:
return self.site_form_instances.order_by('-pk').values('date')[:1]
else:
return self.project_form_instances.order_by('-pk').values('date')[:1]
def get_absolute_url(self):
if self.project:
# return reverse('forms:project_html_export', kwargs={'fsxf_id': self.pk})
return reverse('forms:setup-forms', kwargs={'is_project':1, 'pk':self.project_id})
else:
# return reverse('forms:formpack_html_export', kwargs={'fsxf_id': self.pk})
return reverse('forms:setup-forms', kwargs={'is_project':0, 'pk':self.site_id})
def form_type(self):
if self.is_scheduled:
return "scheduled"
if self.is_staged:
return "staged"
if self.is_survey:
return "survey"
if not self.is_scheduled and not self.is_staged:
return "general"
def form_type_id(self):
if self.is_scheduled and self.schedule: return self.schedule.id
if self.is_staged and self.stage: return self.stage.id
return None
def stage_name(self):
if self.stage: return self.stage.name
def schedule_name(self):
if self.schedule: return self.schedule.name
def clean(self):
if self.is_staged:
if FieldSightXF.objects.filter(stage=self.stage).exists():
if not FieldSightXF.objects.filter(stage=self.stage).pk == self.pk:
raise ValidationError({
'xf': ValidationError(_('Duplicate Stage Data')),
})
if self.is_scheduled:
if FieldSightXF.objects.filter(schedule=self.schedule).exists():
if not FieldSightXF.objects.filter(schedule=self.schedule)[0].pk == self.pk:
raise ValidationError({
'xf': ValidationError(_('Duplicate Schedule Data')),
})
if not self.is_scheduled and not self.is_staged:
if self.site:
if FieldSightXF.objects.filter(xf=self.xf, is_scheduled=False, is_staged=False,project=self.site.project).exists():
raise ValidationError({
'xf': ValidationError(_('Form Already Used in Project Level')),
})
else:
if FieldSightXF.objects.filter(xf=self.xf, is_scheduled=False, is_staged=False,
site=self.site, project=self.project).exists():
if not FieldSightXF.objects.filter(xf=self.xf, is_scheduled=False, is_staged=False,
site=self.site, project=self.project)[0].pk == self.pk:
raise ValidationError({
'xf': ValidationError(_('Duplicate General Form Data')),
})
@staticmethod
def get_xform_id_list(site_id):
fs_form_list = FieldSightXF.objects.filter(site__id=site_id).order_by('xf__id').distinct('xf__id')
return [fsform.xf.pk for fsform in fs_form_list]
@property
def site_name(self):
if self.site is not None:
return u'{}'.format(self.site.name)\
@property
def site_or_project_display(self):
if self.site is not None:
return u'{}'.format(self.site.name)
return u'{}'.format(self.project.name)
@property
def project_info(self):
if self.fsform:
self.fsform.pk
return None
@property
def has_versions(self):
return self.xf.fshistory.exists()
def __unicode__(self):
return u'{}- {}- {}'.format(self.xf, self.site, self.is_staged)
@receiver(post_save, sender=FieldSightXF)
def create_messages(sender, instance, created, **kwargs):
if instance.project is not None and created and not instance.is_staged:
send_message_project_form(instance)
elif created and instance.site is not None and not instance.is_staged:
send_message(instance)
@receiver(pre_delete, sender=FieldSightXF)
def send_delete_message(sender, instance, using, **kwargs):
if instance.project is not None:
pass
elif instance.is_staged:
pass
else:
fxf = instance
send_message(fxf)
post_save.connect(create_messages, sender=FieldSightXF)
class FieldSightParsedInstance(ParsedInstance):
_update_fs_data = None
class Meta:
proxy = True
def save(self, *args, **kwargs):
self._update_fs_data = kwargs.pop('update_fs_data', {})
super(FieldSightParsedInstance, self).save(*args, **kwargs)
def to_dict_for_mongo(self):
mongo_dict = super(FieldSightParsedInstance, self).to_dict_for_mongo()
mongo_dict.update(self._update_fs_data)
return mongo_dict
@staticmethod
def get_or_create(instance, update_data=None):
if update_data is None:
update_data = {}
created = False
try:
fspi = FieldSightParsedInstance.objects.get(instance__pk=instance.pk)
fspi.save(update_fs_data=update_data, async=False)
except FieldSightParsedInstance.DoesNotExist:
created = True
fspi = FieldSightParsedInstance(instance=instance)
fspi.save(update_fs_data=update_data, async=False)
return fspi, created
class FInstanceManager(models.Manager):
def get_queryset(self):
return super(FInstanceManager, self).get_queryset().filter(is_deleted=False)
class FInstanceDeletedManager(models.Manager):
def get_queryset(self):
return super(FInstanceDeletedManager, self).get_queryset().filter(is_deleted=True)
class FInstance(models.Model):
instance = models.OneToOneField(Instance, related_name='fieldsight_instance')
site = models.ForeignKey(Site, null=True, related_name='site_instances')
project = models.ForeignKey(Project, null=True, related_name='project_instances')
site_fxf = models.ForeignKey(FieldSightXF, null=True, related_name='site_form_instances', on_delete=models.SET_NULL)
project_fxf = models.ForeignKey(FieldSightXF, null=True, related_name='project_form_instances')
form_status = models.IntegerField(null=True, blank=True, choices=FORM_STATUS)
date = models.DateTimeField(auto_now=True)
submitted_by = models.ForeignKey(User, related_name="supervisor")
is_deleted = models.BooleanField(default=False)
version = models.CharField(max_length=255, default=u'')
objects = FInstanceManager()
deleted_objects = FInstanceDeletedManager()
logs = GenericRelation('eventlog.FieldSightLog')
@property
def get_version(self):
return self.instance.json['__version__']
def save(self, *args, **kwargs):
self.version = self.get_version
if self.project_fxf is not None and self.project_fxf.is_staged and self.site is not None:
self.site.update_current_progress()
elif self.site is not None:
self.site.update_status()
if self.form_status is None:
if self.site_fxf:
self.form_status = self.site_fxf.default_submission_status
else:
self.form_status = self.project_fxf.default_submission_status
super(FInstance, self).save(*args, **kwargs) # Call the "real" save() method.
@property
def fsxfid(self):
if self.project_fxf:
return self.project_fxf.id
else:
return self.site_fxf.id\
@property
def fsxf(self):
if self.project_fxf:
return self.project_fxf
else:
return self.site_fxf
def get_absolute_url(self):
if self.site_fxf:
fxf_id = self.site_fxf_id
else:
fxf_id = self.project_fxf_id
return "/forms/forms/" + str(fxf_id) + "#/" + str(self.instance.id)
def get_abr_form_status(self):
return dict(FORM_STATUS)[self.form_status]
def getname(self):
if self.site_fxf is None:
return '{0} form {1}'.format(self.project_fxf.form_type(), self.project_fxf.xf.title,)
return '{0} form {1}'.format(self.site_fxf.form_type(),
self.site_fxf.xf.title,)
def __unicode__(self):
if self.site_fxf is None:
return u"%s" % str(self.submitted_by) + "---" + self.project_fxf.xf.title
return u"%s" % str(self.submitted_by) + "---" + self.site_fxf.xf.title
def instance_json(self):
return json.dumps(self.instance.json)
def get_responces(self):
data=[]
json_answer = self.instance.json
json_question = json.loads(self.instance.xform.json)
base_url = DjangoSite.objects.get_current().domain
media_folder = self.instance.xform.user.username
def parse_repeat(r_object):
r_question = r_object['name']
data.append(r_question)
if r_question in json_answer:
for gnr_answer in json_answer[r_question]:
for first_children in r_object['children']:
question_type = first_children['type']
question = first_children['name']
group_answer = json_answer[r_question]
answer = ''
if r_question+"/"+question in gnr_answer:
if first_children['type'] == 'note':
answer= ''
elif first_children['type'] == 'photo' or first_children['type'] == 'audio' or first_children['type'] == 'video':
answer = 'http://'+base_url+'/attachment/medium?media_file=/'+ media_folder +'attachments/'+gnr_answer[r_question+"/"+question]
else:
answer = gnr_answer[r_question+"/"+question]
if 'label' in first_children:
question = first_children['label']
row={'type':question_type, 'question':question, 'answer':answer}
data.append(row)
else:
for first_children in r_object['children']:
question_type = first_children['type']
question = first_children['name']
answer = ''
if 'label' in first_children:
question = first_children['label']
row={'type':question_type, 'question':question, 'answer':answer}
data.append(row)
def parse_group(prev_groupname, g_object):
g_question = prev_groupname+g_object['name']
for first_children in g_object['children']:
question = first_children['name']
question_type = first_children['type']
if question_type == 'group':
parse_group(g_question+"/",first_children)
continue
answer = ''
if g_question+"/"+question in json_answer:
if question_type == 'note':
answer= ''
elif question_type == 'photo' or question_type == 'audio' or question_type == 'video':
answer = 'http://'+base_url+'/attachment/medium?media_file=/'+ media_folder +'attachments/'+json_answer[g_question+"/"+question]
else:
answer = json_answer[g_question+"/"+question]
if 'label' in first_children:
question = first_children['label']
row={'type':question_type, 'question':question, 'answer':answer}
data.append(row)
def parse_individual_questions(parent_object):
for first_children in parent_object:
if first_children['type'] == "repeat":
parse_repeat(first_children)
elif first_children['type'] == 'group':
parse_group("",first_children)
else:
question = first_children['name']
question_type = first_children['type']
answer= ''
if question in json_answer:
if first_children['type'] == 'note':
answer= ''
elif first_children['type'] == 'photo' or first_children['type'] == 'audio' or first_children['type'] == 'video':
answer = 'http://'+base_url+'/attachment/medium?media_file=/'+ media_folder +'attachments/'+json_answer[question]
else:
answer = json_answer[question]
if 'label' in first_children:
question = first_children['label']
row={"type":question_type, "question":question, "answer":answer}
data.append(row)
submitted_by={'type':'submitted_by','question':'Submitted by', 'answer':json_answer['_submitted_by']}
submittion_time={'type':'submittion_time','question':'Submittion Time', 'answer':json_answer['_submission_time']}
data.append(submitted_by)
data.append(submittion_time)
parse_individual_questions(json_question['children'])
return data
class InstanceStatusChanged(models.Model):
finstance = models.ForeignKey(FInstance, related_name="comments")
message = models.TextField(null=True, blank=True)
date = models.DateTimeField(auto_now=True)
old_status = models.IntegerField(default=0, choices=FORM_STATUS)
new_status = models.IntegerField(default=0, choices=FORM_STATUS)
user = models.ForeignKey(User, related_name="submission_comments")
logs = GenericRelation('eventlog.FieldSightLog')
class Meta:
ordering = ['-date']
def get_absolute_url(self):
return reverse('forms:alter-status-detail', kwargs={'pk': self.pk})
def getname(self):
return '{0} form {1}'.format(self.finstance.site_fxf.form_type(), self.finstance.site_fxf.xf.title)
class InstanceImages(models.Model):
instance_status = models.ForeignKey(InstanceStatusChanged, related_name="images")
image = models.ImageField(upload_to="submission-feedback-images",
verbose_name='Status Changed Images',)
class FieldSightFormLibrary(models.Model):
xf = models.ForeignKey(XForm)
is_global = models.BooleanField(default=False)
shared_date = models.DateTimeField(auto_now=True)
organization = models.ForeignKey(Organization, null=True, blank=True)
project = models.ForeignKey(Project, null=True, blank=True)
logs = GenericRelation('eventlog.FieldSightLog')
class Meta:
verbose_name = _("Library")
verbose_name_plural = _("Library")
ordering = ("-shared_date",)
class EducationMaterial(models.Model):
is_pdf = models.BooleanField(default=False)
pdf = models.FileField(upload_to="education-material-pdf", null=True, blank=True)
title = models.CharField(max_length=31, blank=True, null=True)
text = models.TextField(blank=True, null=True)
stage = models.OneToOneField(Stage, related_name="em", null=True, blank=True)
fsxf = models.OneToOneField(FieldSightXF, related_name="em", null=True, blank=True)
class EducationalImages(models.Model):
educational_material = models.ForeignKey(EducationMaterial, related_name="em_images")
image = models.ImageField(upload_to="education-material-images",
verbose_name='Education Images',)
# @receiver(post_save, sender=Site)
# def copy_stages_from_project(sender, **kwargs):
# site = kwargs.get('instance')
# created = kwargs.get('created')
# if created:
# project = site.project
# project_main_stages = project.stages.filter(stage__isnull=True)
# for pms in project_main_stages:
# project_sub_stages = Stage.objects.filter(stage__id=pms.pk, stage_forms__is_deleted=False, stage_forms__is_deployed=True)
# if not project_sub_stages:
# continue
# site_main_stage = Stage(name=pms.name, order=pms.order, site=site, description=pms.description,
# project_stage_id=pms.id, weight=pms.weight)
# site_main_stage.save()
# for pss in project_sub_stages:
# if pss.tags and site.type:
# if site.type.id not in pss.tags:
# continue
# site_sub_stage = Stage(name=pss.name, order=pss.order, site=site,
# description=pss.description, stage=site_main_stage, project_stage_id=pss.id, weight=pss.weight)
# site_sub_stage.save()
# if FieldSightXF.objects.filter(stage=pss).exists():
# fsxf = pss.stage_forms
# site_form = FieldSightXF(is_staged=True, default_submission_status=fsxf.default_submission_status, xf=fsxf.xf, site=site,fsform=fsxf, stage=site_sub_stage, is_deployed=True)
# site_form.save()
# general_forms = project.project_forms.filter(is_staged=False, is_scheduled=False, is_deployed=True, is_deleted=False)
# for general_form in general_forms:
# FieldSightXF.objects.create(is_staged=False, default_submission_status=general_form.default_submission_status, is_scheduled=False, is_deployed=True, site=site,
# xf=general_form.xf, fsform=general_form)
#
# schedule_forms = project.project_forms.filter(is_scheduled=True, is_deployed=True, is_deleted=False)
# for schedule_form in schedule_forms:
# schedule = schedule_form.schedule
# selected_days = tuple(schedule.selected_days.all())
# s = Schedule.objects.create(name=schedule.name, site=site, date_range_start=schedule.date_range_start,
# date_range_end=schedule.date_range_end)
# s.selected_days.add(*selected_days)
# s.save()
# FieldSightXF.objects.create(is_scheduled=True, default_submission_status=schedule_form.default_submission_status, xf=schedule_form.xf, site=site, fsform=schedule_form,
# schedule=s, is_deployed=True)
class DeployEvent(models.Model):
form_changed = models.BooleanField(default=True)
data = JSONField(default={})
date = models.DateTimeField(auto_now=True)
site = models.ForeignKey(Site, related_name="deploy_data", null=True)
project = models.ForeignKey(Project, related_name="deploy_data", null=True)
def upload_to(instance, filename):
return os.path.join(
'versions', str(instance.pk),
'xls',
os.path.split(filename)[1])
class XformHistory(models.Model):
class Meta:
unique_together = ('xform', 'version')
def _set_uuid_in_xml(self, file_name=None):
"""
Add bind to automatically set UUID node in XML.
"""
if not file_name:
file_name = self.file_name()
file_name, file_ext = os.path.splitext(file_name)
doc = clean_and_parse_xml(self.xml)
model_nodes = doc.getElementsByTagName("model")
if len(model_nodes) != 1:
raise Exception(u"xml contains multiple model nodes")
model_node = model_nodes[0]
instance_nodes = [node for node in model_node.childNodes if
node.nodeType == Node.ELEMENT_NODE and
node.tagName.lower() == "instance" and
not node.hasAttribute("id")]
if len(instance_nodes) != 1:
raise Exception(u"Multiple instance nodes without the id "
u"attribute, can't tell which is the main one")
instance_node = instance_nodes[0]
# get the first child whose id attribute matches our id_string
survey_nodes = [node for node in instance_node.childNodes
if node.nodeType == Node.ELEMENT_NODE and
(node.tagName == file_name or
node.attributes.get('id'))]
if len(survey_nodes) != 1:
raise Exception(
u"Multiple survey nodes with the id '%s'" % self.id_string)
survey_node = survey_nodes[0]
formhub_nodes = [n for n in survey_node.childNodes
if n.nodeType == Node.ELEMENT_NODE and
n.tagName == "formhub"]
if len(formhub_nodes) > 1:
raise Exception(
u"Multiple formhub nodes within main instance node")
elif len(formhub_nodes) == 1:
formhub_node = formhub_nodes[0]
else:
formhub_node = survey_node.insertBefore(
doc.createElement("formhub"), survey_node.firstChild)
uuid_nodes = [node for node in formhub_node.childNodes if
node.nodeType == Node.ELEMENT_NODE and
node.tagName == "uuid"]
if len(uuid_nodes) == 0:
formhub_node.appendChild(doc.createElement("uuid"))
if len(formhub_nodes) == 0:
# append the calculate bind node
calculate_node = doc.createElement("bind")
calculate_node.setAttribute(
"nodeset", "/%s/formhub/uuid" % file_name)
calculate_node.setAttribute("type", "string")
calculate_node.setAttribute("calculate", "'%s'" % self.uuid)
model_node.appendChild(calculate_node)
self.xml = doc.toprettyxml(indent=" ", encoding='utf-8')
# hack
# http://ronrothman.com/public/leftbraned/xml-dom-minidom-toprettyxml-\
# and-silly-whitespace/
text_re = re.compile('>\n\s+([^<>\s].*?)\n\s+</', re.DOTALL)
output_re = re.compile('\n.*(<output.*>)\n( )*')
prettyXml = text_re.sub('>\g<1></', self.xml.decode('utf-8'))
inlineOutput = output_re.sub('\g<1>', prettyXml)
inlineOutput = re.compile('<label>\s*\n*\s*\n*\s*</label>').sub(
'<label></label>', inlineOutput)
self.xml = inlineOutput
xform = models.ForeignKey(XForm, related_name="fshistory")
date = models.DateTimeField(auto_now=True)
xls = models.FileField(upload_to=upload_to, null=True)
json = models.TextField(default=u'')
description = models.TextField(default=u'', null=True)
xml = models.TextField()
id_string = models.CharField(editable=False, max_length=255)
title = models.CharField(editable=False, max_length=255)
uuid = models.CharField(max_length=32, default=u'')
version = models.CharField(max_length=255, default=u'')
@property
def get_version(self):
import re
n = XML_VERSION_MAX_ITER
xml = self.xml
p = re.compile('version="(.*)">')
m = p.search(xml)
if m:
return m.group(1)
version = check_version(xml)
if version:
return version
else:
p = re.compile("""<bind calculate="\'(.*)\'" nodeset="/(.*)/_version_" """)
m = p.search(xml)
if m:
return m.group(1)
p1 = re.compile("""<bind calculate="(.*)" nodeset="/(.*)/_version_" """)
m1 = p.search(xml)
if m1:
return m1.group(1)
p1 = re.compile("""<bind calculate="\'(.*)\'" nodeset="/(.*)/__version__" """)
m1 = p1.search(xml)
if m1:
return m1.group(1)
p1 = re.compile("""<bind calculate="(.*)" nodeset="/(.*)/__version__" """)
m1 = p1.search(xml)
if m1:
return m1.group(1)
return None
def check_version(xml, n):
for i in range(n, 0, -1):
p = re.compile("""<bind calculate="\'(.*)\'" nodeset="/(.*)/_version__00{0}" """.format(i))
m = p.search(xml)
if m:
return m.group(1)
p = re.compile("""<bind calculate="(.*)" nodeset="/(.*)/_version__00{0}" """.format(i))
m1 = p.search(xml)
if m1:
return m1.group(1)
return None
def save(self, *args, **kwargs):
if self.xls and not self.xml:
survey = create_survey_from_xls(self.xls)
self.json = survey.to_json()
self.xml = survey.to_xml()
self._mark_start_time_boolean()
# set_uuid(self)
# self._set_uuid_in_xml()
if not self.version:
self.version = self.get_version
super(XformHistory, self).save(*args, **kwargs)
def file_name(self):
return os.path.split(self.xls.name)[-1]
def _mark_start_time_boolean(self):
starttime_substring = 'jr:preloadParams="start"'
if self.xml.find(starttime_substring) != -1:
self.has_start_time = True
else:
self.has_start_time = False
def get_survey(self):
if not hasattr(self, "_survey"):
try:
builder = SurveyElementBuilder()
self._survey = \
builder.create_survey_element_from_json(self.json)
except ValueError:
xml = bytes(bytearray(self.xml, encoding='utf-8'))
self._survey = create_survey_element_from_xml(xml)
return self._survey
survey = property(get_survey)
class SubmissionOfflineSite(models.Model):
offline_site_id = models.CharField(max_length=20)
temporary_site = models.ForeignKey(Site, related_name="offline_submissions")
instance = models.OneToOneField(FInstance, blank=True, null=True, related_name="offline_submission")
fieldsight_form = models.ForeignKey(FieldSightXF, related_name="offline_submissiob" , null=True, blank=True)
def __unicode__(self):
if self.instance:
return u"%s ---------------%s" % (str(self.instance.id) ,self.offline_site_id)
return u"%s" % str(self.offline_site_id)
| bsd-2-clause | 6,981,882,587,208,460,000 | 41.057175 | 195 | 0.599547 | false | 3.895639 | false | false | false |
antworteffekt/EDeN | graphprot/prepare_graphprot_seqs.py | 1 | 10394 | #!/usr/bin/env python
# draft implementation
# * TODO:
# * centering should be optional
# * viewpoint should be optional
# * check for nonunique ids and warn
# * check for bedtools version
# * write bed files for sequence coordinates
# * set rnd init for shuffling to have reproducible results
# * use my own temporary sequence files, properly clean up afterwards
# * check if seq length and core length arguments match or handle properly
# * handle input/output error gracefully
# * check if input bed coordinates are stranded
from __future__ import print_function
import argparse
from csv import reader
from itertools import izip
import logging
from eden.util import configure_logging
from pybedtools.featurefuncs import midpoint
from pybedtools.helpers import get_chromsizes_from_ucsc
from pybedtools import BedTool
# parse command line arguments
# positional arguments
parser = argparse.ArgumentParser(
description="Create coordinates and fasta sequences for use with GraphProt.")
parser.add_argument(
"bsites_fn", help="Path to binding site coordiantes in bed format")
parser.add_argument(
"genome_id", help="Genome UCSC id")
parser.add_argument(
"genome_fa_fn", help="Genome fasta sequences")
# optional arguments
parser.add_argument(
"--seq_length",
type=int,
default=150,
help="Length of sequences to create")
parser.add_argument(
"--core_length",
type=int,
default=48,
help="Length of viewpoint region at center of sequence")
parser.add_argument(
"--output_file_prefix",
default="",
help="Prefix to use for output filenames")
parser.add_argument(
"--chromosome_limits",
help="Path to file containing chromosome limites as required by bedtools. Use this parameter disables automatic lookup via the genome id.")
parser.add_argument(
"--negative_site_candidate_regions_fn",
help="Path to regions considered for placement of negatives in bed format")
parser.add_argument(
"-v", "--verbosity",
action="count",
help="Increase output verbosity")
args = parser.parse_args()
logger = logging.getLogger()
configure_logging(logger, verbosity=args.verbosity)
# fixed global variables
npeek = 2
# check chromsizes retreival
if (args.chromosome_limits is None):
# check if genome_id can be found,
chromsizes = get_chromsizes_from_ucsc(args.genome_id)
logging.debug("Number of chromosomes: {}.".format(len(chromsizes)))
# otherwise request manual definition of chromosome limits
if (len(chromsizes) == 0):
logging.error("Error: retrieving chromosome sizes from UCSC failed. Please specify manually using parameter --chromosome_limits")
exit(1)
# output file arguments
pos_core_bed_fn = args.output_file_prefix + ".positives_core.bed"
neg_core_bed_fn = args.output_file_prefix + ".negatives_core.bed"
# TODO: use
pos_seq_bed_fn = args.output_file_prefix + ".positives_seq.bed"
# TODO: use
neg_seq_bed_fn = args.output_file_prefix + ".negatives_seq.bed"
pos_seq_fa_fn = args.output_file_prefix + ".positives.fa"
neg_seq_fa_fn = args.output_file_prefix + ".negatives.fa"
# calculate flank lengths
flank_length = args.seq_length - args.core_length
flank_upstream_length = int(flank_length / 2)
flank_downstream_length = int(flank_length / 2) + (flank_length % 2)
if (args.core_length + flank_upstream_length + flank_downstream_length != args.seq_length):
raise Exception("Error: bad length calculation.")
def dbg_head(sites, description="", n=npeek, run=args.debug):
"""Print the first few bed entries."""
if run:
logging.debug(description)
for i in sites[0:n]:
logging.debug(i)
def prefix_neg(feature, prefix="negative_from_"):
"""Modify BedTool feature by adding a prefix."""
feature.name = prefix + feature.name
return feature
def offset_zero_by_one(feature):
"""Sets the start coordinate to 1 if it is actually 0.
Required for the flanking to work properly in those cases.
"""
if feature.start == 0:
feature.start += 1
return feature
def get_flanks(cores,
flank_upstream_length=flank_upstream_length,
flank_downstream_length=flank_downstream_length):
"""Calculate flanking regions of a core region."""
if args.chromosome_limits is not None:
logging.debug("using chromosome_limits " + args.chromosome_limits)
# get upstream flanks
flanks_upstream = cores.flank(
s=True,
l=flank_upstream_length,
r=0,
g=args.chromosome_limits).saveas()
# get downstream flanks
flanks_downstream = cores.flank(
s=True,
r=flank_downstream_length,
l=0,
g=args.chromosome_limits).saveas()
else:
# get upstream flanks
flanks_upstream = cores.flank(
s=True,
l=flank_upstream_length,
r=0,
genome=args.genome_id).saveas()
# get downstream flanks
flanks_downstream = cores.flank(
s=True,
r=flank_downstream_length,
l=0,
genome=args.genome_id).saveas()
# check if sites and flanks have the same number of entries
if cores.count() == flanks_upstream.count() == flanks_downstream.count():
return flanks_upstream, flanks_downstream
else:
if args.debug:
cores.saveas("debug_cores.bed")
flanks_upstream.saveas("debug_upstream.bed")
flanks_downstream.saveas("debug_downstream.bed")
else:
cores.saveas()
flanks_upstream.saveas()
flanks_downstream.saveas()
raise Exception("Error: numbers of cores and flanks don't match: got " + str(cores.count()) + " cores, " + str(
flanks_upstream.count()) + " upstream flanks and " + str(flanks_downstream.count()) + " downstream flanks.")
def get_seqs(cores,
flanks_upstream,
flanks_downstream,
viewpointfa_fn,
genome_fa_fn=args.genome_fa_fn):
"""Prepare sequences and write them to disk."""
# get sequences
genome_fa = BedTool(genome_fa_fn)
cores = cores.sequence(
fi=genome_fa,
s=True,
tab=True, name=True).save_seqs(cores.fn + ".tabseq")
flanks_upstream = flanks_upstream.sequence(
fi=genome_fa,
s=True,
tab=True,
name=True).save_seqs(flanks_upstream.fn + ".tabseq")
flanks_downstream = flanks_downstream.sequence(
fi=genome_fa,
s=True,
tab=True,
name=True).save_seqs(flanks_downstream.fn + ".tabseq")
# write sequences to disk
fup_seq_fn = flanks_upstream.seqfn
cores_seq_fn = cores.seqfn
fdown_seq_fn = flanks_downstream.seqfn
viewpointfa = open(viewpointfa_fn, "wb")
with open(fup_seq_fn, "rb") as fup_tabseq, open(cores_seq_fn, "rb") as core_tabseq, open(fdown_seq_fn, "rb") as fdown_tabseq:
fup_reader = reader(fup_tabseq, delimiter="\t")
core_reader = reader(core_tabseq, delimiter="\t")
fdown_reader = reader(fdown_tabseq, delimiter="\t")
for fup, core, fdown in izip(fup_reader, core_reader, fdown_reader):
assert fup[0] == core[0] == fdown[0], "Error: sequence ids of cores and flanks don't match."
# setup fasta headers and sequences
fa_header = ">" + core[0]
seq_viewpoint = fup[1].lower() + core[1].upper() + fdown[1].lower()
# seq_normal = fup[1].upper() + core[1].upper() + fdown[1].upper()
viewpointfa.write(fa_header + "\n")
viewpointfa.write(seq_viewpoint + "\n")
viewpointfa.close()
# prepare input coordinates
bsites = BedTool(args.bsites_fn).sort().saveas()
centers = bsites.each(midpoint).saveas()
# prepare positive instances
logging.info("preparing positive instances")
if (args.chromosome_limits):
logging.debug("using chromosome_limits " + args.chromosome_limits)
cores = centers.slop(s=True,
l=int(args.core_length / 2),
# -1 to account for the center nucleotide!
r=int(args.core_length / 2) +
(args.core_length % 2) - 1,
g=args.chromosome_limits).each(offset_zero_by_one).saveas(pos_core_bed_fn)
else:
cores = centers.slop(s=True,
l=int(args.core_length / 2),
# -1 to account for the center nucleotide!
r=int(args.core_length / 2) +
(args.core_length % 2) - 1,
genome=args.genome_id).each(offset_zero_by_one).saveas(pos_core_bed_fn)
flanks_upstream, flanks_downstream = get_flanks(cores)
get_seqs(cores, flanks_upstream, flanks_downstream, pos_seq_fa_fn)
# prepare negative sites if requested
if args.negative_site_candidate_regions_fn:
# get negative candidate regions
negative_site_candidate_regions = BedTool(
args.negative_site_candidate_regions_fn)
# remove input binding sites from negative candidate regions
processed_negative_site_candidate_regions = negative_site_candidate_regions.subtract(
bsites,
s=True).saveas()
# create negative core sites by placing within candidate regions
logging.info("preparing negative instances")
logging.info("starting from " + str(cores.count()) + " positive cores")
if args.chromosome_limits:
logging.debug("using chromosome_limits " + args.chromosome_limits)
neg_cores = cores.shuffle(
g=args.chromosome_limits,
chrom=True,
incl=processed_negative_site_candidate_regions.fn,
noOverlapping=True).each(prefix_neg).saveas(neg_core_bed_fn)
logging.info("derived negative cores: " + str(neg_cores.count()))
neg_fup, neg_fdown = get_flanks(neg_cores)
get_seqs(neg_cores, neg_fup, neg_fdown, neg_seq_fa_fn)
else:
neg_cores = cores.shuffle(
genome=args.genome_id,
chrom=True,
incl=processed_negative_site_candidate_regions.fn,
noOverlapping=True).each(prefix_neg).saveas(neg_core_bed_fn)
logging.info("derived negative cores: " + str(neg_cores.count()))
neg_fup, neg_fdown = get_flanks(neg_cores)
get_seqs(neg_cores, neg_fup, neg_fdown, neg_seq_fa_fn)
| mit | -1,839,607,395,871,399,200 | 38.075188 | 143 | 0.646527 | false | 3.510301 | false | false | false |
rwightman/tensorflow-litterbox | litterbox/models/sdc/model_sdc.py | 1 | 8350 | # Copyright (C) 2016 Ross Wightman. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
# ==============================================================================
"""Model wrapper for Google's tensorflow/model/slim models.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import fabric
import tensorflow as tf
from .build_inception_resnet_sdc import *
from .build_resnet_sdc import *
from .build_nvidia_sdc import *
slim = tf.contrib.slim
sdc_default_params = {
'outputs': {'steer': 1, 'xyz': 2},
'network': 'inception_resnet_v2', # or one of other options in network_map
'regression_loss': 'mse', # or huber
'version': 2,
'bayesian': False,
'lock_root': False,
}
network_map = {
'inception_resnet_v2': build_inception_resnet_sdc_regression,
'resnet_v1_50': build_resnet_v1_50_sdc,
'resnet_v1_101': build_resnet_v1_101_sdc,
'resnet_v1_152': build_resnet_v1_152_sdc,
'nvidia_sdc': build_nvidia_sdc,
}
arg_scope_map = {
'inception_resnet_v2': inception_resnet_v2_arg_scope,
'resnet_v1_50': resnet_arg_scope,
'resnet_v1_101': resnet_arg_scope,
'resnet_v1_152': resnet_arg_scope,
'nvidia_sdc': nvidia_style_arg_scope,
}
class ModelSdc(fabric.model.Model):
def __init__(self, params={}):
super(ModelSdc, self).__init__()
params = fabric.model.merge_params(sdc_default_params, params)
print("ModelSdc params", params)
self.output_cfg = params['outputs']
# model variable scope needs to match google net for pretrained weight compat
if (params['network'] == 'resnet_v1_152' or
params['network'] == 'resnet_v1_101' or
params['network'] == 'resnet_v1_50'):
self.network = params['network']
self.model_variable_scope = params['network']
elif params['network'] == 'inception_resnet_v2':
self.network = 'inception_resnet_v2'
self.model_variable_scope = "InceptionResnetV2"
else:
assert params['network'] == 'nvidia_sdc'
self.network = 'nvidia_sdc'
self.model_variable_scope = "NvidiaSdc"
self.version = params['version']
self.bayesian = params['bayesian']
self.lock_root = params['lock_root']
if params['regression_loss'] == 'huber':
self.regression_loss = fabric.loss.loss_huber_with_aux
else:
self.regression_loss = fabric.loss.loss_mse_with_aux
self.disable_summaries = False
def build_tower(self, inputs, is_training=False, summaries=True, scope=None):
with slim.arg_scope(arg_scope_map[self.network]()):
output, endpoints = network_map[self.network](
inputs,
output_cfg=self.output_cfg,
version=self.version,
bayesian=self.bayesian,
lock_root=self.lock_root,
is_training=is_training)
aux_output = None
if 'AuxOutput' in endpoints:
aux_output = endpoints['AuxOutput']
self.add_tower(
scope,
endpoints=endpoints,
outputs=output,
aux_outputs=aux_output,
)
# Add summaries for viewing model statistics on TensorBoard.
if summaries:
self.activation_summaries()
return output
def add_tower_loss(self, targets, scope=None):
tower = self.tower(scope)
assert 'xyz' in self.output_cfg or 'steer' in self.output_cfg
if 'xyz' in self.output_cfg:
target_xyz = targets[1]
aux_output_xyz = None
if tower.aux_outputs:
aux_output_xyz = tower.aux_outputs['xyz']
self.regression_loss(
tower.outputs['xyz'], target_xyz, aux_predictions=aux_output_xyz)
if 'steer' in self.output_cfg:
target_steer = targets[0]
aux_output_steer = None
if tower.aux_outputs:
aux_output_steer = tower.aux_outputs['steer']
if self.output_cfg['steer'] > 1:
# steer is integer target, one hot output, use softmax
fabric.loss_softmax_cross_entropy_with_aux(
tower.outputs['steer'], target_steer, aux_logits=aux_output_steer)
else:
assert self.output_cfg['steer'] == 1
# steer is float target/output, use regression /w huber loss
self.regression_loss(
tower.outputs['steer'], target_steer, aux_predictions=aux_output_steer)
def get_predictions(self, outputs, processor=None):
if processor:
for k, v in outputs.items():
outputs[k] = processor.decode_output(v, key=k)
return outputs
def _remap_variable_names(self, variables, checkpoint_variable_set, prefix_scope):
def _strip_name(prefix, name):
name = name[len(prefix):] if name.startswith(prefix) else name
return name
if prefix_scope:
# strip our network prefix scope and remap accordingly
prefix_scope += '/'
restore_variables = {_strip_name(prefix_scope, v.op.name): v for v in variables}
return restore_variables
else:
return variables
def output_scopes(self, prefix_scope=''):
rel_scopes = ['logits', 'Logits', 'Output', 'Output/OutputXYZ', 'Output/OutputSteer', 'Output/Fc1',
'AuxLogits/OutputXYZ', 'AuxLogits/OutputSteer', 'AuxLogits/Fc1']
prefix = prefix_scope + '/' if prefix_scope else ''
prefix += self.model_variable_scope + '/'
abs_scopes = [prefix + x for x in rel_scopes]
return abs_scopes
@staticmethod
def eval_ops(predictions, labels, processor=None):
"""Generate a simple (non tower based) loss op for use in evaluation.
"""
ops = {}
if 'steer' in predictions:
steer_label = labels[0]
steer_prediction = predictions['steer']
if steer_prediction.get_shape()[-1].value > 1:
# one hot steering loss (non reduced)
steer_loss = tf.nn.sparse_softmax_cross_entropy_with_logits(
steer_prediction, steer_label, name='steer_xentropy_eval')
# decode non-linear mapping before mse
steer_prediction = tf.cast(tf.argmax(steer_prediction, dimension=1), tf.int32)
if processor:
steer_prediction = processor.decode_output(steer_prediction, key='steer')
steer_label = processor.decode_output(steer_label, key='steer')
else:
# linear regression steering loss
assert steer_prediction.get_shape()[-1].value == 1
steer_loss = fabric.loss.metric_huber(steer_prediction, steer_label)
if processor:
steer_prediction = processor.decode_output(steer_prediction, key='steer')
steer_label = processor.decode_output(steer_label, key='steer')
steer_mse = tf.squared_difference(
steer_prediction, steer_label, name='steer_mse_eval')
ops['steer_loss'] = steer_loss
ops['steer_mse'] = steer_mse
#ops['steer_prediction'] = steer_prediction
#ops['steer_label'] = steer_label
if 'xyz' in predictions:
xyz_labels = labels[1]
xyz_predictions = predictions['xyz']
if processor:
xyz_labels = processor.decode_output(xyz_labels, key='xyz')
xyz_predictions = processor.decode_output(xyz_predictions, key='xyz')
xyz_loss = fabric.loss.metric_huber(xyz_predictions, xyz_labels)
xyz_mse = tf.squared_difference(xyz_predictions, xyz_labels, name='xyz_mse_eval')
ops['xyz_loss'] = xyz_loss
ops['xyz_mse'] = xyz_mse
ops['xyz_prediction'] = xyz_predictions
ops['xyz_label'] = xyz_labels
return ops
| apache-2.0 | 1,256,829,688,703,866,600 | 38.761905 | 107 | 0.582395 | false | 3.802368 | false | false | false |
mwiencek/picard | picard/ui/cdlookup.py | 1 | 3277 | # -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
# Copyright (C) 2006 Lukáš Lalinský
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from PyQt4 import QtCore, QtGui
from picard.ui.ui_cdlookup import Ui_Dialog
from picard.mbxml import artist_credit_from_node, label_info_from_node
class CDLookupDialog(QtGui.QDialog):
def __init__(self, releases, disc, parent=None):
QtGui.QDialog.__init__(self, parent)
self.releases = releases
self.disc = disc
self.ui = Ui_Dialog()
self.ui.setupUi(self)
self.ui.release_list.setSortingEnabled(True)
self.ui.release_list.setHeaderLabels([_(u"Album"), _(u"Artist"), _(u"Date"), _(u"Country"),
_(u"Labels"), _(u"Catalog #s"), _(u"Barcode")])
if self.releases:
for release in self.releases:
labels, catalog_numbers = label_info_from_node(release.label_info_list[0])
date = release.date[0].text if "date" in release.children else ""
country = release.country[0].text if "country" in release.children else ""
barcode = release.barcode[0].text if "barcode" in release.children else ""
item = QtGui.QTreeWidgetItem(self.ui.release_list)
item.setText(0, release.title[0].text)
item.setText(1, artist_credit_from_node(release.artist_credit[0], self.config)[0])
item.setText(2, date)
item.setText(3, country)
item.setText(4, ", ".join(labels))
item.setText(5, ", ".join(catalog_numbers))
item.setText(6, barcode)
item.setData(0, QtCore.Qt.UserRole, QtCore.QVariant(release.id))
self.ui.release_list.setCurrentItem(self.ui.release_list.topLevelItem(0))
self.ui.ok_button.setEnabled(True)
[self.ui.release_list.resizeColumnToContents(i) for i in range(self.ui.release_list.columnCount() - 1)]
# Sort by descending date, then ascending country
self.ui.release_list.sortByColumn(3, QtCore.Qt.AscendingOrder)
self.ui.release_list.sortByColumn(2, QtCore.Qt.DescendingOrder)
self.ui.lookup_button.clicked.connect(self.lookup)
def accept(self):
release_id = str(self.ui.release_list.currentItem().data(0, QtCore.Qt.UserRole).toString())
self.tagger.load_album(release_id, discid=self.disc.id)
QtGui.QDialog.accept(self)
def lookup(self):
lookup = self.tagger.get_file_lookup()
lookup.discLookup(self.disc.submission_url)
QtGui.QDialog.accept(self)
| gpl-2.0 | 8,501,564,173,859,651,000 | 48.606061 | 111 | 0.66066 | false | 3.670404 | false | false | false |
p2pu/mechanical-mooc | twitter/views.py | 1 | 1825 | from django import http
from django.conf import settings
from django.views.decorators.http import require_http_methods
import json
from twitter import utils
@require_http_methods(['POST'])
def get_data(request):
if 'twitter_handle' not in request.POST.keys():
return http.HttpResponseServerError()
twitter_handle = request.POST.get('twitter_handle')
creds = (settings.TWITTER_ACCESS_TOKEN, settings.TWITTER_ACCESS_TOKEN_SECRET)
try:
user_data = utils.get_user_data(twitter_handle, creds)
bio_data = {
'avatar': user_data['profile_image_url'],
'name': user_data['name'],
'bio': user_data['description']
}
if '_normal' in bio_data['avatar']:
bio_data['avatar'] = bio_data['avatar'].replace('_normal', '')
return http.HttpResponse(json.dumps(bio_data))
except:
return http.HttpResponseNotFound()
def old(request):
request_token_dict = utils.get_request_token()
request.session['oauth_token'] = request_token_dict['oauth_token']
request.session['oauth_token_secret'] = request_token_dict['oauth_token_secret']
redirect_url = 'https://api.twitter.com/oauth/authenticate?oauth_token={0}'.format(
request_token_dict['oauth_token']
)
return http.HttpResponseRedirect(redirect_url)
def oauth_callback(request):
oauth_token = request.GET.get('oauth_token')
oauth_verifier = request.GET.get('oauth_verifier')
oauth_token_secret = request.session['oauth_token_secret']
access_token_dict = utils.get_access_token(oauth_verifier, (oauth_token, oauth_token_secret))
user = utils.get_user_data(
access_token_dict['screen_name'],
(access_token_dict['oauth_token'], access_token_dict['oauth_token_secret'])
)
raise Exception()
| mit | 2,535,445,939,260,978,000 | 34.096154 | 97 | 0.667397 | false | 3.70935 | false | false | false |
OCA/carrier-delivery | base_delivery_carrier_label/models/delivery_carrier.py | 1 | 1276 | # -*- coding: utf-8 -*-
# Copyright 2012 Akretion <http://www.akretion.com>.
# Copyright 2013-2016 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, fields, models
class DeliveryCarrier(models.Model):
_inherit = 'delivery.carrier'
@api.model
def _get_carrier_type_selection(self):
""" To inherit to add carrier type """
return []
carrier_type = fields.Selection(
selection='_get_carrier_type_selection',
string='Type',
help="Carrier type (combines several delivery methods)",
oldname='type',
)
code = fields.Char(
help="Delivery Method Code (according to carrier)",
)
description = fields.Text()
available_option_ids = fields.One2many(
comodel_name='delivery.carrier.option',
inverse_name='carrier_id',
string='Option',
)
@api.multi
def default_options(self):
""" Returns default and available options for a carrier """
options = self.env['delivery.carrier.option'].browse()
for available_option in self.available_option_ids:
if (available_option.mandatory or available_option.by_default):
options |= available_option
return options
| agpl-3.0 | -6,199,342,651,234,206,000 | 31.717949 | 75 | 0.636364 | false | 3.962733 | false | false | false |
heatherleaf/MCFParser.py | test/convert_mcfg_to_py.py | 1 | 1550 |
import re
import json
import fileinput
# F --> AO a [0,0;1,0] (* C --> /T,C T [0,0;1,0] *)
# A --> AL H [0,0;1,0] (* D,-case --> /N,D,-case N [0,0;1,0] *)
# I --> m [0,2;0,0][0,1] (* PastPart,-aux;-case --> +v,PastPart,-aux;-case;-v [0,2;0,0][0,1] *)
# I --> p [0,1;0,0][0,2] (* PastPart,-aux;-case --> +v,PastPart,-aux;-v;-case [0,1;0,0][0,2] *)
# E --> "laugh" (* /D,V,-v --> "laugh" *)
# E --> "love" (* /D,V,-v --> "love" *)
_rule_re = re.compile(r'''
^ (\w+) \s+ --> \s+ ([\w\s]+?) \s+ \[ ([][\d,;]+) \]
''', re.VERBOSE)
_lex_re = re.compile(r'''
^ (\w+) \s+ --> \s+ "([^"]*)"
''', re.VERBOSE)
# >>> grammar = [('f', 'S', ['A'], [[(0,0), (0,1)]]),
# ... ('g', 'A', ['A'], [['a', (0,0), 'b'], ['c', (0,1), 'd']]),
# ... ('h', 'A', [], [['a', 'b'], ['c', 'd']])]
# fun, cat, args, rhss = split_mcfrule(mcfrule)
grammar = []
functr = 1
for line in fileinput.input():
mrule = re.match(_rule_re, line)
mlex = re.match(_lex_re, line)
if mrule:
cat, args, rhss = mrule.groups()
args = tuple(args.split())
rhss = [[tuple(int(i) for i in sym.split(',')) for sym in rhs.split(';')] for rhs in rhss.split('][')]
elif mlex:
args = ()
cat, token = mlex.groups()
if token:
rhss = [[token]]
else:
rhss = [[]]
else:
continue
fun = f"{cat}-{functr:05d}"
grammar.append((fun, cat, args, rhss))
functr += 1
print('grammar = [')
for rule in grammar:
print(f" {rule},")
print(']')
| gpl-3.0 | -5,696,885,453,429,928,000 | 28.245283 | 110 | 0.425161 | false | 2.359209 | false | false | false |
meredith-digops/awsops | volumecleanup/volumecleanup.py | 1 | 3967 | #!/usr/bin/env python
from __future__ import print_function
import boto3
from botocore.exceptions import ClientError
from datetime import datetime
from datetime import timedelta
from datetime import tzinfo
DEFAULT_RETENTION_DAYS = None
"""If None, no default retention is applied"""
ZERO = timedelta(0)
class UTC(tzinfo):
"""
Implements UTC timezone for datetime interaction
"""
def utcoffset(self, dt):
return ZERO
def tzname(self, dt):
return "UTC"
def dst(self, dt):
return ZERO
def fetch_available_volumes(ec2, filters=None):
"""
Generator of available EBS volumes
:param ec2: EC2 resource
:type ec2: boto3.resources.factory.ec2.ServiceResource
:param filters: Optional list of filters
:type filters: None|list
:returns: volumes collection
:rtype: boto3.resources.collection.ec2.volumesCollection
"""
# Set an empty filter set if none provided
if filters is None:
filters = []
# Append the filter for finding only volumes that are in the 'available'
# state.
# Ref: http://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVolumes.html
filters.append({
'Name': 'status',
'Values': ['available'],
})
return ec2.volumes.filter(
Filters=filters
)
def get_abandoned_volumes(since, *args, **kwargs):
"""
Generate of available EBS volumes created some time ago
:param since: Datetime where all volumes created prior to are considered abandoned
:type since: datetime.datetime
:returns: (iterator) of volumes
:rtype: boto3.resources.factory.ec2.Volume
"""
for vol in fetch_available_volumes(*args, **kwargs):
# Ignore volumes created after `since` parameter
if vol.meta.data['CreateTime'] > since:
continue
yield vol
def lambda_handler(event, context):
"""
Delete abandoned EBS snapshots that exceed reasonable retention
"""
# Set the default retention period if none was provided to the lambda
# invocation
if 'Retention' not in event:
event['Retention'] = DEFAULT_RETENTION_DAYS
if event['Retention'] is None:
# Don't delete anything
raise AttributeError("No Retention specified")
if 'DryRun' not in event:
event['DryRun'] = False
if 'Filters' not in event:
event['Filters'] = [{
'Name': 'tag-key',
'Values': [
'ops:retention'
]
}]
since = datetime.now(UTC()) - timedelta(float(event['Retention']))
ec2 = boto3.resource('ec2')
old_volumes = get_abandoned_volumes(since,
ec2=ec2,
filters=event['Filters'])
for volume in old_volumes:
print("Deleting: {id}".format(
id=volume.id
))
try:
volume.delete(DryRun=event['DryRun'])
except ClientError as e:
if e.response['Error']['Code'] == 'DryRunOperation':
pass
if __name__ == '__main__':
from terminaltables import AsciiTable
since = datetime.now(UTC()) - timedelta(3*365/12)
print("Since: {}".format(
since.isoformat()))
table_headers = [
[
'created',
'id',
'size',
'type',
'tags',
]
]
table_data = []
vols = get_abandoned_volumes(
since,
ec2=boto3.resource('ec2'))
for v in vols:
table_data.append([
v.meta.data['CreateTime'].isoformat(),
v.id,
v.size,
v.volume_type,
"" if v.tags is None else
"\n".join("{k}: {v}".format(
k=i['Key'],
v=i['Value']
) for i in v.tags),
])
table_data.sort(key=lambda x: x[0])
print(AsciiTable(table_headers + table_data).table)
| mit | -3,186,707,178,158,320,600 | 24.928105 | 89 | 0.575498 | false | 4.064549 | false | false | false |
johncosta/private-readthedocs.org | readthedocs/core/models.py | 1 | 1611 | from django.db import models
from django.db.models.signals import post_save
from django.db.utils import DatabaseError
from django.dispatch import receiver
from django.contrib.auth.models import User
STANDARD_EMAIL = "[email protected]"
class UserProfile (models.Model):
"""Additional information about a User.
"""
user = models.ForeignKey(User, unique=True, related_name='profile')
whitelisted = models.BooleanField()
homepage = models.CharField(max_length=100, blank=True)
allow_email = models.BooleanField(help_text='Show your email on VCS contributions.', default=True)
def get_absolute_url(self):
return ('profiles_profile_detail', (), {'username': self.user.username})
get_absolute_url = models.permalink(get_absolute_url)
def __unicode__(self):
return "%s's profile" % self.user.username
def get_contribution_details(self):
"""
Gets the line to put into commits to attribute the author.
Returns a tuple (name, email)
"""
if self.user.first_name and self.user.last_name:
name = '%s %s' % (self.user.first_name, self.user.last_name)
else:
name = self.user.username
if self.allow_email:
email = self.user.email
else:
email = STANDARD_EMAIL
return (name, email)
@receiver(post_save, sender=User)
def create_profile(sender, **kwargs):
if kwargs['created'] is True:
try:
UserProfile.objects.create(user_id=kwargs['instance'].id, whitelisted=False)
except DatabaseError:
pass
| mit | -9,134,461,863,259,773,000 | 33.276596 | 102 | 0.656735 | false | 3.977778 | false | false | false |
FernanOrtega/DAT210x | Module2/assignment3.py | 1 | 1065 | import pandas as pd
# TODO: Load up the dataset
# Ensuring you set the appropriate header column names
#
df = pd.read_csv('Datasets/servo.data', names=['motor', 'screw', 'pgain', 'vgain', 'class'])
print df.head()
# TODO: Create a slice that contains all entries
# having a vgain equal to 5. Then print the
# length of (# of samples in) that slice:
#
df_vgain = df[df.vgain == 5]
print df_vgain.iloc[:,0].count()
# TODO: Create a slice that contains all entries
# having a motor equal to E and screw equal
# to E. Then print the length of (# of
# samples in) that slice:
#
# .. your code here ..
df_eq = df[(df.motor == 'E') & (df.screw == 'E')]
print df_eq.iloc[:,0].count()
# TODO: Create a slice that contains all entries
# having a pgain equal to 4. Use one of the
# various methods of finding the mean vgain
# value for the samples in that slice. Once
# you've found it, print it:
#
df_pgain = df[df.pgain == 4]
print df_pgain.vgain.mean(0)
# TODO: (Bonus) See what happens when you run
# the .dtypes method on your dataframe!
print df.dtypes
| mit | 6,675,119,542,563,966,000 | 21.659574 | 92 | 0.680751 | false | 3.025568 | false | false | false |
ITNano/WikiSubtitleReader | raw_to_ass.py | 1 | 3753 | # -*- coding: utf-8 -*-
#Python class to parse lyrics on the form
#Singer 1: I am so happy, hear me sing
#And write it to an .ass file. (Advanced SubStation Alpha subtitle file)
#The string before the separator ':' is used to format the text by mapping it to
#a predefined format, the remainder is the actual text to sing.
import math
def time_to_seconds(time):
hmmss_list=time.split(':')
seconds=3600*float(hmmss_list[0])+60*float(hmmss_list[1])+float(hmmss_list[2])
return seconds
def seconds_to_time(seconds):
#Seconds are given with two decimal points. 1 digit for hours.
#Minutes and hours are integers.
hours=math.floor(seconds/3600)
seconds=seconds-3600*hours
minutes=math.floor(seconds/60)
seconds=seconds-60*minutes
seconds=float("{0:05.2f}".format(seconds))
if seconds==60:
seconds=0;
minutes=minutes+1;
if minutes==60:
minutes=0
hours=hours+1
#Pads minutes with a leading zero, formats seconds to xx.xx
hmmss_string="{0:01.0f}".format(hours)+':'+"{0:02.0f}".format(minutes)+':'+"{0:05.2f}".format(seconds)
return hmmss_string
class Raw_to_ass_parser():
def parse_line_to_ass(self,line,delimiter,allowEmptyLines):
#example output:
#Dialogue: 0,0:00:26.00,0:00:27.00,CHARACTER,,0,0,0,,I am singing!
#Styledict maps a short form to a style used in the ASS file. Example:
#Styledict["a"]="ANNA"
#Note that keys are all cast to lowercase.
emptyLine = False
if len(line) == 0:
emptyLine = True
if allowEmptyLines:
line = "kommentar:"
else:
return ""
split_line=line.split(delimiter,1)
# Handle lines without explicitly written singer
if len(split_line)==1:
split_line=[self.empty_style,split_line[0]]
# Handle multi/none singer(s)
default_singer = r"OKÄND"
if "," in split_line[0] or "+" in split_line[0]:
default_singer = r"ALLA"
# Handle people singing at the same time
extra_stylepart = ""
if self.multi_line:
extra_stylepart = " NERE"
if split_line[1].strip().endswith(self.multi_line_keyword):
if self.multi_line:
print("WARNING: Found 3+ multiline!")
extra_stylepart = " UPPE"
split_line[1] = split_line[1].strip()[:-len(self.multi_line_keyword)]
self.multi_line = True;
else:
self.multi_line = False;
# Construct the actual data.
outline='Dialogue: 0,'+self.time_start+','+self.time_end+','
outline=outline+self.style_dictionary.get(split_line[0].lower(), default_singer)+extra_stylepart+',,0,0,0,,'+split_line[1].strip()
# Prepare for next line
if not emptyLine:
self.empty_style=split_line[0]
if len(outline) > 0 and not self.multi_line:
self.increment_time()
return outline
def increment_time(self):
float_start=time_to_seconds(self.time_start)
float_end=time_to_seconds(self.time_end)
self.time_start=seconds_to_time(float_start+self.time_step)
self.time_end=seconds_to_time(float_end+self.time_step)
def __init__(self,start_time,increment_time):
self.time_step=float(increment_time)
self.time_start=seconds_to_time(start_time)
self.time_end=seconds_to_time(time_to_seconds(self.time_start)+self.time_step)
self.style_dictionary={}
self.empty_style=""
self.multi_line = False;
self.multi_line_keyword = "[samtidigt]"
| apache-2.0 | 3,436,370,674,309,785,000 | 35.427184 | 138 | 0.598614 | false | 3.509822 | false | false | false |
ealmansi/incc-tp-final | src/gensim/tut2.py | 1 | 1637 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# https://radimrehurek.com/gensim/tut2.html
import logging
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
import os
from gensim import corpora, models, similarities
if (os.path.exists("/tmp/deerwester.dict")):
dictionary = corpora.Dictionary.load('/tmp/deerwester.dict')
corpus = corpora.MmCorpus('/tmp/deerwester.mm')
print("Used files generated from first tutorial")
else:
print("Please run first tutorial to generate data set")
tfidf = models.TfidfModel(corpus) # step 1 -- initialize a model
doc_bow = [(0, 1), (1, 1)]
print(tfidf[doc_bow]) # step 2 -- use the model to transform vectors
corpus_tfidf = tfidf[corpus]
for doc in corpus_tfidf:
print(doc)
lsi = models.LsiModel(corpus_tfidf, id2word=dictionary, num_topics=2) # initialize an LSI transformation
corpus_lsi = lsi[corpus_tfidf] # create a double wrapper over the original corpus: bow->tfidf->fold-in-lsi
lsi.print_topics(2)
for doc in corpus_lsi: # both bow->tfidf and tfidf->lsi transformations are actually executed here, on the fly
print(doc)
lsi.save('/tmp/model.lsi') # same for tfidf, lda, ...
lsi = models.LsiModel.load('/tmp/model.lsi')
tfidf_model = models.TfidfModel(corpus, normalize=True)
lsi_model = models.LsiModel(corpus_tfidf, id2word=dictionary, num_topics=300)
rp_model = models.RpModel(corpus_tfidf, num_topics=500)
lda_model = models.LdaModel(corpus, id2word=dictionary, num_topics=100)
hdp_model = models.HdpModel(corpus, id2word=dictionary)
print(tfidf_model)
print(lsi_model)
print(rp_model)
print(lda_model)
print(hdp_model)
| mit | 5,472,797,103,600,224,000 | 33.104167 | 110 | 0.735492 | false | 2.87193 | false | false | false |
milapour/palm | palm/test/test_blink_model.py | 1 | 3374 | import nose.tools
from palm.blink_factory import SingleDarkBlinkFactory,\
DoubleDarkBlinkFactory,\
ConnectedDarkBlinkFactory
from palm.blink_model import BlinkModel
from palm.blink_parameter_set import SingleDarkParameterSet,\
DoubleDarkParameterSet,\
ConnectedDarkParameterSet
from palm.util import n_choose_k
@nose.tools.istest
def SingleDarkModelHasCorrectNumberOfStatesAndRoutes():
parameter_set = SingleDarkParameterSet()
parameter_set.set_parameter('N', 3)
model_factory = SingleDarkBlinkFactory()
model = model_factory.create_model(parameter_set)
num_states = model.get_num_states()
N = parameter_set.get_parameter('N')
expected_num_states = n_choose_k(N+3, 3)
error_message = "Got model with %d states, " \
"expected model with %d states.\n %s" % \
(num_states, expected_num_states, str(model))
nose.tools.eq_(num_states, expected_num_states,
error_message)
num_routes = model.get_num_routes()
nose.tools.ok_(num_routes > 0, "Model doesn't have routes.")
# print model.state_collection
# print model.route_collection
@nose.tools.istest
def DoubleDarkModelHasCorrectNumberOfStatesAndRoutes():
parameter_set = DoubleDarkParameterSet()
parameter_set.set_parameter('N', 5)
parameter_set.set_parameter('log_kr_diff', -1.0)
model_factory = DoubleDarkBlinkFactory()
model = model_factory.create_model(parameter_set)
num_states = model.get_num_states()
N = parameter_set.get_parameter('N')
expected_num_states = n_choose_k(N+4, 4)
error_message = "Got model with %d states, " \
"expected model with %d states.\n %s" % \
(num_states, expected_num_states, str(model))
nose.tools.eq_(num_states, expected_num_states,
error_message)
num_routes = model.get_num_routes()
nose.tools.ok_(num_routes > 0, "Model doesn't have routes.")
@nose.tools.istest
def initial_vector_gives_probability_one_to_state_with_all_inactive():
parameter_set = SingleDarkParameterSet()
model_factory = SingleDarkBlinkFactory()
model = model_factory.create_model(parameter_set)
init_prob_vec = model.get_initial_probability_vector()
prob = init_prob_vec.get_state_probability(model.all_inactive_state_id)
nose.tools.eq_(prob, 1.0)
@nose.tools.istest
def ConnectedDarkModelHasCorrectNumberOfStatesAndRoutes():
parameter_set = ConnectedDarkParameterSet()
parameter_set.set_parameter('N', 3)
parameter_set.set_parameter('log_kr2', -1.0)
model_factory = ConnectedDarkBlinkFactory()
model = model_factory.create_model(parameter_set)
num_states = model.get_num_states()
N = parameter_set.get_parameter('N')
expected_num_states = n_choose_k(N+4, 4)
error_message = "Got model with %d states, " \
"expected model with %d states.\n %s" % \
(num_states, expected_num_states, str(model))
nose.tools.eq_(num_states, expected_num_states,
error_message)
num_routes = model.get_num_routes()
nose.tools.ok_(num_routes > 0, "Model doesn't have routes.")
print model.state_collection
print model.route_collection
| bsd-2-clause | 1,793,866,859,589,507,000 | 40.654321 | 75 | 0.65412 | false | 3.540399 | false | false | false |
tomzw11/Pydrone | route.py | 1 | 2000 | import matplotlib.pyplot as plt
import matplotlib.patches as patches
def route(root):
root_height = root[2]
coordinates = [\
[0.42*root_height+root[0],0.42*root_height+root[1],root_height/2],\
[-0.42*root_height+root[0],0.42*root_height+root[1],root_height/2],\
[-0.42*root_height+root[0],-0.15*root_height+root[1],root_height/2],\
[0.42*root_height+root[0],-0.15*root_height+root[1],root_height/2]]
return coordinates
if __name__ == "__main__":
meter_to_feet = 3.28
root = [0,0,16*1]
print 'root',root,'\n'
level1 = route(root)
print 'level 1 \n'
print level1[0],'\n'
print level1[1],'\n'
print level1[2],'\n'
print level1[3],'\n'
print 'level 2 \n'
level2 = [[0]*3]*4
for x in xrange(4):
level2[x] = route(level1[x])
for y in xrange(4):
print 'level2 point[',x+1,y+1,']',level2[x][y],'\n'
fig, ax = plt.subplots()
ball, = plt.plot(6.72+1.52,6.72+1.52,'mo')
plt.plot(0,0,'bo')
plt.plot([level1[0][0],level1[1][0],level1[2][0],level1[3][0]],[level1[0][1],level1[1][1],level1[2][1],level1[3][1]],'ro')
rect_blue = patches.Rectangle((-13.44,-4.8),13.44*2,9.12*2,linewidth=1,edgecolor='b',facecolor='b',alpha = 0.1)
ax.add_patch(rect_blue)
rect_red = patches.Rectangle((0,4.23),13.44,9.12,linewidth=1,edgecolor='r',facecolor='r',alpha = 0.3)
ax.add_patch(rect_red)
plt.plot([level2[0][0][0],level2[0][1][0],level2[0][2][0],level2[0][3][0]],[level2[0][0][1],level2[0][1][1],level2[0][2][1],level2[0][3][1]],'go')
rect_green = patches.Rectangle((6.72,6.72+4.23/2),13.44/2,9.12/2,linewidth=1,edgecolor='g',facecolor='g',alpha = 0.5)
ax.add_patch(rect_green)
linear_s = [12,12]
plt.plot(12,12,'yo')
rect_yellow = patches.Rectangle((10,11),13.44/4,9.12/4,linewidth=1,edgecolor='y',facecolor='y',alpha = 0.5)
ax.add_patch(rect_yellow)
ax.legend([ball,rect_blue,rect_red,rect_green,rect_yellow],['Ball','Root View','Level 1 - 4 anchors','Level 2 - 16 anchors','Linear Search - 64 anchors'])
plt.axis([-13.44, 13.44, -4.8, 13.44])
plt.show()
| mit | 8,674,524,241,058,925,000 | 26.777778 | 155 | 0.6335 | false | 2.209945 | false | false | false |
MysterionRise/fantazy-predictor | enriching_data.py | 1 | 10896 | #!/usr/local/bin/python
# -*- coding: utf-8 -*-
import calendar
import os
import pandas as pd
# Правила подсчета очков:
#
# за участие в матче – 2 очка, если сыграно 10 минут и больше; 1 очко, если сыграно меньше 10 минут
#
# за победу – 3 очка (в гостях); 2 очка (дома)
#
# за поражение – минус 3 очка (дома); минуc 2 очка (в гостях)
#
# Принцип начисления очков следующий:
#
# количество очков + количество передач + количество перехватов + количество подборов +
# количество блок-шотов + количество совершенных штрафных + количество совершенных двухочковых + количество совершенных трехочковых
#
# - количество попыток штрафных бросков - количество попыток двухочковых – количество попыток трехочковых –
# удвоенная цифра от количества потерь - количество фолов
def convert_to_sec(time_str):
if pd.isnull(time_str):
return 0
try:
m, s = time_str.split(':')
return int(m) * 60 + int(s)
except Exception as inst:
print(time_str)
print(type(inst)) # the exception instance
print(inst.args) # arguments stored in .args
def get_sec(row):
time_str = row['minutes']
return convert_to_sec(time_str)
def getOrDefault(value):
if pd.isnull(value):
return 0
return int(value)
def extractYear(row):
return row['date'].year
def extractMonth(row):
return row['date'].month
def extractDay(row):
return row['date'].day
def concat1(row):
return row['opponent'] + str(row['year'])
def concat2(row):
return row['team'] + str(row['year'])
def concat3(row):
return row['name'] + str(row['year'])
def concat4(row):
return row['opponent'] + str(row['month']) + str(row['year'])
def concat5(row):
return row['team'] + str(row['month']) + str(row['year'])
def concat6(row):
return row['name'] + str(row['month']) + str(row['year'])
def getDayOfTheWeek(row):
day = calendar.day_name[row['date'].weekday()]
return day[:3]
def convert_age(row):
if pd.isnull(row['age']):
return 0
years, days = row['age'].split('-')
return int(years) + 1.0 * int(days) / 365
def split_result(x):
try:
sp = x.split('(')
return sp[0].strip(), sp[1][:-1]
except Exception as inst:
print(x)
print(type(inst)) # the exception instance
print(inst.args) # arguments stored in .args
# (FG + 0.5 * 3P) / FGA
def calc_efg(row):
try:
fg = row['fg']
fg3 = row['fg3']
fga = row['fga']
if fga == 0:
return 0.0
return (fg + 0.5 * fg3) / fga
except Exception as inst:
print(row)
print(type(inst)) # the exception instance
print(inst.args) # arguments stored in .args
def calc_fantasy(row):
if pd.isnull(row['minutes']):
return 0
fantasy_points = 0
if convert_to_sec(row['minutes']) >= 10 * 60:
fantasy_points += 2
else:
fantasy_points += 1
if 'W' in str(row['result']):
if row['location'] == '@':
fantasy_points += 3
else:
fantasy_points += 2
else:
if row['location'] == '@':
fantasy_points -= 2
else:
fantasy_points -= 3
fantasy_points += getOrDefault(row['pts'])
fantasy_points += getOrDefault(row['ast'])
fantasy_points += getOrDefault(row['stl'])
fantasy_points += getOrDefault(row['trb'])
fantasy_points += getOrDefault(row['blk'])
fantasy_points += getOrDefault(row['ft'])
fantasy_points += getOrDefault(row['fg'])
fantasy_points -= getOrDefault(row['fta'])
fantasy_points -= getOrDefault(row['fga'])
fantasy_points -= 2 * getOrDefault(row['tov'])
fantasy_points -= getOrDefault(row['pf'])
return fantasy_points
def enrich_player_df(df):
df['fantasy_points'] = df.apply(lambda row: calc_fantasy(row), axis=1)
df['year'] = df.apply(lambda row: extractYear(row), axis=1)
df['month'] = df.apply(lambda row: extractMonth(row), axis=1)
df['day'] = df.apply(lambda row: extractDay(row), axis=1)
df['opponent2'] = df.apply(lambda row: concat1(row), axis=1)
df['opponent3'] = df.apply(lambda row: concat4(row), axis=1)
df['team3'] = df.apply(lambda row: concat5(row), axis=1)
df['name3'] = df.apply(lambda row: concat6(row), axis=1)
df['name2'] = df.apply(lambda row: concat3(row), axis=1)
df['team2'] = df.apply(lambda row: concat2(row), axis=1)
df['age1'] = df.apply(lambda row: convert_age(row), axis=1)
df['seconds'] = df.apply(lambda row: get_sec(row), axis=1)
for i in range(1, 6):
df['mean_pts_' + str(i)] = df['pts'].rolling(i).mean().shift(1)
df['efg'] = df.apply(lambda row: calc_efg(row), axis=1)
df['mefg'] = df['efg'].expanding().mean().shift(1)
df['day_of_the_week'] = df.apply(lambda row: getDayOfTheWeek(row), axis=1)
df['mfp'] = df['fantasy_points'].expanding().mean().shift(1)
df['medfp'] = df['fantasy_points'].expanding().median().shift(1)
df['msec'] = df['seconds'].expanding().mean().shift(1)
df['mpts'] = df['pts'].expanding().mean().shift(1)
df['mast'] = df['ast'].expanding().mean().shift(1)
df['mtrb'] = df['trb'].expanding().mean().shift(1)
df['mstl'] = df['stl'].expanding().mean().shift(1)
df['mpf'] = df['pf'].expanding().mean().shift(1)
df['mtov'] = df['tov'].expanding().mean().shift(1)
df['mblk'] = df['blk'].expanding().mean().shift(1)
df['mfg'] = df['fg'].expanding().mean().shift(1)
df['mfg3'] = df['fg3'].expanding().mean().shift(1)
df['mft'] = df['ft'].expanding().mean().shift(1)
df['mfg3_pct'] = df['fg3_pct'].expanding().mean().shift(1)
df['mfg_pct'] = df['fg_pct'].expanding().mean().shift(1)
df['mft_pct'] = df['ft_pct'].expanding().mean().shift(1)
# number of games in last 5 days
df['rest_days'] = df['date'].diff().apply(lambda x: x.days)
for i in [1, 7, 10, 11, 12]:
df['mean_rest_days_' + str(i)] = df['rest_days'].rolling(i).mean().shift(1)
for i in [10, 21, 31, 38, 39]:
df['mean_fantasy_' + str(i)] = df['fantasy_points'].rolling(i).mean().shift(1)
for i in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]:
df['mean_sec_' + str(i)] = df['seconds'].rolling(i).mean().shift(1)
for i in [3, 4, 12, 16, 17, 28, 36]:
df['skew_fantasy_' + str(i)] = df['fantasy_points'].rolling(i).skew().shift(1)
return df
def enrich_player_df_for_upcoming_games(df):
df['fantasy_points'] = df.apply(lambda row: calc_fantasy(row), axis=1)
df['year'] = df.apply(lambda row: extractYear(row), axis=1)
df['month'] = df.apply(lambda row: extractMonth(row), axis=1)
df['day'] = df.apply(lambda row: extractDay(row), axis=1)
df['opponent2'] = df.apply(lambda row: concat1(row), axis=1)
df['opponent3'] = df.apply(lambda row: concat4(row), axis=1)
df['team3'] = df.apply(lambda row: concat5(row), axis=1)
df['name3'] = df.apply(lambda row: concat6(row), axis=1)
df['name2'] = df.apply(lambda row: concat3(row), axis=1)
df['team2'] = df.apply(lambda row: concat2(row), axis=1)
df['age1'] = df.apply(lambda row: convert_age(row), axis=1)
df['seconds'] = df.apply(lambda row: get_sec(row), axis=1)
for i in range(1, 6):
df['mean_pts_' + str(i)] = df['pts'].rolling(i).mean().shift(1)
df['efg'] = df.apply(lambda row: calc_efg(row), axis=1)
df['mefg'] = df['efg'].expanding().mean().shift(1)
df['day_of_the_week'] = df.apply(lambda row: getDayOfTheWeek(row), axis=1)
df['mfp'] = df['fantasy_points'].expanding().mean().shift(1)
df['medfp'] = df['fantasy_points'].expanding().median().shift(1)
df['msec'] = df['seconds'].expanding().mean().shift(1)
df['mpts'] = df['pts'].expanding().mean().shift(1)
df['mast'] = df['ast'].expanding().mean().shift(1)
df['mtrb'] = df['trb'].expanding().mean().shift(1)
df['mstl'] = df['stl'].expanding().mean().shift(1)
df['mpf'] = df['pf'].expanding().mean().shift(1)
df['mtov'] = df['tov'].expanding().mean().shift(1)
df['mblk'] = df['blk'].expanding().mean().shift(1)
df['mfg'] = df['fg'].expanding().mean().shift(1)
df['mfg3'] = df['fg3'].expanding().mean().shift(1)
df['mft'] = df['ft'].expanding().mean().shift(1)
df['mfg3_pct'] = df['fg3_pct'].expanding().mean().shift(1)
df['mfg_pct'] = df['fg_pct'].expanding().mean().shift(1)
df['mft_pct'] = df['ft_pct'].expanding().mean().shift(1)
# number of games in last 5 days
df['rest_days'] = df['date'].diff().apply(lambda x: x.days)
for i in [1, 7, 10, 11, 12]:
df['mean_rest_days_' + str(i)] = df['rest_days'].rolling(i).mean().shift(1)
for i in [10, 21, 31, 38, 39]:
df['mean_fantasy_' + str(i)] = df['fantasy_points'].rolling(i).mean().shift(1)
for i in [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]:
df['mean_sec_' + str(i)] = df['seconds'].rolling(i).mean().shift(1)
for i in [3, 4, 12, 16, 17, 28, 36]:
df['skew_fantasy_' + str(i)] = df['fantasy_points'].rolling(i).skew().shift(1)
return df
dateparse = lambda x: pd.datetime.strptime(x, '%Y-%m-%d')
def enrich_all_data():
for root, dirs, files in os.walk("nba"):
for file in files:
if file.endswith(".csv"):
try:
path = os.path.join(root, file)
if path.find('fantasy') == -1 and path.find('2018.csv') != -1:
f = open(path)
print(path)
lines = f.readlines()
if len(lines) > 1:
df = pd.read_csv(path,
parse_dates=['date'],
date_parser=dateparse)
if not df.empty:
df.fillna(df.mean(), inplace=True)
df = enrich_player_df(df)
join = os.path.join(root, "fantasy")
if not os.path.exists(join):
os.mkdir(join)
df.to_csv(os.path.join(root, "fantasy", file), index=False)
except Exception as inst:
print(file)
print(df.head())
print(type(inst)) # the exception instance
print(inst.args) # arguments stored in .args
| mit | 7,115,765,170,995,633,000 | 34.2 | 131 | 0.562115 | false | 2.743461 | false | false | false |
Gixugif/CDRecording | Call_Detail_Record.py | 1 | 2248 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Title: Call_Detail_Record
# Description: Class for one
# CDR
# separately.
# Date: 6/9/16
# Author: Jeffrey Zic
class Call_Detail_Record:
""" Call Detail Records contain metadata for phone calls."""
def __init__(self):
self.bbx_cdr_id = ('', )
self.network_addr = ('', )
self.bbx_fax_inbound_id = ('', )
self.billsec = ('', )
self.original_callee_id_name = ('', )
self.end_timestamp = ('', )
self.direction = ('', )
self.destination_name = ('', )
self.transfer_source = ('', )
self.original_callee_id_number = ('', )
self.write_rate = ('', )
self.transfer_to = ('', )
self.write_codec = ('', )
self.context = ('', )
self.callee_bbx_phone_id = ('', )
self.destination_number = ('', )
self.caller_id_number = ('', )
self.caller_bbx_phone_registration_id = ('', )
self.hangup_cause = ('', )
self.original_caller_id_number = ('', )
self.gateway_name = ('', )
self.record_file_name = ('', )
self.callee_bbx_user_id = ('', )
self.record_file_checksum = ('', )
self.caller_bbx_phone_id = ('', )
self.duration = ('', )
self.callee_bbx_phone_registration_id = ('', )
self.answer_timestamp = ('', )
self.hangup_originator = ('', )
self.transfer_history = ('', )
self.call_type = ('', )
self.source_table = ('', )
self.bbx_queue_id = ('', )
self.hold_events = ('', )
self.start_timestamp = ('', )
self.uuid = ('', )
self.record_keep_days = ('', )
self.bbx_fax_outbound_id = ('', )
self.bleg_uuid = ('', )
self.bbx_callflow_id = ('', )
self.destination_list = ('', )
self.caller_id_name = ('', )
self.click_to_call_uuid = ('', )
self.read_rate = ('', )
self.original_caller_id_name = ('', )
self.recording_retention = ('', )
self.caller_bbx_user_id = ('', )
self.destination_type = ('', )
self.outbound_route = ('', )
self.processed = ('', )
self.accountcode = ('', )
self.read_codec = ''
| gpl-3.0 | -1,865,762,430,259,183,400 | 33.060606 | 64 | 0.483986 | false | 3.5125 | false | false | false |
kannon92/psi4 | doc/sphinxman/source/psi4doc/ext/psidomain.py | 1 | 1221 | #
# @BEGIN LICENSE
#
# Psi4: an open-source quantum chemistry software package
#
# Copyright (c) 2007-2016 The Psi4 Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""Extension to format and index PSI variables."""
#Sphinx.add_object_type(psivar, rolename, indextemplate='', parse_node=None, ref_nodeclass=None, objname='', doc_field_types=[])
def setup(app):
app.add_object_type('psivar', 'psivar', indextemplate='single: %s')
| gpl-2.0 | 5,442,838,222,191,783,000 | 33.885714 | 128 | 0.746929 | false | 3.756923 | false | false | false |
CaptainDesAstres/Simple-Blender-Render-Manager | usefullFunctions.py | 1 | 1409 | #!/usr/bin/python3.4
# -*-coding:Utf-8 -*
import time
def now(short = True):
'''return current date in short or long form (HH:MM:SS or DD.MM.AAAA-HH:MM:SS)'''
if short == True:
return time.strftime('%H:%M:%S')
else:
return time.strftime('%d.%m.%Y-%H:%M:%S')
def columnLimit(value, limit, begin = True, sep = '|'):
'''make fix sized text column'''
if type(value) is not str:
value = str(value)
if begin is True:
begin = limit# number of first caracter to display
if len(value) > limit:
return (value[0:begin-1]+'…'# first caracter\
+value[len(value)-(limit-begin):]# last caracter\
+sep) # column seperator
else:
return value + (' '*(limit-len(value))) +sep# add space to match needed size
def indexPrintList(l):
'''Print a list and index'''
for i, v in enumerate(l):
print(str(i)+'- '+str(v))
class XML:
''' a class containing usefull method for XML'''
entities = {
'\'':''',
'"':'"',
'<':'<',
'>':'>'
}
def encode(txt):
'''replace XML entities by XML representation'''
txt.replace('&', '&')
for entity, code in XML.entities.items():
txt.replace(entity, code)
return txt
def decode(txt):
'''XML representation by the original character'''
for entity, code in XML.entities.items():
txt.replace(code, entity)
txt.replace('&', '&')
return txt
| mit | -1,501,886,194,108,361,200 | 15.552941 | 82 | 0.595593 | false | 2.980932 | false | false | false |
slub/vk2-georeference | georeference/views/user/georeferencehistory.py | 1 | 3257 | # -*- coding: utf-8 -*-
'''
Copyright (c) 2015 Jacob Mendt
Created on 07.10.15
@author: mendt
'''
import traceback
from pyramid.view import view_config
from pyramid.httpexceptions import HTTPInternalServerError
from sqlalchemy import desc
from georeference import LOGGER
from georeference.settings import OAI_ID_PATTERN
from georeference.utils.exceptions import ParameterException
from georeference.models.vkdb.georeferenzierungsprozess import Georeferenzierungsprozess
from georeference.models.vkdb.map import Map
from georeference.models.vkdb.metadata import Metadata
GENERAL_ERROR_MESSAGE = 'Something went wrong while trying to process your requests. Please try again or contact the administrators of the Virtual Map Forum 2.0.'
@view_config(route_name='user-history', renderer='json')
def generateGeoreferenceHistory(request):
def getUserId(request):
""" Parse the process id from the request.
:type request: pyramid.request
:return: str|None """
if request.method == 'GET' and 'userid' in request.matchdict:
return request.matchdict['userid']
return None
LOGGER.info('Request - Get georeference profile page.')
dbsession = request.db
try:
userid = getUserId(request)
if userid is None:
raise ParameterException("Wrong or missing userid.")
LOGGER.debug('Query georeference profile information from database for user %s'%userid)
queryData = request.db.query(Georeferenzierungsprozess, Metadata, Map).join(Metadata, Georeferenzierungsprozess.mapid == Metadata.mapid)\
.join(Map, Georeferenzierungsprozess.mapid == Map.id)\
.filter(Georeferenzierungsprozess.nutzerid == userid)\
.order_by(desc(Georeferenzierungsprozess.id))
LOGGER.debug('Create response list')
georef_profile = []
points = 0
for record in queryData:
georef = record[0]
metadata = record[1]
mapObj = record[2]
#
# create response
#
responseRecord = {'georefid':georef.id, 'mapid':OAI_ID_PATTERN%georef.mapid,
'georefparams': georef.georefparams, 'time': str(metadata.timepublish), 'transformed': georef.processed,
'isvalide': georef.adminvalidation, 'title': metadata.title, 'key': mapObj.apsdateiname,
'georeftime':str(georef.timestamp),'type':georef.type,
'published':georef.processed, 'thumbnail': metadata.thumbsmid}
# add boundingbox if exists
if mapObj.boundingbox is not None:
responseRecord['boundingbox'] = mapObj.getExtentAsString(dbsession, 4326)
# calculate points
if georef.adminvalidation is not 'invalide':
points += 20
georef_profile.append(responseRecord)
LOGGER.debug('Response: %s'%georef_profile)
return {'georef_profile':georef_profile, 'points':points}
except Exception as e:
LOGGER.error('Error while trying to request georeference history information');
LOGGER.error(e)
LOGGER.error(traceback.format_exc())
raise HTTPInternalServerError(GENERAL_ERROR_MESSAGE) | gpl-3.0 | -4,433,242,273,326,549,500 | 38.253012 | 162 | 0.671784 | false | 3.872771 | false | false | false |
mtpajula/ijonmap | core/project.py | 1 | 4348 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from .elements.point import Point
from .elements.line import Line
from .elements.polygon import Polygon
import os
class Project(object):
def __init__(self, messages):
self.messages = messages
self.points = []
self.lines = []
self.polygons = []
self.title = None
self.filepath = None
self.users = []
self.saved = True
self.draw = True
def get_title(self):
if self.title is not None:
return self.title
if self.filepath is not None:
filename, file_extension = os.path.splitext(self.filepath)
return filename + file_extension
return '...'
def is_empty(self):
if len(self.points) > 0:
return False
if len(self.lines) > 0:
return False
if len(self.polygons) > 0:
return False
return True
def get(self, element_type):
if element_type == 'point':
return self.points
elif element_type == 'line':
return self.lines
elif element_type == 'polygon':
return self.polygons
else:
return False
def get_id(self, element_type, id):
for element in self.get(element_type):
if element.id == id:
return element
return False
def new_point(self):
return Point()
def new_line(self):
return Line()
def new_polygon(self):
return Polygon()
def new(self, element_type):
if element_type == 'point':
return self.new_point()
elif element_type == 'line':
return self.new_line()
elif element_type == 'polygon':
return self.new_polygon()
else:
return False
def save(self, element, show_ok_message = True):
if show_ok_message:
m = self.messages.add("save " + element.type, "Project")
if element.is_valid() is not True:
self.messages.set_message_status(m, False, element.type + " is not valid")
return False
self.get(element.type).append(element)
if show_ok_message:
self.messages.set_message_status(m, True)
self.saved = False
return True
def edit(self, element):
m = self.messages.add("edit " + element.type, "Project")
self.messages.set_message_status(m, True)
self.saved = False
return True
def delete(self, element):
m = self.messages.add("delete " + element.type, "Project")
elements = self.get(element.type)
if element in elements:
elements.remove(element)
self.messages.set_message_status(m, True)
self.saved = False
return True
def is_in_range(self, elements, num):
try:
elements[num]
return True
except:
return False
def get_dictionary(self):
data = {
'title' : self.title
}
d = {
'data' : data,
'points' : [],
'lines' : [],
'polygons' : []
}
for point in self.points:
d['points'].append(point.get_dictionary())
for line in self.lines:
d['lines'].append(line.get_dictionary())
for polygon in self.polygons:
d['polygons'].append(polygon.get_dictionary())
return d
def set_dictionary(self, d):
if 'data' in d:
if 'title' in d['data']:
self.title = d['data']['title']
for data in d['points']:
p = self.new_point()
p.set_dictionary(data)
self.get('point').append(p)
for data in d['lines']:
l = self.new_line()
l.set_dictionary(data)
self.get('line').append(l)
for data in d['polygons']:
pl = self.new_polygon()
pl.set_dictionary(data)
self.get('polygon').append(pl)
| gpl-2.0 | 1,836,806,213,838,873,000 | 26.518987 | 86 | 0.49172 | false | 4.405268 | false | false | false |
pyfa-org/eos | eos/util/repr.py | 1 | 1866 | # ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
def make_repr_str(instance, spec=None):
"""Prepare string for printing info about passed object.
Args:
instance: Object which we should get info from.
spec (optional): Iterable which defines which fields we should include
in info string. Each iterable element can be single attribute name,
or tuple/list with two elements, where first defines reference name
for info string, and second defines actual attribute name.
Returns:
String, which includes object's class name and requested additional
fields.
"""
arg_list = []
for field in spec or ():
if isinstance(field, str):
repr_name, attr_name = field, field
else:
repr_name, attr_name = field
attr_val = getattr(instance, attr_name, 'N/A')
arg_list.append('{}={}'.format(repr_name, attr_val))
return '<{}({})>'.format(type(instance).__name__, ', '.join(arg_list))
| lgpl-3.0 | 6,969,660,614,217,755,000 | 41.409091 | 80 | 0.620579 | false | 4.464115 | false | false | false |
Ophrys-Project/Ophrys | ophrys/utils/models.py | 1 | 4817 | from django.conf.urls import patterns, url, include
from django.core.urlresolvers import reverse, NoReverseMatch
from django.db import models
from .views import ListView, CreateView, DetailView, UpdateView, DeleteView
class GetAbsoluteUrlMixin:
"""
Mixin to add the methods get_absolute_url() and get_absolute_url_name()
to a model class. These methods look for an url name in the nested
namespace. The top level namespace is the name of the application
(including the name of the project), e. g. `yourproject.yourapp`. The
low level namespace is the name of the current model (class), e. g.
`YourModel`. The url name can be something like `list`, `create`,
`detail`, `update` or `delete`. So this mixin tries to reverse e. g.
`yourproject.yourapp:YourModel:detail`. The named urls except `list`
and `create` have to accept either a pk argument or a slug argument.
"""
def get_absolute_url(self, url_name='detail'):
"""
Returns the url concerning the given url name. The url must accept
a pk argument or a slug argument if its name is not `list` or
`create`.
"""
if url_name == 'list' or url_name == 'create':
return reverse(self.get_absolute_url_name(url_name))
try:
return reverse(self.get_absolute_url_name(url_name), kwargs={'pk': str(self.pk)})
except NoReverseMatch:
pass
# TODO: Raise an specific error message if self.slug does not exist or
# reverse does not find an url.
return reverse(self.get_absolute_url_name(url_name), kwargs={'slug': str(self.slug)})
def get_absolute_url_name(self, url_name='detail'):
"""
Returns the full url name (including namespace patterns) of the
given url name.
"""
project_app_name = type(self).__module__.split('.models')[0]
class_name = type(self).__name__
return '%s:%s:%s' % (project_app_name, class_name, url_name)
class AutoModelMixin(GetAbsoluteUrlMixin):
"""
Mixin for models to add automaticly designed urls and views.
Add this mixin to your model and include YourModel().urls in the
urlpatterns of your application::
url(r'^example/', include(YourModel().urls))
The urls and classes for a list view (`/example/`), a create view
(`/example/create/`), a detail view (`/example/<pk>/`), an update view
(`/example/<pk>/update/`) and a delete view (`/example/<pk>/delete/`)
will be setup. You only have to write the corresponding templates with
Django's default template names (`yourapp/yourmodel_list.html`,
`yourapp/yourmodel_form.html`, `yourapp/yourmodel_detail.html`,
`yourapp/yourmodel_confirm_delete.html`).
The GetAbsoluteUrlMixin is used, so you have to set the inclusion of the
urls into a specific top level namespace concerning to the name of the
application (including the name of the project)::
url(r'^example_app/', include(yourproject.yourapp.urls), namespace='yourproject.yourapp')
"""
@property
def urls(self):
"""
Attribute of mixed models. Include this in the urlpatterns of
your application::
url(r'^example/', include(YourModel().urls))
"""
return (self.get_urlpatterns(), None, type(self).__name__)
def get_urlpatterns(self):
"""
Method to get the urlpatterns object. Override this method to
customize the urls.
"""
return patterns(
'',
url(r'^$', self.get_view_class('List').as_view(), name='list'),
url(r'^create/$', self.get_view_class('Create').as_view(), name='create'),
url(r'^(?P<pk>\d+)/$', self.get_view_class('Detail').as_view(), name='detail'),
url(r'^(?P<pk>\d+)/update/$', self.get_view_class('Update').as_view(), name='update'),
url(r'^(?P<pk>\d+)/delete/$', self.get_view_class('Delete').as_view(), name='delete'))
def get_view_class(self, view_name):
"""
Method to construct the view classes. Override this method to
customize them.
"""
view_class_definitions = {'model': type(self)}
if view_name == 'List':
view_class = ListView
elif view_name == 'Create':
view_class = CreateView
elif view_name == 'Detail':
view_class = DetailView
elif view_name == 'Update':
view_class = UpdateView
elif view_name == 'Delete':
view_class = DeleteView
view_class_definitions['success_url_name'] = self.get_absolute_url_name('list')
else:
raise ValueError('The view name "%s" is unknown.' % view_name)
return type(view_name, (view_class,), view_class_definitions)
| mit | 7,332,932,344,475,288,000 | 42.396396 | 98 | 0.625493 | false | 3.99751 | false | false | false |
zekearneodo/ephys-tools | zk/pca_filter.py | 1 | 4586 | __author__ = 'chris'
import logging
import tables
from scipy import signal
import numpy as np
def PCA_filter(self, rec_h5_obj, probe):
"""
Filtering based on
doi:10.1016/S0165-0270(01)00516-7
"""
data = self.run_group.data
D_all_clean = rec_h5_obj.create_carray(self.run_group, '_data_PCA_filt',
shape=data.shape, atom=data.atom)
logging.info('Starting PCA filtering. Loading data matrix...')
D_all_raw = data.read()
# TODO: this read should be done by shank instead of entirely at once to save memory.
# -- filter and threshold parameters --
t_scalar = 5. # stdeviations away from noise to call a spike.
pre_spike_samples=10 # num samples before threshold-crossing to replace with spike-free version
post_spike_samples= 10 # num samples after ^^^
rate = data._v_attrs['sampling_rate_Hz']
low = 500.
high = 9895.675
_b, _a = signal.butter(3, (low/(rate/2.), high/(rate/2.)), 'pass')
sh_cnt = 0
# --- first stage cleaning.
for shank in probe.values():
sh_cnt += 1
logging.info('PCA filtering {0}'.format(sh_cnt))
channels = shank['channels']
# print channels
D = D_all_raw[:, channels]
D_clean = np.zeros(D.shape, dtype=D.dtype)
for i in xrange(len(channels)):
D_i = D[:,i]
D_i_clean = D[:, i].astype(np.float64)
noti = []
for ii in xrange(len(channels)):
if ii != i:
noti.append(ii)
D_noti = D[:, noti]
u, _, _ = np.linalg.svd(D_noti, full_matrices=False)
for i_pc in xrange(3): # first 3 pcs
pc = u[:, i_pc]
b = np.dot(D_i, pc) / np.dot(pc, pc)
pc *= b
D_i_clean -= pc
D_clean[:, i] = D_i_clean.astype(D.dtype)
# --- find spikes, replace spike times with D_noise (spike free noise representation D_noise)
D_noise = D - D_clean
D_filt_clean_1 = signal.filtfilt(_b,_a, D_clean, axis=0) #filtered representation of cleaned data to find spikes
D_nospikes = D.copy()
for i in xrange(len(channels)):
sig = D_filt_clean_1[:,i]
median = np.median(np.abs(sig))
std = median / .6745
threshold = t_scalar * std
sig_L = sig < -threshold
edges = np.convolve([1, -1], sig_L, mode='same')
t_crossings = np.where(edges == 1)[0]
for cross in t_crossings:
if cross == 0:
continue
elif cross < pre_spike_samples:
st = 0
end = cross+post_spike_samples
elif cross + post_spike_samples > len(sig) - 1:
st = cross-pre_spike_samples
end = len(sig)-1
else:
st = cross-pre_spike_samples
end = cross+post_spike_samples
D_nospikes[st:end, i] = D_noise[st:end,i]
# -- 2nd stage cleaning.
for i in xrange(len(channels)):
# just reuse D_clean's memory space here, as it is not being used by the algorithm any more.
D_i_clean = D[:, i].astype(np.float64, copy=True) # Copying from original data matrix.
D_i_nospikes = D_nospikes[:, i]
noti = []
for ii in xrange(len(channels)):
if ii != i:
noti.append(ii)
D_noti = D_nospikes[:, noti]
u, _, _ = np.linalg.svd(D_noti, full_matrices=False)
for i_pc in xrange(3): # first 3 pcs
pc = u[:, i_pc]
b = np.dot(D_i_nospikes, pc) / np.dot(pc, pc)
pc *= b
D_i_clean -= pc
D_clean[:, i] = D_i_clean.astype(D.dtype)
# put everything back into the a super D.
for i, ch in enumerate(channels):
# the channel order is the same as the row in D.
D_all_clean[:, ch] = D_clean[:, i]
D_all_clean.flush()
assert isinstance(data, tables.EArray)
logging.info('Renaming plfiltered data to "neural_PL_filtered"')
data.rename('neural_PL_filtered')
rec_h5_obj.flush()
logging.info('Renaming PCA filtered data to "data"')
D_all_clean.rename('data')
rec_h5_obj.flush()
logging.info('PCA filtering complete!')
return D_all_clean | gpl-2.0 | -8,351,679,364,114,622,000 | 38.60177 | 121 | 0.511557 | false | 3.582813 | false | false | false |
mfalesni/python-kwargify | test_kwargify.py | 1 | 5569 | # -*- coding: utf-8 -*-
import pytest
from kwargify import kwargify
class TestFunctionWithNoArgs(object):
@pytest.fixture(scope="class")
def function(self):
@kwargify
def f():
return True
return f
def test_no_args_given(self, function):
function()
@pytest.mark.xfail
@pytest.mark.parametrize("n", range(1, 4))
def test_args_given(self, function, n):
function(*range(n + 1))
def test_kwargs_passed(self, function):
function(foo="bar")
class TestFunctionWithOnlyArgs(object):
@pytest.fixture(scope="class")
def function(self):
@kwargify
def f(a, b):
return True
return f
@pytest.mark.xfail
def test_no_args_given(self, function):
function()
@pytest.mark.xfail
def test_args_given_not_enough(self, function):
function(1)
def test_args_given_enough(self, function):
function(1, 2)
@pytest.mark.xfail
def test_only_kwargs_passed_wrong(self, function):
function(foo="bar")
@pytest.mark.xfail
def test_only_kwargs_passed_not_enough(self, function):
function(a="bar")
def test_only_kwargs_passed(self, function):
function(a=1, b=2)
def test_both_passed(self, function):
function(1, b=2)
class TestFunctionWithDefaultValues(object):
@pytest.fixture(scope="class")
def function(self):
@kwargify
def f(a, b=None):
return locals()
return f
def test_pass_only_required(self, function):
assert function(1)["b"] is None
def test_override_default_with_arg(self, function):
assert function(1, 2)["b"] == 2
def test_override_default_with_kwarg(self, function):
assert function(1, b=2)["b"] == 2
class TestKwargifyMethod(object):
class _TestClass(object):
def noargs(self):
return locals()
def onlyargs(self, a, b):
return locals()
def withdefault(self, a, b=None):
return locals()
@pytest.fixture(scope="class")
def o(self):
return self._TestClass()
# No args test
def test_no_args_given(self, o):
kwargify(o.noargs)()
@pytest.mark.xfail
@pytest.mark.parametrize("n", range(1, 4))
def test_args_given(self, o, n):
kwargify(o.noargs)(*range(n + 1))
def test_kwargs_passed(self, o):
kwargify(o.noargs)(foo="bar")
# Only args
@pytest.mark.xfail
def test_no_args_given_fails(self, o):
kwargify(o.onlyargs)()
@pytest.mark.xfail
def test_args_given_not_enough(self, o):
kwargify(o.onlyargs)(1)
def test_args_given_enough(self, o):
kwargify(o.onlyargs)(1, 2)
@pytest.mark.xfail
def test_only_kwargs_passed_wrong(self, o):
kwargify(o.onlyargs)(foo="bar")
@pytest.mark.xfail
def test_only_kwargs_passed_not_enough(self, o):
kwargify(o.onlyargs)(a="bar")
def test_only_kwargs_passed(self, o):
kwargify(o.onlyargs)(a=1, b=2)
def test_both_passed(self, o):
kwargify(o.onlyargs)(1, b=2)
# Default values
def test_pass_only_required(self, o):
assert kwargify(o.withdefault)(1)["b"] is None
def test_override_default_with_arg(self, o):
assert kwargify(o.withdefault)(1, 2)["b"] == 2
def test_override_default_with_kwarg(self, o):
assert kwargify(o.withdefault)(1, b=2)["b"] == 2
def test_wrapped_method():
# method wrapping should work the same as function wrapping,
# so this only does a minimum of sanity checks
class Foo(object):
@kwargify
def bar(self, x, y, z):
return x, y, z
f = Foo()
args = 1, 2, 3
# method fails correctly with incorrect args, just like a function does
with pytest.raises(TypeError):
f.bar(**dict(zip(('x', 'y'), args)))
# This should not explode (self is handled correctly)
ret = f.bar(**dict(zip(('x', 'y', 'z'), args)))
# Values should be returned in the same way that they were given
assert ret == args
def test_wrapped():
# double check that the function wrapper does its job
def f():
"""doctring!"""
pass
f.custom_attr = True
wrapped_f = kwargify(f)
# __wrapped__ should be set
assert wrapped_f.__wrapped__ is f
# dunder attrs should be copied over
assert wrapped_f.__doc__ == f.__doc__
# any public attrs on the wrapped func should be available
assert wrapped_f.custom_attr
def test_wrap_method():
"""Tst whether wrapping already existing method works."""
class A(object):
def a(self):
return True
def b(self, a, b):
return locals()
def c(self, a, b=None):
return locals()
a = A()
k_a = kwargify(a.a)
k_b = kwargify(a.b)
k_c = kwargify(a.c)
# Plain function
assert k_a()
# Without nonrequired parameters
with pytest.raises(TypeError):
k_b()
result = k_b(1, 2)
assert result["a"] == 1
assert result["b"] == 2
# With nonrequired params
with pytest.raises(TypeError):
k_c()
result_1 = k_c(1, 2)
result_2 = k_c(1)
assert result_1["a"] == result_2["a"] == 1
assert result_1["b"] == 2
assert result_2["b"] is None
def test_wrap_class_constructor():
class A(object):
def __init__(self, a, b=None):
self.a = a
self.b = b
cons = kwargify(A)
a = cons(a=1)
assert a.a == 1
assert a.b is None
| lgpl-3.0 | -1,896,215,505,816,903,200 | 23.318777 | 75 | 0.587718 | false | 3.459006 | true | false | false |
bmazin/SDR | Projects/ChannelizerSim/legacy/bin_width_1st_stage.py | 1 | 1524 |
import matplotlib.pyplot as plt
import scipy.signal
import numpy as np
import math
import random
from matplotlib.backends.backend_pdf import PdfPages
samples = 51200
L = samples/512
fs = 512e6
dt = 1/fs
time = [i*dt for i in range(samples)]
def pfb_fir(x):
N = len(x)
T = 4
L = 512
bin_width_scale = 2.5
dx = T*math.pi/L/T
X = np.array([n*dx-T*math.pi/2 for n in range(T*L)])
coeff = np.sinc(bin_width_scale*X/math.pi)*np.hanning(T*L)
y = np.array([0+0j]*(N-T*L))
for n in range((T-1)*L, N):
m = n%L
coeff_sub = coeff[L*T-m::-L]
y[n-T*L] = (x[n-(T-1)*L:n+L:L]*coeff_sub).sum()
return y
R = 100/5
#freqs = [i*1e5 + 6.0e6 for i in range(R)]
freqs = [i*5e4 + 6.0e6 for i in range(R*8)]
bin = []
bin_pfb = []
for f in freqs:
print f
signal = np.array([complex(math.cos(2*math.pi*f*t), math.sin(2*math.pi*f*t)) for t in time])
y = pfb_fir(signal)
bin_pfb.append(np.fft.fft(y[0:512])[10])
bin = np.array(bin)
bin_pfb = np.array(bin_pfb)
freqs = np.array(freqs)/1e6
b = scipy.signal.firwin(20, cutoff=0.125, window="hanning")
w,h = scipy.signal.freqz(b,1, 4*R, whole=1)
h = np.array(h[2*R:4*R].tolist()+h[0:2*R].tolist())
#h = np.array(h[20:40].tolist()+h[0:20].tolist())
fig = plt.figure()
ax0 = fig.add_subplot(111)
#ax0.plot(freqs, abs(fir9), '.', freqs, abs(fir10), '.', freqs, abs(fir11), '.')
ax0.plot(freqs, 10*np.log10(abs(bin_pfb)/512), 'k-')
ax0.set_xlabel('Frequency (MHz)')
ax0.set_ylabel('Gain (dB)')
ax0.set_ylim((-50,0))
plt.show()
#ax0.axvline(x = 10, linewidth=1, color='k')
| gpl-2.0 | 6,725,326,808,693,590,000 | 21.411765 | 93 | 0.625328 | false | 2.122563 | false | false | false |
xpostudio4/red-de-emprendimiento | app/institutions/views.py | 1 | 4872 | from django.contrib.auth import (login as django_login, authenticate,
logout as django_logout)
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import SetPasswordForm
from django.http import JsonResponse, HttpResponseRedirect
from django.shortcuts import render, get_object_or_404
from django.views.decorators.http import require_POST
from .models import Event, Organization, UserProfile
from .forms import (CustomUserCreationForm,
DashboardUserCreationForm,
EventForm, OrganizationForm,
UserProfileLoginForm)
@login_required
def approve_organization(request, organization_id):
"""This function responde to an ajax request asking to approve an
organization"""
if request.user.is_admin:
organization = Organization.objects.get(id=organization_id)
organization.is_active = True
organization.save()
return JsonResponse({'is_approved': True})
return JsonResponse({'is_approved': False,
'reason': 'Solo los administradores pueden aprobar'})
@require_POST
@login_required
def create_event(request):
"""This view creates a new event from a registered organization,
it returns Json"""
form = EventForm(request.POST or None)
if form.is_valid():
event = form.save(commit=False)
event.organization = request.user.organization
event.save()
form.save_m2m()
return HttpResponseRedirect('/dashboard/')
@require_POST
@login_required
def dashboard_usercreation(request):
"""
This view helps to create a new user of the
person belonging to the organization.
"""
user_form = DashboardUserCreationForm(request.POST or None)
if user_form.is_valid():
new_user = user_form.save(commit=False)
new_user.organization = request.user.organization
new_user.save()
return HttpResponseRedirect('/dashboard/')
@require_POST
@login_required
def dashboard_userdeletion(request, user_id):
"""
This view helps to delete the users asociated with an
organization after validating the user creating the view is
not himself or does belong to the organization
"""
user_to_delete = UserProfile.objects.get(pk=user_id)
if user_to_delete.organization == request.user.organization:
user_to_delete.delete()
return JsonResponse({'is_deleted': True})
return JsonResponse({'is_deleted': False})
@require_POST
@login_required
def delete_event(request, event_id):
"""
This view deletes the event after receiving a POST request.
"""
event = get_object_or_404(Event, id=event_id)
if request.user.organization == event.organization:
event.delete()
return JsonResponse({"is_deleted": True})
return JsonResponse({"is_deleted": False})
@require_POST
@login_required
def password_change(request):
"""This view process the password change of the user, returns Json"""
password_form = SetPasswordForm(request.user, request.POST or None)
#if form is valid
if password_form.is_valid():
#process the form by saving
password_form.save()
return JsonResponse({'is_changed': True})
else:
#else return the error as ajax
print password_form.errors
return JsonResponse({'is_changed': False,
'reasons': str(password_form.errors)})
@require_POST
def signin(request):
"""
Log in view
"""
form = UserProfileLoginForm(data=request.POST or None)
if request.method == 'POST':
if form.is_valid():
user = authenticate(email=request.POST['username'],
password=request.POST['password'])
if user is not None and user.is_active:
django_login(request, user)
return JsonResponse({'is_loggedin': True})
return JsonResponse({'is_loggedin': False,
'reason': "La contraseña es incorrecta"})
def signup(request):
"""
User registration view.
"""
if request.user.is_authenticated():
return HttpResponseRedirect('/')
user_form = CustomUserCreationForm(request.POST or None)
organization_form = OrganizationForm(request.POST or None)
if request.method == 'POST':
if user_form.is_valid() and organization_form.is_valid():
organization = organization_form.save()
user = user_form.save(commit=False)
user.is_admin = False
user.organization = organization
user.save()
return HttpResponseRedirect('/')
return render(request,
'accounts/signup.html',
{'user_form': user_form,
'organization_form': organization_form},
)
| mit | 6,066,139,324,412,251,000 | 34.304348 | 78 | 0.650246 | false | 4.385239 | false | false | false |
noironetworks/group-based-policy | gbpservice/nfp/core/cfg.py | 1 | 1671 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg as oslo_config
CONF = oslo_config.CONF
NFP_OPTS = [
oslo_config.IntOpt(
'workers',
default=1,
help='Number of event worker process to be created.'
),
oslo_config.ListOpt(
'nfp_modules_path',
default='gbpservice.nfp.core.test',
help='Path for NFP modules.'
'All modules from this path are autoloaded by framework'
),
oslo_config.StrOpt(
'backend',
default='rpc',
help='Backend Support for communicationg with configurator.'
)
]
EXTRA_OPTS = [
oslo_config.StrOpt(
'logger_class',
default='gbpservice.nfp.core.log.WrappedLogger',
help='logger class path to handle logging seperately.'
),
]
def init(module, args, **kwargs):
"""Initialize the configuration. """
oslo_config.CONF.register_opts(EXTRA_OPTS)
oslo_config.CONF.register_opts(NFP_OPTS, module)
oslo_config.CONF(args=args, project='nfp',
version='%%(prog)s %s' % ('version'),
**kwargs)
return oslo_config.CONF
| apache-2.0 | 1,557,896,110,659,903,700 | 29.944444 | 78 | 0.645721 | false | 3.868056 | true | false | false |
nhoffman/opiates | opiate/utils.py | 1 | 2361 | from collections import Iterable
import os
from os import path
import shutil
import logging
from __init__ import __version__
log = logging.getLogger(__name__)
def flatten(seq):
"""
Poached from http://stackoverflow.com/questions/2158395/flatten-an-irregular-list-of-lists-in-python
Don't flatten strings or dict-like objects.
"""
for el in seq:
if isinstance(el, Iterable) and not (isinstance(el, basestring) or hasattr(el, 'get')):
for sub in flatten(el):
yield sub
else:
yield el
def get_outfile(args, label = None, ext = None, include_version = True):
"""
Return a file-like object open for writing. `args` is expected to
have attributes 'infile' (None or a string specifying a file
path), 'outfile' (None or a file-like object open for writing),
and 'outdir' (None or a string defining a dir-path). If
`args.outfilr` is None, the name of the outfile is derived from
the basename of `args.infile` and is written either in the same
directory or in `args.outdir` if provided.
"""
version = __version__ if include_version else None
if args.outfile is None:
dirname, basename = path.split(args.infile)
parts = filter(lambda x: x,
[path.splitext(basename)[0], version, label, ext])
outname = path.join(args.outdir or dirname,
'.'.join(parts))
if path.abspath(outname) == path.abspath(args.infile):
raise OSError('Input and output file names are identical')
outfile = open(outname, 'w')
else:
outfile = args.outfile
if not (hasattr(outfile, 'write') and not outfile.closed and 'w' in outfile.mode):
raise OSError('`args.outfile` must be a file-like object open for writing')
log.debug(outfile)
return outfile
def mkdir(dirpath, clobber = False):
"""
Create a (potentially existing) directory without errors. Raise
OSError if directory can't be created. If clobber is True, remove
dirpath if it exists.
"""
if clobber:
shutil.rmtree(dirpath, ignore_errors = True)
try:
os.mkdir(dirpath)
except OSError, msg:
pass
if not path.exists(dirpath):
raise OSError('Failed to create %s' % dirpath)
return dirpath
| gpl-3.0 | -4,849,164,687,510,369,000 | 31.342466 | 104 | 0.633206 | false | 4.091854 | false | false | false |
Seekatar/pcg | Games/FineControl.py | 1 | 2743 | import base
import datetime
import random
from FixedRandomGame import FixedRandomGame as __base
# use __base, otherwise when searching for games, FixedRandomGame shows up multiple times
class FineControl(__base):
"""
Touch four plates in patterns as fast as you can.
Level 1: tight, clockwise 5 times
Level 2: tight, anti clockwise 5 times
Level 3: tight, repeat 4 each: clockwise, anticlockwise, diagonal1, diagonal2
Level 4: wide, clockwise 5 times
Level 5: wide, anti clockwise 5 times
Level 6: wide, repeat 4 each: clockwise, anticlockwise, diagonal1, diagonal2
"""
def GameInfo():
"""
return tuple of (name,desc,levels,author,date,version)
"""
return ("FineControl",
"Tight patterns of plates",
6, #levels
"Jim Wallace",
datetime.date(2015,6,19),
'0.1')
GameInfo = staticmethod(GameInfo)
# patterns
_clockwise = (1,2,5,4)
_anticlockwise = _clockwise[::-1] #reverse
_diagonal1 = (1,5)
_diagonal2 = (2,4)
_wclockwise = (1,3,9,7)
_wanticlockwise = _wclockwise[::-1] #reverse
_wdiagonal1 = (1,9)
_wdiagonal2 = (3,7)
def __init__(self):
super(FineControl,self).__init__()
self._timeout_sec = 10
self._interval_sec = 0
self._pattern = None
self._pattern_index = -1
self.LOOP_CNT = 0
def initialize(self,hardware,user,level):
"""
Initialize
"""
super(FineControl,self).initialize(hardware,user,level)
if self.level == 1:
self._pattern = FineControl._clockwise*5
elif self.level == 2:
self._pattern = FineControl._anticlockwise*5
elif self.level == 3:
repeat = 4
self._pattern = FineControl._clockwise*repeat+FineControl._anticlockwise*repeat+FineControl._diagonal1*repeat+FineControl._diagonal2*repeat
elif self.level == 4:
self._pattern = FineControl._wclockwise*5
elif self.level == 5:
self._pattern = FineControl._wanticlockwise*5
else:
repeat = 4
self._pattern = FineControl._wclockwise*repeat+FineControl._wanticlockwise*repeat+FineControl._wdiagonal1*repeat+FineControl._wdiagonal2*repeat
# index for next plate
self._pattern_index = -1
self.LOOP_CNT = len(self._pattern)
def get_next_plate(self):
"""
override to change number of plates, etc.
"""
self._pattern_index += 1
return self._pattern[self._pattern_index]
| mit | 47,598,030,569,357,740 | 32.048193 | 155 | 0.574918 | false | 3.804438 | false | false | false |
DIRACGrid/COMDIRAC | Interfaces/scripts/dgetenv.py | 1 | 1529 | #! /usr/bin/env python
"""
print DCommands session environment variables
"""
import DIRAC
from COMDIRAC.Interfaces import critical
from COMDIRAC.Interfaces import DSession
if __name__ == "__main__":
from COMDIRAC.Interfaces import ConfigCache
from DIRAC.Core.Base import Script
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'Usage:',
' %s [[section.]option]' % Script.scriptName,
'Arguments:',
' section: display all options in section',
'++ OR ++',
' section.option: display section specific option',] )
)
configCache = ConfigCache()
Script.parseCommandLine( ignoreErrors = True )
configCache.cacheConfig()
args = Script.getPositionalArgs()
session = DSession( )
if not args:
retVal = session.listEnv( )
if not retVal[ "OK" ]:
print "Error:", retVal[ "Message" ]
DIRAC.exit( -1 )
for o, v in retVal[ "Value" ]:
print o + "=" + v
DIRAC.exit( 0 )
arg = args[ 0 ]
section = None
option = None
if "." in arg:
section, option = arg.split( "." )
else:
option = arg
ret = None
if section:
ret = session.get( section, option )
else:
ret = session.getEnv( option )
if not ret[ "OK" ]:
print critical( ret[ "Message" ] )
print ret[ "Value" ]
| gpl-3.0 | 3,005,090,108,459,228,000 | 22.890625 | 97 | 0.517332 | false | 4.189041 | false | false | false |
RPGOne/Skynet | pytorch-master/torch/nn/modules/upsampling.py | 1 | 3469 | from numbers import Integral
from .module import Module
from .. import functional as F
from .utils import _pair
class _UpsamplingBase(Module):
def __init__(self, size=None, scale_factor=None):
super(_UpsamplingBase, self).__init__()
if size is None and scale_factor is None:
raise ValueError('either size or scale_factor should be defined')
if scale_factor is not None and not isinstance(scale_factor, Integral):
raise ValueError('scale_factor must be of integer type')
self.size = _pair(size)
self.scale_factor = scale_factor
def __repr__(self):
if self.scale_factor is not None:
info = 'scale_factor=' + str(self.scale_factor)
else:
info = 'size=' + str(self.size)
return self.__class__.__name__ + '(' + info + ')'
class UpsamplingNearest2d(_UpsamplingBase):
"""
Applies a 2D nearest neighbor upsampling to an input signal composed of several input
channels.
To specify the scale, it takes either the :attr:`size` or the :attr:`scale_factor`
as it's constructor argument.
When `size` is given, it is the output size of the image (h, w).
Args:
size (tuple, optional): a tuple of ints (H_out, W_out) output sizes
scale_factor (int, optional): the multiplier for the image height / width
Shape:
- Input: :math:`(N, C, H_{in}, W_{in})`
- Output: :math:`(N, C, H_{out}, W_{out})` where
:math:`H_{out} = floor(H_{in} * scale\_factor)`
:math:`W_{out} = floor(W_{in} * scale\_factor)`
Examples::
>>> inp
Variable containing:
(0 ,0 ,.,.) =
1 2
3 4
[torch.FloatTensor of size 1x1x2x2]
>>> m = nn.UpsamplingNearest2d(scale_factor=2)
>>> m(inp)
Variable containing:
(0 ,0 ,.,.) =
1 1 2 2
1 1 2 2
3 3 4 4
3 3 4 4
[torch.FloatTensor of size 1x1x4x4]
"""
def forward(self, input):
return F.upsample_nearest(input, self.size, self.scale_factor)
class UpsamplingBilinear2d(_UpsamplingBase):
"""
Applies a 2D bilinear upsampling to an input signal composed of several input
channels.
To specify the scale, it takes either the :attr:`size` or the :attr:`scale_factor`
as it's constructor argument.
When `size` is given, it is the output size of the image (h, w).
Args:
size (tuple, optional): a tuple of ints (H_out, W_out) output sizes
scale_factor (int, optional): the multiplier for the image height / width
Shape:
- Input: :math:`(N, C, H_{in}, W_{in})`
- Output: :math:`(N, C, H_{out}, W_{out})` where
:math:`H_{out} = floor(H_{in} * scale\_factor)`
:math:`W_{out} = floor(W_{in} * scale\_factor)`
Examples::
>>> inp
Variable containing:
(0 ,0 ,.,.) =
1 2
3 4
[torch.FloatTensor of size 1x1x2x2]
>>> m = nn.UpsamplingBilinear2d(scale_factor=2)
>>> m(inp)
Variable containing:
(0 ,0 ,.,.) =
1.0000 1.3333 1.6667 2.0000
1.6667 2.0000 2.3333 2.6667
2.3333 2.6667 3.0000 3.3333
3.0000 3.3333 3.6667 4.0000
[torch.FloatTensor of size 1x1x4x4]
"""
def forward(self, input):
return F.upsample_bilinear(input, self.size, self.scale_factor)
| bsd-3-clause | -1,060,963,812,535,801,200 | 29.429825 | 89 | 0.566734 | false | 3.417734 | false | false | false |
iliavolyova/evo-clustering | src/stats.py | 1 | 5952 | from __future__ import division
import os
from functools import partial
import log as logger
import core
import gui_graphs
from PyQt4.QtGui import *
defaultParams = {
'Dataset' : 'Iris',
'Number of generations' : 100,
'Population size': 20,
'Max clusters' : 5,
'Fitness method': 'db',
'q' : 2,
't' : 2,
'Distance measure': 'Minkowski_2',
'Feature significance': True
}
class Stats():
def __init__(self, window):
self.window = window
self.plots = {}
self.setup_ui()
def setup_ui(self):
self.setup_table()
self.populate_combo()
def populate_combo(self):
self.resfolder = os.path.join('..', 'res')
self.run_groups = []
for dirname, dirnames, filenames in os.walk(self.resfolder):
for subdirname in dirnames:
self.run_groups.append(subdirname)
for r in self.run_groups:
self.window.datasetComboBox.addItem(r)
self.window.datasetComboBox.activated.connect(self.run_group_changed)
def run_group_changed(self, rg_index):
run_paths = []
self.runs = []
if rg_index != 0:
basepath = os.path.join(self.resfolder, self.run_groups[rg_index-1])
for dirname, dirnames, filenames in os.walk(basepath):
for f in filenames:
run_paths.append(os.path.join(basepath, f))
else:
self.table.clearContents()
self.clearLabels()
return
log = logger.Log()
for path in run_paths:
run = {}
log.load(path)
run['params'] = log.head_as_array
run['colormaps'] = log.colormaps
run['measures'] = log.measures
dirs, filename = os.path.split(path)
run['dataset'] = filename.split('_')[2]
run['name'] = filename
self.runs.append(run)
params = self.get_params(self.runs[0])
params['Feature significance'] = False if params['Distance measure'] is not 'Minkowski2' else params['Feature significance']
self.window.label_dataset.setText(params['Dataset'])
opt_config = core.Config(params)
self.window.label_classes.setText(str(opt_config.dataset.params['Classes']))
distribution = []
for k, v in opt_config.dataset.params['Clusters'].iteritems():
distribution.append(v)
self.window.label_distribution.setText(str(distribution))
self.populate_table()
def populate_table(self):
self.table.clearContents()
self.table.setRowCount(len(self.runs)+1)
cls_sum=0
dist_sum=[]
dist_cnt=[]
runs_by_name = sorted(self.runs, key=lambda run: run['name'])
for row, run in enumerate(runs_by_name):
colormap = run['colormaps'][-1]
l_counts = [colormap.count(x) for x in set(colormap)]
l_counts.sort(reverse=True)
for index, val in enumerate(l_counts):
if index >= len(dist_sum):
dist_sum.append(val)
dist_cnt.append(1)
else:
dist_sum[index] += val
dist_cnt[index] += 1
cls_sum += len(l_counts)
params = self.get_params(run)
conf = core.Config(params)
for col in range(6):
item = QTableWidgetItem('')
if col == 0:
item = QTableWidgetItem(run['name'][14:])
elif col == 1:
item = QTableWidgetItem(str(len(l_counts)))
elif col == 2:
item = QTableWidgetItem(str(l_counts))
elif col == 3:
item = QTableWidgetItem('%.4f' % (1 / conf.dataset.getOptimalFitness(conf)))
elif col == 4:
item = QTableWidgetItem('%.4f' % (1 / run['measures'][-1][5]))
elif col == 5:
btn = QPushButton(self.table)
btn.setText('Show')
btn.clicked.connect(partial(self.show_details, row - 1))
self.table.setCellWidget(row+1, col, btn)
if col != 5:
self.table.setItem(row+1, col, item)
avg_clsnum = '%.3f' % (cls_sum / len(self.runs))
avg_dist = []
for index, val in enumerate(dist_sum):
avg_dist.append(dist_sum[index] / dist_cnt[index])
avg_dist_str = ["%.1f" % t for t in avg_dist]
for index, val in enumerate(['Average', avg_clsnum, '[' + ", ".join(avg_dist_str) + ']']):
item = QTableWidgetItem(val)
self.table.setItem(0, index, item)
def show_details(self, row):
self.plots[row] = gui_graphs.DetailsPlot(self.runs[row])
def get_params(self, run):
defaultParams['Dataset'] = run['dataset']
defaultParams['Number of generations'] = int(run['params'][2])
defaultParams['Population size'] = int(run['params'][1])
defaultParams['Max clusters'] = int(run['params'][0])
defaultParams['Fitness method'] = run['params'][3]
defaultParams['Distance measure'] = run['params'][4]
defaultParams['q'] = int(run['params'][5])
defaultParams['t'] = int(run['params'][6])
return defaultParams
def setup_table(self):
self.table = self.window.table_results
self.table.setColumnWidth(0, 235)
self.table.setColumnWidth(1, 50)
self.table.setColumnWidth(2, 180)
self.table.setColumnWidth(3, 65)
self.table.setColumnWidth(4, 65)
self.table.setColumnWidth(5, 60)
def clearLabels(self):
self.window.label_classes.setText('')
self.window.label_dataset.setText('')
self.window.label_distribution.setText('') | mit | 8,746,288,657,055,795,000 | 34.434524 | 132 | 0.541835 | false | 3.890196 | false | false | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.