repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
ojake/pytest | _pytest/assertion/oldinterpret.py | 41 | 18298 | import traceback
import types
import py
import sys, inspect
from compiler import parse, ast, pycodegen
from _pytest.assertion.util import format_explanation, BuiltinAssertionError
passthroughex = py.builtin._sysex
class Failure:
def __init__(self, node):
self.exc, self.value, self.tb = sys.exc_info()
self.node = node
class View(object):
"""View base class.
If C is a subclass of View, then C(x) creates a proxy object around
the object x. The actual class of the proxy is not C in general,
but a *subclass* of C determined by the rules below. To avoid confusion
we call view class the class of the proxy (a subclass of C, so of View)
and object class the class of x.
Attributes and methods not found in the proxy are automatically read on x.
Other operations like setting attributes are performed on the proxy, as
determined by its view class. The object x is available from the proxy
as its __obj__ attribute.
The view class selection is determined by the __view__ tuples and the
optional __viewkey__ method. By default, the selected view class is the
most specific subclass of C whose __view__ mentions the class of x.
If no such subclass is found, the search proceeds with the parent
object classes. For example, C(True) will first look for a subclass
of C with __view__ = (..., bool, ...) and only if it doesn't find any
look for one with __view__ = (..., int, ...), and then ..., object,...
If everything fails the class C itself is considered to be the default.
Alternatively, the view class selection can be driven by another aspect
of the object x, instead of the class of x, by overriding __viewkey__.
See last example at the end of this module.
"""
_viewcache = {}
__view__ = ()
def __new__(rootclass, obj, *args, **kwds):
self = object.__new__(rootclass)
self.__obj__ = obj
self.__rootclass__ = rootclass
key = self.__viewkey__()
try:
self.__class__ = self._viewcache[key]
except KeyError:
self.__class__ = self._selectsubclass(key)
return self
def __getattr__(self, attr):
# attributes not found in the normal hierarchy rooted on View
# are looked up in the object's real class
return getattr(object.__getattribute__(self, '__obj__'), attr)
def __viewkey__(self):
return self.__obj__.__class__
def __matchkey__(self, key, subclasses):
if inspect.isclass(key):
keys = inspect.getmro(key)
else:
keys = [key]
for key in keys:
result = [C for C in subclasses if key in C.__view__]
if result:
return result
return []
def _selectsubclass(self, key):
subclasses = list(enumsubclasses(self.__rootclass__))
for C in subclasses:
if not isinstance(C.__view__, tuple):
C.__view__ = (C.__view__,)
choices = self.__matchkey__(key, subclasses)
if not choices:
return self.__rootclass__
elif len(choices) == 1:
return choices[0]
else:
# combine the multiple choices
return type('?', tuple(choices), {})
def __repr__(self):
return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
def enumsubclasses(cls):
for subcls in cls.__subclasses__():
for subsubclass in enumsubclasses(subcls):
yield subsubclass
yield cls
class Interpretable(View):
"""A parse tree node with a few extra methods."""
explanation = None
def is_builtin(self, frame):
return False
def eval(self, frame):
# fall-back for unknown expression nodes
try:
expr = ast.Expression(self.__obj__)
expr.filename = '<eval>'
self.__obj__.filename = '<eval>'
co = pycodegen.ExpressionCodeGenerator(expr).getCode()
result = frame.eval(co)
except passthroughex:
raise
except:
raise Failure(self)
self.result = result
self.explanation = self.explanation or frame.repr(self.result)
def run(self, frame):
# fall-back for unknown statement nodes
try:
expr = ast.Module(None, ast.Stmt([self.__obj__]))
expr.filename = '<run>'
co = pycodegen.ModuleCodeGenerator(expr).getCode()
frame.exec_(co)
except passthroughex:
raise
except:
raise Failure(self)
def nice_explanation(self):
return format_explanation(self.explanation)
class Name(Interpretable):
__view__ = ast.Name
def is_local(self, frame):
source = '%r in locals() is not globals()' % self.name
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def is_global(self, frame):
source = '%r in globals()' % self.name
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def is_builtin(self, frame):
source = '%r not in locals() and %r not in globals()' % (
self.name, self.name)
try:
return frame.is_true(frame.eval(source))
except passthroughex:
raise
except:
return False
def eval(self, frame):
super(Name, self).eval(frame)
if not self.is_local(frame):
self.explanation = self.name
class Compare(Interpretable):
__view__ = ast.Compare
def eval(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
for operation, expr2 in self.ops:
if hasattr(self, 'result'):
# shortcutting in chained expressions
if not frame.is_true(self.result):
break
expr2 = Interpretable(expr2)
expr2.eval(frame)
self.explanation = "%s %s %s" % (
expr.explanation, operation, expr2.explanation)
source = "__exprinfo_left %s __exprinfo_right" % operation
try:
self.result = frame.eval(source,
__exprinfo_left=expr.result,
__exprinfo_right=expr2.result)
except passthroughex:
raise
except:
raise Failure(self)
expr = expr2
class And(Interpretable):
__view__ = ast.And
def eval(self, frame):
explanations = []
for expr in self.nodes:
expr = Interpretable(expr)
expr.eval(frame)
explanations.append(expr.explanation)
self.result = expr.result
if not frame.is_true(expr.result):
break
self.explanation = '(' + ' and '.join(explanations) + ')'
class Or(Interpretable):
__view__ = ast.Or
def eval(self, frame):
explanations = []
for expr in self.nodes:
expr = Interpretable(expr)
expr.eval(frame)
explanations.append(expr.explanation)
self.result = expr.result
if frame.is_true(expr.result):
break
self.explanation = '(' + ' or '.join(explanations) + ')'
# == Unary operations ==
keepalive = []
for astclass, astpattern in {
ast.Not : 'not __exprinfo_expr',
ast.Invert : '(~__exprinfo_expr)',
}.items():
class UnaryArith(Interpretable):
__view__ = astclass
def eval(self, frame, astpattern=astpattern):
expr = Interpretable(self.expr)
expr.eval(frame)
self.explanation = astpattern.replace('__exprinfo_expr',
expr.explanation)
try:
self.result = frame.eval(astpattern,
__exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
keepalive.append(UnaryArith)
# == Binary operations ==
for astclass, astpattern in {
ast.Add : '(__exprinfo_left + __exprinfo_right)',
ast.Sub : '(__exprinfo_left - __exprinfo_right)',
ast.Mul : '(__exprinfo_left * __exprinfo_right)',
ast.Div : '(__exprinfo_left / __exprinfo_right)',
ast.Mod : '(__exprinfo_left % __exprinfo_right)',
ast.Power : '(__exprinfo_left ** __exprinfo_right)',
}.items():
class BinaryArith(Interpretable):
__view__ = astclass
def eval(self, frame, astpattern=astpattern):
left = Interpretable(self.left)
left.eval(frame)
right = Interpretable(self.right)
right.eval(frame)
self.explanation = (astpattern
.replace('__exprinfo_left', left .explanation)
.replace('__exprinfo_right', right.explanation))
try:
self.result = frame.eval(astpattern,
__exprinfo_left=left.result,
__exprinfo_right=right.result)
except passthroughex:
raise
except:
raise Failure(self)
keepalive.append(BinaryArith)
class CallFunc(Interpretable):
__view__ = ast.CallFunc
def is_bool(self, frame):
source = 'isinstance(__exprinfo_value, bool)'
try:
return frame.is_true(frame.eval(source,
__exprinfo_value=self.result))
except passthroughex:
raise
except:
return False
def eval(self, frame):
node = Interpretable(self.node)
node.eval(frame)
explanations = []
vars = {'__exprinfo_fn': node.result}
source = '__exprinfo_fn('
for a in self.args:
if isinstance(a, ast.Keyword):
keyword = a.name
a = a.expr
else:
keyword = None
a = Interpretable(a)
a.eval(frame)
argname = '__exprinfo_%d' % len(vars)
vars[argname] = a.result
if keyword is None:
source += argname + ','
explanations.append(a.explanation)
else:
source += '%s=%s,' % (keyword, argname)
explanations.append('%s=%s' % (keyword, a.explanation))
if self.star_args:
star_args = Interpretable(self.star_args)
star_args.eval(frame)
argname = '__exprinfo_star'
vars[argname] = star_args.result
source += '*' + argname + ','
explanations.append('*' + star_args.explanation)
if self.dstar_args:
dstar_args = Interpretable(self.dstar_args)
dstar_args.eval(frame)
argname = '__exprinfo_kwds'
vars[argname] = dstar_args.result
source += '**' + argname + ','
explanations.append('**' + dstar_args.explanation)
self.explanation = "%s(%s)" % (
node.explanation, ', '.join(explanations))
if source.endswith(','):
source = source[:-1]
source += ')'
try:
self.result = frame.eval(source, **vars)
except passthroughex:
raise
except:
raise Failure(self)
if not node.is_builtin(frame) or not self.is_bool(frame):
r = frame.repr(self.result)
self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
class Getattr(Interpretable):
__view__ = ast.Getattr
def eval(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
source = '__exprinfo_expr.%s' % self.attrname
try:
try:
self.result = frame.eval(source, __exprinfo_expr=expr.result)
except AttributeError:
# Maybe the attribute name needs to be mangled?
if (not self.attrname.startswith("__") or
self.attrname.endswith("__")):
raise
source = "getattr(__exprinfo_expr.__class__, '__name__', '')"
class_name = frame.eval(source, __exprinfo_expr=expr.result)
mangled_attr = "_" + class_name + self.attrname
source = "__exprinfo_expr.%s" % (mangled_attr,)
self.result = frame.eval(source, __exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
self.explanation = '%s.%s' % (expr.explanation, self.attrname)
# if the attribute comes from the instance, its value is interesting
source = ('hasattr(__exprinfo_expr, "__dict__") and '
'%r in __exprinfo_expr.__dict__' % self.attrname)
try:
from_instance = frame.is_true(
frame.eval(source, __exprinfo_expr=expr.result))
except passthroughex:
raise
except:
from_instance = True
if from_instance:
r = frame.repr(self.result)
self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
# == Re-interpretation of full statements ==
class Assert(Interpretable):
__view__ = ast.Assert
def run(self, frame):
test = Interpretable(self.test)
test.eval(frame)
# print the result as 'assert <explanation>'
self.result = test.result
self.explanation = 'assert ' + test.explanation
if not frame.is_true(test.result):
try:
raise BuiltinAssertionError
except passthroughex:
raise
except:
raise Failure(self)
class Assign(Interpretable):
__view__ = ast.Assign
def run(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
self.result = expr.result
self.explanation = '... = ' + expr.explanation
# fall-back-run the rest of the assignment
ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
mod = ast.Module(None, ast.Stmt([ass]))
mod.filename = '<run>'
co = pycodegen.ModuleCodeGenerator(mod).getCode()
try:
frame.exec_(co, __exprinfo_expr=expr.result)
except passthroughex:
raise
except:
raise Failure(self)
class Discard(Interpretable):
__view__ = ast.Discard
def run(self, frame):
expr = Interpretable(self.expr)
expr.eval(frame)
self.result = expr.result
self.explanation = expr.explanation
class Stmt(Interpretable):
__view__ = ast.Stmt
def run(self, frame):
for stmt in self.nodes:
stmt = Interpretable(stmt)
stmt.run(frame)
def report_failure(e):
explanation = e.node.nice_explanation()
if explanation:
explanation = ", in: " + explanation
else:
explanation = ""
sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
def check(s, frame=None):
if frame is None:
frame = sys._getframe(1)
frame = py.code.Frame(frame)
expr = parse(s, 'eval')
assert isinstance(expr, ast.Expression)
node = Interpretable(expr.node)
try:
node.eval(frame)
except passthroughex:
raise
except Failure:
e = sys.exc_info()[1]
report_failure(e)
else:
if not frame.is_true(node.result):
sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
###########################################################
# API / Entry points
# #########################################################
def interpret(source, frame, should_fail=False):
module = Interpretable(parse(source, 'exec').node)
#print "got module", module
if isinstance(frame, types.FrameType):
frame = py.code.Frame(frame)
try:
module.run(frame)
except Failure:
e = sys.exc_info()[1]
return getfailure(e)
except passthroughex:
raise
except:
traceback.print_exc()
if should_fail:
return ("(assertion failed, but when it was re-run for "
"printing intermediate values, it did not fail. Suggestions: "
"compute assert expression before the assert or use --assert=plain)")
else:
return None
def getmsg(excinfo):
if isinstance(excinfo, tuple):
excinfo = py.code.ExceptionInfo(excinfo)
#frame, line = gettbline(tb)
#frame = py.code.Frame(frame)
#return interpret(line, frame)
tb = excinfo.traceback[-1]
source = str(tb.statement).strip()
x = interpret(source, tb.frame, should_fail=True)
if not isinstance(x, str):
raise TypeError("interpret returned non-string %r" % (x,))
return x
def getfailure(e):
explanation = e.node.nice_explanation()
if str(e.value):
lines = explanation.split('\n')
lines[0] += " << %s" % (e.value,)
explanation = '\n'.join(lines)
text = "%s: %s" % (e.exc.__name__, explanation)
if text.startswith('AssertionError: assert '):
text = text[16:]
return text
def run(s, frame=None):
if frame is None:
frame = sys._getframe(1)
frame = py.code.Frame(frame)
module = Interpretable(parse(s, 'exec').node)
try:
module.run(frame)
except Failure:
e = sys.exc_info()[1]
report_failure(e)
if __name__ == '__main__':
# example:
def f():
return 5
def g():
return 3
def h(x):
return 'never'
check("f() * g() == 5")
check("not f()")
check("not (f() and g() or 0)")
check("f() == g()")
i = 4
check("i == f()")
check("len(f()) == 0")
check("isinstance(2+3+4, float)")
run("x = i")
check("x == 5")
run("assert not f(), 'oops'")
run("a, b, c = 1, 2")
run("a, b, c = f()")
check("max([f(),g()]) == 4")
check("'hello'[g()] == 'h'")
run("'guk%d' % h(f())")
| mit | -4,955,701,541,419,846,000 | 31.328622 | 85 | 0.542628 | false |
mapzen/vector-datasource | integration-test/484-include-state-pois.py | 2 | 1216 | # -*- encoding: utf-8 -*-
from shapely.wkt import loads as wkt_loads
import dsl
from . import FixtureTest
class IncludeStatePois(FixtureTest):
def test_proposed_stations(self):
# Antioch Station
self.generate_fixtures(dsl.way(3353451464, wkt_loads('POINT (-121.785229642994 37.99688077243528)'), {u'name': u'Antioch Station (in construction)', u'source': u'openstreetmap.org', u'state': u'proposed', u'train': u'yes', u'public_transport': u'station', u'operator': u'BART', u'railway': u'station'})) # noqa
self.assert_has_feature(
16, 10597, 25279, 'pois',
{'id': 3353451464, 'state': 'proposed'})
# Pittsburg Center
self.generate_fixtures(dsl.way(3354463416, wkt_loads('POINT (-121.88916373322 38.01684868163071)'), {u'toilets': u'yes', u'name': u'BART - Pittsburg Center Station (In Construction)', u'wheelchair': u'yes', u'source': u'openstreetmap.org', u'state': u'proposed', u'train': u'yes', u'public_transport': u'station', u'operator': u'BART', u'railway': u'station', u'toilets:wheelchair': u'yes'})) # noqa
self.assert_has_feature(
16, 10578, 25275, 'pois',
{'id': 3354463416, 'state': 'proposed'})
| mit | 57,475,647,718,731,416 | 56.904762 | 408 | 0.648849 | false |
p4datasystems/CarnotKEdist | dist/Lib/socket.py | 13 | 2462 | # dispatches to _socket for actual implementation
from _socket import (
socket, SocketType, error, herror, gaierror, timeout, has_ipv6,
create_connection,
getdefaulttimeout,
setdefaulttimeout,
getfqdn,
gethostbyaddr,
gethostbyname,
gethostbyname_ex,
gethostname,
getprotobyname,
getservbyname,
getservbyport,
AF_UNSPEC,
AF_INET,
AF_INET6,
AI_PASSIVE,
AI_CANONNAME,
AI_NUMERICHOST,
AI_V4MAPPED,
AI_ALL,
AI_ADDRCONFIG,
AI_NUMERICSERV,
EAI_NONAME,
EAI_SERVICE,
EAI_ADDRFAMILY,
NI_NUMERICHOST,
NI_NUMERICSERV,
NI_NOFQDN,
NI_NAMEREQD,
NI_DGRAM,
NI_MAXSERV,
NI_IDN,
NI_IDN_ALLOW_UNASSIGNED,
NI_IDN_USE_STD3_ASCII_RULES,
NI_MAXHOST,
SHUT_RD,
SHUT_WR,
SHUT_RDWR,
SOCK_DGRAM,
SOCK_STREAM,
SOCK_RAW,
SOCK_RDM,
SOCK_SEQPACKET,
SOL_SOCKET,
SOL_TCP,
# not supported, but here for apparent completeness
IPPROTO_AH,
IPPROTO_DSTOPTS,
IPPROTO_ESP,
IPPROTO_FRAGMENT,
IPPROTO_GGP,
IPPROTO_HOPOPTS,
IPPROTO_ICMP,
IPPROTO_ICMPV6,
IPPROTO_IDP,
IPPROTO_IGMP,
IPPROTO_IP, # supported
# not supported
IPPROTO_IPV4,
IPPROTO_IPV6,
IPPROTO_MAX,
IPPROTO_ND,
IPPROTO_NONE,
IPPROTO_PUP,
IPPROTO_RAW,
IPPROTO_ROUTING,
IPPROTO_TCP, # supported
IPPROTO_UDP, # supported
# supported
SO_BROADCAST,
SO_KEEPALIVE,
SO_LINGER,
SO_RCVBUF,
SO_REUSEADDR,
SO_SNDBUF,
SO_TIMEOUT,
TCP_NODELAY,
# pseudo options
SO_ACCEPTCONN,
SO_ERROR,
SO_TYPE,
# unsupported, will return errno.ENOPROTOOPT if actually used
SO_OOBINLINE,
SO_DEBUG,
SO_DONTROUTE,
SO_RCVLOWAT,
SO_RCVTIMEO,
SO_REUSEPORT,
SO_SNDLOWAT,
SO_SNDTIMEO,
SO_USELOOPBACK,
INADDR_ANY,
INADDR_BROADCAST,
IN6ADDR_ANY_INIT,
_GLOBAL_DEFAULT_TIMEOUT,
is_ipv4_address, is_ipv6_address, is_ip_address,
getaddrinfo,
getnameinfo,
htons,
htonl,
ntohs,
ntohl,
inet_aton,
inet_ntoa,
inet_pton,
inet_ntop,
_fileobject,
_get_jsockaddr
)
def supports(feature):
# FIXME this seems to be Jython internals specific, and for
# testing only; consider removing since it really no longer
# matters
if feature == "idna":
return True
raise KeyError("Unknown feature", feature)
| apache-2.0 | 5,281,908,101,320,620,000 | 16.338028 | 67 | 0.619821 | false |
javivi001/horus | src/horus/engine/uvc/mac/__init__.py | 3 | 5157 | '''
----------------------------------------------
- Horus Project
- Jesus Arroyo Torrens <[email protected]>
- May 2015 Mundo Readers S.L.
----------------------------------------------
(*)~----------------------------------------------------------------------------------
Pupil - eye tracking platform
Copyright (C) 2012-2014 Pupil Labs
Distributed under the terms of the CC BY-NC-SA License.
License details are in the file license.txt, distributed as part of this software.
----------------------------------------------------------------------------------~(*)
'''
"""
OOP style interface for uvcc c_types binding
Three classes:
Camera_List holds Cam's instances,
Cam is a class that contains infos about attached cameras
Control is the actual Control with methods for getting and setting them.
"""
import sys
from raw import *
#logging
import logging
logger = logging.getLogger(__name__)
class Control(object):
"""docstring for uvcc_Control"""
def __init__(self,name,i,handle):
self.handle = handle
self.name = name
self.atb_name = name[9:].capitalize() #pretify the name
self.order = i
self.value = None
self.assess_type()
def assess_type(self):
"""
find out if a control is active
find out if the range is bool or int
"""
self.value = None
self.min = None
self.max = None
self.step = None
self.default = None
self.menu = None #some day in the future we will extract the control menu entries here.
self.info = self.get_info()
"""
D0 1 = Supports GET value requests Capability
D1 1 = Supports SET value requests Capability
D2 1 = Disabled due to automatic mode (under device control) State
D3 1 = Autoupdate Control Capability
D4 1 = Asynchronous Control Capability
D5 1 = Disabled due to incompatibility with Commit state. State
"""
if self.info > 0 : # Control supported
self.value = self.get_val_from_device()
self.min = self.get_(UVC_GET_MIN)
self.max = self.get_(UVC_GET_MAX)
self.step = self.get_(UVC_GET_RES)
self.default = self.get_(UVC_GET_DEF)
if ((self.max,self.min) == (None,None)) or ((self.max,self.min) == (1,0)) :
self.type = "bool"
# elif (self.max,self.min) == (None,None):
# ###I guess this should be a menu
# self.type = "int"
# self.flags = "active"
# self.min = 0
# self.max = 20
# self.step = 1
else:
self.type = "int"
if self.info >> 3 & 1: # Disabled due to automatic mode (under device control)
self.flags = "inactive"
else:
self.flags = "active"
else:
self.type = "unknown type"
self.flags = "control not supported"
self.value = None
def get_val_from_device(self):
return uvccGetVal(self.name,self.handle)
def get_val(self):
return self.value
def set_val(self,val):
self.value = val
return uvccSetVal(val,self.name,self.handle)
def get_info(self):
return uvccRequestInfo(self.name,self.handle)
def get_(self,request):
return uvccSendRequest(self.name,request,self.handle)
class Controls(dict):
"""docstring for Controls"""
def __init__(self,uId):
uvccInit()
self.handle = uvccGetCamWithQTUniqueID(uId)
assert self.handle is not None, "UVCC could not open camera based on uId %s" %uId
# list of all controls implemented by uvcc,
# the names evaluate to ints using a dict lookup in raw.py
controls_str = uvcc_controls[:-1] #the last one is not a real control
for i,c in enumerate(controls_str):
self[c] = Control(c,i,self.handle)
def update_from_device(self):
for c in self.itervalues():
if c.flags == "active":
c.value = c.get_val_from_device()
def load_defaults(self):
for c in self.itervalues():
if c.flags == "active" and c.default is not None:
c.set_val(c.default)
def __del__(self):
#uvccReleaseCam(self.handle)
uvccExit()
class Cam():
"""a simple class that only contains info about a camera"""
def __init__(self,name,uId,src_id):
self.src_id = src_id
self.uId = uId
self.name = name
class Camera_List(list):
"""docstring for uvcc_control"""
def __init__(self):
if getattr(sys, 'frozen', False):
#explicit import needed when frozen
import QTKit
from QTKit import QTCaptureDevice,QTMediaTypeVideo
qt_cameras = QTCaptureDevice.inputDevicesWithMediaType_(QTMediaTypeVideo)
for src_id,q in enumerate(qt_cameras):
uId = q.uniqueID()
name = q.localizedDisplayName().encode('utf-8')
self.append(Cam(name,uId,src_id)) | gpl-2.0 | 4,613,428,689,483,882,000 | 32.277419 | 98 | 0.550902 | false |
sdonapar/CompStats | check_env.py | 7 | 1130 | """This file contains code used in "Think Stats",
by Allen B. Downey, available from greenteapress.com
Copyright 2013 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function, division
import math
import numpy
from matplotlib import pyplot
import thinkplot
import thinkstats2
def RenderPdf(mu, sigma, n=101):
"""Makes xs and ys for a normal PDF with (mu, sigma).
n: number of places to evaluate the PDF
"""
xs = numpy.linspace(mu-4*sigma, mu+4*sigma, n)
ys = [thinkstats2.EvalNormalPdf(x, mu, sigma) for x in xs]
return xs, ys
def main():
xs, ys = RenderPdf(100, 15)
n = 34
pyplot.fill_between(xs[-n:], ys[-n:], y2=0.0001, color='blue', alpha=0.2)
s = 'Congratulations!\nIf you got this far,\nyou must be here.'
d = dict(shrink=0.05)
pyplot.annotate(s, [127, 0.002], xytext=[80, 0.005], arrowprops=d)
thinkplot.Plot(xs, ys)
thinkplot.Show(title='Distribution of IQ',
xlabel='IQ',
ylabel='PDF',
legend=False)
if __name__ == "__main__":
main()
| mit | -6,667,441,445,842,350,000 | 23.565217 | 77 | 0.627434 | false |
umass-forensics/Yapr-forensics | yapr/YaffsClasses/YaffsObject.py | 1 | 7663 | import math
from . import YaffsChunk
class YaffsObject:
def __init__(self, obj_id):
#tuple of type tag, chunk
self.chunk_pairs = []
self.object_id = obj_id
self.versions = []
self.is_deleted = False
self.hasNoHeader = False
self.name = None
self.object_type = None
#[(tag_cls,chunk_cls)...] tuple lists keyed by chunkId
#This allows use to have an ordered list of chunks, by id, such
#that the most recent chunk is first in the list.
self.chunkDict = {}
def splitByDeletions(self):
"""
This method will split the object based on the deletion headers.
We need to do this because object ids are
reassigned after the old object has been deleted.
"""
#TODO: I need to take another look at this method.
splitObjects = []
isFirstIteration = True
obj = None
#iterate through all chunkPairs
for tag, chunk in self.chunk_pairs:
#if the the tag is a deleted header, then we know this
# is the start of a new object. Also do this even if
# the object does not properly start with a header
isNewObject = (tag.isHeaderTag and tag.isDeleted)
if isNewObject or isFirstIteration:
obj = YaffsObject(self.object_id)
splitObjects.append(obj)
isFirstIteration = False
obj.chunk_pairs.append((tag, chunk))
if len(splitObjects) > 1 or len(splitObjects) == 0:
pass
return splitObjects
def sanity_check(self):
"""
This method is a simple sanity check that will return false if any
of the file versions contain a chunk with a block sequence number
greater than that of the header chunk. This should not happen as the header
is always the last part of a file to be written.
"""
for version in self.versions:
header_oob, header_chunk = version[0]
for chunk_id in version:
if header_oob.block_seq < version[chunk_id][0].block_seq:
return False
return True
def split_by_version(self):
"""
This method will group the chunk pairs based on object headers.
Each grouping is called a 'version', and should contain chunks that were
written after the version's header such that the version doesn't already have
a chunk with that id and that id does not fall beyond the boundary set by the
num_bytes field.
"""
#TODO: It wont handle shrink headers yet.
#TODO: Doesn't handle issues that arise from missing chunks
#In the event of an unclean shutdown while the object was open,
#the first chunk pair (i.e. the last written),
#won't be a header chunk as expected. If this happens, the current logic
#always starts with an empty version and assigns a header to it later.
#This could also happen due to
#garbage collection
self.versions = [{}]
for tag, chunk in self.chunk_pairs:
if tag.isHeaderTag:
#check if the first version is missing its header
if len(self.versions) == 1 and 0 not in self.versions[0]:
self.versions[0][0] = (tag, chunk)
#create a new version for this header
else:
chunks = {0: (tag, chunk)}
self.versions.append(chunks)
#if this is not a header, add it to every known version that
# doesn't have a chunk with this id
#unless this chunk is beyond the end of the file
else:
for version in self.versions:
#If the version doesn't have a header,
#go ahead and add the chunk
if 0 not in version:
version[tag.chunk_id] = (tag, chunk)
continue
#The oob tag contains the file size, we shouldn't include
# any chunks beyond that file size.
filesize = version[0][0].num_bytes
num_chunks = int(math.ceil(filesize * 1.0 / chunk.length))
if not(tag.chunk_id in version) and tag.chunk_id <= num_chunks:
version[tag.chunk_id] = (tag, chunk)
def reconstruct(self):
"""
This method should be called after all chunks for the object have been located.
It will order all previous chunks by chunk id
"""
for tag, chunk in self.chunk_pairs:
#It might be useful for tracking which pairs belong to which objects
tag.object_cls = self
if tag.isHeaderTag:
if 0 not in self.chunkDict:
self.chunkDict[0] = []
chunk = YaffsChunk.YaffsHeader(chunk)
self.chunkDict[0].append((tag, chunk))
else:
if tag.chunk_id not in self.chunkDict:
self.chunkDict[tag.chunk_id] = []
self.chunkDict[tag.chunk_id].append((tag, chunk))
if 0 not in self.chunkDict:
#print 'Object has no header tag!'
self.hasNoHeader = True
else:
tag, chunk = self.chunkDict[0][0]
self.is_deleted = tag.isDeleted
self.object_type = chunk.obj_type
num_chunks = math.ceil(float(tag.num_bytes) / chunk.length)
for chunk_id in range(int(num_chunks) + 1):
if chunk_id in self.chunkDict:
self.chunkDict[chunk_id][0][0].is_most_recent = True
#Set the object name
if not tag.isDeleted:
self.name = chunk.name
else:
names = [x[1].name for x in self.chunkDict[0]
if x[1].name not in ['deleted', 'unlinked']]
if len(names) > 0:
self.name = names[0]
def writeVersion(self, versionNum=0, name=None):
header, hChunk = self.versions[versionNum][0]
hChunk = YaffsChunk.YaffsHeader(hChunk)
numChunks = math.ceil(float(hChunk.fsize) / hChunk.length)
remaining = hChunk.fsize
if name is None:
name = hChunk.name
with open(name, "wb") as f:
for index in range(int(numChunks)):
chunk_id = index + 1
#Versions may be missing certain chunks. This
#happens due to block erasure.
if chunk_id not in self.versions[versionNum]:
#Make a best effort and grab the most recent
#version of that chunk
if chunk_id in self.chunkDict:
cTag, cChunk = self.chunkDict[chunk_id][0]
bytes = cChunk.get_bytes()
#otherwise, just write zeroes
else:
bytes = 0x00 * hChunk.length
else:
cTag, cChunk = self.versions[versionNum][chunk_id]
bytes = cChunk.get_bytes()
if remaining >= len(bytes):
f.write(bytes)
remaining -= len(bytes)
else:
f.write(bytes[0:remaining])
remaining = 0
pass | gpl-2.0 | 8,031,917,286,478,674,000 | 36.568627 | 87 | 0.533212 | false |
avgoor/python-cudet | cudet/fuel_client.py | 3 | 2842 | # -*- coding: utf-8 -*-
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Working with fuel client as a library
"""
import logging
try:
from fuelclient.client import Client as FuelClient
except ImportError:
try:
from fuelclient.client import APIClient as FuelClient
except ImportError:
FuelClient = None
if FuelClient is not None:
from fuelclient import fuelclient_settings
# LP bug 1592445
try:
from fuelclient.client import logger
logger.handlers = []
except:
pass
from cudet import utils
logger = logging.getLogger(__name__)
def get_client(config):
"""Returns initialized Fuel client
:param config: The ``cudet.CudetConfig`` instance
:returns: Fuel client instance
"""
client = None
if FuelClient is not None:
with utils.environ_settings(http_proxy=config.fuel_http_proxy,
HTTP_PROXY=config.fuel_http_proxy):
try:
try:
# try to instantiate fuel client with new init signature
client = FuelClient(host=config.fuel_ip,
port=config.fuel_port,
http_proxy=config.fuel_http_proxy,
os_username=config.fuel_user,
os_password=config.fuel_pass,
os_tenant_name=config.fuel_tenant)
except TypeError:
# instantiate fuel client using old init signature
fuel_settings = fuelclient_settings.get_settings()
fuel_config = fuel_settings.config
fuel_config['OS_USERNAME'] = config.fuel_user
fuel_config['OS_PASSWORD'] = config.fuel_pass
fuel_config['OS_TENANT_NAME'] = config.fuel_tenant
fuel_config['HTTP_PROXY'] = config.fuel_http_proxy
client = FuelClient()
except Exception as e:
logger.info('Failed to initialize fuelclient instance:%s' % e,
exc_info=True)
else:
logger.info('Fuelclient can not be imported')
return client
| gpl-2.0 | -870,327,387,352,534,800 | 32.833333 | 78 | 0.582688 | false |
ricard33/cloud-mailing | cloud_mailing/common/settings.py | 1 | 4809 | # Copyright 2015-2019 Cedric RICARD
#
# This file is part of CloudMailing.
#
# CloudMailing is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# CloudMailing is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with CloudMailing. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
from .config_file import ConfigFile
__author__ = 'ricard'
RUNNING_UNITTEST = sys.argv[0].endswith('trial') or 'pytest' in sys.argv[0] or os.environ.get('RUNNING_UNITTEST', False) == "True"
# PROJECT_ROOT = os.path.normpath(os.getcwd())
PROJECT_ROOT = os.path.normpath(os.path.join(os.path.dirname(__file__), '..', '..'))
if RUNNING_UNITTEST:
PROJECT_ROOT = os.path.join(PROJECT_ROOT, 'UT')
if not os.path.exists(PROJECT_ROOT):
os.makedirs(PROJECT_ROOT)
CONFIG_PATH = os.path.join(PROJECT_ROOT, "config")
CONFIG_FILE = os.path.join(CONFIG_PATH, "cloud-mailing.ini")
LOG_PATH = os.path.join(PROJECT_ROOT, "log")
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static')
os.makedirs(CONFIG_PATH, exist_ok=True)
os.makedirs(LOG_PATH, exist_ok=True)
config = ConfigFile()
if os.path.exists(CONFIG_FILE):
config.read(CONFIG_FILE)
else:
sys.stderr.write("Config file '%s' not found!\n" % CONFIG_FILE)
DEBUG = config.getboolean('DEBUG', 'DEBUG', False)
SSL_CERTIFICATE_PATH = os.path.join(PROJECT_ROOT, 'ssl')
SSL_CERTIFICATE_NAME = 'cm'
MASTER_DATABASE = config.get('MASTER_DATABASE', 'NAME', "cm_master")
MASTER_DATABASE_URI = config.get('MASTER_DATABASE', 'URI', "mongodb://localhost:27017")
SATELLITE_DATABASE = config.get('SATELLITE_DATABASE', 'NAME', "cm_satellite")
SATELLITE_DATABASE_URI = config.get('SATELLITE_DATABASE', 'URI', "mongodb://localhost:27017")
TEST_DATABASE = "cm_test"
SERIAL = config.get('ID', 'SERIAL', '<NO_SERIAL_NUMBER>')
## Master
MASTER_IP = config.get('MAILING', 'master_ip', 'localhost')
MASTER_PORT = config.getint('MAILING', 'master_port', 33620)
## Satellite specific
TEST_TARGET_IP = config.get('MAILING', 'test_target_ip', "") # used for mailing tests. IP of an internal and fake SMTP server.
TEST_TARGET_PORT = config.getint('MAILING', 'test_target_port', 33625) # used for mailing tests. Port number of an internal and fake SMTP server.
TEST_FAKE_DNS = config.getboolean('MAILING', 'test_faked_dns', False) # used for mailing tests. DNS always returns local ip.
USE_LOCAL_DNS_CACHE = config.getboolean('MAILING', 'use_local_dns_cache', False) # mainly used for mailing tests. DNS always returns determined ips for some domains.
LOCAL_DNS_CACHE_FILE = config.get('MAILING', 'local_dns_cache_filename', os.path.join(PROJECT_ROOT, 'local_dns_cache.ini')) # mainly used for mailing tests. DNS always returns determined ips for some domains.
MAIL_TEMP = config.get('MAILING', 'MAIL_TEMP', os.path.join(PROJECT_ROOT, 'temp'))
CUSTOMIZED_CONTENT_FOLDER = config.get('MAILING', 'CUSTOMIZED_CONTENT_FOLDER', os.path.join(PROJECT_ROOT, 'cust_ml'))
# Create missing folders
for dir_name in (CUSTOMIZED_CONTENT_FOLDER, MAIL_TEMP):
try:
os.makedirs(dir_name)
except:
pass
## End satellite specific
# Logging configuration
if 'master_app' in sys.argv:
log_name = 'master'
elif 'satellite_app' in sys.argv:
log_name = 'satellite'
else:
log_name = 'cloud_mailing'
DEFAULT_LOG_FORMAT='%(name)-12s: %(asctime)s %(levelname)-8s [%(threadName)s] %(message)s'
import warnings
warnings.simplefilter('ignore', UserWarning)
# API settings
PAGE_SIZE = 100
# SMTPD
SMTPD_AUTH_URL = config.get('SMTPD', 'user_authentication_url', 'https://localhost/api/auth/')
SMTPD_AUTH_USERNAME_FIELD = config.get('SMTPD', 'user_authentication_username_field', 'username')
SMTPD_AUTH_PASSWORD_FIELD = config.get('SMTPD', 'user_authentication_password_field', 'password')
SMTPD_VALIDATE_FROM_URL = config.get('SMTPD', 'validate_from_url', 'https://localhost/api/validate_from/')
SMTPD_VALIDATE_FROM_FIELD = config.get('SMTPD', 'validate_from_field', 'mail_from')
SMTPD_VALIDATE_TO_URL = config.get('SMTPD', 'validate_to_url', 'https://localhost/api/validate_to/')
SMTPD_VALIDATE_TO_FIELD = config.get('SMTPD', 'validate_to_field', 'rcpt_to')
SMTPD_MESSAGE_URL = config.get('SMTPD', 'message_url', 'https://localhost/api/send_mailing/')
SMTPD_RECIPIENTS_FIELD = config.get('SMTPD', 'recipients_field', 'recipients')
SMTPD_MESSAGE_FIELD = config.get('SMTPD', 'message_field', 'message')
| agpl-3.0 | 8,328,160,698,335,105,000 | 42.324324 | 209 | 0.722396 | false |
enthought/pyside | tests/QtCore/qthread_test.py | 6 | 1878 | #!/usr/bin/python
'''Test cases for QThread'''
import unittest
from PySide.QtCore import QThread, QCoreApplication, QObject, SIGNAL, QMutex, QTimer
from PySide.QtCore import QEventLoop
from helper import UsesQCoreApplication
mutex = QMutex()
class Dummy(QThread):
'''Dummy thread'''
def __init__(self, *args):
super(Dummy, self).__init__(*args)
self.called = False
def run(self):
#Start-quit sequence
self.qobj = QObject()
mutex.lock()
self.called = True
mutex.unlock()
class QThreadSimpleCase(UsesQCoreApplication):
def setUp(self):
UsesQCoreApplication.setUp(self)
self.called = False
def tearDown(self):
UsesQCoreApplication.tearDown(self)
def testThread(self):
#Basic QThread test
obj = Dummy()
obj.start()
obj.wait()
self.assert_(obj.called)
def cb(self, *args):
self.called = True
#self.exit_app_cb()
def abort_application(self):
self._thread.terminate()
self.app.quit()
def testSignalFinished(self):
#QThread.finished() (signal)
obj = Dummy()
QObject.connect(obj, SIGNAL('finished()'), self.cb)
mutex.lock()
obj.start()
mutex.unlock()
self._thread = obj
QTimer.singleShot(1000, self.abort_application)
self.app.exec_()
self.assert_(self.called)
def testSignalStarted(self):
#QThread.started() (signal)
obj = Dummy()
QObject.connect(obj, SIGNAL('started()'), self.cb)
obj.start()
self._thread = obj
QTimer.singleShot(1000, self.abort_application)
self.app.exec_()
self.assertEqual(obj.qobj.thread(), obj) # test QObject.thread() method
self.assert_(self.called)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | 8,739,463,915,080,134,000 | 23.076923 | 84 | 0.597444 | false |
whuaegeanse/mapnik | scons/scons-local-2.3.6/SCons/compat/_scons_collections.py | 4 | 1848 | #
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__doc__ = """
collections compatibility module for older (pre-2.4) Python versions
This does not not NOT (repeat, *NOT*) provide complete collections
functionality. It only wraps the portions of collections functionality
used by SCons, in an interface that looks enough like collections for
our purposes.
"""
__revision__ = "src/engine/SCons/compat/_scons_collections.py rel_2.3.5:3347:d31d5a4e74b6 2015/07/31 14:36:10 bdbaddog"
# Use exec to hide old names from fixers.
exec("""if True:
from UserDict import UserDict
from UserList import UserList
from UserString import UserString""")
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| lgpl-2.1 | -247,165,558,219,018,700 | 40.066667 | 119 | 0.753788 | false |
MOA-2011/e2openplugin-OpenWebif | plugin/controllers/views/web/moviedelete.py | 1 | 5203 | #!/usr/bin/env python
##################################################
## DEPENDENCIES
import sys
import os
import os.path
try:
import builtins as builtin
except ImportError:
import __builtin__ as builtin
from os.path import getmtime, exists
import time
import types
from Cheetah.Version import MinCompatibleVersion as RequiredCheetahVersion
from Cheetah.Version import MinCompatibleVersionTuple as RequiredCheetahVersionTuple
from Cheetah.Template import Template
from Cheetah.DummyTransaction import *
from Cheetah.NameMapper import NotFound, valueForName, valueFromSearchList, valueFromFrameOrSearchList
from Cheetah.CacheRegion import CacheRegion
import Cheetah.Filters as Filters
import Cheetah.ErrorCatchers as ErrorCatchers
##################################################
## MODULE CONSTANTS
VFFSL=valueFromFrameOrSearchList
VFSL=valueFromSearchList
VFN=valueForName
currentTime=time.time
__CHEETAH_version__ = '2.4.4'
__CHEETAH_versionTuple__ = (2, 4, 4, 'development', 0)
__CHEETAH_genTime__ = 1453357629.140598
__CHEETAH_genTimestamp__ = 'Thu Jan 21 15:27:09 2016'
__CHEETAH_src__ = '/home/babel/Build/Test/OpenPLi5/openpli5.0/build/tmp/work/tmnanoseplus-oe-linux/enigma2-plugin-extensions-openwebif/1+gitAUTOINC+186ea358f6-r0/git/plugin/controllers/views/web/moviedelete.tmpl'
__CHEETAH_srcLastModified__ = 'Thu Jan 21 15:27:08 2016'
__CHEETAH_docstring__ = 'Autogenerated by Cheetah: The Python-Powered Template Engine'
if __CHEETAH_versionTuple__ < RequiredCheetahVersionTuple:
raise AssertionError(
'This template was compiled with Cheetah version'
' %s. Templates compiled before version %s must be recompiled.'%(
__CHEETAH_version__, RequiredCheetahVersion))
##################################################
## CLASSES
class moviedelete(Template):
##################################################
## CHEETAH GENERATED METHODS
def __init__(self, *args, **KWs):
super(moviedelete, self).__init__(*args, **KWs)
if not self._CHEETAH__instanceInitialized:
cheetahKWArgs = {}
allowedKWs = 'searchList namespaces filter filtersLib errorCatcher'.split()
for k,v in KWs.items():
if k in allowedKWs: cheetahKWArgs[k] = v
self._initCheetahInstance(**cheetahKWArgs)
def respond(self, trans=None):
## CHEETAH: main method generated for this template
if (not trans and not self._CHEETAH__isBuffering and not callable(self.transaction)):
trans = self.transaction # is None unless self.awake() was called
if not trans:
trans = DummyTransaction()
_dummyTrans = True
else: _dummyTrans = False
write = trans.response().write
SL = self._CHEETAH__searchList
_filter = self._CHEETAH__currentFilter
########################################
## START - generated method body
_orig_filter_91587011 = _filter
filterName = u'WebSafe'
if self._CHEETAH__filters.has_key("WebSafe"):
_filter = self._CHEETAH__currentFilter = self._CHEETAH__filters[filterName]
else:
_filter = self._CHEETAH__currentFilter = \
self._CHEETAH__filters[filterName] = getattr(self._CHEETAH__filtersLib, filterName)(self).filter
write(u'''<?xml version="1.0" encoding="UTF-8"?>
<e2simplexmlresult>
\t<e2state>''')
_v = VFFSL(SL,"result",True) # u'$result' on line 4, col 11
if _v is not None: write(_filter(_v, rawExpr=u'$result')) # from line 4, col 11.
write(u'''</e2state>
\t<e2statetext>''')
_v = VFFSL(SL,"message",True) # u'$message' on line 5, col 15
if _v is not None: write(_filter(_v, rawExpr=u'$message')) # from line 5, col 15.
write(u'''</e2statetext>\t
</e2simplexmlresult>
''')
_filter = self._CHEETAH__currentFilter = _orig_filter_91587011
########################################
## END - generated method body
return _dummyTrans and trans.response().getvalue() or ""
##################################################
## CHEETAH GENERATED ATTRIBUTES
_CHEETAH__instanceInitialized = False
_CHEETAH_version = __CHEETAH_version__
_CHEETAH_versionTuple = __CHEETAH_versionTuple__
_CHEETAH_genTime = __CHEETAH_genTime__
_CHEETAH_genTimestamp = __CHEETAH_genTimestamp__
_CHEETAH_src = __CHEETAH_src__
_CHEETAH_srcLastModified = __CHEETAH_srcLastModified__
_mainCheetahMethod_for_moviedelete= 'respond'
## END CLASS DEFINITION
if not hasattr(moviedelete, '_initCheetahAttributes'):
templateAPIClass = getattr(moviedelete, '_CHEETAH_templateClass', Template)
templateAPIClass._addCheetahPlumbingCodeToClass(moviedelete)
# CHEETAH was developed by Tavis Rudd and Mike Orr
# with code, advice and input from many other volunteers.
# For more information visit http://www.CheetahTemplate.org/
##################################################
## if run from command line:
if __name__ == '__main__':
from Cheetah.TemplateCmdLineIface import CmdLineIface
CmdLineIface(templateObj=moviedelete()).run()
| gpl-2.0 | -7,490,806,011,654,685,000 | 34.155405 | 212 | 0.63252 | false |
AndresVillan/pyafipws.web2py-app | controllers/email.py | 12 | 2505 | # -*- coding: utf-8 -*-
# módulo para envío de correos
import os
COMPROBANTES_PATH = os.path.join(request.env.web2py_path,'applications',request.application,'private', 'comprobantes')
@auth.requires(auth.has_membership('administrador') or auth.has_membership('emisor'))
def enviar_comprobante():
comprobante = db.comprobante[int(request.args[0])]
if not comprobante: raise HTTP("Comprobante inexistente")
try:
variables = db(db.variables).select().first()
# variables de mensaje
empresa = str(variables.empresa)
punto_vta = str(comprobante.punto_vta)
cliente = str(comprobante.nombre_cliente)
tipocbte = str(comprobante.tipocbte.ds)
cbte_nro = str(comprobante.cbte_nro)
fecha_vto = str(comprobante.fecha_venc_pago)
fecha_cbte = str(comprobante.fecha_cbte)
url_descarga = variables.url + "salida/invoice/comprobante/" + str(comprobante.id)
mail.settings.server = variables.mail_server
mail.settings.sender = variables.mail_sender
mail.settings.login = variables.mail_login
except (AttributeError, KeyError, ValueError, TypeError), e:
raise HTTP(500, "No se configurararon las variables generales o de envío. %s" % str(e))
mensaje = None
attachment = None
texto = variables.aviso_de_cbte_texto.replace("{{=empresa}}", empresa).replace("{{=cliente}}", cliente).replace("{{=tipocbte}}", tipocbte).replace("{{=cbte_nro}}", cbte_nro).replace("{{=fecha_cbte}}", fecha_cbte).replace("{{=fecha_vto}}", fecha_vto ).replace("{{=punto_vta}}", punto_vta).replace("{{=url_descarga}}", url_descarga)
asunto = variables.aviso_de_cbte_asunto.replace("{{=empresa}}", empresa).replace("{{=cliente}}", cliente).replace("{{=tipocbte}}", tipocbte).replace("{{=cbte_nro}}", cbte_nro).replace("{{=fecha_cbte}}", fecha_cbte).replace("{{=fecha_vto}}", fecha_vto ).replace("{{=punto_vta}}", punto_vta)
nombre_cbte = "%s.pdf" % str(comprobante.id)
# si se creó el cbte en el sistema adjuntarlo
if nombre_cbte in os.listdir(COMPROBANTES_PATH):
attachment = os.path.join(COMPROBANTES_PATH, nombre_cbte)
mail.send(str(comprobante.email), asunto, texto, \
attachments = Mail.Attachment(attachment))
else:
mail.send(str(comprobante.email), asunto, texto)
mensaje = "Se envió el comprobante a: " + str(comprobante.email)
return dict(mensaje = mensaje, asunto = asunto, texto = texto)
| agpl-3.0 | 699,383,110,120,476,000 | 48.019608 | 334 | 0.6612 | false |
mfherbst/spack | var/spack/repos/builtin/packages/bib2xhtml/package.py | 5 | 2210 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
from glob import glob
class Bib2xhtml(Package):
"""bib2xhtml is a program that converts BibTeX files into HTML."""
homepage = "http://www.spinellis.gr/sw/textproc/bib2xhtml/"
url = 'http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v3.0-15-gf506.tar.gz'
version('3.0-15-gf506', 'a26ba02fe0053bbbf2277bdf0acf8645')
def install(self, spec, prefix):
# Add the bst include files to the install directory
bst_include = join_path(prefix.share, 'bib2xhtml')
mkdirp(bst_include)
for bstfile in glob('html-*bst'):
install(bstfile, bst_include)
# Install the script and point it at the user's favorite perl
# and the bst include directory.
mkdirp(prefix.bin)
install('bib2xhtml', prefix.bin)
filter_file(r'#!/usr/bin/perl',
'#!/usr/bin/env BSTINPUTS=%s perl' % bst_include,
join_path(prefix.bin, 'bib2xhtml'))
| lgpl-2.1 | -148,352,556,650,403,000 | 44.102041 | 88 | 0.656561 | false |
spaceone/pyjs | examples/libtest/BuiltinTest.py | 6 | 39967 | from UnitTest import UnitTest, PY27_BEHAVIOUR
try:
builtin_value = builtin.value
except:
builtin_value = None
if False:
import builtin
import builtin
from imports.cls import CLS
from imports.cls1 import CLS as CLS1
def other(**kwargs):
return kwargs
def foo(_some, _long, _list, _of, _arguments):
_additional = 5
return other(**locals())
class ColourThing(object):
def rgb():
def fset(self, rgb):
self.r, self.g, self.b = rgb
def fget(self):
return (self.r, self.g, self.b)
return property(**locals())
class C(object):
@property
def x(self):
return [1,2,3]
class Foo:
pass
class LocalsTest:
def __init__(self):
pass
def testargs(self, test1, test2):
return locals()
def testkwargs(self, test1=None, test2=None):
return locals()
class BuiltinTest(UnitTest):
def testMinMax(self):
self.assertEqual(max(1,2,3,4), 4)
self.assertEqual(min(1,2,3,4), 1)
self.assertEqual(max([1,2,3,4]), 4)
self.assertEqual(min([1,2,3,4]), 1)
self.assertTrue(max([5,3,4],[6,1,2]) == [6,1,2] , "max([5,3,4],[6,1,2])")
self.assertTrue(min([5,3,4],[6,1,2]) == [5,3,4] , "min([5,3,4],[6,1,2])")
def testIterProperty(self):
o = C()
tst = []
for y in iter(o.x):
tst.append(y)
self.assertTrue(tst, [1,2,3])
tst = []
try:
for y in o.x:
tst.append(y)
self.assertTrue(tst, [1,2,3])
except:
self.fail("#490 - no function iter.__iter__ not a function")
def testInt(self):
self.assertEqual(int("5"), 5)
self.assertEqual(int("09"), 9)
self.assertEqual(6, 6)
self.assertEqual(int("0"), 0)
self.assertEqual(int(0), 0)
self.assertEqual(int("-1"), -1)
self.assertEqual(int("- 2"), -2)
self.assertEqual(int(" - 3"), -3)
try:
int('')
self.fail("No int() argument error raised: int('')")
except ValueError, e:
self.assertEqual(e[0], "invalid literal for int() with base 10: ''")
try:
int(' ')
self.fail("No int() argument error raised: int(' ')")
except ValueError, e:
self.assertEqual(e[0], "invalid literal for int() with base 10: ''")
try:
int('not int')
self.fail("No int() argument error raised: int('not-int')")
except ValueError, e:
self.assertEqual(e[0], "invalid literal for int() with base 10: 'not int'")
try:
int(1, 10)
self.fail("No int() argument error raised: int(1, 10)")
except TypeError, e:
self.assertEqual(e[0], "int() can't convert non-string with explicit base")
try:
int('10px')
self.fail("No int() argument error raised: int('10px')")
except ValueError, e:
self.assertEqual(e[0], "invalid literal for int() with base 10: '10px'")
def testFloat(self):
self.assertEqual(float("5.1"), 5.1)
self.assertEqual(float("09"), 9)
self.assertEqual(6.1, 6.1)
self.assertEqual(float("0"), 0)
self.assertEqual(float(0), 0)
try:
float('not float')
self.fail("No float('not float') argument error raised")
except ValueError, e:
self.assertIn(e[0], [
"invalid literal for float(): not float", # <= 2.6
"could not convert string to float: not float", # 2.7
])
try:
float('')
self.fail("No float('') argument error raised")
except ValueError, e:
self.assertIn(e[0], [
"empty string for float()", # <= 2.6
"could not convert string to float: ", # 2.7
])
try:
float(' ')
self.fail("No float(' ') argument error raised")
except ValueError, e:
self.assertIn(e[0], [
"empty string for float()", # <= 2.6
"could not convert string to float: ", # 2.7
])
self.assertTrue(isinstance(1.0, float), "1.0 should be instance of float")
def testOrdChr(self):
for i in range(256):
self.assertEqual(ord(chr(i)), i)
def testMod(self):
self.assertEqual(12 % 5, 2)
self.assertEqual(-0.4 % 1, 0.6, "Modulo error 1 for negative base, bug #473")
self.assertEqual(-0.3 % 1.0, 0.7)
self.assertEqual(-1 % 2, 1)
self.assertEqual(-1 % -2, -1)
self.assertEqual(-1 % 3L, 2L)
self.assertEqual(-2 % -3L, -2L)
self.assertEqual(-1L % 4, 3L)
self.assertEqual(-3L % -4, -3L)
self.assertEqual(-1L % 5L, 4L)
self.assertEqual(-4L % -5L, -4L)
self.assertEqual(-1.0 % 6, 5.0)
self.assertEqual(-5.0 % -6, -5.0)
def testPower(self):
self.assertEqual(3 ** 4, 81)
def testPowerfunc(self):
self.assertEqual(pow(10, 3), 1000)
self.assertEqual(pow(10, 3, 7), 6)
def testHex(self):
self.assertEqual(hex(23), '0x17')
try:
h = hex(23.2)
self.fail("No hex() argument error raised")
except TypeError, why:
self.assertEqual(why.args[0], "hex() argument can't be converted to hex")
def testOct(self):
self.assertEqual(oct(23), '027')
try:
o = oct(23.2)
self.fail("No oct() argument error raised")
except TypeError, why:
self.assertEqual(str(why), "oct() argument can't be converted to oct")
def testRound(self):
self.assertEqual(round(13.12345), 13.0)
self.assertEqual(round(13.12345, 3), 13.123)
self.assertEqual(round(-13.12345), -13.0)
self.assertEqual(round(-13.12345, 3), -13.123)
self.assertEqual(round(13.62345), 14.0)
self.assertEqual(round(13.62345, 3), 13.623)
self.assertEqual(round(-13.62345), -14.0)
self.assertEqual(round(-13.62345, 3), -13.623)
def testDivmod(self):
test_set = [(14, 3, 4, 2),
(14.1, 3, 4.0, 2.1),
(14.1, 3.1, 4.0, 1.7),
]
for x, y, p, q in test_set:
d = divmod(x,y)
self.assertEqual(d[0], p)
self.assertEqual(abs(d[1] - q) < 0.00001, True)
def testFloorDiv(self):
self.assertEqual(1, 4//3)
self.assertEqual(1, 5//3)
self.assertEqual(2, 6//3)
def testAll(self):
self.assertEqual(all([True, 1, 'a']), True)
self.assertEqual(all([True, 1, None, 'a']), False)
self.assertEqual(all([True, 1, '', 'a']), False)
self.assertEqual(all([True, 1, False, 'a']), False)
def testAny(self):
self.assertEqual(any([True, 1, 'a']), True)
self.assertEqual(any([True, 1, None, 'a']), True)
self.assertEqual(any([True, 1, '', 'a']), True)
self.assertEqual(any([True, 1, False, 'a']), True)
self.assertEqual(any([False, '', None]), False)
def testRepr(self):
l1 = [1,2,3]
l2 = ["a", "b", "c"]
t1 = (4,5,6,7)
t2 = ("aa", "bb")
d1 = {'a': 1, "b": "B"}
d2 = {1: l1, 2: l2, 3: t1, 4: t2, 5:d1}
i1 = 10000
f1 = 1.5
self.assertEqual(repr(l1), '[1, 2, 3]')
self.assertEqual(l1.__repr__(), '[1, 2, 3]')
self.assertEqual(repr(l2), "['a', 'b', 'c']")
self.assertEqual(repr(t1), '(4, 5, 6, 7)')
self.assertEqual(repr(t2), "('aa', 'bb')")
self.assertEqual(repr(d1), "{'a': 1, 'b': 'B'}")
self.assertEqual(repr(d2), "{1: [1, 2, 3], 2: ['a', 'b', 'c'], 3: (4, 5, 6, 7), 4: ('aa', 'bb'), 5: {'a': 1, 'b': 'B'}}")
self.assertEqual(d2.__repr__(), "{1: [1, 2, 3], 2: ['a', 'b', 'c'], 3: (4, 5, 6, 7), 4: ('aa', 'bb'), 5: {'a': 1, 'b': 'B'}}")
self.assertEqual(repr(i1), '10000')
self.assertEqual(i1.__repr__(), '10000')
self.assertEqual(repr(f1), '1.5')
self.assertEqual(f1.__repr__(), '1.5', 'float.__repr__() returns type instead of value, bug #487')
self.assertEqual(`l1`, '[1, 2, 3]')
def testIsInstance(self):
s = 'hello'
self.assertTrue(isinstance(s, str), "s is a string")
self.assertFalse(isinstance(s, int), "s is a string not an integer")
s = 1
self.assertFalse(isinstance(s, str), "s is an integer not a string")
self.assertTrue(isinstance(s, int), "s is an integer")
self.assertFalse(isinstance('', list), "'' is not instance of list")
self.assertTrue(isinstance([], list), "[] is an instance of list")
class Cls(object):
pass
self.assertTrue(isinstance(Cls, type), "Bug #578 classes are instances of type")
def testImport(self):
self.assertEqual(builtin_value, None, "The builtin is loaded before import!")
try:
self.assertEqual(builtin.value, builtin.get_value())
except:
self.fail("Import failed for builtin")
from imports import overrideme
cls1 = CLS1()
self.assertTrue(CLS is CLS1, "CLS is CLS1")
self.assertTrue(isinstance(cls1, CLS), "isinstance(cls1, CLS)")
self.assertEqual(overrideme, "not overridden")
import imports.override
self.assertEqual(overrideme, "not overridden")
from imports import overrideme
try:
self.assertTrue(overrideme.overridden is True, "overrideme.overridden is True")
except:
self.fail("Exception on 'overrideme.overridden is True'")
import imports
self.assertTrue(CLS is imports.loccls.CLS, "CLS is imports.loccls.CLS")
self.assertTrue(CLS is imports.upcls.CLS, "CLS is imports.upcls.CLS")
def testImport(self):
self.fail("Bug #XXX - from X import .. not completely implemented, only considering modules")
return
from imports import __doc__ as imports_doc
self.assertEqual(imports.__doc__, imports_doc, "Module object must have __doc__ attribute")
from imports import __name__ as imports_name
self.assertEqual(imports.__name__, imports_name)
# from ... import * tests, issue #615
self.assertEqual(imports.all_masked, False, "from ... import * should respect __all__, #615")
self.assertEqual(imports.all_override, True, "Should override globals, #615")
self.assertEqual(imports.all_import1, 1)
self.assertEqual(imports.all_import2, 3)
self.assertEqual(imports.all_import3, 3)
# import builtins module
import __builtin__
self.assertEqual(__builtin__.dict, dict, "__builtin__.dict != dict")
from __builtin__ import dict as dict_bltin
self.assertEqual(dict_bltin, dict, "__builtin__.dict != dict")
def testBitOperations(self):
self.assertEqual(1 << 2 - 1, 2, "shift error 1")
self.assertEqual((1 << 2) - 1, 3, "shift error 2")
self.assertEqual(1 & 3 + 1, 0, "and error 1")
self.assertEqual((1 & 3) + 1, 2, "and error 2")
self.assertEqual(4 >> 2, 1, "right shift error 1")
self.assertEqual(-4 >> 2, -1, "right shift error 2 - bug #341")
def testLocals(self):
v1 = 1
v2 = 2
local_vars = locals()
self.assertEqual(len(local_vars), 3)
self.assertEqual(local_vars['v1'], 1)
def fn1():
a = 1
def fn2():
b = 1
c = locals()
return c
return fn2()
local_vars = fn1()
self.assertEqual(local_vars, {'b': 1})
def fn2():
lx = 3
def g():
li = lx
return locals()
return g()
self.assertEqual(fn2(), {'li':3, 'lx':3}, "locals() bugs: #589")
def fn3():
lx = 3
def g():
def lh():
li = lx
return locals()
return locals(), lh()
return g()
outer_locals, inner_locals = fn3()
self.assertEqual(inner_locals, {'li':3, 'lx':3}, "locals() bugs: #589")
keys = outer_locals.keys()
keys.sort()
self.assertEqual(keys, ['lh', 'lx'], "locals() bugs: #589")
def fn4(x):
class X:
x = 12
def fn4(self):
return x
locals()
return X
self.assertEqual(fn4(1).x, 12)
args = {'test1': 5, 'test2': 'hello'}
la = LocalsTest()
argsreturn = la.testkwargs(**args)
args['self'] = la
self.assertEqual(args, argsreturn)
del args['self']
argsreturn = la.testargs(**args)
args['self'] = la
self.assertEqual(args, argsreturn)
t = ColourThing()
t.rgb = 1
self.assertEqual(t.rgb, 1)
args = foo(0, 1, 2, 3, 4)
self.assertEqual(args, {'_some': 0, '_additional': 5,
'_of': 3, '_list': 2,
'_long': 1, '_arguments': 4})
def testIfExp(self):
var = 1 if True else 0
self.assertEqual(var, 1)
var = 1 if False else 0
self.assertEqual(var, 0)
var = 1 if [] else 0
self.assertEqual(var, 0)
var = 1 if not [] else 0
self.assertEqual(var, 1)
def testRange(self):
r = range(3)
self.assertEqual(r, [0, 1, 2])
r = range(2, 5)
self.assertEqual(r, [2, 3, 4])
r = range(2, 15, 3)
self.assertEqual(r, [2, 5, 8, 11, 14])
r = range(15, 2, -3)
self.assertEqual(r, [15, 12, 9, 6, 3])
r = range(15, 2, 3)
self.assertEqual(r, [])
r = range(-6, -2, -1)
self.assertEqual(r, [])
r = range(2, 1, 2)
self.assertEqual(r, [])
r = range(0, 2, 2)
self.assertEqual(r, [0])
def testXRange(self):
r = [i for i in xrange(3)]
self.assertEqual(r, [0, 1, 2])
r = [i for i in xrange(2, 5)]
self.assertEqual(r, [2, 3, 4])
r = [i for i in xrange(2, 15, 3)]
self.assertEqual(r, [2, 5, 8, 11, 14])
r = [i for i in xrange(15, 2, -3)]
self.assertEqual(r, [15, 12, 9, 6, 3])
r = [i for i in xrange(15, 2, 3)]
self.assertEqual(r, [])
r = [i for i in xrange(-6, -2, -1)]
self.assertEqual(r, [])
self.assertEqual(str(xrange(3)), "xrange(3)")
self.assertEqual(str(xrange(3,4)), "xrange(3, 4)")
self.assertEqual(str(xrange(3,4,5)), "xrange(3, 8, 5)")
self.assertEqual(str(xrange(14,3,-5)), "xrange(14, -1, -5)")
def testForLoop(self):
n1 = 0
n2 = 0
for i in range(10):
n1 += i
for i in xrange(4):
n2 += i
self.assertEqual(n1, 45)
self.assertEqual(n2, 60)
self.assertEqual(i, 3)
try:
for i in xrange(4):
raise StopIteration
self.fail("Failed to raise StopIteration")
except StopIteration:
self.assertTrue(True)
self.assertEqual(i, 0)
e = 0
i = -1
for i in range(1):
pass
else:
e = 1
self.assertEqual(i, 0)
self.assertEqual(e, 1)
e = 0
i = -1
for i in range(0):
pass
else:
e = 1
self.assertEqual(i, -1)
self.assertEqual(e, 1, "bug #316 for X in Y:... else ...")
e = 0
i = -1
for i in range(1):
e = 1
break
else:
e = 2
self.assertEqual(i, 0)
self.assertEqual(e, 1)
class X(object):
pass
x = X()
x.a = 1
for x.a in [3,4,5]:
pass
self.assertEqual(x.a, 5)
d = {}
for d['zz'] in [1,2,3]:
pass
self.assertEqual(d, {'zz': 3})
l = [1]
for l[0] in [1,2,3]:
pass
self.assertEqual(l, [3])
l = [1,3,4]
for l[1:2] in [[5,6,7]]:
pass
self.assertEqual(l, [1, 5, 6, 7, 4])
x = ((1, 2), 3, (4, 5))
for (a, b), c, (d, e) in [x]*5:
pass
self.assertEqual([a, b, c, d, e], [1,2,3,4,5])
for [[a, b], c, [d, e]] in [x]*5:
pass
self.assertEqual([a, b, c, d, e], [1,2,3,4,5])
def testIter(self):
class i:
def __init__(self):
self.idx = 0
def __iter__(self):
return self
def next(self):
self.idx += 1
if self.idx == 5:
raise StopIteration
return self.idx
res = []
try:
for j in i():
res.append(j)
if len(res) > 5:
self.fail("too many items in user-defined iterator")
break
except:
self.fail("error in user-defined iterator (caught here so tests can proceed)")
self.assertEqual(res, range(1,5))
def testSorted(self):
lst1 = range(10)
lst2 = range(10)
lst2.reverse()
self.assertTrue(lst1 == sorted(lst2), "lst1 == sorted(lst2)")
self.assertTrue(lst1 == sorted(xrange(10)), "lst1 == sorted(xrange(1))")
self.assertTrue(lst2 == sorted(xrange(10), reverse=True), "lst2 == sorted(xrange(10), reverse=True)")
def testReversed(self):
lst1 = range(10)
lst2 = range(10)
lst2.reverse()
tpl1 = tuple(lst1)
self.assertTrue(lst1 == list(reversed(lst2)), "lst1 == reversed(lst2)")
self.assertTrue(lst2 == list(reversed(xrange(10))), "lst2 == reversed(xrange(10), reverse=True)")
self.assertTrue(lst2 == list(reversed(tpl1)), "lst1 == reversed(tpl1)")
dict1 = {'a': 'A', 'b': 'B'}
self.assertRaises(TypeError, reversed, dict1)
def testType(self):
try:
self.assertTrue(type(object) is type)
except NotImplementedError, why:
self.fail("Bug #229" + str(why))
self.assertTrue(type([]) is type([]))
self.assertTrue(type([]) is list)
try:
self.assertTrue(type([]) == list)
except:
self.fail("Bug #515")
self.assertTrue(type("") is str, "str")
self.assertTrue(type(True) is bool, "bool")
self.assertTrue(type(1) is int, "int")
self.assertTrue(type(1L) is long, "long")
self.assertTrue(type(1.1) is float, "float 1.1")
self.assertTrue(type(1.0) is float, "float 1.0 issue #524")
def testIter(self):
class G(object):
def __getitem__(self, i):
if 0 <= i <= 4:
return i
raise IndexError("index out of range")
def fn():
for i in [0,1,2,3,4]:
yield i
lst = [0,1,2,3,4]
self.assertEqual(lst, list(iter(lst)), "iter(lst)")
g = G()
self.assertEqual(lst, list(iter(g)), "iter(g)")
self.assertEqual(lst, list(iter(fn().next, 5)), "iter(fn().next, 5)")
self.assertEqual([0,1], list(iter(fn().next, 2)), "iter(fn().next, 2)")
def testReduce(self):
v = reduce(lambda x, y: x+y, [1, 2, 3, 4, 5])
self.assertEqual(v, 15)
def testZip(self):
lst1 = [0,1,2,3]
lst2 = [10,11,12]
dict1 = {'a': 'A', 'b': 'B'}
v = zip(lst1)
self.assertEqual(v, [(0,), (1,), (2,), (3,)])
v = zip(lst1, lst2)
self.assertEqual(v, [(0, 10), (1, 11), (2, 12)])
v = zip(dict1)
self.assertEqual(v, [('a',), ('b',)])
v = zip(lst1, dict1, lst2)
self.assertEqual(v, [(0, 'a', 10), (1, 'b', 11)])
def testSum(self):
self.assertEqual(6, sum([0,1,2,3]))
self.assertEqual(5, sum([0,1,2,3], -1))
self.assertRaises(TypeError, sum, [0,1,2,3], "a")
def testSlice(self):
# repr()
self.assertEqual(repr(slice(1, 2, 3)), "slice(1, 2, 3)", "slice() is mis-used, issue #582")
# cmp, partial
s1 = slice(1, 2, 3)
s2 = slice(1, 2, 3)
s3 = slice(1, 2, 4)
self.assertEqual(s1, s2)
self.assertNotEqual(s1, s3, "slice() is mis-used, issue #582")
# members
s = slice(1)
self.assertEqual(s.start, None)
self.assertEqual(s.stop, 1)
self.assertEqual(s.step, None)
s = slice(1, 2)
self.assertEqual(s.start, 1)
self.assertEqual(s.stop, 2)
self.assertEqual(s.step, None)
s = slice(1, 2, 3)
self.assertEqual(s.start, 1)
self.assertEqual(s.stop, 2)
self.assertEqual(s.step, 3)
class AnyClass:
pass
obj = AnyClass()
s = slice(obj)
self.assertTrue(s.stop is obj)
# indices
self.assertEqual(slice(None ).indices(10), (0, 10, 1))
self.assertEqual(slice(None, None, 2).indices(10), (0, 10, 2))
self.assertEqual(slice(1, None, 2).indices(10), (1, 10, 2))
self.assertEqual(slice(None, None, -1).indices(10), (9, -1, -1))
self.assertEqual(slice(None, None, -2).indices(10), (9, -1, -2))
self.assertEqual(slice(3, None, -2).indices(10), (3, -1, -2))
self.assertEqual(slice(None, -9).indices(10), (0, 1, 1))
self.assertEqual(slice(None, -10).indices(10), (0, 0, 1))
self.assertEqual(slice(None, -11).indices(10), (0, 0, 1))
self.assertEqual(slice(None, -10, -1).indices(10), (9, 0, -1))
self.assertEqual(slice(None, -11, -1).indices(10), (9, -1, -1))
self.assertEqual(slice(None, -12, -1).indices(10), (9, -1, -1))
self.assertEqual(slice(None, 9).indices(10), (0, 9, 1))
self.assertEqual(slice(None, 10).indices(10), (0, 10, 1))
self.assertEqual(slice(None, 11).indices(10), (0, 10, 1))
self.assertEqual(slice(None, 8, -1).indices(10), (9, 8, -1))
self.assertEqual(slice(None, 9, -1).indices(10), (9, 9, -1))
self.assertEqual(slice(None, 10, -1).indices(10), (9, 9, -1))
self.assertEqual(
slice(-100, 100 ).indices(10),
slice(None).indices(10)
)
self.assertEqual(
slice(100, -100, -1).indices(10),
slice(None, None, -1).indices(10)
)
self.assertEqual(slice(-100L, 100L, 2L).indices(10), (0, 10, 2))
### begin from CPython 2.7 Lib/test/test_str.py
def test_format(self):
self.assertEqual(''.format(), '')
self.assertEqual('a'.format(), 'a')
self.assertEqual('ab'.format(), 'ab')
self.assertEqual('a{{'.format(), 'a{')
self.assertEqual('a}}'.format(), 'a}')
self.assertEqual('{{b'.format(), '{b')
self.assertEqual('}}b'.format(), '}b')
self.assertEqual('a{{b'.format(), 'a{b')
# examples from the PEP:
import datetime
self.assertEqual("My name is {0}".format('Fred'), "My name is Fred")
self.assertEqual("My name is {0[name]}".format(dict(name='Fred')),
"My name is Fred")
self.assertEqual("My name is {0} :-{{}}".format('Fred'),
"My name is Fred :-{}")
d = datetime.date(2007, 8, 18)
self.assertEqual("The year is {0.year}".format(d),
"The year is 2007")
# classes we'll use for testing
class C:
def __init__(self, x=100):
self._x = x
def __format__(self, spec):
return spec
class D:
def __init__(self, x):
self.x = x
def __format__(self, spec):
return str(self.x)
# class with __str__, but no __format__
class E:
def __init__(self, x):
self.x = x
def __str__(self):
return 'E(' + self.x + ')'
# class with __repr__, but no __format__ or __str__
class F:
def __init__(self, x):
self.x = x
def __repr__(self):
return 'F(' + self.x + ')'
# class with __format__ that forwards to string, for some format_spec's
class G:
def __init__(self, x):
self.x = x
def __str__(self):
return "string is " + self.x
def __format__(self, format_spec):
if format_spec == 'd':
return 'G(' + self.x + ')'
return object.__format__(self, format_spec)
class Galt:
def __init__(self, x):
self.x = x
def __str__(self):
return "string is " + self.x
def __format__(self, format_spec):
if format_spec == 'd':
return 'G(' + self.x + ')'
return format(str(self), format_spec)
# class that returns a bad type from __format__
class H:
def __format__(self, format_spec):
return 1.0
class I(datetime.date):
def __format__(self, format_spec):
return self.strftime(format_spec)
class J(int):
def __format__(self, format_spec):
return int.__format__(self * 2, format_spec)
self.assertEqual(''.format(), '')
self.assertEqual('abc'.format(), 'abc')
self.assertEqual('{0}'.format('abc'), 'abc')
self.assertEqual('{0:}'.format('abc'), 'abc')
self.assertEqual('X{0}'.format('abc'), 'Xabc')
self.assertEqual('{0}X'.format('abc'), 'abcX')
self.assertEqual('X{0}Y'.format('abc'), 'XabcY')
self.assertEqual('{1}'.format(1, 'abc'), 'abc')
self.assertEqual('X{1}'.format(1, 'abc'), 'Xabc')
self.assertEqual('{1}X'.format(1, 'abc'), 'abcX')
self.assertEqual('X{1}Y'.format(1, 'abc'), 'XabcY')
self.assertEqual('{0}'.format(-15), '-15')
self.assertEqual('{0}{1}'.format(-15, 'abc'), '-15abc')
self.assertEqual('{0}X{1}'.format(-15, 'abc'), '-15Xabc')
self.assertEqual('{{'.format(), '{')
self.assertEqual('}}'.format(), '}')
self.assertEqual('{{}}'.format(), '{}')
self.assertEqual('{{x}}'.format(), '{x}')
self.assertEqual('{{{0}}}'.format(123), '{123}')
self.assertEqual('{{{{0}}}}'.format(), '{{0}}')
self.assertEqual('}}{{'.format(), '}{')
self.assertEqual('}}x{{'.format(), '}x{')
# weird field names
self.assertEqual("{0[foo-bar]}".format({'foo-bar':'baz'}), 'baz')
self.assertEqual("{0[foo bar]}".format({'foo bar':'baz'}), 'baz')
self.assertEqual("{0[ ]}".format({' ':3}), '3')
self.assertEqual('{foo._x}'.format(foo=C(20)), '20')
self.assertEqual('{1}{0}'.format(D(10), D(20)), '2010')
self.assertEqual('{0._x.x}'.format(C(D('abc'))), 'abc')
self.assertEqual('{0[0]}'.format(['abc', 'def']), 'abc')
self.assertEqual('{0[1]}'.format(['abc', 'def']), 'def')
self.assertEqual('{0[1][0]}'.format(['abc', ['def']]), 'def')
self.assertEqual('{0[1][0].x}'.format(['abc', [D('def')]]), 'def')
# strings
self.assertEqual('{0:.3s}'.format('abc'), 'abc')
self.assertEqual('{0:.3s}'.format('ab'), 'ab')
self.assertEqual('{0:.3s}'.format('abcdef'), 'abc')
self.assertEqual('{0:.0s}'.format('abcdef'), '')
self.assertEqual('{0:3.3s}'.format('abc'), 'abc')
self.assertEqual('{0:2.3s}'.format('abc'), 'abc')
self.assertEqual('{0:2.2s}'.format('abc'), 'ab')
self.assertEqual('{0:3.2s}'.format('abc'), 'ab ')
self.assertEqual('{0:x<0s}'.format('result'), 'result')
self.assertEqual('{0:x<5s}'.format('result'), 'result')
self.assertEqual('{0:x<6s}'.format('result'), 'result')
self.assertEqual('{0:x<7s}'.format('result'), 'resultx')
self.assertEqual('{0:x<8s}'.format('result'), 'resultxx')
self.assertEqual('{0: <7s}'.format('result'), 'result ')
self.assertEqual('{0:<7s}'.format('result'), 'result ')
self.assertEqual('{0:>7s}'.format('result'), ' result')
self.assertEqual('{0:>8s}'.format('result'), ' result')
self.assertEqual('{0:^8s}'.format('result'), ' result ')
self.assertEqual('{0:^9s}'.format('result'), ' result ')
self.assertEqual('{0:^10s}'.format('result'), ' result ')
self.assertEqual('{0:10000}'.format('a'), 'a' + ' ' * 9999)
self.assertEqual('{0:10000}'.format(''), ' ' * 10000)
self.assertEqual('{0:10000000}'.format(''), ' ' * 10000000)
# format specifiers for user defined type
self.assertEqual('{0:abc}'.format(C()), 'abc')
# !r and !s coercions
self.assertEqual('{0!s}'.format('Hello'), 'Hello')
self.assertEqual('{0!s:}'.format('Hello'), 'Hello')
self.assertEqual('{0!s:15}'.format('Hello'), 'Hello ')
self.assertEqual('{0!s:15s}'.format('Hello'), 'Hello ')
self.assertEqual('{0!r}'.format('Hello'), "'Hello'")
self.assertEqual('{0!r:}'.format('Hello'), "'Hello'")
self.assertEqual('{0!r}'.format(F('Hello')), 'F(Hello)')
# test fallback to object.__format__
self.assertEqual('{0}'.format({}), '{}')
self.assertEqual('{0}'.format([]), '[]')
self.assertEqual('{0}'.format([1]), '[1]')
self.assertEqual('{0}'.format(E('data')), 'E(data)')
self.assertEqual('{0:d}'.format(G('data')), 'G(data)')
self.assertEqual('{0!s}'.format(G('dat1')), 'string is dat1')
self.assertEqual('{0:^10}'.format(E('dat2')), ' E(dat2) ')
self.assertEqual('{0:^10s}'.format(E('dat3')), ' E(dat3) ')
self.assertEqual('{0:>15s}'.format(Galt('dat4')), ' string is dat4')
# if Issue #674 is fixed the following should no longer throw an
# exception (classified as known issue), then Galt can be changed to G and Galt removed
try:
self.assertEqual('{0:>15s}'.format(G('dat5')), ' string is dat5')
except:
self.fail("object.__format__ missing#674")
self.assertEqual("{0:date: %Y-%m-%d}".format(
I(year=2007, month=8, day=27)),
"date: 2007-08-27",
"Issue #673. datetime.date should have __format___")
# test deriving from a builtin type and overriding __format__
self.assertEqual("{0}".format(J(10)), "20",
'Issue #670 derived from int/float/str not instance of object')
# string format specifiers
self.assertEqual('{0:}'.format('a'), 'a')
# computed format specifiers
self.assertEqual("{0:.{1}}".format('hello world', 5), 'hello')
self.assertEqual("{0:.{1}s}".format('hello world', 5), 'hello')
self.assertEqual("{0:.{precision}s}".format('hello world', precision=5), 'hello')
self.assertEqual("{0:{width}.{precision}s}".format('hello world', width=10, precision=5), 'hello ')
self.assertEqual("{0:{width}.{precision}s}".format('hello world', width='10', precision='5'), 'hello ')
# test various errors
self.format_raises(ValueError, '{')
self.format_raises(ValueError, '}')
self.format_raises(ValueError, 'a{')
self.format_raises(ValueError, 'a}')
self.format_raises(ValueError, '{a')
self.format_raises(ValueError, '}a')
self.format_raises(IndexError, '{0}')
self.format_raises(IndexError, '{1}', 'abc')
self.format_raises(KeyError, '{x}')
self.format_raises(ValueError, "}{")
self.format_raises(ValueError, "{")
self.format_raises(ValueError, "}")
self.format_raises(ValueError, "abc{0:{}")
self.format_raises(ValueError, "{0")
self.format_raises(IndexError, "{0.}")
self.format_raises(ValueError, "{0.}", 0)
self.format_raises(IndexError, "{0[}")
self.format_raises(ValueError, "{0[}", [])
self.format_raises(KeyError, "{0]}")
self.format_raises(ValueError, "{0.[]}", 0)
self.format_raises(ValueError, "{0..foo}", 0)
self.format_raises(ValueError, "{0[0}", 0)
self.format_raises(ValueError, "{0[0:foo}", 0)
self.format_raises(KeyError, "{c]}")
self.format_raises(ValueError, "{{ {{{0}}", 0)
self.format_raises(ValueError, "{0}}", 0)
self.format_raises(KeyError, "{foo}", bar=3)
self.format_raises(ValueError, "{0!x}", 3)
self.format_raises(ValueError, "{0!}", 0)
self.format_raises(ValueError, "{0!rs}", 0)
self.format_raises(ValueError, "{!}")
self.format_raises(IndexError, "{:}")
self.format_raises(IndexError, "{:s}")
self.format_raises(IndexError, "{}")
# issue 6089
self.format_raises(ValueError, "{0[0]x}", [None])
self.format_raises(ValueError, "{0[0](10)}", [None])
# can't have a replacement on the field name portion
# this is Issue 671: string & list indices must be integers, not str
self.format_raises(TypeError, '{0[{1}]}', 'abcdefg', 4)
# exceed maximum recursion depth
self.format_raises(ValueError, "{0:{1:{2}}}", 'abc', 's', '')
self.format_raises(ValueError, "{0:{1:{2:{3:{4:{5:{6}}}}}}}",
0, 1, 2, 3, 4, 5, 6, 7)
# string format spec errors
self.format_raises(ValueError, "{0:-s}", '')
self.assertRaises(ValueError, format, "", "-")
self.format_raises(ValueError, "{0:=s}", '')
def test_format_auto_numbering(self):
class C:
def __init__(self, x=100):
self._x = x
def __format__(self, spec):
return spec
self.assertEqual('{}'.format(10), '10')
self.assertEqual('{:5}'.format('s'), 's ')
self.assertEqual('{!r}'.format('s'), "'s'")
self.assertEqual('{._x}'.format(C(10)), '10')
self.assertEqual('{[1]}'.format([1, 2]), '2')
self.assertEqual('{[a]}'.format({'a':4, 'b':2}), '4')
self.assertEqual('a{}b{}c'.format(0, 1), 'a0b1c')
self.assertEqual('a{:{}}b'.format('x', '^10'), 'a x b')
self.assertEqual('a{:{}x}b'.format(20, '#'), 'a0x14b')
# can't mix and match numbering and auto-numbering
self.format_raises(ValueError, '{}{1}', 1, 2)
self.format_raises(ValueError, '{1}{}', 1, 2)
self.format_raises(ValueError, '{:{1}}', 1, 2)
self.format_raises(ValueError, '{0:{}}', 1, 2)
# can mix and match auto-numbering and named
self.assertEqual('{f}{}'.format(4, f='test'), 'test4')
self.assertEqual('{}{f}'.format(4, f='test'), '4test')
self.assertEqual('{:{f}}{g}{}'.format(1, 3, g='g', f=2), ' 1g3')
self.assertEqual('{f:{}}{}{g}'.format(2, 4, f=1, g='g'), ' 14g')
# this function is needed as the following raises a TypeError
# self.assertRaises(ValueError, '{'.format)
def format_raises(self, e, *args, **kw):
self.startTest()
try:
args[0].format(*args[1:], **kw)
except e:
return True
else:
if hasattr(e, '__name__'):
excName = e.__name__
else:
excName = str(e)
self.fail("%s not raised" % excName)
return False
### end from CPython 2.7 Lib/test/test_str.py
### from Lib/Test/test_float.py
def test_format_float(self):
# these should be rewritten to use both format(x, spec) and
# x.__format__(spec)
self.assertEqual(format(0.0, 'f'), '0.000000')
# the default is 'g', except for empty format spec
#self.assertEqual(format(0.0, ''), '0.0')
#self.assertEqual(format(0.01, ''), '0.01')
#self.assertEqual(format(0.01, 'g'), '0.01')
# empty presentation type should format in the same way as str
# (issue 5920)
x = 100/7.
self.assertEqual(format(x, ''), str(x))
#self.assertEqual(format(x, '-'), str(x))
#self.assertEqual(format(x, '>'), str(x))
#self.assertEqual(format(x, '2'), str(x))
self.assertEqual(format(1.0, 'f'), '1.000000')
self.assertEqual(format(-1.0, 'f'), '-1.000000')
self.assertEqual(format( 1.0, ' f'), ' 1.000000')
self.assertEqual(format(-1.0, ' f'), '-1.000000')
self.assertEqual(format( 1.0, '+f'), '+1.000000')
self.assertEqual(format(-1.0, '+f'), '-1.000000')
# % formatting
self.assertEqual(format(-1.0, '%'), '-100.000000%')
# conversion to string should fail
self.format_raises(ValueError, "{:s}", 3.0)
# other format specifiers shouldn't work on floats,
# in particular int specifiers
for format_spec in ([chr(x) for x in range(ord('a'), ord('z')+1)] +
[chr(x) for x in range(ord('A'), ord('Z')+1)]):
if not format_spec in 'eEfFgGn%':
# Issue #524, no destinction between float and integer
issue524_solved = False
try:
format(1.0, 'd')
except ValueError:
issue524_solved = True
if not issue524_solved and format_spec in 'bcdoxX':
continue
self.assertRaises(ValueError, format, 0.0, format_spec)
self.assertRaises(ValueError, format, 1.0, format_spec)
self.assertRaises(ValueError, format, -1.0, format_spec)
self.assertRaises(ValueError, format, 1e100, format_spec)
self.assertRaises(ValueError, format, -1e100, format_spec)
self.assertRaises(ValueError, format, 1e-100, format_spec)
self.assertRaises(ValueError, format, -1e-100, format_spec)
# CPython issue 3382: 'f' and 'F' with inf's and nan's
# Issue #675 NAN and INF should be implemented
try:
INF = float('inf')
NAN = float('nan')
except ValueError:
pass
else:
self.assertEqual('{0:f}'.format(INF), 'inf')
self.assertEqual('{0:F}'.format(INF), 'INF')
self.assertEqual('{0:f}'.format(-INF), '-inf')
self.assertEqual('{0:F}'.format(-INF), '-INF')
self.assertEqual('{0:f}'.format(NAN), 'nan')
self.assertEqual('{0:F}'.format(NAN), 'NAN')
def test_issue5864(self):
self.assertEqual(format(123.456, '.4'), '123.5')
self.assertEqual(format(1234.56, '.4'), '1.235e+03')
self.assertEqual(format(12345.6, '.4'), '1.235e+04')
### end from Lib/Test/test_float.py
### from pypy test_newformat.py
def test_sign(self):
self.assertEquals(format(-6), "-6")
self.assertEquals(format(-6, "-"), "-6")
self.assertEquals(format(-6, "+"), "-6")
self.assertEquals(format(-6, " "), "-6")
self.assertEquals(format(6, " "), " 6")
self.assertEquals(format(6, "-"), "6")
self.assertEquals(format(6, "+"), "+6")
def test_thousands_separator(self):
self.assertEquals(format(123, ","), "123")
self.assertEquals(format(12345, ","), "12,345")
self.assertEquals(format(123456789, ","), "123,456,789")
self.assertEquals(format(12345, "7,"), " 12,345")
self.assertEquals(format(12345, "<7,"), "12,345 ")
self.assertEquals(format(1234, "0=10,"), "00,001,234")
self.assertEquals(format(1234, "010,"), "00,001,234")
### end from pypy test_newformat.py
| apache-2.0 | -290,746,517,224,576,060 | 35.835945 | 134 | 0.510721 | false |
deved69/pelican-1 | pelican/tests/test_cache.py | 12 | 7304 | from __future__ import unicode_literals
import os
from codecs import open
try:
from unittest.mock import MagicMock
except ImportError:
try:
from mock import MagicMock
except ImportError:
MagicMock = False
from shutil import rmtree
from tempfile import mkdtemp
from pelican.generators import ArticlesGenerator, PagesGenerator
from pelican.tests.support import unittest, get_settings
CUR_DIR = os.path.dirname(__file__)
CONTENT_DIR = os.path.join(CUR_DIR, 'content')
class TestCache(unittest.TestCase):
def setUp(self):
self.temp_cache = mkdtemp(prefix='pelican_cache.')
def tearDown(self):
rmtree(self.temp_cache)
def _get_cache_enabled_settings(self):
settings = get_settings(filenames={})
settings['CACHE_CONTENT'] = True
settings['LOAD_CONTENT_CACHE'] = True
settings['CACHE_PATH'] = self.temp_cache
return settings
@unittest.skipUnless(MagicMock, 'Needs Mock module')
def test_article_object_caching(self):
"""Test Article objects caching at the generator level"""
settings = self._get_cache_enabled_settings()
settings['CONTENT_CACHING_LAYER'] = 'generator'
settings['DEFAULT_DATE'] = (1970, 1, 1)
settings['READERS'] = {'asc': None}
generator = ArticlesGenerator(
context=settings.copy(), settings=settings,
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
generator.generate_context()
self.assertTrue(hasattr(generator, '_cache'))
generator = ArticlesGenerator(
context=settings.copy(), settings=settings,
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
generator.readers.read_file = MagicMock()
generator.generate_context()
"""
3 Files don't get cached because they were not valid
- article_with_comments.html
- article_with_null_attributes.html
- 2012-11-30_md_w_filename_meta#foo-bar.md
"""
self.assertEqual(generator.readers.read_file.call_count, 3)
@unittest.skipUnless(MagicMock, 'Needs Mock module')
def test_article_reader_content_caching(self):
"""Test raw article content caching at the reader level"""
settings = self._get_cache_enabled_settings()
settings['READERS'] = {'asc': None}
generator = ArticlesGenerator(
context=settings.copy(), settings=settings,
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
generator.generate_context()
self.assertTrue(hasattr(generator.readers, '_cache'))
generator = ArticlesGenerator(
context=settings.copy(), settings=settings,
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
readers = generator.readers.readers
for reader in readers.values():
reader.read = MagicMock()
generator.generate_context()
for reader in readers.values():
self.assertEqual(reader.read.call_count, 0)
@unittest.skipUnless(MagicMock, 'Needs Mock module')
def test_article_ignore_cache(self):
"""Test that all the articles are read again when not loading cache
used in --ignore-cache or autoreload mode"""
settings = self._get_cache_enabled_settings()
settings['READERS'] = {'asc': None}
generator = ArticlesGenerator(
context=settings.copy(), settings=settings,
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
generator.readers.read_file = MagicMock()
generator.generate_context()
self.assertTrue(hasattr(generator, '_cache_open'))
orig_call_count = generator.readers.read_file.call_count
settings['LOAD_CONTENT_CACHE'] = False
generator = ArticlesGenerator(
context=settings.copy(), settings=settings,
path=CONTENT_DIR, theme=settings['THEME'], output_path=None)
generator.readers.read_file = MagicMock()
generator.generate_context()
self.assertEqual(generator.readers.read_file.call_count, orig_call_count)
@unittest.skipUnless(MagicMock, 'Needs Mock module')
def test_page_object_caching(self):
"""Test Page objects caching at the generator level"""
settings = self._get_cache_enabled_settings()
settings['CONTENT_CACHING_LAYER'] = 'generator'
settings['PAGE_PATHS'] = ['TestPages']
settings['READERS'] = {'asc': None}
generator = PagesGenerator(
context=settings.copy(), settings=settings,
path=CUR_DIR, theme=settings['THEME'], output_path=None)
generator.generate_context()
self.assertTrue(hasattr(generator, '_cache'))
generator = PagesGenerator(
context=settings.copy(), settings=settings,
path=CUR_DIR, theme=settings['THEME'], output_path=None)
generator.readers.read_file = MagicMock()
generator.generate_context()
"""
1 File doesn't get cached because it was not valid
- bad_page.rst
"""
self.assertEqual(generator.readers.read_file.call_count, 1)
@unittest.skipUnless(MagicMock, 'Needs Mock module')
def test_page_reader_content_caching(self):
"""Test raw page content caching at the reader level"""
settings = self._get_cache_enabled_settings()
settings['PAGE_PATHS'] = ['TestPages']
settings['READERS'] = {'asc': None}
generator = PagesGenerator(
context=settings.copy(), settings=settings,
path=CUR_DIR, theme=settings['THEME'], output_path=None)
generator.generate_context()
self.assertTrue(hasattr(generator.readers, '_cache'))
generator = PagesGenerator(
context=settings.copy(), settings=settings,
path=CUR_DIR, theme=settings['THEME'], output_path=None)
readers = generator.readers.readers
for reader in readers.values():
reader.read = MagicMock()
generator.generate_context()
for reader in readers.values():
self.assertEqual(reader.read.call_count, 0)
@unittest.skipUnless(MagicMock, 'Needs Mock module')
def test_page_ignore_cache(self):
"""Test that all the pages are read again when not loading cache
used in --ignore_cache or autoreload mode"""
settings = self._get_cache_enabled_settings()
settings['PAGE_PATHS'] = ['TestPages']
settings['READERS'] = {'asc': None}
generator = PagesGenerator(
context=settings.copy(), settings=settings,
path=CUR_DIR, theme=settings['THEME'], output_path=None)
generator.readers.read_file = MagicMock()
generator.generate_context()
self.assertTrue(hasattr(generator, '_cache_open'))
orig_call_count = generator.readers.read_file.call_count
settings['LOAD_CONTENT_CACHE'] = False
generator = PagesGenerator(
context=settings.copy(), settings=settings,
path=CUR_DIR, theme=settings['THEME'], output_path=None)
generator.readers.read_file = MagicMock()
generator.generate_context()
self.assertEqual(generator.readers.read_file.call_count, orig_call_count)
| agpl-3.0 | -920,149,951,727,094,500 | 38.481081 | 81 | 0.644578 | false |
rallylee/gem5 | tests/quick/se/00.hello/test.py | 10 | 1808 | # Copyright (c) 2006 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Steve Reinhardt
root.system.cpu[0].workload = Process(cmd = 'hello',
executable = binpath('hello'))
if root.system.cpu[0].checker != NULL:
root.system.cpu[0].checker.workload = root.system.cpu[0].workload
| bsd-3-clause | 4,237,835,582,691,025,000 | 55.5 | 72 | 0.769358 | false |
wpic/flask-appointment-calendar | fbone/production_config.py | 1 | 1211 | import os
from .config import DefaultConfig
class ProductionConfig(DefaultConfig):
DEBUG = False
SECRET_KEY = 'secret key'
INSTANCE_FOLDER_PATH = os.path.join('/tmp', 'instance')
# Flask-Sqlalchemy: http://packages.python.org/Flask-SQLAlchemy/config.html
SQLALCHEMY_ECHO = True
# SQLITE for prototyping.
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + \
INSTANCE_FOLDER_PATH + '/db.sqlite'
# MYSQL for production.
# SQLALCHEMY_DATABASE_URI =
# 'mysql://username:password@server/db?charset=utf8'
# Flask-babel: http://pythonhosted.org/Flask-Babel/
ACCEPT_LANGUAGES = ['en']
BABEL_DEFAULT_LOCALE = 'en'
# Flask-cache: http://pythonhosted.org/Flask-Cache/
CACHE_TYPE = 'simple'
CACHE_DEFAULT_TIMEOUT = 60
# Flask-mail: http://pythonhosted.org/flask-mail/
MAIL_DEBUG = DEBUG
MAIL_SERVER = 'smtp.sendgrid.net'
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USE_SSL = False
# Should put MAIL_USERNAME and MAIL_PASSWORD in production under instance
# folder.
MAIL_USERNAME = 'username'
MAIL_PASSWORD = 'password'
MAIL_DEFAULT_SENDER = '[email protected]'
| bsd-3-clause | 405,773,673,421,662,850 | 30.051282 | 79 | 0.659785 | false |
listamilton/supermilton.repository | plugin.video.sbt-thenoite/UniversalAnalytics/HTTPLog.py | 19 | 3860 | #!/usr/bin/python
###############################################################################
# Formatting filter for urllib2's HTTPHandler(debuglevel=1) output
# Copyright (c) 2013, Analytics Pros
#
# This project is free software, distributed under the BSD license.
# Analytics Pros offers consulting and integration services if your firm needs
# assistance in strategy, implementation, or auditing existing work.
###############################################################################
import sys, re, os
from cStringIO import StringIO
class BufferTranslator(object):
""" Provides a buffer-compatible interface for filtering buffer content.
"""
parsers = []
def __init__(self, output):
self.output = output
self.encoding = getattr(output, 'encoding', None)
def write(self, content):
content = self.translate(content)
self.output.write(content)
@staticmethod
def stripslashes(content):
return content.decode('string_escape')
@staticmethod
def addslashes(content):
return content.encode('string_escape')
def translate(self, line):
for pattern, method in self.parsers:
match = pattern.match(line)
if match:
return method(match)
return line
class LineBufferTranslator(BufferTranslator):
""" Line buffer implementation supports translation of line-format input
even when input is not already line-buffered. Caches input until newlines
occur, and then dispatches translated input to output buffer.
"""
def __init__(self, *a, **kw):
self._linepending = []
super(LineBufferTranslator, self).__init__(*a, **kw)
def write(self, _input):
lines = _input.splitlines(True)
for i in range(0, len(lines)):
last = i
if lines[i].endswith('\n'):
prefix = len(self._linepending) and ''.join(self._linepending) or ''
self.output.write(self.translate(prefix + lines[i]))
del self._linepending[0:]
last = -1
if last >= 0:
self._linepending.append(lines[ last ])
def __del__(self):
if len(self._linepending):
self.output.write(self.translate(''.join(self._linepending)))
class HTTPTranslator(LineBufferTranslator):
""" Translates output from |urllib2| HTTPHandler(debuglevel = 1) into
HTTP-compatible, readible text structures for human analysis.
"""
RE_LINE_PARSER = re.compile(r'^(?:([a-z]+):)\s*(\'?)([^\r\n]*)\2(?:[\r\n]*)$')
RE_LINE_BREAK = re.compile(r'(\r?\n|(?:\\r)?\\n)')
RE_HTTP_METHOD = re.compile(r'^(POST|GET|HEAD|DELETE|PUT|TRACE|OPTIONS)')
RE_PARAMETER_SPACER = re.compile(r'&([a-z0-9]+)=')
@classmethod
def spacer(cls, line):
return cls.RE_PARAMETER_SPACER.sub(r' &\1= ', line)
def translate(self, line):
parsed = self.RE_LINE_PARSER.match(line)
if parsed:
value = parsed.group(3)
stage = parsed.group(1)
if stage == 'send': # query string is rendered here
return '\n# HTTP Request:\n' + self.stripslashes(value)
elif stage == 'reply':
return '\n\n# HTTP Response:\n' + self.stripslashes(value)
elif stage == 'header':
return value + '\n'
else:
return value
return line
def consume(outbuffer = None): # Capture standard output
sys.stdout = HTTPTranslator(outbuffer or sys.stdout)
return sys.stdout
if __name__ == '__main__':
consume(sys.stdout).write(sys.stdin.read())
print '\n'
# vim: set nowrap tabstop=4 shiftwidth=4 softtabstop=0 expandtab textwidth=0 filetype=python foldmethod=indent foldcolumn=4
| gpl-2.0 | -2,030,346,191,084,365,300 | 30.900826 | 123 | 0.579534 | false |
inirudebwoy/pulsar | pulsar/utils/websocket.py | 5 | 12387 | # -*- coding: utf-8 -*-
'''WebSocket_ Protocol is implemented via the :class:`Frame` and
:class:`FrameParser` classes.
To obtain a frame parser one should use the :func:`frame_parser` function.
frame parser
~~~~~~~~~~~~~~~~~~~
.. autofunction:: frame_parser
Frame
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: Frame
:members:
:member-order: bysource
Frame Parser
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: FrameParser
:members:
:member-order: bysource
parse_close
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. autofunction:: parse_close
.. _WebSocket: http://tools.ietf.org/html/rfc6455'''
import os
from struct import pack, unpack
from array import array
from .pep import to_bytes
from .exceptions import ProtocolError
try:
from .lib import FrameParser as CFrameParser
except: # pragma nocover
CFrameParser = None
CLOSE_CODES = {
1000: "OK",
1001: "going away",
1002: "protocol error",
1003: "unsupported type",
# 1004: - (reserved)
# 1005: no status code (internal)
# 1006: connection closed abnormally (internal)
1007: "invalid data",
1008: "policy violation",
1009: "message too big",
1010: "extension required",
1011: "unexpected error",
# 1015: TLS failure (internal)
}
DEFAULT_VERSION = 13
SUPPORTED_VERSIONS = (DEFAULT_VERSION,)
WS_EXTENSIONS = {}
WS_PROTOCOLS = {}
def get_version(version):
try:
version = int(version or DEFAULT_VERSION)
except Exception:
pass
if version not in SUPPORTED_VERSIONS:
raise ProtocolError('Version %s not supported.' % version)
return version
class Extension(object):
def receive(self, data):
return data
def send(self, data):
return data
def frame_parser(version=None, kind=0, extensions=None, protocols=None,
pyparser=False):
'''Create a new :class:`FrameParser` instance.
:param version: protocol version, the default is 13
:param kind: the kind of parser, and integer between 0 and 3 (check the
:class:`FrameParser` documentation for details)
:param extensions: not used at the moment
:param protocols: not used at the moment
:param pyparser: if ``True`` (default ``False``) uses the python frame
parser implementation rather than the much faster cython
implementation.
'''
version = get_version(version)
Parser = FrameParser if pyparser else CFrameParser
# extensions, protocols
return Parser(version, kind, ProtocolError, close_codes=CLOSE_CODES)
def websocket_mask(data, masking_key):
mask_size = len(masking_key)
key = array('B', masking_key)
data = array('B', data)
for i in range(len(data)):
data[i] ^= key[i % mask_size]
return data.tobytes()
class Frame:
_body = None
_masking_key = None
def __init__(self, opcode, final, payload_length):
self._opcode = opcode
self._final = final
self._payload_length = payload_length
@property
def final(self):
return self._final
@property
def opcode(self):
return self._opcode
@property
def body(self):
return self._body
@property
def masking_key(self):
return self._masking_key
@property
def is_message(self):
return self._opcode == 1
@property
def is_bytes(self):
return self._opcode == 2
@property
def is_close(self):
return self._opcode == 8
@property
def is_ping(self):
return self._opcode == 9
@property
def is_pong(self):
return self._opcode == 10
class FrameParser(object):
'''Decoder and encoder for the websocket protocol.
.. attribute:: version
Optional protocol version (Default 13).
.. attribute:: kind
* 0 for parsing client's frames and sending server frames (to be used
in the server)
* 1 for parsing server frames and sending client frames (to be used
by the client)
* 2 Assumes always unmasked data
'''
def __init__(self, version, kind, ProtocolError, extensions=None,
protocols=None, close_codes=None):
self.version = version
self.kind = kind
self.frame = None
self.buffer = bytearray()
self._opcodes = (0, 1, 2, 8, 9, 10)
self._encode_mask_length = 0
self._decode_mask_length = 0
if kind == 0:
self._decode_mask_length = 4
elif kind == 1:
self._encode_mask_length = 4
elif kind == 3:
self._decode_mask_length = 4
self._encode_mask_length = 4
self._max_payload = 1 << 63
self._extensions = extensions
self._protocols = protocols
self._close_codes = close_codes or CLOSE_CODES
@property
def max_payload(self):
return self._max_payload
@property
def decode_mask_length(self):
return self._decode_mask_length
@property
def encode_mask_length(self):
return self._encode_mask_length
@property
def extensions(self):
return self._extensions
@property
def protocols(self):
return self._protocols
def ping(self, body=None):
'''return a `ping` :class:`Frame`.'''
return self.encode(body, opcode=0x9)
def pong(self, body=None):
'''return a `pong` :class:`Frame`.'''
return self.encode(body, opcode=0xA)
def close(self, code=None):
'''return a `close` :class:`Frame`.
'''
code = code or 1000
body = pack('!H', code)
body += self._close_codes.get(code, '').encode('utf-8')
return self.encode(body, opcode=0x8)
def continuation(self, body=None, final=True):
'''return a `continuation` :class:`Frame`.'''
return self.encode(body, opcode=0, final=final)
def encode(self, message, final=True, masking_key=None,
opcode=None, rsv1=0, rsv2=0, rsv3=0):
'''Encode a ``message`` for writing into the wire.
To produce several frames for a given large message use
:meth:`multi_encode` method.
'''
fin = 1 if final else 0
opcode, masking_key, data = self._info(message, opcode, masking_key)
return self._encode(data, opcode, masking_key, fin,
rsv1, rsv2, rsv3)
def multi_encode(self, message, masking_key=None, opcode=None,
rsv1=0, rsv2=0, rsv3=0, max_payload=0):
'''Encode a ``message`` into several frames depending on size.
Returns a generator of bytes to be sent over the wire.
'''
max_payload = max(2, max_payload or self._max_payload)
opcode, masking_key, data = self._info(message, opcode, masking_key)
#
while data:
if len(data) >= max_payload:
chunk, data, fin = (data[:max_payload],
data[max_payload:], 0)
else:
chunk, data, fin = data, b'', 1
yield self._encode(chunk, opcode, masking_key, fin,
rsv1, rsv2, rsv3)
def decode(self, data=None):
frame = self.frame
mask_length = self._decode_mask_length
if data:
self.buffer.extend(data)
if frame is None:
if len(self.buffer) < 2:
return
chunk = self._chunk(2)
first_byte, second_byte = unpack("BB", chunk)
fin = (first_byte >> 7) & 1
# rsv1 = (first_byte >> 6) & 1
# rsv2 = (first_byte >> 5) & 1
# rsv3 = (first_byte >> 4) & 1
opcode = first_byte & 0xf
if fin not in (0, 1):
raise ProtocolError('FIN must be 0 or 1')
if bool(mask_length) != bool(second_byte & 0x80):
if mask_length:
raise ProtocolError('unmasked client frame.')
else:
raise ProtocolError('masked server frame.')
payload_length = second_byte & 0x7f
# All control frames MUST have a payload length of 125 bytes
# or less
if opcode > 7:
if payload_length > 125:
raise ProtocolError(
'WEBSOCKET control frame too large')
elif not fin:
raise ProtocolError(
'WEBSOCKET control frame fragmented')
self.frame = frame = Frame(opcode, bool(fin), payload_length)
if frame._masking_key is None:
if frame._payload_length == 0x7e: # 126
if len(self.buffer) < 2 + mask_length: # 2 + 4 for mask
return
chunk = self._chunk(2)
frame._payload_length = unpack("!H", chunk)[0]
elif frame._payload_length == 0x7f: # 127
if len(self.buffer) < 8 + mask_length: # 8 + 4 for mask
return
chunk = self._chunk(8)
frame._payload_length = unpack("!Q", chunk)[0]
elif len(self.buffer) < mask_length:
return
if mask_length:
frame._masking_key = self._chunk(mask_length)
else:
frame._masking_key = b''
if len(self.buffer) >= frame._payload_length:
self.frame = None
chunk = self._chunk(frame._payload_length)
if self._extensions:
for extension in self._extensions:
chunk = extension.receive(frame, self.buffer)
if frame._masking_key:
chunk = websocket_mask(chunk, frame._masking_key)
if frame.opcode == 1:
frame._body = chunk.decode("utf-8", "replace")
else:
frame._body = chunk
return frame
def _encode(self, data, opcode, masking_key, fin, rsv1, rsv2, rsv3):
buffer = bytearray()
length = len(data)
mask_bit = 128 if masking_key else 0
buffer.append(((fin << 7) | (rsv1 << 6) | (rsv2 << 5) |
(rsv3 << 4) | opcode))
if length < 126:
buffer.append(mask_bit | length)
elif length < 65536:
buffer.append(mask_bit | 126)
buffer.extend(pack('!H', length))
elif length < self._max_payload:
buffer.append(mask_bit | 127)
buffer.extend(pack('!Q', length))
else:
raise ProtocolError('WEBSOCKET frame too large')
if masking_key:
buffer.extend(masking_key)
buffer.extend(websocket_mask(data, masking_key))
else:
buffer.extend(data)
return bytes(buffer)
def _info(self, message, opcode, masking_key):
mask_length = self._encode_mask_length
if mask_length:
masking_key = to_bytes(masking_key or os.urandom(4))
assert len(masking_key) == mask_length, "bad masking key"
else:
masking_key = b''
if opcode is None:
opcode = 1 if isinstance(message, str) else 2
data = to_bytes(message or b'', 'utf-8')
if opcode not in self._opcodes:
raise ProtocolError('WEBSOCKET opcode a reserved value')
elif opcode > 7:
if len(data) > 125:
raise ProtocolError('WEBSOCKET control frame too large')
if opcode == 8:
# TODO CHECK CLOSE FRAME STATUS CODE
pass
return opcode, masking_key, data
def _chunk(self, length):
chunk = bytes(self.buffer[:length])
self.buffer = self.buffer[length:]
return chunk
def parse_close(data):
'''Parse the body of a close :class:`Frame`.
Returns a tuple (``code``, ``reason``) if successful otherwise
raise :class:`.ProtocolError`.
'''
length = len(data)
if length == 0:
return 1005, ''
elif length == 1:
raise ProtocolError("Close frame too short")
else:
code, = unpack('!H', data[:2])
if not (code in CLOSE_CODES or 3000 <= code < 5000):
raise ProtocolError("Invalid status code for websocket")
reason = data[2:].decode('utf-8')
return code, reason
if CFrameParser is None: # pragma nocover
CFrameParser = FrameParser
| bsd-3-clause | 6,373,649,098,871,250,000 | 29.212195 | 77 | 0.559942 | false |
brandon-rhodes/numpy | numpy/core/defchararray.py | 14 | 67932 | """
This module contains a set of functions for vectorized string
operations and methods.
.. note::
The `chararray` class exists for backwards compatibility with
Numarray, it is not recommended for new development. Starting from numpy
1.4, if one needs arrays of strings, it is recommended to use arrays of
`dtype` `object_`, `string_` or `unicode_`, and use the free functions
in the `numpy.char` module for fast vectorized string operations.
Some methods will only be available if the corresponding string method is
available in your version of Python.
The preferred alias for `defchararray` is `numpy.char`.
"""
from __future__ import division, absolute_import, print_function
import sys
from .numerictypes import string_, unicode_, integer, object_, bool_, character
from .numeric import ndarray, compare_chararrays
from .numeric import array as narray
from numpy.core.multiarray import _vec_string
from numpy.compat import asbytes, long
import numpy
__all__ = ['chararray',
'equal', 'not_equal', 'greater_equal', 'less_equal', 'greater', 'less',
'str_len', 'add', 'multiply', 'mod', 'capitalize', 'center', 'count',
'decode', 'encode', 'endswith', 'expandtabs', 'find', 'format',
'index', 'isalnum', 'isalpha', 'isdigit', 'islower', 'isspace',
'istitle', 'isupper', 'join', 'ljust', 'lower', 'lstrip',
'partition', 'replace', 'rfind', 'rindex', 'rjust', 'rpartition',
'rsplit', 'rstrip', 'split', 'splitlines', 'startswith', 'strip',
'swapcase', 'title', 'translate', 'upper', 'zfill',
'isnumeric', 'isdecimal',
'array', 'asarray']
_globalvar = 0
if sys.version_info[0] >= 3:
_unicode = str
_bytes = bytes
else:
_unicode = unicode
_bytes = str
_len = len
def _use_unicode(*args):
"""
Helper function for determining the output type of some string
operations.
For an operation on two ndarrays, if at least one is unicode, the
result should be unicode.
"""
for x in args:
if (isinstance(x, _unicode)
or issubclass(numpy.asarray(x).dtype.type, unicode_)):
return unicode_
return string_
def _to_string_or_unicode_array(result):
"""
Helper function to cast a result back into a string or unicode array
if an object array must be used as an intermediary.
"""
return numpy.asarray(result.tolist())
def _clean_args(*args):
"""
Helper function for delegating arguments to Python string
functions.
Many of the Python string operations that have optional arguments
do not use 'None' to indicate a default value. In these cases,
we need to remove all `None` arguments, and those following them.
"""
newargs = []
for chk in args:
if chk is None:
break
newargs.append(chk)
return newargs
def _get_num_chars(a):
"""
Helper function that returns the number of characters per field in
a string or unicode array. This is to abstract out the fact that
for a unicode array this is itemsize / 4.
"""
if issubclass(a.dtype.type, unicode_):
return a.itemsize // 4
return a.itemsize
def equal(x1, x2):
"""
Return (x1 == x2) element-wise.
Unlike `numpy.equal`, this comparison is performed by first
stripping whitespace characters from the end of the string. This
behavior is provided for backward-compatibility with numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray or bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
not_equal, greater_equal, less_equal, greater, less
"""
return compare_chararrays(x1, x2, '==', True)
def not_equal(x1, x2):
"""
Return (x1 != x2) element-wise.
Unlike `numpy.not_equal`, this comparison is performed by first
stripping whitespace characters from the end of the string. This
behavior is provided for backward-compatibility with numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray or bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
equal, greater_equal, less_equal, greater, less
"""
return compare_chararrays(x1, x2, '!=', True)
def greater_equal(x1, x2):
"""
Return (x1 >= x2) element-wise.
Unlike `numpy.greater_equal`, this comparison is performed by
first stripping whitespace characters from the end of the string.
This behavior is provided for backward-compatibility with
numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray or bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
equal, not_equal, less_equal, greater, less
"""
return compare_chararrays(x1, x2, '>=', True)
def less_equal(x1, x2):
"""
Return (x1 <= x2) element-wise.
Unlike `numpy.less_equal`, this comparison is performed by first
stripping whitespace characters from the end of the string. This
behavior is provided for backward-compatibility with numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray or bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
equal, not_equal, greater_equal, greater, less
"""
return compare_chararrays(x1, x2, '<=', True)
def greater(x1, x2):
"""
Return (x1 > x2) element-wise.
Unlike `numpy.greater`, this comparison is performed by first
stripping whitespace characters from the end of the string. This
behavior is provided for backward-compatibility with numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray or bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
equal, not_equal, greater_equal, less_equal, less
"""
return compare_chararrays(x1, x2, '>', True)
def less(x1, x2):
"""
Return (x1 < x2) element-wise.
Unlike `numpy.greater`, this comparison is performed by first
stripping whitespace characters from the end of the string. This
behavior is provided for backward-compatibility with numarray.
Parameters
----------
x1, x2 : array_like of str or unicode
Input arrays of the same shape.
Returns
-------
out : ndarray or bool
Output array of bools, or a single bool if x1 and x2 are scalars.
See Also
--------
equal, not_equal, greater_equal, less_equal, greater
"""
return compare_chararrays(x1, x2, '<', True)
def str_len(a):
"""
Return len(a) element-wise.
Parameters
----------
a : array_like of str or unicode
Returns
-------
out : ndarray
Output array of integers
See also
--------
__builtin__.len
"""
return _vec_string(a, integer, '__len__')
def add(x1, x2):
"""
Return element-wise string concatenation for two arrays of str or unicode.
Arrays `x1` and `x2` must have the same shape.
Parameters
----------
x1 : array_like of str or unicode
Input array.
x2 : array_like of str or unicode
Input array.
Returns
-------
add : ndarray
Output array of `string_` or `unicode_`, depending on input types
of the same shape as `x1` and `x2`.
"""
arr1 = numpy.asarray(x1)
arr2 = numpy.asarray(x2)
out_size = _get_num_chars(arr1) + _get_num_chars(arr2)
dtype = _use_unicode(arr1, arr2)
return _vec_string(arr1, (dtype, out_size), '__add__', (arr2,))
def multiply(a, i):
"""
Return (a * i), that is string multiple concatenation,
element-wise.
Values in `i` of less than 0 are treated as 0 (which yields an
empty string).
Parameters
----------
a : array_like of str or unicode
i : array_like of ints
Returns
-------
out : ndarray
Output array of str or unicode, depending on input types
"""
a_arr = numpy.asarray(a)
i_arr = numpy.asarray(i)
if not issubclass(i_arr.dtype.type, integer):
raise ValueError("Can only multiply by integers")
out_size = _get_num_chars(a_arr) * max(long(i_arr.max()), 0)
return _vec_string(
a_arr, (a_arr.dtype.type, out_size), '__mul__', (i_arr,))
def mod(a, values):
"""
Return (a % i), that is pre-Python 2.6 string formatting
(iterpolation), element-wise for a pair of array_likes of str
or unicode.
Parameters
----------
a : array_like of str or unicode
values : array_like of values
These values will be element-wise interpolated into the string.
Returns
-------
out : ndarray
Output array of str or unicode, depending on input types
See also
--------
str.__mod__
"""
return _to_string_or_unicode_array(
_vec_string(a, object_, '__mod__', (values,)))
def capitalize(a):
"""
Return a copy of `a` with only the first character of each element
capitalized.
Calls `str.capitalize` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like of str or unicode
Input array of strings to capitalize.
Returns
-------
out : ndarray
Output array of str or unicode, depending on input
types
See also
--------
str.capitalize
Examples
--------
>>> c = np.array(['a1b2','1b2a','b2a1','2a1b'],'S4'); c
array(['a1b2', '1b2a', 'b2a1', '2a1b'],
dtype='|S4')
>>> np.char.capitalize(c)
array(['A1b2', '1b2a', 'B2a1', '2a1b'],
dtype='|S4')
"""
a_arr = numpy.asarray(a)
return _vec_string(a_arr, a_arr.dtype, 'capitalize')
def center(a, width, fillchar=' '):
"""
Return a copy of `a` with its elements centered in a string of
length `width`.
Calls `str.center` element-wise.
Parameters
----------
a : array_like of str or unicode
width : int
The length of the resulting strings
fillchar : str or unicode, optional
The padding character to use (default is space).
Returns
-------
out : ndarray
Output array of str or unicode, depending on input
types
See also
--------
str.center
"""
a_arr = numpy.asarray(a)
width_arr = numpy.asarray(width)
size = long(numpy.max(width_arr.flat))
if numpy.issubdtype(a_arr.dtype, numpy.string_):
fillchar = asbytes(fillchar)
return _vec_string(
a_arr, (a_arr.dtype.type, size), 'center', (width_arr, fillchar))
def count(a, sub, start=0, end=None):
"""
Returns an array with the number of non-overlapping occurrences of
substring `sub` in the range [`start`, `end`].
Calls `str.count` element-wise.
Parameters
----------
a : array_like of str or unicode
sub : str or unicode
The substring to search for.
start, end : int, optional
Optional arguments `start` and `end` are interpreted as slice
notation to specify the range in which to count.
Returns
-------
out : ndarray
Output array of ints.
See also
--------
str.count
Examples
--------
>>> c = np.array(['aAaAaA', ' aA ', 'abBABba'])
>>> c
array(['aAaAaA', ' aA ', 'abBABba'],
dtype='|S7')
>>> np.char.count(c, 'A')
array([3, 1, 1])
>>> np.char.count(c, 'aA')
array([3, 1, 0])
>>> np.char.count(c, 'A', start=1, end=4)
array([2, 1, 1])
>>> np.char.count(c, 'A', start=1, end=3)
array([1, 0, 0])
"""
return _vec_string(a, integer, 'count', [sub, start] + _clean_args(end))
def decode(a, encoding=None, errors=None):
"""
Calls `str.decode` element-wise.
The set of available codecs comes from the Python standard library,
and may be extended at runtime. For more information, see the
:mod:`codecs` module.
Parameters
----------
a : array_like of str or unicode
encoding : str, optional
The name of an encoding
errors : str, optional
Specifies how to handle encoding errors
Returns
-------
out : ndarray
See also
--------
str.decode
Notes
-----
The type of the result will depend on the encoding specified.
Examples
--------
>>> c = np.array(['aAaAaA', ' aA ', 'abBABba'])
>>> c
array(['aAaAaA', ' aA ', 'abBABba'],
dtype='|S7')
>>> np.char.encode(c, encoding='cp037')
array(['\\x81\\xc1\\x81\\xc1\\x81\\xc1', '@@\\x81\\xc1@@',
'\\x81\\x82\\xc2\\xc1\\xc2\\x82\\x81'],
dtype='|S7')
"""
return _to_string_or_unicode_array(
_vec_string(a, object_, 'decode', _clean_args(encoding, errors)))
def encode(a, encoding=None, errors=None):
"""
Calls `str.encode` element-wise.
The set of available codecs comes from the Python standard library,
and may be extended at runtime. For more information, see the codecs
module.
Parameters
----------
a : array_like of str or unicode
encoding : str, optional
The name of an encoding
errors : str, optional
Specifies how to handle encoding errors
Returns
-------
out : ndarray
See also
--------
str.encode
Notes
-----
The type of the result will depend on the encoding specified.
"""
return _to_string_or_unicode_array(
_vec_string(a, object_, 'encode', _clean_args(encoding, errors)))
def endswith(a, suffix, start=0, end=None):
"""
Returns a boolean array which is `True` where the string element
in `a` ends with `suffix`, otherwise `False`.
Calls `str.endswith` element-wise.
Parameters
----------
a : array_like of str or unicode
suffix : str
start, end : int, optional
With optional `start`, test beginning at that position. With
optional `end`, stop comparing at that position.
Returns
-------
out : ndarray
Outputs an array of bools.
See also
--------
str.endswith
Examples
--------
>>> s = np.array(['foo', 'bar'])
>>> s[0] = 'foo'
>>> s[1] = 'bar'
>>> s
array(['foo', 'bar'],
dtype='|S3')
>>> np.char.endswith(s, 'ar')
array([False, True], dtype=bool)
>>> np.char.endswith(s, 'a', start=1, end=2)
array([False, True], dtype=bool)
"""
return _vec_string(
a, bool_, 'endswith', [suffix, start] + _clean_args(end))
def expandtabs(a, tabsize=8):
"""
Return a copy of each string element where all tab characters are
replaced by one or more spaces.
Calls `str.expandtabs` element-wise.
Return a copy of each string element where all tab characters are
replaced by one or more spaces, depending on the current column
and the given `tabsize`. The column number is reset to zero after
each newline occurring in the string. This doesn't understand other
non-printing characters or escape sequences.
Parameters
----------
a : array_like of str or unicode
Input array
tabsize : int, optional
Replace tabs with `tabsize` number of spaces. If not given defaults
to 8 spaces.
Returns
-------
out : ndarray
Output array of str or unicode, depending on input type
See also
--------
str.expandtabs
"""
return _to_string_or_unicode_array(
_vec_string(a, object_, 'expandtabs', (tabsize,)))
def find(a, sub, start=0, end=None):
"""
For each element, return the lowest index in the string where
substring `sub` is found.
Calls `str.find` element-wise.
For each element, return the lowest index in the string where
substring `sub` is found, such that `sub` is contained in the
range [`start`, `end`].
Parameters
----------
a : array_like of str or unicode
sub : str or unicode
start, end : int, optional
Optional arguments `start` and `end` are interpreted as in
slice notation.
Returns
-------
out : ndarray or int
Output array of ints. Returns -1 if `sub` is not found.
See also
--------
str.find
"""
return _vec_string(
a, integer, 'find', [sub, start] + _clean_args(end))
def index(a, sub, start=0, end=None):
"""
Like `find`, but raises `ValueError` when the substring is not found.
Calls `str.index` element-wise.
Parameters
----------
a : array_like of str or unicode
sub : str or unicode
start, end : int, optional
Returns
-------
out : ndarray
Output array of ints. Returns -1 if `sub` is not found.
See also
--------
find, str.find
"""
return _vec_string(
a, integer, 'index', [sub, start] + _clean_args(end))
def isalnum(a):
"""
Returns true for each element if all characters in the string are
alphanumeric and there is at least one character, false otherwise.
Calls `str.isalnum` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like of str or unicode
Returns
-------
out : ndarray
Output array of str or unicode, depending on input type
See also
--------
str.isalnum
"""
return _vec_string(a, bool_, 'isalnum')
def isalpha(a):
"""
Returns true for each element if all characters in the string are
alphabetic and there is at least one character, false otherwise.
Calls `str.isalpha` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like of str or unicode
Returns
-------
out : ndarray
Output array of bools
See also
--------
str.isalpha
"""
return _vec_string(a, bool_, 'isalpha')
def isdigit(a):
"""
Returns true for each element if all characters in the string are
digits and there is at least one character, false otherwise.
Calls `str.isdigit` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like of str or unicode
Returns
-------
out : ndarray
Output array of bools
See also
--------
str.isdigit
"""
return _vec_string(a, bool_, 'isdigit')
def islower(a):
"""
Returns true for each element if all cased characters in the
string are lowercase and there is at least one cased character,
false otherwise.
Calls `str.islower` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like of str or unicode
Returns
-------
out : ndarray
Output array of bools
See also
--------
str.islower
"""
return _vec_string(a, bool_, 'islower')
def isspace(a):
"""
Returns true for each element if there are only whitespace
characters in the string and there is at least one character,
false otherwise.
Calls `str.isspace` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like of str or unicode
Returns
-------
out : ndarray
Output array of bools
See also
--------
str.isspace
"""
return _vec_string(a, bool_, 'isspace')
def istitle(a):
"""
Returns true for each element if the element is a titlecased
string and there is at least one character, false otherwise.
Call `str.istitle` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like of str or unicode
Returns
-------
out : ndarray
Output array of bools
See also
--------
str.istitle
"""
return _vec_string(a, bool_, 'istitle')
def isupper(a):
"""
Returns true for each element if all cased characters in the
string are uppercase and there is at least one character, false
otherwise.
Call `str.isupper` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like of str or unicode
Returns
-------
out : ndarray
Output array of bools
See also
--------
str.isupper
"""
return _vec_string(a, bool_, 'isupper')
def join(sep, seq):
"""
Return a string which is the concatenation of the strings in the
sequence `seq`.
Calls `str.join` element-wise.
Parameters
----------
sep : array_like of str or unicode
seq : array_like of str or unicode
Returns
-------
out : ndarray
Output array of str or unicode, depending on input types
See also
--------
str.join
"""
return _to_string_or_unicode_array(
_vec_string(sep, object_, 'join', (seq,)))
def ljust(a, width, fillchar=' '):
"""
Return an array with the elements of `a` left-justified in a
string of length `width`.
Calls `str.ljust` element-wise.
Parameters
----------
a : array_like of str or unicode
width : int
The length of the resulting strings
fillchar : str or unicode, optional
The character to use for padding
Returns
-------
out : ndarray
Output array of str or unicode, depending on input type
See also
--------
str.ljust
"""
a_arr = numpy.asarray(a)
width_arr = numpy.asarray(width)
size = long(numpy.max(width_arr.flat))
if numpy.issubdtype(a_arr.dtype, numpy.string_):
fillchar = asbytes(fillchar)
return _vec_string(
a_arr, (a_arr.dtype.type, size), 'ljust', (width_arr, fillchar))
def lower(a):
"""
Return an array with the elements converted to lowercase.
Call `str.lower` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like, {str, unicode}
Input array.
Returns
-------
out : ndarray, {str, unicode}
Output array of str or unicode, depending on input type
See also
--------
str.lower
Examples
--------
>>> c = np.array(['A1B C', '1BCA', 'BCA1']); c
array(['A1B C', '1BCA', 'BCA1'],
dtype='|S5')
>>> np.char.lower(c)
array(['a1b c', '1bca', 'bca1'],
dtype='|S5')
"""
a_arr = numpy.asarray(a)
return _vec_string(a_arr, a_arr.dtype, 'lower')
def lstrip(a, chars=None):
"""
For each element in `a`, return a copy with the leading characters
removed.
Calls `str.lstrip` element-wise.
Parameters
----------
a : array-like, {str, unicode}
Input array.
chars : {str, unicode}, optional
The `chars` argument is a string specifying the set of
characters to be removed. If omitted or None, the `chars`
argument defaults to removing whitespace. The `chars` argument
is not a prefix; rather, all combinations of its values are
stripped.
Returns
-------
out : ndarray, {str, unicode}
Output array of str or unicode, depending on input type
See also
--------
str.lstrip
Examples
--------
>>> c = np.array(['aAaAaA', ' aA ', 'abBABba'])
>>> c
array(['aAaAaA', ' aA ', 'abBABba'],
dtype='|S7')
The 'a' variable is unstripped from c[1] because whitespace leading.
>>> np.char.lstrip(c, 'a')
array(['AaAaA', ' aA ', 'bBABba'],
dtype='|S7')
>>> np.char.lstrip(c, 'A') # leaves c unchanged
array(['aAaAaA', ' aA ', 'abBABba'],
dtype='|S7')
>>> (np.char.lstrip(c, ' ') == np.char.lstrip(c, '')).all()
... # XXX: is this a regression? this line now returns False
... # np.char.lstrip(c,'') does not modify c at all.
True
>>> (np.char.lstrip(c, ' ') == np.char.lstrip(c, None)).all()
True
"""
a_arr = numpy.asarray(a)
return _vec_string(a_arr, a_arr.dtype, 'lstrip', (chars,))
def partition(a, sep):
"""
Partition each element in `a` around `sep`.
Calls `str.partition` element-wise.
For each element in `a`, split the element as the first
occurrence of `sep`, and return 3 strings containing the part
before the separator, the separator itself, and the part after
the separator. If the separator is not found, return 3 strings
containing the string itself, followed by two empty strings.
Parameters
----------
a : array_like, {str, unicode}
Input array
sep : {str, unicode}
Separator to split each string element in `a`.
Returns
-------
out : ndarray, {str, unicode}
Output array of str or unicode, depending on input type.
The output array will have an extra dimension with 3
elements per input element.
See also
--------
str.partition
"""
return _to_string_or_unicode_array(
_vec_string(a, object_, 'partition', (sep,)))
def replace(a, old, new, count=None):
"""
For each element in `a`, return a copy of the string with all
occurrences of substring `old` replaced by `new`.
Calls `str.replace` element-wise.
Parameters
----------
a : array-like of str or unicode
old, new : str or unicode
count : int, optional
If the optional argument `count` is given, only the first
`count` occurrences are replaced.
Returns
-------
out : ndarray
Output array of str or unicode, depending on input type
See also
--------
str.replace
"""
return _to_string_or_unicode_array(
_vec_string(
a, object_, 'replace', [old, new] +_clean_args(count)))
def rfind(a, sub, start=0, end=None):
"""
For each element in `a`, return the highest index in the string
where substring `sub` is found, such that `sub` is contained
within [`start`, `end`].
Calls `str.rfind` element-wise.
Parameters
----------
a : array-like of str or unicode
sub : str or unicode
start, end : int, optional
Optional arguments `start` and `end` are interpreted as in
slice notation.
Returns
-------
out : ndarray
Output array of ints. Return -1 on failure.
See also
--------
str.rfind
"""
return _vec_string(
a, integer, 'rfind', [sub, start] + _clean_args(end))
def rindex(a, sub, start=0, end=None):
"""
Like `rfind`, but raises `ValueError` when the substring `sub` is
not found.
Calls `str.rindex` element-wise.
Parameters
----------
a : array-like of str or unicode
sub : str or unicode
start, end : int, optional
Returns
-------
out : ndarray
Output array of ints.
See also
--------
rfind, str.rindex
"""
return _vec_string(
a, integer, 'rindex', [sub, start] + _clean_args(end))
def rjust(a, width, fillchar=' '):
"""
Return an array with the elements of `a` right-justified in a
string of length `width`.
Calls `str.rjust` element-wise.
Parameters
----------
a : array_like of str or unicode
width : int
The length of the resulting strings
fillchar : str or unicode, optional
The character to use for padding
Returns
-------
out : ndarray
Output array of str or unicode, depending on input type
See also
--------
str.rjust
"""
a_arr = numpy.asarray(a)
width_arr = numpy.asarray(width)
size = long(numpy.max(width_arr.flat))
if numpy.issubdtype(a_arr.dtype, numpy.string_):
fillchar = asbytes(fillchar)
return _vec_string(
a_arr, (a_arr.dtype.type, size), 'rjust', (width_arr, fillchar))
def rpartition(a, sep):
"""
Partition (split) each element around the right-most separator.
Calls `str.rpartition` element-wise.
For each element in `a`, split the element as the last
occurrence of `sep`, and return 3 strings containing the part
before the separator, the separator itself, and the part after
the separator. If the separator is not found, return 3 strings
containing the string itself, followed by two empty strings.
Parameters
----------
a : array_like of str or unicode
Input array
sep : str or unicode
Right-most separator to split each element in array.
Returns
-------
out : ndarray
Output array of string or unicode, depending on input
type. The output array will have an extra dimension with
3 elements per input element.
See also
--------
str.rpartition
"""
return _to_string_or_unicode_array(
_vec_string(a, object_, 'rpartition', (sep,)))
def rsplit(a, sep=None, maxsplit=None):
"""
For each element in `a`, return a list of the words in the
string, using `sep` as the delimiter string.
Calls `str.rsplit` element-wise.
Except for splitting from the right, `rsplit`
behaves like `split`.
Parameters
----------
a : array_like of str or unicode
sep : str or unicode, optional
If `sep` is not specified or `None`, any whitespace string
is a separator.
maxsplit : int, optional
If `maxsplit` is given, at most `maxsplit` splits are done,
the rightmost ones.
Returns
-------
out : ndarray
Array of list objects
See also
--------
str.rsplit, split
"""
# This will return an array of lists of different sizes, so we
# leave it as an object array
return _vec_string(
a, object_, 'rsplit', [sep] + _clean_args(maxsplit))
def rstrip(a, chars=None):
"""
For each element in `a`, return a copy with the trailing
characters removed.
Calls `str.rstrip` element-wise.
Parameters
----------
a : array-like of str or unicode
chars : str or unicode, optional
The `chars` argument is a string specifying the set of
characters to be removed. If omitted or None, the `chars`
argument defaults to removing whitespace. The `chars` argument
is not a suffix; rather, all combinations of its values are
stripped.
Returns
-------
out : ndarray
Output array of str or unicode, depending on input type
See also
--------
str.rstrip
Examples
--------
>>> c = np.array(['aAaAaA', 'abBABba'], dtype='S7'); c
array(['aAaAaA', 'abBABba'],
dtype='|S7')
>>> np.char.rstrip(c, 'a')
array(['aAaAaA', 'abBABb'],
dtype='|S7')
>>> np.char.rstrip(c, 'A')
array(['aAaAa', 'abBABba'],
dtype='|S7')
"""
a_arr = numpy.asarray(a)
return _vec_string(a_arr, a_arr.dtype, 'rstrip', (chars,))
def split(a, sep=None, maxsplit=None):
"""
For each element in `a`, return a list of the words in the
string, using `sep` as the delimiter string.
Calls `str.rsplit` element-wise.
Parameters
----------
a : array_like of str or unicode
sep : str or unicode, optional
If `sep` is not specified or `None`, any whitespace string is a
separator.
maxsplit : int, optional
If `maxsplit` is given, at most `maxsplit` splits are done.
Returns
-------
out : ndarray
Array of list objects
See also
--------
str.split, rsplit
"""
# This will return an array of lists of different sizes, so we
# leave it as an object array
return _vec_string(
a, object_, 'split', [sep] + _clean_args(maxsplit))
def splitlines(a, keepends=None):
"""
For each element in `a`, return a list of the lines in the
element, breaking at line boundaries.
Calls `str.splitlines` element-wise.
Parameters
----------
a : array_like of str or unicode
keepends : bool, optional
Line breaks are not included in the resulting list unless
keepends is given and true.
Returns
-------
out : ndarray
Array of list objects
See also
--------
str.splitlines
"""
return _vec_string(
a, object_, 'splitlines', _clean_args(keepends))
def startswith(a, prefix, start=0, end=None):
"""
Returns a boolean array which is `True` where the string element
in `a` starts with `prefix`, otherwise `False`.
Calls `str.startswith` element-wise.
Parameters
----------
a : array_like of str or unicode
prefix : str
start, end : int, optional
With optional `start`, test beginning at that position. With
optional `end`, stop comparing at that position.
Returns
-------
out : ndarray
Array of booleans
See also
--------
str.startswith
"""
return _vec_string(
a, bool_, 'startswith', [prefix, start] + _clean_args(end))
def strip(a, chars=None):
"""
For each element in `a`, return a copy with the leading and
trailing characters removed.
Calls `str.rstrip` element-wise.
Parameters
----------
a : array-like of str or unicode
chars : str or unicode, optional
The `chars` argument is a string specifying the set of
characters to be removed. If omitted or None, the `chars`
argument defaults to removing whitespace. The `chars` argument
is not a prefix or suffix; rather, all combinations of its
values are stripped.
Returns
-------
out : ndarray
Output array of str or unicode, depending on input type
See also
--------
str.strip
Examples
--------
>>> c = np.array(['aAaAaA', ' aA ', 'abBABba'])
>>> c
array(['aAaAaA', ' aA ', 'abBABba'],
dtype='|S7')
>>> np.char.strip(c)
array(['aAaAaA', 'aA', 'abBABba'],
dtype='|S7')
>>> np.char.strip(c, 'a') # 'a' unstripped from c[1] because whitespace leads
array(['AaAaA', ' aA ', 'bBABb'],
dtype='|S7')
>>> np.char.strip(c, 'A') # 'A' unstripped from c[1] because (unprinted) ws trails
array(['aAaAa', ' aA ', 'abBABba'],
dtype='|S7')
"""
a_arr = numpy.asarray(a)
return _vec_string(a_arr, a_arr.dtype, 'strip', _clean_args(chars))
def swapcase(a):
"""
Return element-wise a copy of the string with
uppercase characters converted to lowercase and vice versa.
Calls `str.swapcase` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like, {str, unicode}
Input array.
Returns
-------
out : ndarray, {str, unicode}
Output array of str or unicode, depending on input type
See also
--------
str.swapcase
Examples
--------
>>> c=np.array(['a1B c','1b Ca','b Ca1','cA1b'],'S5'); c
array(['a1B c', '1b Ca', 'b Ca1', 'cA1b'],
dtype='|S5')
>>> np.char.swapcase(c)
array(['A1b C', '1B cA', 'B cA1', 'Ca1B'],
dtype='|S5')
"""
a_arr = numpy.asarray(a)
return _vec_string(a_arr, a_arr.dtype, 'swapcase')
def title(a):
"""
Return element-wise title cased version of string or unicode.
Title case words start with uppercase characters, all remaining cased
characters are lowercase.
Calls `str.title` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like, {str, unicode}
Input array.
Returns
-------
out : ndarray
Output array of str or unicode, depending on input type
See also
--------
str.title
Examples
--------
>>> c=np.array(['a1b c','1b ca','b ca1','ca1b'],'S5'); c
array(['a1b c', '1b ca', 'b ca1', 'ca1b'],
dtype='|S5')
>>> np.char.title(c)
array(['A1B C', '1B Ca', 'B Ca1', 'Ca1B'],
dtype='|S5')
"""
a_arr = numpy.asarray(a)
return _vec_string(a_arr, a_arr.dtype, 'title')
def translate(a, table, deletechars=None):
"""
For each element in `a`, return a copy of the string where all
characters occurring in the optional argument `deletechars` are
removed, and the remaining characters have been mapped through the
given translation table.
Calls `str.translate` element-wise.
Parameters
----------
a : array-like of str or unicode
table : str of length 256
deletechars : str
Returns
-------
out : ndarray
Output array of str or unicode, depending on input type
See also
--------
str.translate
"""
a_arr = numpy.asarray(a)
if issubclass(a_arr.dtype.type, unicode_):
return _vec_string(
a_arr, a_arr.dtype, 'translate', (table,))
else:
return _vec_string(
a_arr, a_arr.dtype, 'translate', [table] + _clean_args(deletechars))
def upper(a):
"""
Return an array with the elements converted to uppercase.
Calls `str.upper` element-wise.
For 8-bit strings, this method is locale-dependent.
Parameters
----------
a : array_like, {str, unicode}
Input array.
Returns
-------
out : ndarray, {str, unicode}
Output array of str or unicode, depending on input type
See also
--------
str.upper
Examples
--------
>>> c = np.array(['a1b c', '1bca', 'bca1']); c
array(['a1b c', '1bca', 'bca1'],
dtype='|S5')
>>> np.char.upper(c)
array(['A1B C', '1BCA', 'BCA1'],
dtype='|S5')
"""
a_arr = numpy.asarray(a)
return _vec_string(a_arr, a_arr.dtype, 'upper')
def zfill(a, width):
"""
Return the numeric string left-filled with zeros
Calls `str.zfill` element-wise.
Parameters
----------
a : array_like, {str, unicode}
Input array.
width : int
Width of string to left-fill elements in `a`.
Returns
-------
out : ndarray, {str, unicode}
Output array of str or unicode, depending on input type
See also
--------
str.zfill
"""
a_arr = numpy.asarray(a)
width_arr = numpy.asarray(width)
size = long(numpy.max(width_arr.flat))
return _vec_string(
a_arr, (a_arr.dtype.type, size), 'zfill', (width_arr,))
def isnumeric(a):
"""
For each element, return True if there are only numeric
characters in the element.
Calls `unicode.isnumeric` element-wise.
Numeric characters include digit characters, and all characters
that have the Unicode numeric value property, e.g. ``U+2155,
VULGAR FRACTION ONE FIFTH``.
Parameters
----------
a : array_like, unicode
Input array.
Returns
-------
out : ndarray, bool
Array of booleans of same shape as `a`.
See also
--------
unicode.isnumeric
"""
if _use_unicode(a) != unicode_:
raise TypeError("isnumeric is only available for Unicode strings and arrays")
return _vec_string(a, bool_, 'isnumeric')
def isdecimal(a):
"""
For each element, return True if there are only decimal
characters in the element.
Calls `unicode.isdecimal` element-wise.
Decimal characters include digit characters, and all characters
that that can be used to form decimal-radix numbers,
e.g. ``U+0660, ARABIC-INDIC DIGIT ZERO``.
Parameters
----------
a : array_like, unicode
Input array.
Returns
-------
out : ndarray, bool
Array of booleans identical in shape to `a`.
See also
--------
unicode.isdecimal
"""
if _use_unicode(a) != unicode_:
raise TypeError("isnumeric is only available for Unicode strings and arrays")
return _vec_string(a, bool_, 'isdecimal')
class chararray(ndarray):
"""
chararray(shape, itemsize=1, unicode=False, buffer=None, offset=0,
strides=None, order=None)
Provides a convenient view on arrays of string and unicode values.
.. note::
The `chararray` class exists for backwards compatibility with
Numarray, it is not recommended for new development. Starting from numpy
1.4, if one needs arrays of strings, it is recommended to use arrays of
`dtype` `object_`, `string_` or `unicode_`, and use the free functions
in the `numpy.char` module for fast vectorized string operations.
Versus a regular Numpy array of type `str` or `unicode`, this
class adds the following functionality:
1) values automatically have whitespace removed from the end
when indexed
2) comparison operators automatically remove whitespace from the
end when comparing values
3) vectorized string operations are provided as methods
(e.g. `.endswith`) and infix operators (e.g. ``"+", "*", "%"``)
chararrays should be created using `numpy.char.array` or
`numpy.char.asarray`, rather than this constructor directly.
This constructor creates the array, using `buffer` (with `offset`
and `strides`) if it is not ``None``. If `buffer` is ``None``, then
constructs a new array with `strides` in "C order", unless both
``len(shape) >= 2`` and ``order='Fortran'``, in which case `strides`
is in "Fortran order".
Methods
-------
astype
argsort
copy
count
decode
dump
dumps
encode
endswith
expandtabs
fill
find
flatten
getfield
index
isalnum
isalpha
isdecimal
isdigit
islower
isnumeric
isspace
istitle
isupper
item
join
ljust
lower
lstrip
nonzero
put
ravel
repeat
replace
reshape
resize
rfind
rindex
rjust
rsplit
rstrip
searchsorted
setfield
setflags
sort
split
splitlines
squeeze
startswith
strip
swapaxes
swapcase
take
title
tofile
tolist
tostring
translate
transpose
upper
view
zfill
Parameters
----------
shape : tuple
Shape of the array.
itemsize : int, optional
Length of each array element, in number of characters. Default is 1.
unicode : bool, optional
Are the array elements of type unicode (True) or string (False).
Default is False.
buffer : int, optional
Memory address of the start of the array data. Default is None,
in which case a new array is created.
offset : int, optional
Fixed stride displacement from the beginning of an axis?
Default is 0. Needs to be >=0.
strides : array_like of ints, optional
Strides for the array (see `ndarray.strides` for full description).
Default is None.
order : {'C', 'F'}, optional
The order in which the array data is stored in memory: 'C' ->
"row major" order (the default), 'F' -> "column major"
(Fortran) order.
Examples
--------
>>> charar = np.chararray((3, 3))
>>> charar[:] = 'a'
>>> charar
chararray([['a', 'a', 'a'],
['a', 'a', 'a'],
['a', 'a', 'a']],
dtype='|S1')
>>> charar = np.chararray(charar.shape, itemsize=5)
>>> charar[:] = 'abc'
>>> charar
chararray([['abc', 'abc', 'abc'],
['abc', 'abc', 'abc'],
['abc', 'abc', 'abc']],
dtype='|S5')
"""
def __new__(subtype, shape, itemsize=1, unicode=False, buffer=None,
offset=0, strides=None, order='C'):
global _globalvar
if unicode:
dtype = unicode_
else:
dtype = string_
# force itemsize to be a Python long, since using Numpy integer
# types results in itemsize.itemsize being used as the size of
# strings in the new array.
itemsize = long(itemsize)
if sys.version_info[0] >= 3 and isinstance(buffer, _unicode):
# On Py3, unicode objects do not have the buffer interface
filler = buffer
buffer = None
else:
filler = None
_globalvar = 1
if buffer is None:
self = ndarray.__new__(subtype, shape, (dtype, itemsize),
order=order)
else:
self = ndarray.__new__(subtype, shape, (dtype, itemsize),
buffer=buffer,
offset=offset, strides=strides,
order=order)
if filler is not None:
self[...] = filler
_globalvar = 0
return self
def __array_finalize__(self, obj):
# The b is a special case because it is used for reconstructing.
if not _globalvar and self.dtype.char not in 'SUbc':
raise ValueError("Can only create a chararray from string data.")
def __getitem__(self, obj):
val = ndarray.__getitem__(self, obj)
if issubclass(val.dtype.type, character) and not _len(val) == 0:
temp = val.rstrip()
if _len(temp) == 0:
val = ''
else:
val = temp
return val
# IMPLEMENTATION NOTE: Most of the methods of this class are
# direct delegations to the free functions in this module.
# However, those that return an array of strings should instead
# return a chararray, so some extra wrapping is required.
def __eq__(self, other):
"""
Return (self == other) element-wise.
See also
--------
equal
"""
return equal(self, other)
def __ne__(self, other):
"""
Return (self != other) element-wise.
See also
--------
not_equal
"""
return not_equal(self, other)
def __ge__(self, other):
"""
Return (self >= other) element-wise.
See also
--------
greater_equal
"""
return greater_equal(self, other)
def __le__(self, other):
"""
Return (self <= other) element-wise.
See also
--------
less_equal
"""
return less_equal(self, other)
def __gt__(self, other):
"""
Return (self > other) element-wise.
See also
--------
greater
"""
return greater(self, other)
def __lt__(self, other):
"""
Return (self < other) element-wise.
See also
--------
less
"""
return less(self, other)
def __add__(self, other):
"""
Return (self + other), that is string concatenation,
element-wise for a pair of array_likes of str or unicode.
See also
--------
add
"""
return asarray(add(self, other))
def __radd__(self, other):
"""
Return (other + self), that is string concatenation,
element-wise for a pair of array_likes of `string_` or `unicode_`.
See also
--------
add
"""
return asarray(add(numpy.asarray(other), self))
def __mul__(self, i):
"""
Return (self * i), that is string multiple concatenation,
element-wise.
See also
--------
multiply
"""
return asarray(multiply(self, i))
def __rmul__(self, i):
"""
Return (self * i), that is string multiple concatenation,
element-wise.
See also
--------
multiply
"""
return asarray(multiply(self, i))
def __mod__(self, i):
"""
Return (self % i), that is pre-Python 2.6 string formatting
(iterpolation), element-wise for a pair of array_likes of `string_`
or `unicode_`.
See also
--------
mod
"""
return asarray(mod(self, i))
def __rmod__(self, other):
return NotImplemented
def argsort(self, axis=-1, kind='quicksort', order=None):
"""
Return the indices that sort the array lexicographically.
For full documentation see `numpy.argsort`, for which this method is
in fact merely a "thin wrapper."
Examples
--------
>>> c = np.array(['a1b c', '1b ca', 'b ca1', 'Ca1b'], 'S5')
>>> c = c.view(np.chararray); c
chararray(['a1b c', '1b ca', 'b ca1', 'Ca1b'],
dtype='|S5')
>>> c[c.argsort()]
chararray(['1b ca', 'Ca1b', 'a1b c', 'b ca1'],
dtype='|S5')
"""
return self.__array__().argsort(axis, kind, order)
argsort.__doc__ = ndarray.argsort.__doc__
def capitalize(self):
"""
Return a copy of `self` with only the first character of each element
capitalized.
See also
--------
char.capitalize
"""
return asarray(capitalize(self))
def center(self, width, fillchar=' '):
"""
Return a copy of `self` with its elements centered in a
string of length `width`.
See also
--------
center
"""
return asarray(center(self, width, fillchar))
def count(self, sub, start=0, end=None):
"""
Returns an array with the number of non-overlapping occurrences of
substring `sub` in the range [`start`, `end`].
See also
--------
char.count
"""
return count(self, sub, start, end)
def decode(self, encoding=None, errors=None):
"""
Calls `str.decode` element-wise.
See also
--------
char.decode
"""
return decode(self, encoding, errors)
def encode(self, encoding=None, errors=None):
"""
Calls `str.encode` element-wise.
See also
--------
char.encode
"""
return encode(self, encoding, errors)
def endswith(self, suffix, start=0, end=None):
"""
Returns a boolean array which is `True` where the string element
in `self` ends with `suffix`, otherwise `False`.
See also
--------
char.endswith
"""
return endswith(self, suffix, start, end)
def expandtabs(self, tabsize=8):
"""
Return a copy of each string element where all tab characters are
replaced by one or more spaces.
See also
--------
char.expandtabs
"""
return asarray(expandtabs(self, tabsize))
def find(self, sub, start=0, end=None):
"""
For each element, return the lowest index in the string where
substring `sub` is found.
See also
--------
char.find
"""
return find(self, sub, start, end)
def index(self, sub, start=0, end=None):
"""
Like `find`, but raises `ValueError` when the substring is not found.
See also
--------
char.index
"""
return index(self, sub, start, end)
def isalnum(self):
"""
Returns true for each element if all characters in the string
are alphanumeric and there is at least one character, false
otherwise.
See also
--------
char.isalnum
"""
return isalnum(self)
def isalpha(self):
"""
Returns true for each element if all characters in the string
are alphabetic and there is at least one character, false
otherwise.
See also
--------
char.isalpha
"""
return isalpha(self)
def isdigit(self):
"""
Returns true for each element if all characters in the string are
digits and there is at least one character, false otherwise.
See also
--------
char.isdigit
"""
return isdigit(self)
def islower(self):
"""
Returns true for each element if all cased characters in the
string are lowercase and there is at least one cased character,
false otherwise.
See also
--------
char.islower
"""
return islower(self)
def isspace(self):
"""
Returns true for each element if there are only whitespace
characters in the string and there is at least one character,
false otherwise.
See also
--------
char.isspace
"""
return isspace(self)
def istitle(self):
"""
Returns true for each element if the element is a titlecased
string and there is at least one character, false otherwise.
See also
--------
char.istitle
"""
return istitle(self)
def isupper(self):
"""
Returns true for each element if all cased characters in the
string are uppercase and there is at least one character, false
otherwise.
See also
--------
char.isupper
"""
return isupper(self)
def join(self, seq):
"""
Return a string which is the concatenation of the strings in the
sequence `seq`.
See also
--------
char.join
"""
return join(self, seq)
def ljust(self, width, fillchar=' '):
"""
Return an array with the elements of `self` left-justified in a
string of length `width`.
See also
--------
char.ljust
"""
return asarray(ljust(self, width, fillchar))
def lower(self):
"""
Return an array with the elements of `self` converted to
lowercase.
See also
--------
char.lower
"""
return asarray(lower(self))
def lstrip(self, chars=None):
"""
For each element in `self`, return a copy with the leading characters
removed.
See also
--------
char.lstrip
"""
return asarray(lstrip(self, chars))
def partition(self, sep):
"""
Partition each element in `self` around `sep`.
See also
--------
partition
"""
return asarray(partition(self, sep))
def replace(self, old, new, count=None):
"""
For each element in `self`, return a copy of the string with all
occurrences of substring `old` replaced by `new`.
See also
--------
char.replace
"""
return asarray(replace(self, old, new, count))
def rfind(self, sub, start=0, end=None):
"""
For each element in `self`, return the highest index in the string
where substring `sub` is found, such that `sub` is contained
within [`start`, `end`].
See also
--------
char.rfind
"""
return rfind(self, sub, start, end)
def rindex(self, sub, start=0, end=None):
"""
Like `rfind`, but raises `ValueError` when the substring `sub` is
not found.
See also
--------
char.rindex
"""
return rindex(self, sub, start, end)
def rjust(self, width, fillchar=' '):
"""
Return an array with the elements of `self`
right-justified in a string of length `width`.
See also
--------
char.rjust
"""
return asarray(rjust(self, width, fillchar))
def rpartition(self, sep):
"""
Partition each element in `self` around `sep`.
See also
--------
rpartition
"""
return asarray(rpartition(self, sep))
def rsplit(self, sep=None, maxsplit=None):
"""
For each element in `self`, return a list of the words in
the string, using `sep` as the delimiter string.
See also
--------
char.rsplit
"""
return rsplit(self, sep, maxsplit)
def rstrip(self, chars=None):
"""
For each element in `self`, return a copy with the trailing
characters removed.
See also
--------
char.rstrip
"""
return asarray(rstrip(self, chars))
def split(self, sep=None, maxsplit=None):
"""
For each element in `self`, return a list of the words in the
string, using `sep` as the delimiter string.
See also
--------
char.split
"""
return split(self, sep, maxsplit)
def splitlines(self, keepends=None):
"""
For each element in `self`, return a list of the lines in the
element, breaking at line boundaries.
See also
--------
char.splitlines
"""
return splitlines(self, keepends)
def startswith(self, prefix, start=0, end=None):
"""
Returns a boolean array which is `True` where the string element
in `self` starts with `prefix`, otherwise `False`.
See also
--------
char.startswith
"""
return startswith(self, prefix, start, end)
def strip(self, chars=None):
"""
For each element in `self`, return a copy with the leading and
trailing characters removed.
See also
--------
char.strip
"""
return asarray(strip(self, chars))
def swapcase(self):
"""
For each element in `self`, return a copy of the string with
uppercase characters converted to lowercase and vice versa.
See also
--------
char.swapcase
"""
return asarray(swapcase(self))
def title(self):
"""
For each element in `self`, return a titlecased version of the
string: words start with uppercase characters, all remaining cased
characters are lowercase.
See also
--------
char.title
"""
return asarray(title(self))
def translate(self, table, deletechars=None):
"""
For each element in `self`, return a copy of the string where
all characters occurring in the optional argument
`deletechars` are removed, and the remaining characters have
been mapped through the given translation table.
See also
--------
char.translate
"""
return asarray(translate(self, table, deletechars))
def upper(self):
"""
Return an array with the elements of `self` converted to
uppercase.
See also
--------
char.upper
"""
return asarray(upper(self))
def zfill(self, width):
"""
Return the numeric string left-filled with zeros in a string of
length `width`.
See also
--------
char.zfill
"""
return asarray(zfill(self, width))
def isnumeric(self):
"""
For each element in `self`, return True if there are only
numeric characters in the element.
See also
--------
char.isnumeric
"""
return isnumeric(self)
def isdecimal(self):
"""
For each element in `self`, return True if there are only
decimal characters in the element.
See also
--------
char.isdecimal
"""
return isdecimal(self)
def array(obj, itemsize=None, copy=True, unicode=None, order=None):
"""
Create a `chararray`.
.. note::
This class is provided for numarray backward-compatibility.
New code (not concerned with numarray compatibility) should use
arrays of type `string_` or `unicode_` and use the free functions
in :mod:`numpy.char <numpy.core.defchararray>` for fast
vectorized string operations instead.
Versus a regular Numpy array of type `str` or `unicode`, this
class adds the following functionality:
1) values automatically have whitespace removed from the end
when indexed
2) comparison operators automatically remove whitespace from the
end when comparing values
3) vectorized string operations are provided as methods
(e.g. `str.endswith`) and infix operators (e.g. ``+, *, %``)
Parameters
----------
obj : array of str or unicode-like
itemsize : int, optional
`itemsize` is the number of characters per scalar in the
resulting array. If `itemsize` is None, and `obj` is an
object array or a Python list, the `itemsize` will be
automatically determined. If `itemsize` is provided and `obj`
is of type str or unicode, then the `obj` string will be
chunked into `itemsize` pieces.
copy : bool, optional
If true (default), then the object is copied. Otherwise, a copy
will only be made if __array__ returns a copy, if obj is a
nested sequence, or if a copy is needed to satisfy any of the other
requirements (`itemsize`, unicode, `order`, etc.).
unicode : bool, optional
When true, the resulting `chararray` can contain Unicode
characters, when false only 8-bit characters. If unicode is
`None` and `obj` is one of the following:
- a `chararray`,
- an ndarray of type `str` or `unicode`
- a Python str or unicode object,
then the unicode setting of the output array will be
automatically determined.
order : {'C', 'F', 'A'}, optional
Specify the order of the array. If order is 'C' (default), then the
array will be in C-contiguous order (last-index varies the
fastest). If order is 'F', then the returned array
will be in Fortran-contiguous order (first-index varies the
fastest). If order is 'A', then the returned array may
be in any order (either C-, Fortran-contiguous, or even
discontiguous).
"""
if isinstance(obj, (_bytes, _unicode)):
if unicode is None:
if isinstance(obj, _unicode):
unicode = True
else:
unicode = False
if itemsize is None:
itemsize = _len(obj)
shape = _len(obj) // itemsize
if unicode:
if sys.maxunicode == 0xffff:
# On a narrow Python build, the buffer for Unicode
# strings is UCS2, which doesn't match the buffer for
# Numpy Unicode types, which is ALWAYS UCS4.
# Therefore, we need to convert the buffer. On Python
# 2.6 and later, we can use the utf_32 codec. Earlier
# versions don't have that codec, so we convert to a
# numerical array that matches the input buffer, and
# then use Numpy to convert it to UCS4. All of this
# should happen in native endianness.
if sys.hexversion >= 0x2060000:
obj = obj.encode('utf_32')
else:
if isinstance(obj, str):
ascii = numpy.frombuffer(obj, 'u1')
ucs4 = numpy.array(ascii, 'u4')
obj = ucs4.data
else:
ucs2 = numpy.frombuffer(obj, 'u2')
ucs4 = numpy.array(ucs2, 'u4')
obj = ucs4.data
else:
obj = _unicode(obj)
else:
# Let the default Unicode -> string encoding (if any) take
# precedence.
obj = _bytes(obj)
return chararray(shape, itemsize=itemsize, unicode=unicode,
buffer=obj, order=order)
if isinstance(obj, (list, tuple)):
obj = numpy.asarray(obj)
if isinstance(obj, ndarray) and issubclass(obj.dtype.type, character):
# If we just have a vanilla chararray, create a chararray
# view around it.
if not isinstance(obj, chararray):
obj = obj.view(chararray)
if itemsize is None:
itemsize = obj.itemsize
# itemsize is in 8-bit chars, so for Unicode, we need
# to divide by the size of a single Unicode character,
# which for Numpy is always 4
if issubclass(obj.dtype.type, unicode_):
itemsize //= 4
if unicode is None:
if issubclass(obj.dtype.type, unicode_):
unicode = True
else:
unicode = False
if unicode:
dtype = unicode_
else:
dtype = string_
if order is not None:
obj = numpy.asarray(obj, order=order)
if (copy
or (itemsize != obj.itemsize)
or (not unicode and isinstance(obj, unicode_))
or (unicode and isinstance(obj, string_))):
obj = obj.astype((dtype, long(itemsize)))
return obj
if isinstance(obj, ndarray) and issubclass(obj.dtype.type, object):
if itemsize is None:
# Since no itemsize was specified, convert the input array to
# a list so the ndarray constructor will automatically
# determine the itemsize for us.
obj = obj.tolist()
# Fall through to the default case
if unicode:
dtype = unicode_
else:
dtype = string_
if itemsize is None:
val = narray(obj, dtype=dtype, order=order, subok=True)
else:
val = narray(obj, dtype=(dtype, itemsize), order=order, subok=True)
return val.view(chararray)
def asarray(obj, itemsize=None, unicode=None, order=None):
"""
Convert the input to a `chararray`, copying the data only if
necessary.
Versus a regular Numpy array of type `str` or `unicode`, this
class adds the following functionality:
1) values automatically have whitespace removed from the end
when indexed
2) comparison operators automatically remove whitespace from the
end when comparing values
3) vectorized string operations are provided as methods
(e.g. `str.endswith`) and infix operators (e.g. +, *, %)
Parameters
----------
obj : array of str or unicode-like
itemsize : int, optional
`itemsize` is the number of characters per scalar in the
resulting array. If `itemsize` is None, and `obj` is an
object array or a Python list, the `itemsize` will be
automatically determined. If `itemsize` is provided and `obj`
is of type str or unicode, then the `obj` string will be
chunked into `itemsize` pieces.
unicode : bool, optional
When true, the resulting `chararray` can contain Unicode
characters, when false only 8-bit characters. If unicode is
`None` and `obj` is one of the following:
- a `chararray`,
- an ndarray of type `str` or 'unicode`
- a Python str or unicode object,
then the unicode setting of the output array will be
automatically determined.
order : {'C', 'F'}, optional
Specify the order of the array. If order is 'C' (default), then the
array will be in C-contiguous order (last-index varies the
fastest). If order is 'F', then the returned array
will be in Fortran-contiguous order (first-index varies the
fastest).
"""
return array(obj, itemsize, copy=False,
unicode=unicode, order=order)
| bsd-3-clause | -4,129,378,087,275,058,700 | 24.281727 | 86 | 0.573471 | false |
sectalks/sectalks.github.io | ctf101/02-cryptanalysis/brute-force-caesar-cipher.py | 1 | 1040 | """SecTalks CTF101 - 02 - Cryptoanalysis
Brute-force Caesar cipher
"""
cipher = 'FVBJHUIYLHRJVKLHUKFVBHYLWYVBKVMPA'
#cipher = 'FRPGNYXFZRRGHCFNERNOBHGCNEGVPVCNGVATVAVGFRPHEVGLQVFPHFFVBAFYRNEAVATSEBZBGUREFNAQVZCEBIVATCEBOYRZFBYIVATFXVYYF'
LETTERS = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
for key in range(len(LETTERS)):
plaintext = ''
for symbol in cipher: # Loop through each character of cipher
if symbol in LETTERS:
position = LETTERS.index(symbol) # Find symbol in alphabet
position = position - key # Replaced with key previous letter
# If position is negative, add 26, length of alphabet
if position < 0:
position = position + len(LETTERS)
# Decryption: added the letter than the key position to plaintext
plaintext = plaintext + LETTERS[position]
else:
# Symbol not find, just add cipher character with no decription
plaintext = plaintext + symbol
print('Key #{0}: {1}'.format(key, plaintext))
| mit | -5,942,980,410,473,219,000 | 36.142857 | 121 | 0.675 | false |
cigalsace/signalement | lib/externals/openlayers-2.11/tools/mergejs.py | 4 | 8079 | #!/usr/bin/env python
#
# Merge multiple JavaScript source code files into one.
#
# Usage:
# This script requires source files to have dependencies specified in them.
#
# Dependencies are specified with a comment of the form:
#
# // @requires <file path>
#
# e.g.
#
# // @requires Geo/DataSource.js
#
# This script should be executed like so:
#
# mergejs.py <output.js> <directory> [...]
#
# e.g.
#
# mergejs.py openlayers.js Geo/ CrossBrowser/
#
# This example will cause the script to walk the `Geo` and
# `CrossBrowser` directories--and subdirectories thereof--and import
# all `*.js` files encountered. The dependency declarations will be extracted
# and then the source code from imported files will be output to
# a file named `openlayers.js` in an order which fulfils the dependencies
# specified.
#
#
# Note: This is a very rough initial version of this code.
#
# -- Copyright 2005-2011 OpenLayers contributors / OpenLayers project --
#
# TODO: Allow files to be excluded. e.g. `Crossbrowser/DebugMode.js`?
# TODO: Report error when dependency can not be found rather than KeyError.
import re
import os
import sys
SUFFIX_JAVASCRIPT = ".js"
RE_REQUIRE = "@requires?:? (.*)\n" # TODO: Ensure in comment?
class MissingImport(Exception):
"""Exception raised when a listed import is not found in the lib."""
class SourceFile:
"""
Represents a Javascript source code file.
"""
def __init__(self, filepath, source):
"""
"""
self.filepath = filepath
self.source = source
self.requiredBy = []
def _getRequirements(self):
"""
Extracts the dependencies specified in the source code and returns
a list of them.
"""
# TODO: Cache?
return re.findall(RE_REQUIRE, self.source)
requires = property(fget=_getRequirements, doc="")
def usage(filename):
"""
Displays a usage message.
"""
print "%s [-c <config file>] <output.js> <directory> [...]" % filename
class Config:
"""
Represents a parsed configuration file.
A configuration file should be of the following form:
[first]
3rd/prototype.js
core/application.js
core/params.js
# A comment
[last]
core/api.js # Another comment
[exclude]
3rd/logger.js
exclude/this/dir
All headings are required.
The files listed in the `first` section will be forced to load
*before* all other files (in the order listed). The files in `last`
section will be forced to load *after* all the other files (in the
order listed).
The files list in the `exclude` section will not be imported.
Any text appearing after a # symbol indicates a comment.
"""
def __init__(self, filename):
"""
Parses the content of the named file and stores the values.
"""
lines = [re.sub("#.*?$", "", line).strip() # Assumes end-of-line character is present
for line in open(filename)
if line.strip() and not line.strip().startswith("#")] # Skip blank lines and comments
self.forceFirst = lines[lines.index("[first]") + 1:lines.index("[last]")]
self.forceLast = lines[lines.index("[last]") + 1:lines.index("[include]")]
self.include = lines[lines.index("[include]") + 1:lines.index("[exclude]")]
self.exclude = lines[lines.index("[exclude]") + 1:]
def undesired(filepath, excludes):
# exclude file if listed
exclude = filepath in excludes
if not exclude:
# check if directory is listed
for excludepath in excludes:
if not excludepath.endswith("/"):
excludepath += "/"
if filepath.startswith(excludepath):
exclude = True
break
return exclude
def run (sourceDirectory, outputFilename = None, configFile = None):
cfg = None
if configFile:
cfg = Config(configFile)
allFiles = []
## Find all the Javascript source files
for root, dirs, files in os.walk(sourceDirectory):
for filename in files:
if filename.endswith(SUFFIX_JAVASCRIPT) and not filename.startswith("."):
filepath = os.path.join(root, filename)[len(sourceDirectory)+1:]
filepath = filepath.replace("\\", "/")
if cfg and cfg.include:
if filepath in cfg.include or filepath in cfg.forceFirst:
allFiles.append(filepath)
elif (not cfg) or (not undesired(filepath, cfg.exclude)):
allFiles.append(filepath)
## Header inserted at the start of each file in the output
HEADER = "/* " + "=" * 70 + "\n %s\n" + " " + "=" * 70 + " */\n\n"
files = {}
## Import file source code
## TODO: Do import when we walk the directories above?
for filepath in allFiles:
print "Importing: %s" % filepath
fullpath = os.path.join(sourceDirectory, filepath).strip()
content = open(fullpath, "U").read() # TODO: Ensure end of line @ EOF?
files[filepath] = SourceFile(filepath, content) # TODO: Chop path?
print
from toposort import toposort
complete = False
resolution_pass = 1
while not complete:
complete = True
## Resolve the dependencies
print "Resolution pass %s... " % resolution_pass
resolution_pass += 1
for filepath, info in files.items():
for path in info.requires:
if not files.has_key(path):
complete = False
fullpath = os.path.join(sourceDirectory, path).strip()
if os.path.exists(fullpath):
print "Importing: %s" % path
content = open(fullpath, "U").read() # TODO: Ensure end of line @ EOF?
files[path] = SourceFile(path, content) # TODO: Chop path?
else:
raise MissingImport("File '%s' not found (required by '%s')." % (path, filepath))
# create dictionary of dependencies
dependencies = {}
for filepath, info in files.items():
dependencies[filepath] = info.requires
print "Sorting..."
order = toposort(dependencies) #[x for x in toposort(dependencies)]
## Move forced first and last files to the required position
if cfg:
print "Re-ordering files..."
order = cfg.forceFirst + [item
for item in order
if ((item not in cfg.forceFirst) and
(item not in cfg.forceLast))] + cfg.forceLast
print
## Output the files in the determined order
result = []
for fp in order:
f = files[fp]
print "Exporting: ", f.filepath
result.append(HEADER % f.filepath)
source = f.source
result.append(source)
if not source.endswith("\n"):
result.append("\n")
print "\nTotal files merged: %d " % len(files)
if outputFilename:
print "\nGenerating: %s" % (outputFilename)
open(outputFilename, "w").write("".join(result))
return "".join(result)
if __name__ == "__main__":
import getopt
options, args = getopt.getopt(sys.argv[1:], "-c:")
try:
outputFilename = args[0]
except IndexError:
usage(sys.argv[0])
raise SystemExit
else:
sourceDirectory = args[1]
if not sourceDirectory:
usage(sys.argv[0])
raise SystemExit
configFile = None
if options and options[0][0] == "-c":
configFile = options[0][1]
print "Parsing configuration file: %s" % filename
run( sourceDirectory, outputFilename, configFile )
| gpl-3.0 | 2,651,481,233,804,135,000 | 29.19305 | 105 | 0.574205 | false |
zofuthan/zulip | zproject/local_settings_template.py | 4 | 9015 | # Settings for Zulip Voyager
### MANDATORY SETTINGS
#
# These settings MUST be set in production. In a development environment,
# sensible default values will be used.
# The user-accessible Zulip hostname for this installation, e.g.
# zulip.example.com
EXTERNAL_HOST = 'zulip.example.com'
# The email address for the person or team who maintain the Zulip
# Voyager installation. Will also get support emails. (e.g. [email protected])
ZULIP_ADMINISTRATOR = '[email protected]'
# The domain for your organization, e.g. example.com
ADMIN_DOMAIN = 'example.com'
# Enable at least one of the following authentication backends.
AUTHENTICATION_BACKENDS = (
# 'zproject.backends.EmailAuthBackend', # Email and password
# 'zproject.backends.ZulipRemoteUserBackend', # Local SSO
# 'zproject.backends.GoogleMobileOauth2Backend', # Google Apps, setup below
)
# Google Oauth requires a bit of configuration; you will need to go to
# do the following:
#
# (1) Visit https://console.developers.google.com, setup an
# Oauth2 client ID that allows redirects to
# e.g. https://zulip.example.com/accounts/login/google/done/.
#
# (2) Then click into the APIs and Auth section (in the sidebar on the
# left side of the page), APIs, then under "Social APIs" click on
# "Google+ API" and click the button to enable the API.
#
# (3) put your client secret as "google_oauth2_client_secret" in
# zulip-secrets.conf, and your client ID right here:
# GOOGLE_OAUTH2_CLIENT_ID=<your client ID from Google>
# If you are using the ZulipRemoteUserBackend authentication backend,
# set this to your domain (e.g. if REMOTE_USER is "username" and the
# corresponding email address is "[email protected]", set
# SSO_APPEND_DOMAIN = "example.com")
SSO_APPEND_DOMAIN = None
# Configure the outgoing SMTP server below. For outgoing email
# via a GMail SMTP server, EMAIL_USE_TLS must be True and the
# outgoing port must be 587. The EMAIL_HOST is prepopulated
# for GMail servers, change it for other hosts, or leave it unset
# or empty to skip sending email.
#
# A common problem you may encounter when trying to get this working
# is many hosting providers block outgoing SMTP traffic.
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = ''
# If you're using password auth, you will need to put the password in
# /etc/zulip/zulip-secrets.conf as email_password.
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# The email From address to be used for automatically generated emails
DEFAULT_FROM_EMAIL = "Zulip <[email protected]>"
# The noreply address to be used as Reply-To for certain generated emails.
# Messages sent to this address should not be delivered anywhere.
NOREPLY_EMAIL_ADDRESS = "[email protected]"
# A list of strings representing the host/domain names that this
# Django site can serve. You should reset it to be a list of
# domains/IP addresses for your site. This is a security measure to
# prevent an attacker from poisoning caches and triggering password
# reset emails with links to malicious hosts by submitting requests
# with a fake HTTP Host header.
ALLOWED_HOSTS = ['*']
### OPTIONAL SETTINGS
# Controls whether session cookies expire when the browser closes
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
# Session cookie expiry in seconds after the last page load
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2 # 2 weeks
# Controls whether or not there is a feedback button in the UI.
ENABLE_FEEDBACK = False
# By default, the feedback button will submit feedback to the Zulip
# developers. If you set FEEDBACK_EMAIL to be an email address
# (e.g. ZULIP_ADMINISTRATOR), feedback sent by your users will instead
# be sent to that email address.
FEEDBACK_EMAIL = ZULIP_ADMINISTRATOR
# Controls whether or not error reports are sent to Zulip. Error
# reports are used to improve the quality of the product and do not
# include message contents; please contact Zulip support with any
# questions.
ERROR_REPORTING = True
# Controls whether or not Zulip will provide inline image preview when
# a link to an image is referenced in a message.
INLINE_IMAGE_PREVIEW = True
# By default, files uploaded by users and user avatars are stored
# directly on the Zulip server. If file storage in Amazon S3 is
# desired, you can configure that by setting s3_key and s3_secret_key
# in /etc/zulip/zulip-secrets.conf to be the S3 access and secret keys
# that you want to use, and setting the S3_AUTH_UPLOADS_BUCKET and
# S3_AVATAR_BUCKET to be the S3 buckets you've created to store file
# uploads and user avatars, respectively.
LOCAL_UPLOADS_DIR = "/home/zulip/uploads"
# Controls whether name changes are completely disabled for this installation
# This is useful in settings where you're syncing names from an integrated LDAP/Active Directory
NAME_CHANGES_DISABLED = False
# Controls whether users who have not uploaded an avatar will receive an avatar
# from gravatar.com.
ENABLE_GRAVATAR = True
# To override the default avatar image if ENABLE_GRAVATAR is False, place your
# custom default avatar image at /home/zulip/local-static/default-avatar.png
# and uncomment the following line.
#DEFAULT_AVATAR_URI = '/local-static/default-avatar.png'
### TWITTER INTEGRATION
# Zulip supports showing inline Tweet previews when a tweet is linked
# to in a message. To support this, Zulip must have access to the
# Twitter API via OAuth. To obtain the various access tokens needed
# below, you must register a new application under your Twitter
# account by doing the following:
#
# 1. Log in to http://dev.twitter.com.
# 2. In the menu under your username, click My Applications. From this page, create a new application.
# 3. Click on the application you created and click "create my access token".
# 4. Fill in the values for twitter_consumer_key, twitter_consumer_secret, twitter_access_token_key,
# and twitter_access_token_secret in /etc/zulip/zulip-secrets.conf.
### EMAIL GATEWAY INTEGRATION
# The email gateway provides, for each stream, an email address that
# you can send email to in order to have the email's content be posted
# to that stream. Emails received at the per-stream email address
# will be converted into a Zulip message
# There are two ways to make use of local email mirroring:
# 1. Local delivery: A MTA runs locally and passes mail directly to Zulip
# 2. Polling: Checks an IMAP inbox every minute for new messages.
# A Puppet manifest for local delivery via Postfix is available in
# puppet/zulip/manifests/postfix_localmail.pp. To use the manifest, add it to
# puppet_classes in /etc/zulip/zulip.conf. This manifest assumes you'll receive
# mail addressed to the hostname of your Zulip server.
#
# Users of other mail servers will need to configure it to pass mail to the
# email mirror; see `python manage.py email-mirror --help` for details.
# The email address pattern to use for auto-generated stream emails
# The %s will be replaced with a unique token, and the resulting email
# must be delivered to the EMAIL_GATEWAY_IMAP_FOLDER of the
# EMAIL_GATEWAY_LOGIN account below, or piped in to the email-mirror management
# command as indicated above.
#
# Example: zulip+%[email protected]
EMAIL_GATEWAY_PATTERN = ""
# The following options are relevant if you're using mail polling.
#
# A sample cron job for mail polling is available at puppet/zulip/files/cron.d/email-mirror
#
# The Zulip username of the bot that the email pattern should post as.
# Example: [email protected]
EMAIL_GATEWAY_BOT = ""
# Configuration of the email mirror mailbox
# The IMAP login and password
EMAIL_GATEWAY_LOGIN = ""
EMAIL_GATEWAY_PASSWORD = ""
# The IMAP server & port to connect to
EMAIL_GATEWAY_IMAP_SERVER = ""
EMAIL_GATEWAY_IMAP_PORT = 993
# The IMAP folder name to check for emails. All emails sent to EMAIL_GATEWAY_PATTERN above
# must be delivered to this folder
EMAIL_GATEWAY_IMAP_FOLDER = "INBOX"
### LDAP integration configuration
# Zulip supports retrieving information about users via LDAP, and optionally
# using LDAP as an authentication mechanism.
import ldap
from django_auth_ldap.config import LDAPSearch, GroupOfNamesType
# URI of your LDAP server. If set, LDAP is used to prepopulate a user's name in
# Zulip. Example: "ldaps://ldap.example.com"
AUTH_LDAP_SERVER_URI = ""
# This DN and password will be used to bind to your server. If unset, anonymous
# binds are performed.
AUTH_LDAP_BIND_DN = ""
AUTH_LDAP_BIND_PASSWORD = ""
# Specify the search base and the property to filter on that corresponds to the
# username.
AUTH_LDAP_USER_SEARCH = LDAPSearch("ou=users,dc=example,dc=com",
ldap.SCOPE_SUBTREE, "(uid=%(user)s)")
# If the value of a user's "uid" (or similar) property is not their email
# address, specify the domain to append here.
LDAP_APPEND_DOMAIN = ADMIN_DOMAIN
# This map defines how to populate attributes of a Zulip user from LDAP.
AUTH_LDAP_USER_ATTR_MAP = {
# Populate the Django user's name from the LDAP directory.
"full_name": "cn",
}
CAMO_URI = ''
| apache-2.0 | -2,120,378,649,518,153,500 | 40.353211 | 102 | 0.75563 | false |
satishgoda/programmingusingpython | sandbox/inheritance/areaspaces.py | 1 | 1540 |
class Space(object):
def __init__(self):
self.tag = self.__class__.__name__
def __repr__(self):
return "{0}: {1}".format(self.__class__.__bases__[0].__name__, self.tag)
class View3D(Space):
def draw(self, area):
print("Drawing view3d {0}".format(area.dimensions))
class Console(Space):
def draw(self, area):
print("Drawing console {0}".format(area.dimensions))
class Collection(object):
def __init__(self):
self.items = {}
self.active = None
def new(self, item):
self._create_and_set(item)
def set(self, item):
if item in self.items:
self._set_active(item)
else:
self.new(item)
def _create(self, item):
self.items[item] = eval(item)()
def _set_active(self, item):
self.active = self.items[item]
def _create_and_set(self, item):
self._create(item)
self._set_active(item)
def __repr__(self):
return ' '.join(self.items.keys()) + '\n ' + str(self.active)
class Area(object):
dimensions = (400, 300)
def __init__(self, space='View3D'):
self.spaces = Collection()
self.spaces.new(space)
def draw(self):
self.spaces.active.draw(self)
def set_space(self, space):
self.spaces.set(space)
if __name__ == '__main__':
a = Area()
print(a.spaces)
a.draw()
a.set_space('Console')
print(a.spaces)
a.draw()
| gpl-2.0 | -8,398,200,256,312,645,000 | 20.388889 | 80 | 0.522078 | false |
GeoscienceAustralia/eo-datasets | tests/integration/__init__.py | 2 | 3856 | # coding=utf-8
"""
Module
"""
from __future__ import absolute_import
import binascii
import hashlib
import rasterio
import tempfile
from pathlib import Path
from rasterio import DatasetReader
from typing import Dict, Tuple
import numpy
allow_anything = object()
def assert_image(
image: Path,
overviews=allow_anything,
nodata=allow_anything,
unique_pixel_counts: Dict = allow_anything,
bands=1,
shape: Tuple[int, int] = None,
):
__tracebackhide__ = True
with rasterio.open(image) as d:
d: DatasetReader
assert d.count == bands, f"Expected {bands} band{'s' if bands > 1 else ''}"
if overviews is not allow_anything:
assert (
d.overviews(1) == overviews
), f"Unexpected overview: {d.overviews(1)!r} != {overviews!r}"
if nodata is not allow_anything:
assert d.nodata == nodata, f"Unexpected nodata: {d.nodata!r} != {nodata!r}"
if unique_pixel_counts is not allow_anything:
array = d.read(1)
value_counts = dict(zip(*numpy.unique(array, return_counts=True)))
assert (
value_counts == unique_pixel_counts
), f"Unexpected pixel counts: {value_counts!r} != {unique_pixel_counts!r}"
if shape:
assert shape == d.shape, f"Unexpected shape: {shape!r} != {d.shape!r}"
def load_checksum_filenames(output_metadata_path):
return [
line.split("\t")[-1][:-1] for line in output_metadata_path.open("r").readlines()
]
def on_same_filesystem(path1, path2):
return path1.stat().st_dev == path2.stat().st_dev
def hardlink_arg(path1, path2):
return "--hard-link" if on_same_filesystem(path1, path2) else "--no-hard-link"
def directory_size(directory):
"""
Total size of files in the given directory.
:type file_paths: Path
:rtype: int
"""
return sum(p.stat().st_size for p in directory.rglob("*") if p.is_file())
class FakeAncilFile(object):
def __init__(self, base_folder, type_, filename, folder_offset=()):
"""
:type base_folder: pathlib.Path
:type type_: str
:type filename: str
:type folder_offset: tuple[str]
:return:
"""
self.base_folder = base_folder
self.type_ = type_
self.filename = filename
self.folder_offset = folder_offset
def create(self):
"""Create our dummy ancillary file"""
self.containing_folder.mkdir(parents=True)
with self.file_path.open("wb") as f:
# Write the file path into it so that it has a unique checksum.
f.write(str(self.file_path).encode("utf8"))
@property
def checksum(self):
m = hashlib.sha1()
m.update(str(self.file_path).encode("utf8"))
return binascii.hexlify(m.digest()).decode("ascii")
@property
def containing_folder(self):
return self.base_folder.joinpath(self.type_, *self.folder_offset)
@property
def file_path(self):
return self.containing_folder.joinpath(self.filename)
def prepare_work_order(ancil_files, work_order_template_path):
"""
:type ancil_files: tuple[FakeAncilFile]
:type work_order_template_path: pathlib.Path
:rtype: pathlib.Path
"""
# Create the dummy Ancil files.
for ancil in ancil_files:
ancil.create()
work_dir = Path(tempfile.mkdtemp())
# Write a work order with ancillary locations replaced.
output_work_order = work_dir.joinpath("work_order.xml")
with work_order_template_path.open("rb") as wo:
wo_text = (
wo.read()
.decode("utf-8")
.format(**{a.type_ + "_path": a.file_path for a in ancil_files})
)
with output_work_order.open("w") as out_wo:
out_wo.write(wo_text)
return output_work_order
| apache-2.0 | 6,989,911,014,639,718,000 | 28.212121 | 88 | 0.612552 | false |
tmerrick1/spack | var/spack/repos/builtin/packages/py-isort/package.py | 5 | 1589 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyIsort(PythonPackage):
"""A Python utility / library to sort Python imports."""
homepage = "https://github.com/timothycrosley/isort"
url = "https://pypi.io/packages/source/i/isort/isort-4.2.15.tar.gz"
version('4.2.15', '34915a2ce60e6fe3dbcbf5982deef9b4')
depends_on('py-setuptools', type='build')
| lgpl-2.1 | -4,541,510,042,882,006,500 | 43.138889 | 78 | 0.674638 | false |
SpaceGroupUCL/qgisSpaceSyntaxToolkit | esstoolkit/external/pyqtgraph/flowchart/library/Data.py | 3 | 16128 | # -*- coding: utf-8 -*-
from ..Node import Node
from ...Qt import QtGui, QtCore
import numpy as np
import sys
from .common import *
from ...SRTTransform import SRTTransform
from ...Point import Point
from ...widgets.TreeWidget import TreeWidget
from ...graphicsItems.LinearRegionItem import LinearRegionItem
from . import functions
class ColumnSelectNode(Node):
"""Select named columns from a record array or MetaArray."""
nodeName = "ColumnSelect"
def __init__(self, name):
Node.__init__(self, name, terminals={'In': {'io': 'in'}})
self.columns = set()
self.columnList = QtGui.QListWidget()
self.axis = 0
self.columnList.itemChanged.connect(self.itemChanged)
def process(self, In, display=True):
if display:
self.updateList(In)
out = {}
if hasattr(In, 'implements') and In.implements('MetaArray'):
for c in self.columns:
out[c] = In[self.axis:c]
elif isinstance(In, np.ndarray) and In.dtype.fields is not None:
for c in self.columns:
out[c] = In[c]
else:
self.In.setValueAcceptable(False)
raise Exception("Input must be MetaArray or ndarray with named fields")
return out
def ctrlWidget(self):
return self.columnList
def updateList(self, data):
if hasattr(data, 'implements') and data.implements('MetaArray'):
cols = data.listColumns()
for ax in cols: ## find first axis with columns
if len(cols[ax]) > 0:
self.axis = ax
cols = set(cols[ax])
break
else:
cols = list(data.dtype.fields.keys())
rem = set()
for c in self.columns:
if c not in cols:
self.removeTerminal(c)
rem.add(c)
self.columns -= rem
self.columnList.blockSignals(True)
self.columnList.clear()
for c in cols:
item = QtGui.QListWidgetItem(c)
item.setFlags(QtCore.Qt.ItemIsEnabled|QtCore.Qt.ItemIsUserCheckable)
if c in self.columns:
item.setCheckState(QtCore.Qt.Checked)
else:
item.setCheckState(QtCore.Qt.Unchecked)
self.columnList.addItem(item)
self.columnList.blockSignals(False)
def itemChanged(self, item):
col = str(item.text())
if item.checkState() == QtCore.Qt.Checked:
if col not in self.columns:
self.columns.add(col)
self.addOutput(col)
else:
if col in self.columns:
self.columns.remove(col)
self.removeTerminal(col)
self.update()
def saveState(self):
state = Node.saveState(self)
state['columns'] = list(self.columns)
return state
def restoreState(self, state):
Node.restoreState(self, state)
self.columns = set(state.get('columns', []))
for c in self.columns:
self.addOutput(c)
class RegionSelectNode(CtrlNode):
"""Returns a slice from a 1-D array. Connect the 'widget' output to a plot to display a region-selection widget."""
nodeName = "RegionSelect"
uiTemplate = [
('start', 'spin', {'value': 0, 'step': 0.1}),
('stop', 'spin', {'value': 0.1, 'step': 0.1}),
('display', 'check', {'value': True}),
('movable', 'check', {'value': True}),
]
def __init__(self, name):
self.items = {}
CtrlNode.__init__(self, name, terminals={
'data': {'io': 'in'},
'selected': {'io': 'out'},
'region': {'io': 'out'},
'widget': {'io': 'out', 'multi': True}
})
self.ctrls['display'].toggled.connect(self.displayToggled)
self.ctrls['movable'].toggled.connect(self.movableToggled)
def displayToggled(self, b):
for item in self.items.values():
item.setVisible(b)
def movableToggled(self, b):
for item in self.items.values():
item.setMovable(b)
def process(self, data=None, display=True):
#print "process.."
s = self.stateGroup.state()
region = [s['start'], s['stop']]
if display:
conn = self['widget'].connections()
for c in conn:
plot = c.node().getPlot()
if plot is None:
continue
if c in self.items:
item = self.items[c]
item.setRegion(region)
#print " set rgn:", c, region
#item.setXVals(events)
else:
item = LinearRegionItem(values=region)
self.items[c] = item
#item.connect(item, QtCore.SIGNAL('regionChanged'), self.rgnChanged)
item.sigRegionChanged.connect(self.rgnChanged)
item.setVisible(s['display'])
item.setMovable(s['movable'])
#print " new rgn:", c, region
#self.items[c].setYRange([0., 0.2], relative=True)
if self['selected'].isConnected():
if data is None:
sliced = None
elif (hasattr(data, 'implements') and data.implements('MetaArray')):
sliced = data[0:s['start']:s['stop']]
else:
mask = (data['time'] >= s['start']) * (data['time'] < s['stop'])
sliced = data[mask]
else:
sliced = None
return {'selected': sliced, 'widget': self.items, 'region': region}
def rgnChanged(self, item):
region = item.getRegion()
self.stateGroup.setState({'start': region[0], 'stop': region[1]})
self.update()
class EvalNode(Node):
"""Return the output of a string evaluated/executed by the python interpreter.
The string may be either an expression or a python script, and inputs are accessed as the name of the terminal.
For expressions, a single value may be evaluated for a single output, or a dict for multiple outputs.
For a script, the text will be executed as the body of a function."""
nodeName = 'PythonEval'
def __init__(self, name):
Node.__init__(self, name,
terminals = {
'input': {'io': 'in', 'renamable': True, 'multiable': True},
'output': {'io': 'out', 'renamable': True, 'multiable': True},
},
allowAddInput=True, allowAddOutput=True)
self.ui = QtGui.QWidget()
self.layout = QtGui.QGridLayout()
self.text = QtGui.QTextEdit()
self.text.setTabStopWidth(30)
self.text.setPlainText("# Access inputs as args['input_name']\nreturn {'output': None} ## one key per output terminal")
self.layout.addWidget(self.text, 1, 0, 1, 2)
self.ui.setLayout(self.layout)
self.text.focusOutEvent = self.focusOutEvent
self.lastText = None
def ctrlWidget(self):
return self.ui
def setCode(self, code):
# unindent code; this allows nicer inline code specification when
# calling this method.
ind = []
lines = code.split('\n')
for line in lines:
stripped = line.lstrip()
if len(stripped) > 0:
ind.append(len(line) - len(stripped))
if len(ind) > 0:
ind = min(ind)
code = '\n'.join([line[ind:] for line in lines])
self.text.clear()
self.text.insertPlainText(code)
def code(self):
return self.text.toPlainText()
def focusOutEvent(self, ev):
text = str(self.text.toPlainText())
if text != self.lastText:
self.lastText = text
self.update()
return QtGui.QTextEdit.focusOutEvent(self.text, ev)
def process(self, display=True, **args):
l = locals()
l.update(args)
## try eval first, then exec
try:
text = str(self.text.toPlainText()).replace('\n', ' ')
output = eval(text, globals(), l)
except SyntaxError:
fn = "def fn(**args):\n"
run = "\noutput=fn(**args)\n"
text = fn + "\n".join([" "+l for l in str(self.text.toPlainText()).split('\n')]) + run
if sys.version_info.major == 2:
exec(text)
elif sys.version_info.major == 3:
ldict = locals()
exec(text, globals(), ldict)
output = ldict['output']
except:
print("Error processing node: %s" % self.name())
raise
return output
def saveState(self):
state = Node.saveState(self)
state['text'] = str(self.text.toPlainText())
#state['terminals'] = self.saveTerminals()
return state
def restoreState(self, state):
Node.restoreState(self, state)
self.setCode(state['text'])
self.restoreTerminals(state['terminals'])
self.update()
class ColumnJoinNode(Node):
"""Concatenates record arrays and/or adds new columns"""
nodeName = 'ColumnJoin'
def __init__(self, name):
Node.__init__(self, name, terminals = {
'output': {'io': 'out'},
})
#self.items = []
self.ui = QtGui.QWidget()
self.layout = QtGui.QGridLayout()
self.ui.setLayout(self.layout)
self.tree = TreeWidget()
self.addInBtn = QtGui.QPushButton('+ Input')
self.remInBtn = QtGui.QPushButton('- Input')
self.layout.addWidget(self.tree, 0, 0, 1, 2)
self.layout.addWidget(self.addInBtn, 1, 0)
self.layout.addWidget(self.remInBtn, 1, 1)
self.addInBtn.clicked.connect(self.addInput)
self.remInBtn.clicked.connect(self.remInput)
self.tree.sigItemMoved.connect(self.update)
def ctrlWidget(self):
return self.ui
def addInput(self):
#print "ColumnJoinNode.addInput called."
term = Node.addInput(self, 'input', renamable=True, removable=True, multiable=True)
#print "Node.addInput returned. term:", term
item = QtGui.QTreeWidgetItem([term.name()])
item.term = term
term.joinItem = item
#self.items.append((term, item))
self.tree.addTopLevelItem(item)
def remInput(self):
sel = self.tree.currentItem()
term = sel.term
term.joinItem = None
sel.term = None
self.tree.removeTopLevelItem(sel)
self.removeTerminal(term)
self.update()
def process(self, display=True, **args):
order = self.order()
vals = []
for name in order:
if name not in args:
continue
val = args[name]
if isinstance(val, np.ndarray) and len(val.dtype) > 0:
vals.append(val)
else:
vals.append((name, None, val))
return {'output': functions.concatenateColumns(vals)}
def order(self):
return [str(self.tree.topLevelItem(i).text(0)) for i in range(self.tree.topLevelItemCount())]
def saveState(self):
state = Node.saveState(self)
state['order'] = self.order()
return state
def restoreState(self, state):
Node.restoreState(self, state)
inputs = self.inputs()
## Node.restoreState should have created all of the terminals we need
## However: to maintain support for some older flowchart files, we need
## to manually add any terminals that were not taken care of.
for name in [n for n in state['order'] if n not in inputs]:
Node.addInput(self, name, renamable=True, removable=True, multiable=True)
inputs = self.inputs()
order = [name for name in state['order'] if name in inputs]
for name in inputs:
if name not in order:
order.append(name)
self.tree.clear()
for name in order:
term = self[name]
item = QtGui.QTreeWidgetItem([name])
item.term = term
term.joinItem = item
#self.items.append((term, item))
self.tree.addTopLevelItem(item)
def terminalRenamed(self, term, oldName):
Node.terminalRenamed(self, term, oldName)
item = term.joinItem
item.setText(0, term.name())
self.update()
class Mean(CtrlNode):
"""Calculate the mean of an array across an axis.
"""
nodeName = 'Mean'
uiTemplate = [
('axis', 'intSpin', {'value': 0, 'min': -1, 'max': 1000000}),
]
def processData(self, data):
s = self.stateGroup.state()
ax = None if s['axis'] == -1 else s['axis']
return data.mean(axis=ax)
class Max(CtrlNode):
"""Calculate the maximum of an array across an axis.
"""
nodeName = 'Max'
uiTemplate = [
('axis', 'intSpin', {'value': 0, 'min': -1, 'max': 1000000}),
]
def processData(self, data):
s = self.stateGroup.state()
ax = None if s['axis'] == -1 else s['axis']
return data.max(axis=ax)
class Min(CtrlNode):
"""Calculate the minimum of an array across an axis.
"""
nodeName = 'Min'
uiTemplate = [
('axis', 'intSpin', {'value': 0, 'min': -1, 'max': 1000000}),
]
def processData(self, data):
s = self.stateGroup.state()
ax = None if s['axis'] == -1 else s['axis']
return data.min(axis=ax)
class Stdev(CtrlNode):
"""Calculate the standard deviation of an array across an axis.
"""
nodeName = 'Stdev'
uiTemplate = [
('axis', 'intSpin', {'value': -0, 'min': -1, 'max': 1000000}),
]
def processData(self, data):
s = self.stateGroup.state()
ax = None if s['axis'] == -1 else s['axis']
return data.std(axis=ax)
class Index(CtrlNode):
"""Select an index from an array axis.
"""
nodeName = 'Index'
uiTemplate = [
('axis', 'intSpin', {'value': 0, 'min': 0, 'max': 1000000}),
('index', 'intSpin', {'value': 0, 'min': 0, 'max': 1000000}),
]
def processData(self, data):
s = self.stateGroup.state()
ax = s['axis']
ind = s['index']
if ax == 0:
# allow support for non-ndarray sequence types
return data[ind]
else:
return data.take(ind, axis=ax)
class Slice(CtrlNode):
"""Select a slice from an array axis.
"""
nodeName = 'Slice'
uiTemplate = [
('axis', 'intSpin', {'value': 0, 'min': 0, 'max': 1e6}),
('start', 'intSpin', {'value': 0, 'min': -1e6, 'max': 1e6}),
('stop', 'intSpin', {'value': -1, 'min': -1e6, 'max': 1e6}),
('step', 'intSpin', {'value': 1, 'min': -1e6, 'max': 1e6}),
]
def processData(self, data):
s = self.stateGroup.state()
ax = s['axis']
start = s['start']
stop = s['stop']
step = s['step']
if ax == 0:
# allow support for non-ndarray sequence types
return data[start:stop:step]
else:
sl = [slice(None) for i in range(data.ndim)]
sl[ax] = slice(start, stop, step)
return data[sl]
class AsType(CtrlNode):
"""Convert an array to a different dtype.
"""
nodeName = 'AsType'
uiTemplate = [
('dtype', 'combo', {'values': ['float', 'int', 'float32', 'float64', 'float128', 'int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64'], 'index': 0}),
]
def processData(self, data):
s = self.stateGroup.state()
return data.astype(s['dtype'])
| gpl-3.0 | -5,559,612,180,351,713,000 | 32.530146 | 177 | 0.534288 | false |
drix00/pysemeels | tests/hitachi/eels_su/test_elv_file.py | 1 | 4864 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. py:currentmodule:: xrayspectrummodeling.map.test_simulation_data
.. moduleauthor:: Hendrix Demers <[email protected]>
Tests for the module :py:mod:`xrayspectrummodeling.map.simulation_data as simulation_data`.
"""
###############################################################################
# Copyright 2017 Hendrix Demers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
# Standard library modules.
import unittest
import sys
# Third party modules.
import pytest
# Local modules.
# Project modules.
from pysemeels import get_current_module_path
from pysemeels.hitachi.eels_su.elv_file import ElvFile
from tests import is_bad_file
# Globals and constants variables.
class TestSimulationData(unittest.TestCase):
"""
TestCase class for the module `xrayspectrummodeling.map.simulation_data`.
"""
def setUp(self):
"""
Setup method.
"""
unittest.TestCase.setUp(self)
self.elv_file_path = get_current_module_path(__file__, "../../../test_data/hitachi/eels_su/30kV_7eV.elv")
if is_bad_file(self.elv_file_path):
pytest.skip("File not found: {}".format(self.elv_file_path))
def tearDown(self):
"""
Teardown method.
"""
unittest.TestCase.tearDown(self)
def testSkeleton(self):
"""
First test to check if the testcase is working with the testing framework.
"""
# self.fail("Test if the testcase is working.")
self.assert_(True)
@pytest.mark.skipif(sys.platform != "win32", reason="only run on windows")
def test_read_file(self):
"""
First test to check if the testcase is working with the testing framework.
"""
with open(self.elv_file_path, 'r') as elv_text_file:
elv_file = ElvFile()
elv_file.read(elv_text_file)
self.assertEqual("01/Mar/2017", elv_file.date)
self.assertEqual("10:59", elv_file.time)
self.assertEqual("", elv_file.comment)
self.assertEqual(500, elv_file.dose)
self.assertEqual(0.0, elv_file.le)
self.assertEqual(98.7, elv_file.raw)
self.assertEqual(7.0, elv_file.energy_width)
self.assertEqual(586, elv_file.dual_det_position)
self.assertEqual(133, elv_file.dual_det_post)
self.assertEqual(608, elv_file.dual_det_center)
self.assertEqual(13575, elv_file.q1)
self.assertEqual(3850, elv_file.q1s)
self.assertEqual(0, elv_file.q2)
self.assertEqual(0, elv_file.q2s)
self.assertEqual(2700, elv_file.q3)
self.assertEqual(2900, elv_file.h1)
self.assertEqual(6150, elv_file.h1s)
self.assertEqual(-600, elv_file.h2)
self.assertEqual(350, elv_file.h2s)
self.assertEqual(0, elv_file.h4)
self.assertEqual(0, elv_file.elv_x)
self.assertEqual(0, elv_file.elv_y)
self.assertEqual(259, elv_file.spectrum_alignment_x)
self.assertEqual(0, elv_file.spectrum_alignment_y)
self.assertEqual(-1500, elv_file.det_spec_alignment_x)
self.assertEqual(470, elv_file.det_spec_alignment_y)
self.assertEqual(-1500, elv_file.det_map_alignment_x)
self.assertEqual(1500, elv_file.det_map_alignment_y)
self.assertEqual(37443, elv_file.mag)
self.assertEqual(-32.00, elv_file.energies_eV[0])
self.assertEqual(2282, elv_file.raw_counts[0])
self.assertEqual(21.84, elv_file.energies_eV[-1])
self.assertEqual(0, elv_file.raw_counts[-1])
self.assertEqual(1024, len(elv_file.energies_eV))
self.assertEqual(1024, len(elv_file.raw_counts))
self.assertEqual(0.918375, elv_file.gain_corrections[0])
self.assertEqual(0.000000, elv_file.gain_corrections[-1])
self.assertEqual(1024, len(elv_file.gain_corrections))
self.assertEqual(2313, elv_file.dark_currents[0])
self.assertEqual(0, elv_file.dark_currents[-1])
self.assertEqual(1024, len(elv_file.dark_currents))
# self.fail("Test if the testcase is working.")
| apache-2.0 | 4,892,551,114,688,287,000 | 35.571429 | 113 | 0.61801 | false |
fkolacek/FIT-VUT | bp-revok/python/lib/python2.7/test/test_class.py | 3 | 17777 | "Test the functionality of Python classes implementing operators."
import unittest
from test import test_support
testmeths = [
# Binary operations
"add",
"radd",
"sub",
"rsub",
"mul",
"rmul",
"div",
"rdiv",
"mod",
"rmod",
"divmod",
"rdivmod",
"pow",
"rpow",
"rshift",
"rrshift",
"lshift",
"rlshift",
"and",
"rand",
"or",
"ror",
"xor",
"rxor",
# List/dict operations
"contains",
"getitem",
"getslice",
"setitem",
"setslice",
"delitem",
"delslice",
# Unary operations
"neg",
"pos",
"abs",
# generic operations
"init",
]
# These need to return something other than None
# "coerce",
# "hash",
# "str",
# "repr",
# "int",
# "long",
# "float",
# "oct",
# "hex",
# These are separate because they can influence the test of other methods.
# "getattr",
# "setattr",
# "delattr",
callLst = []
def trackCall(f):
def track(*args, **kwargs):
callLst.append((f.__name__, args))
return f(*args, **kwargs)
return track
class AllTests:
trackCall = trackCall
@trackCall
def __coerce__(self, *args):
return (self,) + args
@trackCall
def __hash__(self, *args):
return hash(id(self))
@trackCall
def __str__(self, *args):
return "AllTests"
@trackCall
def __repr__(self, *args):
return "AllTests"
@trackCall
def __int__(self, *args):
return 1
@trackCall
def __float__(self, *args):
return 1.0
@trackCall
def __long__(self, *args):
return 1L
@trackCall
def __oct__(self, *args):
return '01'
@trackCall
def __hex__(self, *args):
return '0x1'
@trackCall
def __cmp__(self, *args):
return 0
# Synthesize all the other AllTests methods from the names in testmeths.
method_template = """\
@trackCall
def __%(method)s__(self, *args):
pass
"""
for method in testmeths:
exec method_template % locals() in AllTests.__dict__
del method, method_template
class ClassTests(unittest.TestCase):
def setUp(self):
callLst[:] = []
def assertCallStack(self, expected_calls):
actualCallList = callLst[:] # need to copy because the comparison below will add
# additional calls to callLst
if expected_calls != actualCallList:
self.fail("Expected call list:\n %s\ndoes not match actual call list\n %s" %
(expected_calls, actualCallList))
def testInit(self):
foo = AllTests()
self.assertCallStack([("__init__", (foo,))])
def testBinaryOps(self):
testme = AllTests()
# Binary operations
callLst[:] = []
testme + 1
self.assertCallStack([("__coerce__", (testme, 1)), ("__add__", (testme, 1))])
callLst[:] = []
1 + testme
self.assertCallStack([("__coerce__", (testme, 1)), ("__radd__", (testme, 1))])
callLst[:] = []
testme - 1
self.assertCallStack([("__coerce__", (testme, 1)), ("__sub__", (testme, 1))])
callLst[:] = []
1 - testme
self.assertCallStack([("__coerce__", (testme, 1)), ("__rsub__", (testme, 1))])
callLst[:] = []
testme * 1
self.assertCallStack([("__coerce__", (testme, 1)), ("__mul__", (testme, 1))])
callLst[:] = []
1 * testme
self.assertCallStack([("__coerce__", (testme, 1)), ("__rmul__", (testme, 1))])
if 1/2 == 0:
callLst[:] = []
testme / 1
self.assertCallStack([("__coerce__", (testme, 1)), ("__div__", (testme, 1))])
callLst[:] = []
1 / testme
self.assertCallStack([("__coerce__", (testme, 1)), ("__rdiv__", (testme, 1))])
callLst[:] = []
testme % 1
self.assertCallStack([("__coerce__", (testme, 1)), ("__mod__", (testme, 1))])
callLst[:] = []
1 % testme
self.assertCallStack([("__coerce__", (testme, 1)), ("__rmod__", (testme, 1))])
callLst[:] = []
divmod(testme,1)
self.assertCallStack([("__coerce__", (testme, 1)), ("__divmod__", (testme, 1))])
callLst[:] = []
divmod(1, testme)
self.assertCallStack([("__coerce__", (testme, 1)), ("__rdivmod__", (testme, 1))])
callLst[:] = []
testme ** 1
self.assertCallStack([("__coerce__", (testme, 1)), ("__pow__", (testme, 1))])
callLst[:] = []
1 ** testme
self.assertCallStack([("__coerce__", (testme, 1)), ("__rpow__", (testme, 1))])
callLst[:] = []
testme >> 1
self.assertCallStack([("__coerce__", (testme, 1)), ("__rshift__", (testme, 1))])
callLst[:] = []
1 >> testme
self.assertCallStack([("__coerce__", (testme, 1)), ("__rrshift__", (testme, 1))])
callLst[:] = []
testme << 1
self.assertCallStack([("__coerce__", (testme, 1)), ("__lshift__", (testme, 1))])
callLst[:] = []
1 << testme
self.assertCallStack([("__coerce__", (testme, 1)), ("__rlshift__", (testme, 1))])
callLst[:] = []
testme & 1
self.assertCallStack([("__coerce__", (testme, 1)), ("__and__", (testme, 1))])
callLst[:] = []
1 & testme
self.assertCallStack([("__coerce__", (testme, 1)), ("__rand__", (testme, 1))])
callLst[:] = []
testme | 1
self.assertCallStack([("__coerce__", (testme, 1)), ("__or__", (testme, 1))])
callLst[:] = []
1 | testme
self.assertCallStack([("__coerce__", (testme, 1)), ("__ror__", (testme, 1))])
callLst[:] = []
testme ^ 1
self.assertCallStack([("__coerce__", (testme, 1)), ("__xor__", (testme, 1))])
callLst[:] = []
1 ^ testme
self.assertCallStack([("__coerce__", (testme, 1)), ("__rxor__", (testme, 1))])
def testListAndDictOps(self):
testme = AllTests()
# List/dict operations
class Empty: pass
try:
1 in Empty()
self.fail('failed, should have raised TypeError')
except TypeError:
pass
callLst[:] = []
1 in testme
self.assertCallStack([('__contains__', (testme, 1))])
callLst[:] = []
testme[1]
self.assertCallStack([('__getitem__', (testme, 1))])
callLst[:] = []
testme[1] = 1
self.assertCallStack([('__setitem__', (testme, 1, 1))])
callLst[:] = []
del testme[1]
self.assertCallStack([('__delitem__', (testme, 1))])
callLst[:] = []
testme[:42]
self.assertCallStack([('__getslice__', (testme, 0, 42))])
callLst[:] = []
testme[:42] = "The Answer"
self.assertCallStack([('__setslice__', (testme, 0, 42, "The Answer"))])
callLst[:] = []
del testme[:42]
self.assertCallStack([('__delslice__', (testme, 0, 42))])
callLst[:] = []
testme[2:1024:10]
self.assertCallStack([('__getitem__', (testme, slice(2, 1024, 10)))])
callLst[:] = []
testme[2:1024:10] = "A lot"
self.assertCallStack([('__setitem__', (testme, slice(2, 1024, 10),
"A lot"))])
callLst[:] = []
del testme[2:1024:10]
self.assertCallStack([('__delitem__', (testme, slice(2, 1024, 10)))])
callLst[:] = []
testme[:42, ..., :24:, 24, 100]
self.assertCallStack([('__getitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100)))])
callLst[:] = []
testme[:42, ..., :24:, 24, 100] = "Strange"
self.assertCallStack([('__setitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100), "Strange"))])
callLst[:] = []
del testme[:42, ..., :24:, 24, 100]
self.assertCallStack([('__delitem__', (testme, (slice(None, 42, None),
Ellipsis,
slice(None, 24, None),
24, 100)))])
# Now remove the slice hooks to see if converting normal slices to
# slice object works.
getslice = AllTests.__getslice__
del AllTests.__getslice__
setslice = AllTests.__setslice__
del AllTests.__setslice__
delslice = AllTests.__delslice__
del AllTests.__delslice__
# XXX when using new-style classes the slice testme[:42] produces
# slice(None, 42, None) instead of slice(0, 42, None). py3k will have
# to change this test.
callLst[:] = []
testme[:42]
self.assertCallStack([('__getitem__', (testme, slice(0, 42, None)))])
callLst[:] = []
testme[:42] = "The Answer"
self.assertCallStack([('__setitem__', (testme, slice(0, 42, None),
"The Answer"))])
callLst[:] = []
del testme[:42]
self.assertCallStack([('__delitem__', (testme, slice(0, 42, None)))])
# Restore the slice methods, or the tests will fail with regrtest -R.
AllTests.__getslice__ = getslice
AllTests.__setslice__ = setslice
AllTests.__delslice__ = delslice
def testUnaryOps(self):
testme = AllTests()
callLst[:] = []
-testme
self.assertCallStack([('__neg__', (testme,))])
callLst[:] = []
+testme
self.assertCallStack([('__pos__', (testme,))])
callLst[:] = []
abs(testme)
self.assertCallStack([('__abs__', (testme,))])
callLst[:] = []
int(testme)
self.assertCallStack([('__int__', (testme,))])
callLst[:] = []
long(testme)
self.assertCallStack([('__long__', (testme,))])
callLst[:] = []
float(testme)
self.assertCallStack([('__float__', (testme,))])
callLst[:] = []
oct(testme)
self.assertCallStack([('__oct__', (testme,))])
callLst[:] = []
hex(testme)
self.assertCallStack([('__hex__', (testme,))])
def testMisc(self):
testme = AllTests()
callLst[:] = []
hash(testme)
self.assertCallStack([('__hash__', (testme,))])
callLst[:] = []
repr(testme)
self.assertCallStack([('__repr__', (testme,))])
callLst[:] = []
str(testme)
self.assertCallStack([('__str__', (testme,))])
callLst[:] = []
testme == 1
self.assertCallStack([("__coerce__", (testme, 1)), ('__cmp__', (testme, 1))])
callLst[:] = []
testme < 1
self.assertCallStack([("__coerce__", (testme, 1)), ('__cmp__', (testme, 1))])
callLst[:] = []
testme > 1
self.assertCallStack([("__coerce__", (testme, 1)), ('__cmp__', (testme, 1))])
callLst[:] = []
eval('testme <> 1') # XXX kill this in py3k
self.assertCallStack([("__coerce__", (testme, 1)), ('__cmp__', (testme, 1))])
callLst[:] = []
testme != 1
self.assertCallStack([("__coerce__", (testme, 1)), ('__cmp__', (testme, 1))])
callLst[:] = []
1 == testme
self.assertCallStack([("__coerce__", (testme, 1)), ('__cmp__', (1, testme))])
callLst[:] = []
1 < testme
self.assertCallStack([("__coerce__", (testme, 1)), ('__cmp__', (1, testme))])
callLst[:] = []
1 > testme
self.assertCallStack([("__coerce__", (testme, 1)), ('__cmp__', (1, testme))])
callLst[:] = []
eval('1 <> testme')
self.assertCallStack([("__coerce__", (testme, 1)), ('__cmp__', (1, testme))])
callLst[:] = []
1 != testme
self.assertCallStack([("__coerce__", (testme, 1)), ('__cmp__', (1, testme))])
def testGetSetAndDel(self):
# Interfering tests
class ExtraTests(AllTests):
@trackCall
def __getattr__(self, *args):
return "SomeVal"
@trackCall
def __setattr__(self, *args):
pass
@trackCall
def __delattr__(self, *args):
pass
testme = ExtraTests()
callLst[:] = []
testme.spam
self.assertCallStack([('__getattr__', (testme, "spam"))])
callLst[:] = []
testme.eggs = "spam, spam, spam and ham"
self.assertCallStack([('__setattr__', (testme, "eggs",
"spam, spam, spam and ham"))])
callLst[:] = []
del testme.cardinal
self.assertCallStack([('__delattr__', (testme, "cardinal"))])
def testDel(self):
x = []
class DelTest:
def __del__(self):
x.append("crab people, crab people")
testme = DelTest()
del testme
import gc
gc.collect()
self.assertEquals(["crab people, crab people"], x)
def testBadTypeReturned(self):
# return values of some method are type-checked
class BadTypeClass:
def __int__(self):
return None
__float__ = __int__
__long__ = __int__
__str__ = __int__
__repr__ = __int__
__oct__ = __int__
__hex__ = __int__
for f in [int, float, long, str, repr, oct, hex]:
self.assertRaises(TypeError, f, BadTypeClass())
def testMixIntsAndLongs(self):
# mixing up ints and longs is okay
class IntLongMixClass:
@trackCall
def __int__(self):
return 42L
@trackCall
def __long__(self):
return 64
mixIntAndLong = IntLongMixClass()
callLst[:] = []
as_int = int(mixIntAndLong)
self.assertEquals(type(as_int), long)
self.assertEquals(as_int, 42L)
self.assertCallStack([('__int__', (mixIntAndLong,))])
callLst[:] = []
as_long = long(mixIntAndLong)
self.assertEquals(type(as_long), int)
self.assertEquals(as_long, 64)
self.assertCallStack([('__long__', (mixIntAndLong,))])
def testHashStuff(self):
# Test correct errors from hash() on objects with comparisons but
# no __hash__
class C0:
pass
hash(C0()) # This should work; the next two should raise TypeError
class C1:
def __cmp__(self, other): return 0
self.assertRaises(TypeError, hash, C1())
class C2:
def __eq__(self, other): return 1
self.assertRaises(TypeError, hash, C2())
def testSFBug532646(self):
# Test for SF bug 532646
class A:
pass
A.__call__ = A()
a = A()
try:
a() # This should not segfault
except RuntimeError:
pass
else:
self.fail("Failed to raise RuntimeError")
def testForExceptionsRaisedInInstanceGetattr2(self):
# Tests for exceptions raised in instance_getattr2().
def booh(self):
raise AttributeError("booh")
class A:
a = property(booh)
try:
A().a # Raised AttributeError: A instance has no attribute 'a'
except AttributeError, x:
if str(x) != "booh":
self.fail("attribute error for A().a got masked: %s" % x)
class E:
__eq__ = property(booh)
E() == E() # In debug mode, caused a C-level assert() to fail
class I:
__init__ = property(booh)
try:
# In debug mode, printed XXX undetected error and
# raises AttributeError
I()
except AttributeError, x:
pass
else:
self.fail("attribute error for I.__init__ got masked")
def testHashComparisonOfMethods(self):
# Test comparison and hash of methods
class A:
def __init__(self, x):
self.x = x
def f(self):
pass
def g(self):
pass
def __eq__(self, other):
return self.x == other.x
def __hash__(self):
return self.x
class B(A):
pass
a1 = A(1)
a2 = A(2)
self.assertEquals(a1.f, a1.f)
self.assertNotEquals(a1.f, a2.f)
self.assertNotEquals(a1.f, a1.g)
self.assertEquals(a1.f, A(1).f)
self.assertEquals(hash(a1.f), hash(a1.f))
self.assertEquals(hash(a1.f), hash(A(1).f))
self.assertNotEquals(A.f, a1.f)
self.assertNotEquals(A.f, A.g)
self.assertEquals(B.f, A.f)
self.assertEquals(hash(B.f), hash(A.f))
# the following triggers a SystemError in 2.4
a = A(hash(A.f.im_func)^(-1))
hash(a.f)
def test_main():
with test_support.check_py3k_warnings(
(".+__(get|set|del)slice__ has been removed", DeprecationWarning),
("classic int division", DeprecationWarning),
("<> not supported", DeprecationWarning)):
test_support.run_unittest(ClassTests)
if __name__=='__main__':
test_main()
| apache-2.0 | 1,378,322,549,443,257,000 | 27.397764 | 90 | 0.472408 | false |
ckaestne/CIDE | CIDE_Language_Python-test/testfiles/UserString.py | 8 | 7995 | #!/usr/bin/env python
## vim:ts=4:et:nowrap
"""A user-defined wrapper around string objects
Note: string objects have grown methods in Python 1.6
This module requires Python 1.6 or later.
"""
from types import StringType, UnicodeType
import sys
__all__ = ["UserString","MutableString"]
class UserString:
def __init__(self, seq):
if isinstance(seq, StringType) or isinstance(seq, UnicodeType):
self.data = seq
elif isinstance(seq, UserString):
self.data = seq.data[:]
else:
self.data = str(seq)
def __str__(self): return str(self.data)
def __repr__(self): return repr(self.data)
def __int__(self): return int(self.data)
def __long__(self): return long(self.data)
def __float__(self): return float(self.data)
def __complex__(self): return complex(self.data)
def __hash__(self): return hash(self.data)
def __cmp__(self, string):
if isinstance(string, UserString):
return cmp(self.data, string.data)
else:
return cmp(self.data, string)
def __contains__(self, char):
return char in self.data
def __len__(self): return len(self.data)
def __getitem__(self, index): return self.__class__(self.data[index])
def __getslice__(self, start, end):
start = max(start, 0); end = max(end, 0)
return self.__class__(self.data[start:end])
def __add__(self, other):
if isinstance(other, UserString):
return self.__class__(self.data + other.data)
elif isinstance(other, StringType) or isinstance(other, UnicodeType):
return self.__class__(self.data + other)
else:
return self.__class__(self.data + str(other))
def __radd__(self, other):
if isinstance(other, StringType) or isinstance(other, UnicodeType):
return self.__class__(other + self.data)
else:
return self.__class__(str(other) + self.data)
def __iadd__(self, other):
if isinstance(other, UserString):
self.data += other.data
elif isinstance(other, StringType) or isinstance(other, UnicodeType):
self.data += other
else:
self.data += str(other)
return self
def __mul__(self, n):
return self.__class__(self.data*n)
__rmul__ = __mul__
def __imul__(self, n):
self.data *= n
return self
# the following methods are defined in alphabetical order:
def capitalize(self): return self.__class__(self.data.capitalize())
def center(self, width): return self.__class__(self.data.center(width))
def count(self, sub, start=0, end=sys.maxint):
return self.data.count(sub, start, end)
def decode(self, encoding=None, errors=None): # XXX improve this?
if encoding:
if errors:
return self.__class__(self.data.decode(encoding, errors))
else:
return self.__class__(self.data.decode(encoding))
else:
return self.__class__(self.data.decode())
def encode(self, encoding=None, errors=None): # XXX improve this?
if encoding:
if errors:
return self.__class__(self.data.encode(encoding, errors))
else:
return self.__class__(self.data.encode(encoding))
else:
return self.__class__(self.data.encode())
def endswith(self, suffix, start=0, end=sys.maxint):
return self.data.endswith(suffix, start, end)
def expandtabs(self, tabsize=8):
return self.__class__(self.data.expandtabs(tabsize))
def find(self, sub, start=0, end=sys.maxint):
return self.data.find(sub, start, end)
def index(self, sub, start=0, end=sys.maxint):
return self.data.index(sub, start, end)
def isalpha(self): return self.data.isalpha()
def isalnum(self): return self.data.isalnum()
def isdecimal(self): return self.data.isdecimal()
def isdigit(self): return self.data.isdigit()
def islower(self): return self.data.islower()
def isnumeric(self): return self.data.isnumeric()
def isspace(self): return self.data.isspace()
def istitle(self): return self.data.istitle()
def isupper(self): return self.data.isupper()
def join(self, seq): return self.data.join(seq)
def ljust(self, width): return self.__class__(self.data.ljust(width))
def lower(self): return self.__class__(self.data.lower())
def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars))
def replace(self, old, new, maxsplit=-1):
return self.__class__(self.data.replace(old, new, maxsplit))
def rfind(self, sub, start=0, end=sys.maxint):
return self.data.rfind(sub, start, end)
def rindex(self, sub, start=0, end=sys.maxint):
return self.data.rindex(sub, start, end)
def rjust(self, width): return self.__class__(self.data.rjust(width))
def rstrip(self, chars=None): return self.__class__(self.data.rstrip(chars))
def split(self, sep=None, maxsplit=-1):
return self.data.split(sep, maxsplit)
def splitlines(self, keepends=0): return self.data.splitlines(keepends)
def startswith(self, prefix, start=0, end=sys.maxint):
return self.data.startswith(prefix, start, end)
def strip(self, chars=None): return self.__class__(self.data.strip(chars))
def swapcase(self): return self.__class__(self.data.swapcase())
def title(self): return self.__class__(self.data.title())
def translate(self, *args):
return self.__class__(self.data.translate(*args))
def upper(self): return self.__class__(self.data.upper())
def zfill(self, width): return self.__class__(self.data.zfill(width))
class MutableString(UserString):
"""mutable string objects
Python strings are immutable objects. This has the advantage, that
strings may be used as dictionary keys. If this property isn't needed
and you insist on changing string values in place instead, you may cheat
and use MutableString.
But the purpose of this class is an educational one: to prevent
people from inventing their own mutable string class derived
from UserString and than forget thereby to remove (override) the
__hash__ method inherited from ^UserString. This would lead to
errors that would be very hard to track down.
A faster and better solution is to rewrite your program using lists."""
def __init__(self, string=""):
self.data = string
def __hash__(self):
raise TypeError, "unhashable type (it is mutable)"
def __setitem__(self, index, sub):
if index < 0 or index >= len(self.data): raise IndexError
self.data = self.data[:index] + sub + self.data[index+1:]
def __delitem__(self, index):
if index < 0 or index >= len(self.data): raise IndexError
self.data = self.data[:index] + self.data[index+1:]
def __setslice__(self, start, end, sub):
start = max(start, 0); end = max(end, 0)
if isinstance(sub, UserString):
self.data = self.data[:start]+sub.data+self.data[end:]
elif isinstance(sub, StringType) or isinstance(sub, UnicodeType):
self.data = self.data[:start]+sub+self.data[end:]
else:
self.data = self.data[:start]+str(sub)+self.data[end:]
def __delslice__(self, start, end):
start = max(start, 0); end = max(end, 0)
self.data = self.data[:start] + self.data[end:]
def immutable(self):
return UserString(self.data)
if __name__ == "__main__":
# execute the regression test to stdout, if called as a script:
import os
called_in_dir, called_as = os.path.split(sys.argv[0])
called_in_dir = os.path.abspath(called_in_dir)
called_as, py = os.path.splitext(called_as)
sys.path.append(os.path.join(called_in_dir, 'test'))
if '-q' in sys.argv:
import test_support
test_support.verbose = 0
__import__('test_' + called_as.lower())
| gpl-3.0 | -1,615,208,924,488,896,000 | 42.928571 | 80 | 0.627017 | false |
ctxis/canape | CANAPE.Scripting/Lib/urlparse.py | 76 | 14379 | """Parse (absolute and relative) URLs.
urlparse module is based upon the following RFC specifications.
RFC 3986 (STD66): "Uniform Resource Identifiers" by T. Berners-Lee, R. Fielding
and L. Masinter, January 2005.
RFC 2732 : "Format for Literal IPv6 Addresses in URL's by R.Hinden, B.Carpenter
and L.Masinter, December 1999.
RFC 2396: "Uniform Resource Identifiers (URI)": Generic Syntax by T.
Berners-Lee, R. Fielding, and L. Masinter, August 1998.
RFC 2368: "The mailto URL scheme", by P.Hoffman , L Masinter, J. Zwinski, July 1998.
RFC 1808: "Relative Uniform Resource Locators", by R. Fielding, UC Irvine, June
1995.
RFC 1738: "Uniform Resource Locators (URL)" by T. Berners-Lee, L. Masinter, M.
McCahill, December 1994
RFC 3986 is considered the current standard and any future changes to
urlparse module should conform with it. The urlparse module is
currently not entirely compliant with this RFC due to defacto
scenarios for parsing, and for backward compatibility purposes, some
parsing quirks from older RFCs are retained. The testcases in
test_urlparse.py provides a good indicator of parsing behavior.
"""
__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
"urlsplit", "urlunsplit", "parse_qs", "parse_qsl"]
# A classification of schemes ('' means apply by default)
uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap',
'wais', 'file', 'https', 'shttp', 'mms',
'prospero', 'rtsp', 'rtspu', '', 'sftp']
uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet',
'imap', 'wais', 'file', 'mms', 'https', 'shttp',
'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '',
'svn', 'svn+ssh', 'sftp','nfs','git', 'git+ssh']
non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
'telnet', 'wais', 'imap', 'snews', 'sip', 'sips']
uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap',
'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips',
'mms', '', 'sftp']
uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms',
'gopher', 'rtsp', 'rtspu', 'sip', 'sips', '']
uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news',
'nntp', 'wais', 'https', 'shttp', 'snews',
'file', 'prospero', '']
# Characters valid in scheme names
scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789'
'+-.')
MAX_CACHE_SIZE = 20
_parse_cache = {}
def clear_cache():
"""Clear the parse cache."""
_parse_cache.clear()
class ResultMixin(object):
"""Shared methods for the parsed result objects."""
@property
def username(self):
netloc = self.netloc
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
userinfo = userinfo.split(":", 1)[0]
return userinfo
return None
@property
def password(self):
netloc = self.netloc
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
return userinfo.split(":", 1)[1]
return None
@property
def hostname(self):
netloc = self.netloc.split('@')[-1]
if '[' in netloc and ']' in netloc:
return netloc.split(']')[0][1:].lower()
elif ':' in netloc:
return netloc.split(':')[0].lower()
elif netloc == '':
return None
else:
return netloc.lower()
@property
def port(self):
netloc = self.netloc.split('@')[-1].split(']')[-1]
if ':' in netloc:
port = netloc.split(':')[1]
return int(port, 10)
else:
return None
from collections import namedtuple
class SplitResult(namedtuple('SplitResult', 'scheme netloc path query fragment'), ResultMixin):
__slots__ = ()
def geturl(self):
return urlunsplit(self)
class ParseResult(namedtuple('ParseResult', 'scheme netloc path params query fragment'), ResultMixin):
__slots__ = ()
def geturl(self):
return urlunparse(self)
def urlparse(url, scheme='', allow_fragments=True):
"""Parse a URL into 6 components:
<scheme>://<netloc>/<path>;<params>?<query>#<fragment>
Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
tuple = urlsplit(url, scheme, allow_fragments)
scheme, netloc, url, query, fragment = tuple
if scheme in uses_params and ';' in url:
url, params = _splitparams(url)
else:
params = ''
return ParseResult(scheme, netloc, url, params, query, fragment)
def _splitparams(url):
if '/' in url:
i = url.find(';', url.rfind('/'))
if i < 0:
return url, ''
else:
i = url.find(';')
return url[:i], url[i+1:]
def _splitnetloc(url, start=0):
delim = len(url) # position of end of domain part of url, default is end
for c in '/?#': # look for delimiters; the order is NOT important
wdelim = url.find(c, start) # find first of this delim
if wdelim >= 0: # if found
delim = min(delim, wdelim) # use earliest delim position
return url[start:delim], url[delim:] # return (domain, rest)
def urlsplit(url, scheme='', allow_fragments=True):
"""Parse a URL into 5 components:
<scheme>://<netloc>/<path>?<query>#<fragment>
Return a 5-tuple: (scheme, netloc, path, query, fragment).
Note that we don't break the components up in smaller bits
(e.g. netloc is a single string) and we don't expand % escapes."""
allow_fragments = bool(allow_fragments)
key = url, scheme, allow_fragments, type(url), type(scheme)
cached = _parse_cache.get(key, None)
if cached:
return cached
if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
clear_cache()
netloc = query = fragment = ''
i = url.find(':')
if i > 0:
if url[:i] == 'http': # optimize the common case
scheme = url[:i].lower()
url = url[i+1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and '#' in url:
url, fragment = url.split('#', 1)
if '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
for c in url[:i]:
if c not in scheme_chars:
break
else:
try:
# make sure "url" is not actually a port number (in which case
# "scheme" is really part of the path
_testportnum = int(url[i+1:])
except ValueError:
scheme, url = url[:i].lower(), url[i+1:]
if url[:2] == '//':
netloc, url = _splitnetloc(url, 2)
if (('[' in netloc and ']' not in netloc) or
(']' in netloc and '[' not in netloc)):
raise ValueError("Invalid IPv6 URL")
if allow_fragments and scheme in uses_fragment and '#' in url:
url, fragment = url.split('#', 1)
if scheme in uses_query and '?' in url:
url, query = url.split('?', 1)
v = SplitResult(scheme, netloc, url, query, fragment)
_parse_cache[key] = v
return v
def urlunparse(data):
"""Put a parsed URL back together again. This may result in a
slightly different, but equivalent URL, if the URL that was parsed
originally had redundant delimiters, e.g. a ? with an empty query
(the draft states that these are equivalent)."""
scheme, netloc, url, params, query, fragment = data
if params:
url = "%s;%s" % (url, params)
return urlunsplit((scheme, netloc, url, query, fragment))
def urlunsplit(data):
"""Combine the elements of a tuple as returned by urlsplit() into a
complete URL as a string. The data argument can be any five-item iterable.
This may result in a slightly different, but equivalent URL, if the URL that
was parsed originally had unnecessary delimiters (for example, a ? with an
empty query; the RFC states that these are equivalent)."""
scheme, netloc, url, query, fragment = data
if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'):
if url and url[:1] != '/': url = '/' + url
url = '//' + (netloc or '') + url
if scheme:
url = scheme + ':' + url
if query:
url = url + '?' + query
if fragment:
url = url + '#' + fragment
return url
def urljoin(base, url, allow_fragments=True):
"""Join a base URL and a possibly relative URL to form an absolute
interpretation of the latter."""
if not base:
return url
if not url:
return base
bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
urlparse(base, '', allow_fragments)
scheme, netloc, path, params, query, fragment = \
urlparse(url, bscheme, allow_fragments)
if scheme != bscheme or scheme not in uses_relative:
return url
if scheme in uses_netloc:
if netloc:
return urlunparse((scheme, netloc, path,
params, query, fragment))
netloc = bnetloc
if path[:1] == '/':
return urlunparse((scheme, netloc, path,
params, query, fragment))
if not path and not params:
path = bpath
params = bparams
if not query:
query = bquery
return urlunparse((scheme, netloc, path,
params, query, fragment))
segments = bpath.split('/')[:-1] + path.split('/')
# XXX The stuff below is bogus in various ways...
if segments[-1] == '.':
segments[-1] = ''
while '.' in segments:
segments.remove('.')
while 1:
i = 1
n = len(segments) - 1
while i < n:
if (segments[i] == '..'
and segments[i-1] not in ('', '..')):
del segments[i-1:i+1]
break
i = i+1
else:
break
if segments == ['', '..']:
segments[-1] = ''
elif len(segments) >= 2 and segments[-1] == '..':
segments[-2:] = ['']
return urlunparse((scheme, netloc, '/'.join(segments),
params, query, fragment))
def urldefrag(url):
"""Removes any existing fragment from URL.
Returns a tuple of the defragmented URL and the fragment. If
the URL contained no fragments, the second element is the
empty string.
"""
if '#' in url:
s, n, p, a, q, frag = urlparse(url)
defrag = urlunparse((s, n, p, a, q, ''))
return defrag, frag
else:
return url, ''
# unquote method for parse_qs and parse_qsl
# Cannot use directly from urllib as it would create a circular reference
# because urllib uses urlparse methods (urljoin). If you update this function,
# update it also in urllib. This code duplication does not existin in Python3.
_hexdig = '0123456789ABCDEFabcdef'
_hextochr = dict((a+b, chr(int(a+b,16)))
for a in _hexdig for b in _hexdig)
def unquote(s):
"""unquote('abc%20def') -> 'abc def'."""
res = s.split('%')
# fastpath
if len(res) == 1:
return s
s = res[0]
for item in res[1:]:
try:
s += _hextochr[item[:2]] + item[2:]
except KeyError:
s += '%' + item
except UnicodeDecodeError:
s += unichr(int(item[:2], 16)) + item[2:]
return s
def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument.
Arguments:
qs: percent-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings.
A true value indicates that blanks should be retained as
blank strings. The default false value indicates that
blank values are to be ignored and treated as if they were
not included.
strict_parsing: flag indicating what to do with parsing errors.
If false (the default), errors are silently ignored.
If true, errors raise a ValueError exception.
"""
dict = {}
for name, value in parse_qsl(qs, keep_blank_values, strict_parsing):
if name in dict:
dict[name].append(value)
else:
dict[name] = [value]
return dict
def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
"""Parse a query given as a string argument.
Arguments:
qs: percent-encoded query string to be parsed
keep_blank_values: flag indicating whether blank values in
percent-encoded queries should be treated as blank strings. A
true value indicates that blanks should be retained as blank
strings. The default false value indicates that blank values
are to be ignored and treated as if they were not included.
strict_parsing: flag indicating what to do with parsing errors. If
false (the default), errors are silently ignored. If true,
errors raise a ValueError exception.
Returns a list, as G-d intended.
"""
pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
r = []
for name_value in pairs:
if not name_value and not strict_parsing:
continue
nv = name_value.split('=', 1)
if len(nv) != 2:
if strict_parsing:
raise ValueError, "bad query field: %r" % (name_value,)
# Handle case of a control-name with no equal sign
if keep_blank_values:
nv.append('')
else:
continue
if len(nv[1]) or keep_blank_values:
name = unquote(nv[0].replace('+', ' '))
value = unquote(nv[1].replace('+', ' '))
r.append((name, value))
return r
| gpl-3.0 | 2,681,928,501,818,154,000 | 35.219144 | 102 | 0.572432 | false |
gsobczyk/hamster | waflib/Tools/xlcxx.py | 56 | 1468 | #!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2006-2018 (ita)
# Ralf Habacker, 2006 (rh)
# Yinon Ehrlich, 2009
# Michael Kuhn, 2009
from waflib.Tools import ccroot, ar
from waflib.Configure import conf
@conf
def find_xlcxx(conf):
"""
Detects the Aix C++ compiler
"""
cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX')
conf.get_xlc_version(cxx)
conf.env.CXX_NAME = 'xlc++'
@conf
def xlcxx_common_flags(conf):
"""
Flags required for executing the Aix C++ compiler
"""
v = conf.env
v.CXX_SRC_F = []
v.CXX_TGT_F = ['-c', '-o']
if not v.LINK_CXX:
v.LINK_CXX = v.CXX
v.CXXLNK_SRC_F = []
v.CXXLNK_TGT_F = ['-o']
v.CPPPATH_ST = '-I%s'
v.DEFINES_ST = '-D%s'
v.LIB_ST = '-l%s' # template for adding libs
v.LIBPATH_ST = '-L%s' # template for adding libpaths
v.STLIB_ST = '-l%s'
v.STLIBPATH_ST = '-L%s'
v.RPATH_ST = '-Wl,-rpath,%s'
v.SONAME_ST = []
v.SHLIB_MARKER = []
v.STLIB_MARKER = []
v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
v.cxxprogram_PATTERN = '%s'
v.CXXFLAGS_cxxshlib = ['-fPIC']
v.LINKFLAGS_cxxshlib = ['-G', '-Wl,-brtl,-bexpfull']
v.cxxshlib_PATTERN = 'lib%s.so'
v.LINKFLAGS_cxxstlib = []
v.cxxstlib_PATTERN = 'lib%s.a'
def configure(conf):
conf.find_xlcxx()
conf.find_ar()
conf.xlcxx_common_flags()
conf.cxx_load_tools()
conf.cxx_add_flags()
conf.link_add_flags()
| gpl-3.0 | -7,471,754,562,801,903,000 | 21.584615 | 62 | 0.568801 | false |
kaplun/ops | modules/miscutil/lib/upgrades/invenio_2013_09_02_new_bibARXIVPDF.py | 18 | 1424 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import warnings
from invenio.dbquery import run_sql
from invenio.textutils import wait_for_user
depends_on = ['invenio_release_1_1_0']
def info():
return "New pdfchecker (bibARXIVPDF) table"
def do_upgrade():
""" Implement your upgrades here """
run_sql("""CREATE TABLE IF NOT EXISTS bibARXIVPDF (
id_bibrec mediumint(8) unsigned NOT NULL,
status ENUM('ok', 'missing') NOT NULL,
date_harvested datetime NOT NULL,
version tinyint(2) NOT NULL,
PRIMARY KEY (id_bibrec),
KEY status (status)
) ENGINE=MyISAM""")
def estimate():
""" Estimate running time of upgrade in seconds (optional). """
return 1
| gpl-2.0 | -1,647,460,054,138,615,300 | 32.904762 | 75 | 0.71559 | false |
dagwieers/ansible | lib/ansible/modules/network/meraki/meraki_content_filtering.py | 7 | 7671 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2019, Kevin Breit (@kbreit) <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: meraki_content_filtering
short_description: Edit Meraki MX content filtering policies
version_added: "2.8"
description:
- Allows for setting policy on content filtering.
options:
auth_key:
description:
- Authentication key provided by the dashboard. Required if environmental variable MERAKI_KEY is not set.
type: str
net_name:
description:
- Name of a network.
aliases: [ network ]
type: str
net_id:
description:
- ID number of a network.
type: str
org_name:
description:
- Name of organization associated to a network.
type: str
org_id:
description:
- ID of organization associated to a network.
type: str
state:
description:
- States that a policy should be created or modified.
choices: [present]
default: present
type: str
allowed_urls:
description:
- List of URL patterns which should be allowed.
type: list
blocked_urls:
description:
- List of URL patterns which should be blocked.
type: list
blocked_categories:
description:
- List of content categories which should be blocked.
- Use the C(meraki_content_filtering_facts) module for a full list of categories.
type: list
category_list_size:
description:
- Determines whether a network filters fo rall URLs in a category or only the list of top blocked sites.
choices: [ top sites, full list ]
type: str
author:
- Kevin Breit (@kbreit)
extends_documentation_fragment: meraki
'''
EXAMPLES = r'''
- name: Set single allowed URL pattern
meraki_content_filtering:
auth_key: abc123
org_name: YourOrg
net_name: YourMXNet
allowed_urls:
- "http://www.ansible.com/*"
- name: Set blocked URL category
meraki_content_filtering:
auth_key: abc123
org_name: YourOrg
net_name: YourMXNet
state: present
category_list_size: full list
blocked_categories:
- "Adult and Pornography"
- name: Remove match patterns and categories
meraki_content_filtering:
auth_key: abc123
org_name: YourOrg
net_name: YourMXNet
state: present
category_list_size: full list
allowed_urls: []
blocked_urls: []
'''
RETURN = r'''
data:
description: Information about the created or manipulated object.
returned: info
type: complex
contains:
id:
description: Identification string of network.
returned: success
type: str
sample: N_12345
'''
import os
from ansible.module_utils.basic import AnsibleModule, json, env_fallback
from ansible.module_utils.urls import fetch_url
from ansible.module_utils._text import to_native
from ansible.module_utils.network.meraki.meraki import MerakiModule, meraki_argument_spec
def get_category_dict(meraki, full_list, category):
for i in full_list['categories']:
if i['name'] == category:
return i['id']
meraki.fail_json(msg="{0} is not a valid content filtering category".format(category))
def main():
# define the available arguments/parameters that a user can pass to
# the module
argument_spec = meraki_argument_spec()
argument_spec.update(
net_id=dict(type='str'),
net_name=dict(type='str', aliases=['network']),
state=dict(type='str', default='present', choices=['present']),
allowed_urls=dict(type='list'),
blocked_urls=dict(type='list'),
blocked_categories=dict(type='list'),
category_list_size=dict(type='str', choices=['top sites', 'full list']),
)
# the AnsibleModule object will be our abstraction working with Ansible
# this includes instantiation, a couple of common attr would be the
# args/params passed to the execution, as well as if the module
# supports check mode
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
)
meraki = MerakiModule(module, function='content_filtering')
module.params['follow_redirects'] = 'all'
category_urls = {'content_filtering': '/networks/{net_id}/contentFiltering/categories'}
policy_urls = {'content_filtering': '/networks/{net_id}/contentFiltering'}
meraki.url_catalog['categories'] = category_urls
meraki.url_catalog['policy'] = policy_urls
if meraki.params['net_name'] and meraki.params['net_id']:
meraki.fail_json(msg='net_name and net_id are mutually exclusive')
# manipulate or modify the state as needed (this is going to be the
# part where your module will do what it needs to do)
org_id = meraki.params['org_id']
if not org_id:
org_id = meraki.get_org_id(meraki.params['org_name'])
nets = meraki.get_nets(org_id=org_id)
net_id = None
if net_id is None:
nets = meraki.get_nets(org_id=org_id)
net_id = meraki.get_net_id(org_id, meraki.params['net_name'], data=nets)
if module.params['state'] == 'present':
payload = dict()
if meraki.params['allowed_urls']:
payload['allowedUrlPatterns'] = meraki.params['allowed_urls']
if meraki.params['blocked_urls']:
payload['blockedUrlPatterns'] = meraki.params['blocked_urls']
if meraki.params['blocked_categories']:
if len(meraki.params['blocked_categories']) == 0: # Corner case for resetting
payload['blockedUrlCategories'] = []
else:
category_path = meraki.construct_path('categories', net_id=net_id)
categories = meraki.request(category_path, method='GET')
payload['blockedUrlCategories'] = []
for category in meraki.params['blocked_categories']:
payload['blockedUrlCategories'].append(get_category_dict(meraki,
categories,
category))
if meraki.params['category_list_size']:
if meraki.params['category_list_size'].lower() == 'top sites':
payload['urlCategoryListSize'] = "topSites"
elif meraki.params['category_list_size'].lower() == 'full list':
payload['urlCategoryListSize'] = "fullList"
path = meraki.construct_path('policy', net_id=net_id)
current = meraki.request(path, method='GET')
proposed = current.copy()
proposed.update(payload)
if module.check_mode:
meraki.result['data'] = payload
meraki.exit_json(**meraki.result)
if meraki.is_update_required(current, payload):
response = meraki.request(path, method='PUT', payload=json.dumps(payload))
meraki.result['data'] = response
meraki.result['changed'] = True
# in the event of a successful module execution, you will want to
# simple AnsibleModule.exit_json(), passing the key/value results
meraki.exit_json(**meraki.result)
if __name__ == '__main__':
main()
| gpl-3.0 | 361,729,657,431,473,340 | 33.554054 | 113 | 0.620388 | false |
spektom/incubator-airflow | airflow/api/common/experimental/get_task_instance.py | 5 | 1648 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Task Instance APIs."""
from datetime import datetime
from airflow.api.common.experimental import check_and_get_dag, check_and_get_dagrun
from airflow.exceptions import TaskInstanceNotFound
from airflow.models import TaskInstance
def get_task_instance(dag_id: str, task_id: str, execution_date: datetime) -> TaskInstance:
"""Return the task object identified by the given dag_id and task_id."""
dag = check_and_get_dag(dag_id, task_id)
dagrun = check_and_get_dagrun(dag=dag, execution_date=execution_date)
# Get task instance object and check that it exists
task_instance = dagrun.get_task_instance(task_id)
if not task_instance:
error_message = ('Task {} instance for date {} not found'
.format(task_id, execution_date))
raise TaskInstanceNotFound(error_message)
return task_instance
| apache-2.0 | 9,167,267,415,565,550,000 | 42.368421 | 91 | 0.740291 | false |
chugunovyar/factoryForBuild | env/lib/python2.7/site-packages/scipy/misc/tests/test_common.py | 23 | 5076 | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.testing import (assert_array_equal, assert_almost_equal,
assert_array_almost_equal, assert_equal, assert_)
from scipy.misc import pade, logsumexp, face, ascent
def test_pade_trivial():
nump, denomp = pade([1.0], 0)
assert_array_equal(nump.c, [1.0])
assert_array_equal(denomp.c, [1.0])
def test_pade_4term_exp():
# First four Taylor coefficients of exp(x).
# Unlike poly1d, the first array element is the zero-order term.
an = [1.0, 1.0, 0.5, 1.0/6]
nump, denomp = pade(an, 0)
assert_array_almost_equal(nump.c, [1.0/6, 0.5, 1.0, 1.0])
assert_array_almost_equal(denomp.c, [1.0])
nump, denomp = pade(an, 1)
assert_array_almost_equal(nump.c, [1.0/6, 2.0/3, 1.0])
assert_array_almost_equal(denomp.c, [-1.0/3, 1.0])
nump, denomp = pade(an, 2)
assert_array_almost_equal(nump.c, [1.0/3, 1.0])
assert_array_almost_equal(denomp.c, [1.0/6, -2.0/3, 1.0])
nump, denomp = pade(an, 3)
assert_array_almost_equal(nump.c, [1.0])
assert_array_almost_equal(denomp.c, [-1.0/6, 0.5, -1.0, 1.0])
def test_logsumexp():
# Test whether logsumexp() function correctly handles large inputs.
a = np.arange(200)
desired = np.log(np.sum(np.exp(a)))
assert_almost_equal(logsumexp(a), desired)
# Now test with large numbers
b = [1000, 1000]
desired = 1000.0 + np.log(2.0)
assert_almost_equal(logsumexp(b), desired)
n = 1000
b = np.ones(n) * 10000
desired = 10000.0 + np.log(n)
assert_almost_equal(logsumexp(b), desired)
x = np.array([1e-40] * 1000000)
logx = np.log(x)
X = np.vstack([x, x])
logX = np.vstack([logx, logx])
assert_array_almost_equal(np.exp(logsumexp(logX)), X.sum())
assert_array_almost_equal(np.exp(logsumexp(logX, axis=0)), X.sum(axis=0))
assert_array_almost_equal(np.exp(logsumexp(logX, axis=1)), X.sum(axis=1))
# Handling special values properly
assert_equal(logsumexp(np.inf), np.inf)
assert_equal(logsumexp(-np.inf), -np.inf)
assert_equal(logsumexp(np.nan), np.nan)
assert_equal(logsumexp([-np.inf, -np.inf]), -np.inf)
# Handling an array with different magnitudes on the axes
assert_array_almost_equal(logsumexp([[1e10, 1e-10],
[-1e10, -np.inf]], axis=-1),
[1e10, -1e10])
# Test keeping dimensions
assert_array_almost_equal(logsumexp([[1e10, 1e-10],
[-1e10, -np.inf]],
axis=-1,
keepdims=True),
[[1e10], [-1e10]])
# Test multiple axes
assert_array_almost_equal(logsumexp([[1e10, 1e-10],
[-1e10, -np.inf]],
axis=(-1,-2)),
1e10)
def test_logsumexp_b():
a = np.arange(200)
b = np.arange(200, 0, -1)
desired = np.log(np.sum(b*np.exp(a)))
assert_almost_equal(logsumexp(a, b=b), desired)
a = [1000, 1000]
b = [1.2, 1.2]
desired = 1000 + np.log(2 * 1.2)
assert_almost_equal(logsumexp(a, b=b), desired)
x = np.array([1e-40] * 100000)
b = np.linspace(1, 1000, 100000)
logx = np.log(x)
X = np.vstack((x, x))
logX = np.vstack((logx, logx))
B = np.vstack((b, b))
assert_array_almost_equal(np.exp(logsumexp(logX, b=B)), (B * X).sum())
assert_array_almost_equal(np.exp(logsumexp(logX, b=B, axis=0)),
(B * X).sum(axis=0))
assert_array_almost_equal(np.exp(logsumexp(logX, b=B, axis=1)),
(B * X).sum(axis=1))
def test_logsumexp_sign():
a = [1,1,1]
b = [1,-1,-1]
r, s = logsumexp(a, b=b, return_sign=True)
assert_almost_equal(r,1)
assert_equal(s,-1)
def test_logsumexp_sign_zero():
a = [1,1]
b = [1,-1]
r, s = logsumexp(a, b=b, return_sign=True)
assert_(not np.isfinite(r))
assert_(not np.isnan(r))
assert_(r < 0)
assert_equal(s,0)
def test_logsumexp_sign_shape():
a = np.ones((1,2,3,4))
b = np.ones_like(a)
r, s = logsumexp(a, axis=2, b=b, return_sign=True)
assert_equal(r.shape, s.shape)
assert_equal(r.shape, (1,2,4))
r, s = logsumexp(a, axis=(1,3), b=b, return_sign=True)
assert_equal(r.shape, s.shape)
assert_equal(r.shape, (1,3))
def test_logsumexp_shape():
a = np.ones((1, 2, 3, 4))
b = np.ones_like(a)
r = logsumexp(a, axis=2, b=b)
assert_equal(r.shape, (1, 2, 4))
r = logsumexp(a, axis=(1, 3), b=b)
assert_equal(r.shape, (1, 3))
def test_logsumexp_b_zero():
a = [1,10000]
b = [1,0]
assert_almost_equal(logsumexp(a, b=b), 1)
def test_logsumexp_b_shape():
a = np.zeros((4,1,2,1))
b = np.ones((3,1,5))
logsumexp(a, b=b)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
| gpl-3.0 | -4,715,039,854,342,177,000 | 28.511628 | 77 | 0.55595 | false |
vsajip/django | tests/modeltests/prefetch_related/tests.py | 1 | 26898 | from __future__ import absolute_import, unicode_literals
from django.contrib.contenttypes.models import ContentType
from django.db import connection
from django.test import TestCase
from django.test.utils import override_settings
from django.utils import six
from .models import (Author, Book, Reader, Qualification, Teacher, Department,
TaggedItem, Bookmark, AuthorAddress, FavoriteAuthors, AuthorWithAge,
BookWithYear, BookReview, Person, House, Room, Employee, Comment)
class PrefetchRelatedTests(TestCase):
def setUp(self):
self.book1 = Book.objects.create(title="Poems")
self.book2 = Book.objects.create(title="Jane Eyre")
self.book3 = Book.objects.create(title="Wuthering Heights")
self.book4 = Book.objects.create(title="Sense and Sensibility")
self.author1 = Author.objects.create(name="Charlotte",
first_book=self.book1)
self.author2 = Author.objects.create(name="Anne",
first_book=self.book1)
self.author3 = Author.objects.create(name="Emily",
first_book=self.book1)
self.author4 = Author.objects.create(name="Jane",
first_book=self.book4)
self.book1.authors.add(self.author1, self.author2, self.author3)
self.book2.authors.add(self.author1)
self.book3.authors.add(self.author3)
self.book4.authors.add(self.author4)
self.reader1 = Reader.objects.create(name="Amy")
self.reader2 = Reader.objects.create(name="Belinda")
self.reader1.books_read.add(self.book1, self.book4)
self.reader2.books_read.add(self.book2, self.book4)
def test_m2m_forward(self):
with self.assertNumQueries(2):
lists = [list(b.authors.all()) for b in Book.objects.prefetch_related('authors')]
normal_lists = [list(b.authors.all()) for b in Book.objects.all()]
self.assertEqual(lists, normal_lists)
def test_m2m_reverse(self):
with self.assertNumQueries(2):
lists = [list(a.books.all()) for a in Author.objects.prefetch_related('books')]
normal_lists = [list(a.books.all()) for a in Author.objects.all()]
self.assertEqual(lists, normal_lists)
def test_foreignkey_forward(self):
with self.assertNumQueries(2):
books = [a.first_book for a in Author.objects.prefetch_related('first_book')]
normal_books = [a.first_book for a in Author.objects.all()]
self.assertEqual(books, normal_books)
def test_foreignkey_reverse(self):
with self.assertNumQueries(2):
lists = [list(b.first_time_authors.all())
for b in Book.objects.prefetch_related('first_time_authors')]
self.assertQuerysetEqual(self.book2.authors.all(), ["<Author: Charlotte>"])
def test_onetoone_reverse_no_match(self):
# Regression for #17439
with self.assertNumQueries(2):
book = Book.objects.prefetch_related('bookwithyear').all()[0]
with self.assertNumQueries(0):
with self.assertRaises(BookWithYear.DoesNotExist):
book.bookwithyear
def test_survives_clone(self):
with self.assertNumQueries(2):
lists = [list(b.first_time_authors.all())
for b in Book.objects.prefetch_related('first_time_authors').exclude(id=1000)]
def test_len(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related('first_time_authors')
length = len(qs)
lists = [list(b.first_time_authors.all())
for b in qs]
def test_bool(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related('first_time_authors')
x = bool(qs)
lists = [list(b.first_time_authors.all())
for b in qs]
def test_count(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related('first_time_authors')
[b.first_time_authors.count() for b in qs]
def test_exists(self):
with self.assertNumQueries(2):
qs = Book.objects.prefetch_related('first_time_authors')
[b.first_time_authors.exists() for b in qs]
def test_clear(self):
"""
Test that we can clear the behavior by calling prefetch_related()
"""
with self.assertNumQueries(5):
with_prefetch = Author.objects.prefetch_related('books')
without_prefetch = with_prefetch.prefetch_related(None)
lists = [list(a.books.all()) for a in without_prefetch]
def test_m2m_then_m2m(self):
"""
Test we can follow a m2m and another m2m
"""
with self.assertNumQueries(3):
qs = Author.objects.prefetch_related('books__read_by')
lists = [[[six.text_type(r) for r in b.read_by.all()]
for b in a.books.all()]
for a in qs]
self.assertEqual(lists,
[
[["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre
[["Amy"]], # Anne - Poems
[["Amy"], []], # Emily - Poems, Wuthering Heights
[["Amy", "Belinda"]], # Jane - Sense and Sense
])
def test_overriding_prefetch(self):
with self.assertNumQueries(3):
qs = Author.objects.prefetch_related('books', 'books__read_by')
lists = [[[six.text_type(r) for r in b.read_by.all()]
for b in a.books.all()]
for a in qs]
self.assertEqual(lists,
[
[["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre
[["Amy"]], # Anne - Poems
[["Amy"], []], # Emily - Poems, Wuthering Heights
[["Amy", "Belinda"]], # Jane - Sense and Sense
])
with self.assertNumQueries(3):
qs = Author.objects.prefetch_related('books__read_by', 'books')
lists = [[[six.text_type(r) for r in b.read_by.all()]
for b in a.books.all()]
for a in qs]
self.assertEqual(lists,
[
[["Amy"], ["Belinda"]], # Charlotte - Poems, Jane Eyre
[["Amy"]], # Anne - Poems
[["Amy"], []], # Emily - Poems, Wuthering Heights
[["Amy", "Belinda"]], # Jane - Sense and Sense
])
def test_get(self):
"""
Test that objects retrieved with .get() get the prefetch behavior.
"""
# Need a double
with self.assertNumQueries(3):
author = Author.objects.prefetch_related('books__read_by').get(name="Charlotte")
lists = [[six.text_type(r) for r in b.read_by.all()]
for b in author.books.all()]
self.assertEqual(lists, [["Amy"], ["Belinda"]]) # Poems, Jane Eyre
def test_foreign_key_then_m2m(self):
"""
Test we can follow an m2m relation after a relation like ForeignKey
that doesn't have many objects
"""
with self.assertNumQueries(2):
qs = Author.objects.select_related('first_book').prefetch_related('first_book__read_by')
lists = [[six.text_type(r) for r in a.first_book.read_by.all()]
for a in qs]
self.assertEqual(lists, [["Amy"],
["Amy"],
["Amy"],
["Amy", "Belinda"]])
def test_attribute_error(self):
qs = Reader.objects.all().prefetch_related('books_read__xyz')
with self.assertRaises(AttributeError) as cm:
list(qs)
self.assertTrue('prefetch_related' in str(cm.exception))
def test_invalid_final_lookup(self):
qs = Book.objects.prefetch_related('authors__name')
with self.assertRaises(ValueError) as cm:
list(qs)
self.assertTrue('prefetch_related' in str(cm.exception))
self.assertTrue("name" in str(cm.exception))
class DefaultManagerTests(TestCase):
def setUp(self):
self.qual1 = Qualification.objects.create(name="BA")
self.qual2 = Qualification.objects.create(name="BSci")
self.qual3 = Qualification.objects.create(name="MA")
self.qual4 = Qualification.objects.create(name="PhD")
self.teacher1 = Teacher.objects.create(name="Mr Cleese")
self.teacher2 = Teacher.objects.create(name="Mr Idle")
self.teacher3 = Teacher.objects.create(name="Mr Chapman")
self.teacher1.qualifications.add(self.qual1, self.qual2, self.qual3, self.qual4)
self.teacher2.qualifications.add(self.qual1)
self.teacher3.qualifications.add(self.qual2)
self.dept1 = Department.objects.create(name="English")
self.dept2 = Department.objects.create(name="Physics")
self.dept1.teachers.add(self.teacher1, self.teacher2)
self.dept2.teachers.add(self.teacher1, self.teacher3)
def test_m2m_then_m2m(self):
with self.assertNumQueries(3):
# When we prefetch the teachers, and force the query, we don't want
# the default manager on teachers to immediately get all the related
# qualifications, since this will do one query per teacher.
qs = Department.objects.prefetch_related('teachers')
depts = "".join(["%s department: %s\n" %
(dept.name, ", ".join(six.text_type(t) for t in dept.teachers.all()))
for dept in qs])
self.assertEqual(depts,
"English department: Mr Cleese (BA, BSci, MA, PhD), Mr Idle (BA)\n"
"Physics department: Mr Cleese (BA, BSci, MA, PhD), Mr Chapman (BSci)\n")
class GenericRelationTests(TestCase):
def setUp(self):
book1 = Book.objects.create(title="Winnie the Pooh")
book2 = Book.objects.create(title="Do you like green eggs and spam?")
book3 = Book.objects.create(title="Three Men In A Boat")
reader1 = Reader.objects.create(name="me")
reader2 = Reader.objects.create(name="you")
reader3 = Reader.objects.create(name="someone")
book1.read_by.add(reader1, reader2)
book2.read_by.add(reader2)
book3.read_by.add(reader3)
self.book1, self.book2, self.book3 = book1, book2, book3
self.reader1, self.reader2, self.reader3 = reader1, reader2, reader3
def test_prefetch_GFK(self):
TaggedItem.objects.create(tag="awesome", content_object=self.book1)
TaggedItem.objects.create(tag="great", content_object=self.reader1)
TaggedItem.objects.create(tag="stupid", content_object=self.book2)
TaggedItem.objects.create(tag="amazing", content_object=self.reader3)
# 1 for TaggedItem table, 1 for Book table, 1 for Reader table
with self.assertNumQueries(3):
qs = TaggedItem.objects.prefetch_related('content_object')
list(qs)
def test_prefetch_GFK_nonint_pk(self):
Comment.objects.create(comment="awesome", content_object=self.book1)
# 1 for Comment table, 1 for Book table
with self.assertNumQueries(2):
qs = Comment.objects.prefetch_related('content_object')
[c.content_object for c in qs]
def test_traverse_GFK(self):
"""
Test that we can traverse a 'content_object' with prefetch_related() and
get to related objects on the other side (assuming it is suitably
filtered)
"""
TaggedItem.objects.create(tag="awesome", content_object=self.book1)
TaggedItem.objects.create(tag="awesome", content_object=self.book2)
TaggedItem.objects.create(tag="awesome", content_object=self.book3)
TaggedItem.objects.create(tag="awesome", content_object=self.reader1)
TaggedItem.objects.create(tag="awesome", content_object=self.reader2)
ct = ContentType.objects.get_for_model(Book)
# We get 3 queries - 1 for main query, 1 for content_objects since they
# all use the same table, and 1 for the 'read_by' relation.
with self.assertNumQueries(3):
# If we limit to books, we know that they will have 'read_by'
# attributes, so the following makes sense:
qs = TaggedItem.objects.filter(content_type=ct, tag='awesome').prefetch_related('content_object__read_by')
readers_of_awesome_books = set([r.name for tag in qs
for r in tag.content_object.read_by.all()])
self.assertEqual(readers_of_awesome_books, set(["me", "you", "someone"]))
def test_nullable_GFK(self):
TaggedItem.objects.create(tag="awesome", content_object=self.book1,
created_by=self.reader1)
TaggedItem.objects.create(tag="great", content_object=self.book2)
TaggedItem.objects.create(tag="rubbish", content_object=self.book3)
with self.assertNumQueries(2):
result = [t.created_by for t in TaggedItem.objects.prefetch_related('created_by')]
self.assertEqual(result,
[t.created_by for t in TaggedItem.objects.all()])
def test_generic_relation(self):
b = Bookmark.objects.create(url='http://www.djangoproject.com/')
t1 = TaggedItem.objects.create(content_object=b, tag='django')
t2 = TaggedItem.objects.create(content_object=b, tag='python')
with self.assertNumQueries(2):
tags = [t.tag for b in Bookmark.objects.prefetch_related('tags')
for t in b.tags.all()]
self.assertEqual(sorted(tags), ["django", "python"])
class MultiTableInheritanceTest(TestCase):
def setUp(self):
self.book1 = BookWithYear.objects.create(
title="Poems", published_year=2010)
self.book2 = BookWithYear.objects.create(
title="More poems", published_year=2011)
self.author1 = AuthorWithAge.objects.create(
name='Jane', first_book=self.book1, age=50)
self.author2 = AuthorWithAge.objects.create(
name='Tom', first_book=self.book1, age=49)
self.author3 = AuthorWithAge.objects.create(
name='Robert', first_book=self.book2, age=48)
self.authorAddress = AuthorAddress.objects.create(
author=self.author1, address='SomeStreet 1')
self.book2.aged_authors.add(self.author2, self.author3)
self.br1 = BookReview.objects.create(
book=self.book1, notes="review book1")
self.br2 = BookReview.objects.create(
book=self.book2, notes="review book2")
def test_foreignkey(self):
with self.assertNumQueries(2):
qs = AuthorWithAge.objects.prefetch_related('addresses')
addresses = [[six.text_type(address) for address in obj.addresses.all()]
for obj in qs]
self.assertEqual(addresses, [[six.text_type(self.authorAddress)], [], []])
def test_foreignkey_to_inherited(self):
with self.assertNumQueries(2):
qs = BookReview.objects.prefetch_related('book')
titles = [obj.book.title for obj in qs]
self.assertEquals(titles, ["Poems", "More poems"])
def test_m2m_to_inheriting_model(self):
qs = AuthorWithAge.objects.prefetch_related('books_with_year')
with self.assertNumQueries(2):
lst = [[six.text_type(book) for book in author.books_with_year.all()]
for author in qs]
qs = AuthorWithAge.objects.all()
lst2 = [[six.text_type(book) for book in author.books_with_year.all()]
for author in qs]
self.assertEqual(lst, lst2)
qs = BookWithYear.objects.prefetch_related('aged_authors')
with self.assertNumQueries(2):
lst = [[six.text_type(author) for author in book.aged_authors.all()]
for book in qs]
qs = BookWithYear.objects.all()
lst2 = [[six.text_type(author) for author in book.aged_authors.all()]
for book in qs]
self.assertEqual(lst, lst2)
def test_parent_link_prefetch(self):
with self.assertNumQueries(2):
[a.author for a in AuthorWithAge.objects.prefetch_related('author')]
@override_settings(DEBUG=True)
def test_child_link_prefetch(self):
with self.assertNumQueries(2):
l = [a.authorwithage for a in Author.objects.prefetch_related('authorwithage')]
# Regression for #18090: the prefetching query must include an IN clause.
# Note that on Oracle the table name is upper case in the generated SQL,
# thus the .lower() call.
sql = connection.queries[-1]['sql'].lower()
# Some backends have SQL as text, others have it as bytes :-(
if isinstance(sql, six.text_type):
sql = sql.encode('utf-8')
self.assertIn(b'authorwithage', sql)
self.assertIn(b' in ', sql)
self.assertEqual(l, [a.authorwithage for a in Author.objects.all()])
class ForeignKeyToFieldTest(TestCase):
def setUp(self):
self.book = Book.objects.create(title="Poems")
self.author1 = Author.objects.create(name='Jane', first_book=self.book)
self.author2 = Author.objects.create(name='Tom', first_book=self.book)
self.author3 = Author.objects.create(name='Robert', first_book=self.book)
self.authorAddress = AuthorAddress.objects.create(
author=self.author1, address='SomeStreet 1'
)
FavoriteAuthors.objects.create(author=self.author1,
likes_author=self.author2)
FavoriteAuthors.objects.create(author=self.author2,
likes_author=self.author3)
FavoriteAuthors.objects.create(author=self.author3,
likes_author=self.author1)
def test_foreignkey(self):
with self.assertNumQueries(2):
qs = Author.objects.prefetch_related('addresses')
addresses = [[six.text_type(address) for address in obj.addresses.all()]
for obj in qs]
self.assertEqual(addresses, [[six.text_type(self.authorAddress)], [], []])
def test_m2m(self):
with self.assertNumQueries(3):
qs = Author.objects.all().prefetch_related('favorite_authors', 'favors_me')
favorites = [(
[six.text_type(i_like) for i_like in author.favorite_authors.all()],
[six.text_type(likes_me) for likes_me in author.favors_me.all()]
) for author in qs]
self.assertEqual(
favorites,
[
([six.text_type(self.author2)],[six.text_type(self.author3)]),
([six.text_type(self.author3)],[six.text_type(self.author1)]),
([six.text_type(self.author1)],[six.text_type(self.author2)])
]
)
class LookupOrderingTest(TestCase):
"""
Test cases that demonstrate that ordering of lookups is important, and
ensure it is preserved.
"""
def setUp(self):
self.person1 = Person.objects.create(name="Joe")
self.person2 = Person.objects.create(name="Mary")
self.house1 = House.objects.create(address="123 Main St")
self.house2 = House.objects.create(address="45 Side St")
self.house3 = House.objects.create(address="6 Downing St")
self.house4 = House.objects.create(address="7 Regents St")
self.room1_1 = Room.objects.create(name="Dining room", house=self.house1)
self.room1_2 = Room.objects.create(name="Lounge", house=self.house1)
self.room1_3 = Room.objects.create(name="Kitchen", house=self.house1)
self.room2_1 = Room.objects.create(name="Dining room", house=self.house2)
self.room2_2 = Room.objects.create(name="Lounge", house=self.house2)
self.room3_1 = Room.objects.create(name="Dining room", house=self.house3)
self.room3_2 = Room.objects.create(name="Lounge", house=self.house3)
self.room3_3 = Room.objects.create(name="Kitchen", house=self.house3)
self.room4_1 = Room.objects.create(name="Dining room", house=self.house4)
self.room4_2 = Room.objects.create(name="Lounge", house=self.house4)
self.person1.houses.add(self.house1, self.house2)
self.person2.houses.add(self.house3, self.house4)
def test_order(self):
with self.assertNumQueries(4):
# The following two queries must be done in the same order as written,
# otherwise 'primary_house' will cause non-prefetched lookups
qs = Person.objects.prefetch_related('houses__rooms',
'primary_house__occupants')
[list(p.primary_house.occupants.all()) for p in qs]
class NullableTest(TestCase):
def setUp(self):
boss = Employee.objects.create(name="Peter")
worker1 = Employee.objects.create(name="Joe", boss=boss)
worker2 = Employee.objects.create(name="Angela", boss=boss)
def test_traverse_nullable(self):
# Because we use select_related() for 'boss', it doesn't need to be
# prefetched, but we can still traverse it although it contains some nulls
with self.assertNumQueries(2):
qs = Employee.objects.select_related('boss').prefetch_related('boss__serfs')
co_serfs = [list(e.boss.serfs.all()) if e.boss is not None else []
for e in qs]
qs2 = Employee.objects.select_related('boss')
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else []
for e in qs2]
self.assertEqual(co_serfs, co_serfs2)
def test_prefetch_nullable(self):
# One for main employee, one for boss, one for serfs
with self.assertNumQueries(3):
qs = Employee.objects.prefetch_related('boss__serfs')
co_serfs = [list(e.boss.serfs.all()) if e.boss is not None else []
for e in qs]
qs2 = Employee.objects.all()
co_serfs2 = [list(e.boss.serfs.all()) if e.boss is not None else []
for e in qs2]
self.assertEqual(co_serfs, co_serfs2)
def test_in_bulk(self):
"""
In-bulk does correctly prefetch objects by not using .iterator()
directly.
"""
boss1 = Employee.objects.create(name="Peter")
boss2 = Employee.objects.create(name="Jack")
with self.assertNumQueries(2):
# Check that prefetch is done and it does not cause any errors.
bulk = Employee.objects.prefetch_related('serfs').in_bulk([boss1.pk, boss2.pk])
for b in bulk.values():
list(b.serfs.all())
class MultiDbTests(TestCase):
multi_db = True
def test_using_is_honored_m2m(self):
B = Book.objects.using('other')
A = Author.objects.using('other')
book1 = B.create(title="Poems")
book2 = B.create(title="Jane Eyre")
book3 = B.create(title="Wuthering Heights")
book4 = B.create(title="Sense and Sensibility")
author1 = A.create(name="Charlotte", first_book=book1)
author2 = A.create(name="Anne", first_book=book1)
author3 = A.create(name="Emily", first_book=book1)
author4 = A.create(name="Jane", first_book=book4)
book1.authors.add(author1, author2, author3)
book2.authors.add(author1)
book3.authors.add(author3)
book4.authors.add(author4)
# Forward
qs1 = B.prefetch_related('authors')
with self.assertNumQueries(2, using='other'):
books = "".join(["%s (%s)\n" %
(book.title, ", ".join(a.name for a in book.authors.all()))
for book in qs1])
self.assertEqual(books,
"Poems (Charlotte, Anne, Emily)\n"
"Jane Eyre (Charlotte)\n"
"Wuthering Heights (Emily)\n"
"Sense and Sensibility (Jane)\n")
# Reverse
qs2 = A.prefetch_related('books')
with self.assertNumQueries(2, using='other'):
authors = "".join(["%s: %s\n" %
(author.name, ", ".join(b.title for b in author.books.all()))
for author in qs2])
self.assertEqual(authors,
"Charlotte: Poems, Jane Eyre\n"
"Anne: Poems\n"
"Emily: Poems, Wuthering Heights\n"
"Jane: Sense and Sensibility\n")
def test_using_is_honored_fkey(self):
B = Book.objects.using('other')
A = Author.objects.using('other')
book1 = B.create(title="Poems")
book2 = B.create(title="Sense and Sensibility")
author1 = A.create(name="Charlotte Bronte", first_book=book1)
author2 = A.create(name="Jane Austen", first_book=book2)
# Forward
with self.assertNumQueries(2, using='other'):
books = ", ".join(a.first_book.title for a in A.prefetch_related('first_book'))
self.assertEqual("Poems, Sense and Sensibility", books)
# Reverse
with self.assertNumQueries(2, using='other'):
books = "".join("%s (%s)\n" %
(b.title, ", ".join(a.name for a in b.first_time_authors.all()))
for b in B.prefetch_related('first_time_authors'))
self.assertEqual(books,
"Poems (Charlotte Bronte)\n"
"Sense and Sensibility (Jane Austen)\n")
def test_using_is_honored_inheritance(self):
B = BookWithYear.objects.using('other')
A = AuthorWithAge.objects.using('other')
book1 = B.create(title="Poems", published_year=2010)
book2 = B.create(title="More poems", published_year=2011)
author1 = A.create(name='Jane', first_book=book1, age=50)
author2 = A.create(name='Tom', first_book=book1, age=49)
# parent link
with self.assertNumQueries(2, using='other'):
authors = ", ".join(a.author.name for a in A.prefetch_related('author'))
self.assertEqual(authors, "Jane, Tom")
# child link
with self.assertNumQueries(2, using='other'):
ages = ", ".join(str(a.authorwithage.age) for a in A.prefetch_related('authorwithage'))
self.assertEqual(ages, "50, 49")
| bsd-3-clause | -5,650,380,728,051,720,000 | 42.807818 | 118 | 0.590193 | false |
Eric89GXL/numpy | numpy/core/tests/test_shape_base.py | 8 | 20018 | from __future__ import division, absolute_import, print_function
import warnings
import numpy as np
from numpy.core import (
array, arange, atleast_1d, atleast_2d, atleast_3d, block, vstack, hstack,
newaxis, concatenate, stack
)
from numpy.testing import (
assert_, assert_raises, assert_array_equal, assert_equal,
assert_raises_regex, assert_almost_equal
)
from numpy.compat import long
class TestAtleast1d(object):
def test_0D_array(self):
a = array(1)
b = array(2)
res = [atleast_1d(a), atleast_1d(b)]
desired = [array([1]), array([2])]
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1, 2])
b = array([2, 3])
res = [atleast_1d(a), atleast_1d(b)]
desired = [array([1, 2]), array([2, 3])]
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
res = [atleast_1d(a), atleast_1d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_3D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
a = array([a, a])
b = array([b, b])
res = [atleast_1d(a), atleast_1d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_r1array(self):
""" Test to make sure equivalent Travis O's r1array function
"""
assert_(atleast_1d(3).shape == (1,))
assert_(atleast_1d(3j).shape == (1,))
assert_(atleast_1d(long(3)).shape == (1,))
assert_(atleast_1d(3.0).shape == (1,))
assert_(atleast_1d([[2, 3], [4, 5]]).shape == (2, 2))
class TestAtleast2d(object):
def test_0D_array(self):
a = array(1)
b = array(2)
res = [atleast_2d(a), atleast_2d(b)]
desired = [array([[1]]), array([[2]])]
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1, 2])
b = array([2, 3])
res = [atleast_2d(a), atleast_2d(b)]
desired = [array([[1, 2]]), array([[2, 3]])]
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
res = [atleast_2d(a), atleast_2d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_3D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
a = array([a, a])
b = array([b, b])
res = [atleast_2d(a), atleast_2d(b)]
desired = [a, b]
assert_array_equal(res, desired)
def test_r2array(self):
""" Test to make sure equivalent Travis O's r2array function
"""
assert_(atleast_2d(3).shape == (1, 1))
assert_(atleast_2d([3j, 1]).shape == (1, 2))
assert_(atleast_2d([[[3, 1], [4, 5]], [[3, 5], [1, 2]]]).shape == (2, 2, 2))
class TestAtleast3d(object):
def test_0D_array(self):
a = array(1)
b = array(2)
res = [atleast_3d(a), atleast_3d(b)]
desired = [array([[[1]]]), array([[[2]]])]
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1, 2])
b = array([2, 3])
res = [atleast_3d(a), atleast_3d(b)]
desired = [array([[[1], [2]]]), array([[[2], [3]]])]
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
res = [atleast_3d(a), atleast_3d(b)]
desired = [a[:,:, newaxis], b[:,:, newaxis]]
assert_array_equal(res, desired)
def test_3D_array(self):
a = array([[1, 2], [1, 2]])
b = array([[2, 3], [2, 3]])
a = array([a, a])
b = array([b, b])
res = [atleast_3d(a), atleast_3d(b)]
desired = [a, b]
assert_array_equal(res, desired)
class TestHstack(object):
def test_non_iterable(self):
assert_raises(TypeError, hstack, 1)
def test_empty_input(self):
assert_raises(ValueError, hstack, ())
def test_0D_array(self):
a = array(1)
b = array(2)
res = hstack([a, b])
desired = array([1, 2])
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1])
b = array([2])
res = hstack([a, b])
desired = array([1, 2])
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1], [2]])
b = array([[1], [2]])
res = hstack([a, b])
desired = array([[1, 1], [2, 2]])
assert_array_equal(res, desired)
class TestVstack(object):
def test_non_iterable(self):
assert_raises(TypeError, vstack, 1)
def test_empty_input(self):
assert_raises(ValueError, vstack, ())
def test_0D_array(self):
a = array(1)
b = array(2)
res = vstack([a, b])
desired = array([[1], [2]])
assert_array_equal(res, desired)
def test_1D_array(self):
a = array([1])
b = array([2])
res = vstack([a, b])
desired = array([[1], [2]])
assert_array_equal(res, desired)
def test_2D_array(self):
a = array([[1], [2]])
b = array([[1], [2]])
res = vstack([a, b])
desired = array([[1], [2], [1], [2]])
assert_array_equal(res, desired)
def test_2D_array2(self):
a = array([1, 2])
b = array([1, 2])
res = vstack([a, b])
desired = array([[1, 2], [1, 2]])
assert_array_equal(res, desired)
class TestConcatenate(object):
def test_exceptions(self):
# test axis must be in bounds
for ndim in [1, 2, 3]:
a = np.ones((1,)*ndim)
np.concatenate((a, a), axis=0) # OK
assert_raises(np.AxisError, np.concatenate, (a, a), axis=ndim)
assert_raises(np.AxisError, np.concatenate, (a, a), axis=-(ndim + 1))
# Scalars cannot be concatenated
assert_raises(ValueError, concatenate, (0,))
assert_raises(ValueError, concatenate, (np.array(0),))
# test shapes must match except for concatenation axis
a = np.ones((1, 2, 3))
b = np.ones((2, 2, 3))
axis = list(range(3))
for i in range(3):
np.concatenate((a, b), axis=axis[0]) # OK
assert_raises(ValueError, np.concatenate, (a, b), axis=axis[1])
assert_raises(ValueError, np.concatenate, (a, b), axis=axis[2])
a = np.moveaxis(a, -1, 0)
b = np.moveaxis(b, -1, 0)
axis.append(axis.pop(0))
# No arrays to concatenate raises ValueError
assert_raises(ValueError, concatenate, ())
def test_concatenate_axis_None(self):
a = np.arange(4, dtype=np.float64).reshape((2, 2))
b = list(range(3))
c = ['x']
r = np.concatenate((a, a), axis=None)
assert_equal(r.dtype, a.dtype)
assert_equal(r.ndim, 1)
r = np.concatenate((a, b), axis=None)
assert_equal(r.size, a.size + len(b))
assert_equal(r.dtype, a.dtype)
r = np.concatenate((a, b, c), axis=None)
d = array(['0.0', '1.0', '2.0', '3.0',
'0', '1', '2', 'x'])
assert_array_equal(r, d)
out = np.zeros(a.size + len(b))
r = np.concatenate((a, b), axis=None)
rout = np.concatenate((a, b), axis=None, out=out)
assert_(out is rout)
assert_equal(r, rout)
def test_large_concatenate_axis_None(self):
# When no axis is given, concatenate uses flattened versions.
# This also had a bug with many arrays (see gh-5979).
x = np.arange(1, 100)
r = np.concatenate(x, None)
assert_array_equal(x, r)
# This should probably be deprecated:
r = np.concatenate(x, 100) # axis is >= MAXDIMS
assert_array_equal(x, r)
def test_concatenate(self):
# Test concatenate function
# One sequence returns unmodified (but as array)
r4 = list(range(4))
assert_array_equal(concatenate((r4,)), r4)
# Any sequence
assert_array_equal(concatenate((tuple(r4),)), r4)
assert_array_equal(concatenate((array(r4),)), r4)
# 1D default concatenation
r3 = list(range(3))
assert_array_equal(concatenate((r4, r3)), r4 + r3)
# Mixed sequence types
assert_array_equal(concatenate((tuple(r4), r3)), r4 + r3)
assert_array_equal(concatenate((array(r4), r3)), r4 + r3)
# Explicit axis specification
assert_array_equal(concatenate((r4, r3), 0), r4 + r3)
# Including negative
assert_array_equal(concatenate((r4, r3), -1), r4 + r3)
# 2D
a23 = array([[10, 11, 12], [13, 14, 15]])
a13 = array([[0, 1, 2]])
res = array([[10, 11, 12], [13, 14, 15], [0, 1, 2]])
assert_array_equal(concatenate((a23, a13)), res)
assert_array_equal(concatenate((a23, a13), 0), res)
assert_array_equal(concatenate((a23.T, a13.T), 1), res.T)
assert_array_equal(concatenate((a23.T, a13.T), -1), res.T)
# Arrays much match shape
assert_raises(ValueError, concatenate, (a23.T, a13.T), 0)
# 3D
res = arange(2 * 3 * 7).reshape((2, 3, 7))
a0 = res[..., :4]
a1 = res[..., 4:6]
a2 = res[..., 6:]
assert_array_equal(concatenate((a0, a1, a2), 2), res)
assert_array_equal(concatenate((a0, a1, a2), -1), res)
assert_array_equal(concatenate((a0.T, a1.T, a2.T), 0), res.T)
out = res.copy()
rout = concatenate((a0, a1, a2), 2, out=out)
assert_(out is rout)
assert_equal(res, rout)
def test_bad_out_shape(self):
a = array([1, 2])
b = array([3, 4])
assert_raises(ValueError, concatenate, (a, b), out=np.empty(5))
assert_raises(ValueError, concatenate, (a, b), out=np.empty((4,1)))
assert_raises(ValueError, concatenate, (a, b), out=np.empty((1,4)))
concatenate((a, b), out=np.empty(4))
def test_out_dtype(self):
out = np.empty(4, np.float32)
res = concatenate((array([1, 2]), array([3, 4])), out=out)
assert_(out is res)
out = np.empty(4, np.complex64)
res = concatenate((array([0.1, 0.2]), array([0.3, 0.4])), out=out)
assert_(out is res)
# invalid cast
out = np.empty(4, np.int32)
assert_raises(TypeError, concatenate,
(array([0.1, 0.2]), array([0.3, 0.4])), out=out)
def test_stack():
# non-iterable input
assert_raises(TypeError, stack, 1)
# 0d input
for input_ in [(1, 2, 3),
[np.int32(1), np.int32(2), np.int32(3)],
[np.array(1), np.array(2), np.array(3)]]:
assert_array_equal(stack(input_), [1, 2, 3])
# 1d input examples
a = np.array([1, 2, 3])
b = np.array([4, 5, 6])
r1 = array([[1, 2, 3], [4, 5, 6]])
assert_array_equal(np.stack((a, b)), r1)
assert_array_equal(np.stack((a, b), axis=1), r1.T)
# all input types
assert_array_equal(np.stack(list([a, b])), r1)
assert_array_equal(np.stack(array([a, b])), r1)
# all shapes for 1d input
arrays = [np.random.randn(3) for _ in range(10)]
axes = [0, 1, -1, -2]
expected_shapes = [(10, 3), (3, 10), (3, 10), (10, 3)]
for axis, expected_shape in zip(axes, expected_shapes):
assert_equal(np.stack(arrays, axis).shape, expected_shape)
assert_raises_regex(np.AxisError, 'out of bounds', stack, arrays, axis=2)
assert_raises_regex(np.AxisError, 'out of bounds', stack, arrays, axis=-3)
# all shapes for 2d input
arrays = [np.random.randn(3, 4) for _ in range(10)]
axes = [0, 1, 2, -1, -2, -3]
expected_shapes = [(10, 3, 4), (3, 10, 4), (3, 4, 10),
(3, 4, 10), (3, 10, 4), (10, 3, 4)]
for axis, expected_shape in zip(axes, expected_shapes):
assert_equal(np.stack(arrays, axis).shape, expected_shape)
# empty arrays
assert_(stack([[], [], []]).shape == (3, 0))
assert_(stack([[], [], []], axis=1).shape == (0, 3))
# edge cases
assert_raises_regex(ValueError, 'need at least one array', stack, [])
assert_raises_regex(ValueError, 'must have the same shape',
stack, [1, np.arange(3)])
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.arange(3), 1])
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.arange(3), 1], axis=1)
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.zeros((3, 3)), np.zeros(3)], axis=1)
assert_raises_regex(ValueError, 'must have the same shape',
stack, [np.arange(2), np.arange(3)])
class TestBlock(object):
def test_block_simple_row_wise(self):
a_2d = np.ones((2, 2))
b_2d = 2 * a_2d
desired = np.array([[1, 1, 2, 2],
[1, 1, 2, 2]])
result = block([a_2d, b_2d])
assert_equal(desired, result)
def test_block_simple_column_wise(self):
a_2d = np.ones((2, 2))
b_2d = 2 * a_2d
expected = np.array([[1, 1],
[1, 1],
[2, 2],
[2, 2]])
result = block([[a_2d], [b_2d]])
assert_equal(expected, result)
def test_block_with_1d_arrays_row_wise(self):
# # # 1-D vectors are treated as row arrays
a = np.array([1, 2, 3])
b = np.array([2, 3, 4])
expected = np.array([1, 2, 3, 2, 3, 4])
result = block([a, b])
assert_equal(expected, result)
def test_block_with_1d_arrays_multiple_rows(self):
a = np.array([1, 2, 3])
b = np.array([2, 3, 4])
expected = np.array([[1, 2, 3, 2, 3, 4],
[1, 2, 3, 2, 3, 4]])
result = block([[a, b], [a, b]])
assert_equal(expected, result)
def test_block_with_1d_arrays_column_wise(self):
# # # 1-D vectors are treated as row arrays
a_1d = np.array([1, 2, 3])
b_1d = np.array([2, 3, 4])
expected = np.array([[1, 2, 3],
[2, 3, 4]])
result = block([[a_1d], [b_1d]])
assert_equal(expected, result)
def test_block_mixed_1d_and_2d(self):
a_2d = np.ones((2, 2))
b_1d = np.array([2, 2])
result = block([[a_2d], [b_1d]])
expected = np.array([[1, 1],
[1, 1],
[2, 2]])
assert_equal(expected, result)
def test_block_complicated(self):
# a bit more complicated
one_2d = np.array([[1, 1, 1]])
two_2d = np.array([[2, 2, 2]])
three_2d = np.array([[3, 3, 3, 3, 3, 3]])
four_1d = np.array([4, 4, 4, 4, 4, 4])
five_0d = np.array(5)
six_1d = np.array([6, 6, 6, 6, 6])
zero_2d = np.zeros((2, 6))
expected = np.array([[1, 1, 1, 2, 2, 2],
[3, 3, 3, 3, 3, 3],
[4, 4, 4, 4, 4, 4],
[5, 6, 6, 6, 6, 6],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]])
result = block([[one_2d, two_2d],
[three_2d],
[four_1d],
[five_0d, six_1d],
[zero_2d]])
assert_equal(result, expected)
def test_nested(self):
one = np.array([1, 1, 1])
two = np.array([[2, 2, 2], [2, 2, 2], [2, 2, 2]])
three = np.array([3, 3, 3])
four = np.array([4, 4, 4])
five = np.array(5)
six = np.array([6, 6, 6, 6, 6])
zero = np.zeros((2, 6))
result = np.block([
[
np.block([
[one],
[three],
[four]
]),
two
],
[five, six],
[zero]
])
expected = np.array([[1, 1, 1, 2, 2, 2],
[3, 3, 3, 2, 2, 2],
[4, 4, 4, 2, 2, 2],
[5, 6, 6, 6, 6, 6],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]])
assert_equal(result, expected)
def test_3d(self):
a000 = np.ones((2, 2, 2), int) * 1
a100 = np.ones((3, 2, 2), int) * 2
a010 = np.ones((2, 3, 2), int) * 3
a001 = np.ones((2, 2, 3), int) * 4
a011 = np.ones((2, 3, 3), int) * 5
a101 = np.ones((3, 2, 3), int) * 6
a110 = np.ones((3, 3, 2), int) * 7
a111 = np.ones((3, 3, 3), int) * 8
result = np.block([
[
[a000, a001],
[a010, a011],
],
[
[a100, a101],
[a110, a111],
]
])
expected = array([[[1, 1, 4, 4, 4],
[1, 1, 4, 4, 4],
[3, 3, 5, 5, 5],
[3, 3, 5, 5, 5],
[3, 3, 5, 5, 5]],
[[1, 1, 4, 4, 4],
[1, 1, 4, 4, 4],
[3, 3, 5, 5, 5],
[3, 3, 5, 5, 5],
[3, 3, 5, 5, 5]],
[[2, 2, 6, 6, 6],
[2, 2, 6, 6, 6],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8]],
[[2, 2, 6, 6, 6],
[2, 2, 6, 6, 6],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8]],
[[2, 2, 6, 6, 6],
[2, 2, 6, 6, 6],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8],
[7, 7, 8, 8, 8]]])
assert_array_equal(result, expected)
def test_block_with_mismatched_shape(self):
a = np.array([0, 0])
b = np.eye(2)
assert_raises(ValueError, np.block, [a, b])
assert_raises(ValueError, np.block, [b, a])
def test_no_lists(self):
assert_equal(np.block(1), np.array(1))
assert_equal(np.block(np.eye(3)), np.eye(3))
def test_invalid_nesting(self):
msg = 'depths are mismatched'
assert_raises_regex(ValueError, msg, np.block, [1, [2]])
assert_raises_regex(ValueError, msg, np.block, [1, []])
assert_raises_regex(ValueError, msg, np.block, [[1], 2])
assert_raises_regex(ValueError, msg, np.block, [[], 2])
assert_raises_regex(ValueError, msg, np.block, [
[[1], [2]],
[[3, 4]],
[5] # missing brackets
])
def test_empty_lists(self):
assert_raises_regex(ValueError, 'empty', np.block, [])
assert_raises_regex(ValueError, 'empty', np.block, [[]])
assert_raises_regex(ValueError, 'empty', np.block, [[1], []])
def test_tuple(self):
assert_raises_regex(TypeError, 'tuple', np.block, ([1, 2], [3, 4]))
assert_raises_regex(TypeError, 'tuple', np.block, [(1, 2), (3, 4)])
def test_different_ndims(self):
a = 1.
b = 2 * np.ones((1, 2))
c = 3 * np.ones((1, 1, 3))
result = np.block([a, b, c])
expected = np.array([[[1., 2., 2., 3., 3., 3.]]])
assert_equal(result, expected)
def test_different_ndims_depths(self):
a = 1.
b = 2 * np.ones((1, 2))
c = 3 * np.ones((1, 2, 3))
result = np.block([[a, b], [c]])
expected = np.array([[[1., 2., 2.],
[3., 3., 3.],
[3., 3., 3.]]])
assert_equal(result, expected)
| bsd-3-clause | 2,054,650,482,986,785,300 | 33.395189 | 84 | 0.46688 | false |
exic/spade2 | spade/mtp/simba.py | 1 | 3010 |
from spade import MTP
from spade import AID
from spade import ACLParser
from spade import Envelope
import socket
import SocketServer
import xmpp
#try:
# import stack_thread as thread
#except:
# import thread
import thread
class SimbaRequestHandler(SocketServer.DatagramRequestHandler):
'''
Request handler for SIMBA messages
'''
def handle(self):
msg = str(self.request[0])
#print "SIMBA SS: New incoming message: " + msg
acl = self.server.parser.parse(msg)
envelope = Envelope.Envelope(_from=acl.getSender(), to=acl.getReceivers(), aclRepresentation="fipa.acl.rep.string.std")
self.server.dispatch(envelope, msg)
#print "SIMBA SS: Message dispatched"
class simba(MTP.MTP):
def receiveThread(self):
SocketServer.ThreadingUDPServer.allow_reuse_address = True
self.SS = SocketServer.ThreadingUDPServer(("", self.port), SimbaRequestHandler)
self.SS.dispatch = self.dispatch
self.SS.parser = ACLParser.ACLParser()
#print "SIMBA SS listening on port " + str(self.port)
self.SS.serve_forever()
def stop(self):
try:
del self.SS
except Exception,e:
pass
print "EXCEPTION IN SIMBA",str(e)
def setup(self):
'''
Secondary constructor
'''
#print ">>>SIMBA Transport ready for action"
#self.address = self.config.acc[self.name].address
#self.port = self.config.acc[self.name].port
self.port = 2001
# Launch receive thread
#print ">>>SIMBA: Going to start new thread"
tid = thread.start_new_thread(self.receiveThread, ())
#print ">>>SIMBA: Started new thread " + str(tid)
def send(self, envelope, payload, to=None):
'''
Send a message to a SIMBA agent
'''
#print ">>>SIMBA TRANSPORT: A MESSAGE TO SEND FOR ME"
payload = str(payload.getPayload()[0])
#print ">>>SIMBA: PAYLOAD = " + payload
try:
p = ACLParser.ACLxmlParser()
aclmsg = p.parse(payload)
except:
print ">>>SIMBA: COULD NOT BUILD ACL"
pass
if to == None:
to = envelope.getTo()
#print ">>>SIMBA TRANSPORT: TO = " + str(to)
for receiver in to:
#print ">>>SIMBA TRANSPORT: RECEIVER = " + str(receiver)
for ad in receiver.getAddresses():
ad = str(ad) # Type change
#print ">>>SIMBA TRANSPORT: ADDRESS = " + ad
# SIMBA URI = simba://address:port
if ad[0:8] == "simba://":
ad = ad[8:]
#print ">>>SIMBA TRANSPORT: ADDRESS FINAL = " + ad
# Check for the presence of a port
if ':' in ad:
ip, port = ad.split(':')
else:
ip = ad
port = 2001
# Set up a SIMBA socket to send the message
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect((ip, port))
# FORCE ACL WITH PARENTHESIS
s.send(str(aclmsg)) # ACL with parenthesis, oh no!
s.close()
#print ">>>SIMBA message succesfully sent"
except:
print "Could not send SIMBA message"
# Required
PROTOCOL = "simba"
INSTANCE = simba
#port = "2001"
| lgpl-2.1 | 8,704,821,167,077,461,000 | 25.637168 | 121 | 0.634884 | false |
Intel-tensorflow/tensorflow | tensorflow/python/client/pywrap_tf_session.py | 6 | 2973 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python module for Session ops, vars, and functions exported by pybind11."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=invalid-import-order,g-bad-import-order, wildcard-import, unused-import
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.client._pywrap_tf_session import *
from tensorflow.python.client._pywrap_tf_session import _TF_SetTarget
from tensorflow.python.client._pywrap_tf_session import _TF_SetConfig
from tensorflow.python.client._pywrap_tf_session import _TF_NewSessionOptions
# Convert versions to strings for Python2 and keep api_compatibility_test green.
# We can remove this hack once we remove Python2 presubmits. pybind11 can only
# return unicode for Python2 even with py::str.
# https://pybind11.readthedocs.io/en/stable/advanced/cast/strings.html#returning-c-strings-to-python
# pylint: disable=undefined-variable
__version__ = str(get_version())
__git_version__ = str(get_git_version())
__compiler_version__ = str(get_compiler_version())
__cxx11_abi_flag__ = get_cxx11_abi_flag()
__monolithic_build__ = get_monolithic_build()
# User getters to hold attributes rather than pybind11's m.attr due to
# b/145559202.
GRAPH_DEF_VERSION = get_graph_def_version()
GRAPH_DEF_VERSION_MIN_CONSUMER = get_graph_def_version_min_consumer()
GRAPH_DEF_VERSION_MIN_PRODUCER = get_graph_def_version_min_producer()
TENSOR_HANDLE_KEY = get_tensor_handle_key()
# pylint: enable=undefined-variable
# Disable pylint invalid name warnings for legacy functions.
# pylint: disable=invalid-name
def TF_NewSessionOptions(target=None, config=None):
# NOTE: target and config are validated in the session constructor.
opts = _TF_NewSessionOptions()
if target is not None:
_TF_SetTarget(opts, target)
if config is not None:
config_str = config.SerializeToString()
_TF_SetConfig(opts, config_str)
return opts
# Disable pylind undefined-variable as the variable is exported in the shared
# object via pybind11.
# pylint: disable=undefined-variable
def TF_Reset(target, containers=None, config=None):
opts = TF_NewSessionOptions(target=target, config=config)
try:
TF_Reset_wrapper(opts, containers)
finally:
TF_DeleteSessionOptions(opts)
| apache-2.0 | 7,151,792,356,379,686,000 | 41.471429 | 100 | 0.745711 | false |
willingc/oh-mainline | vendor/packages/celery/celery/contrib/rdb.py | 18 | 4469 | # -*- coding: utf-8 -*-
"""
celery.contrib.rdb
==================
Remote debugger for Celery tasks running in multiprocessing pool workers.
Inspired by http://snippets.dzone.com/posts/show/7248
**Usage**
.. code-block:: python
from celery.contrib import rdb
from celery.decorators import task
@task
def add(x, y):
result = x + y
rdb.set_trace()
return result
**Environment Variables**
.. envvar:: CELERY_RDB_HOST
Hostname to bind to. Default is '127.0.01', which means the socket
will only be accessible from the local host.
.. envvar:: CELERY_RDB_PORT
Base port to bind to. Default is 6899.
The debugger will try to find an available port starting from the
base port. The selected port will be logged by celeryd.
:copyright: (c) 2009 - 2011 by Ask Solem.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import errno
import os
import socket
import sys
from pdb import Pdb
default_port = 6899
CELERY_RDB_HOST = os.environ.get("CELERY_RDB_HOST") or "127.0.0.1"
CELERY_RDB_PORT = int(os.environ.get("CELERY_RDB_PORT") or default_port)
#: Holds the currently active debugger.
_current = [None]
_frame = getattr(sys, "_getframe")
class Rdb(Pdb):
me = "Remote Debugger"
_prev_outs = None
_sock = None
def __init__(self, host=CELERY_RDB_HOST, port=CELERY_RDB_PORT,
port_search_limit=100, port_skew=+0):
self.active = True
try:
from multiprocessing import current_process
_, port_skew = current_process().name.split('-')
except (ImportError, ValueError):
pass
port_skew = int(port_skew)
self._prev_handles = sys.stdin, sys.stdout
this_port = None
for i in xrange(port_search_limit):
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
this_port = port + port_skew + i
try:
self._sock.bind((host, this_port))
except socket.error, exc:
if exc.errno in [errno.EADDRINUSE, errno.EINVAL]:
continue
raise
else:
break
else:
raise Exception(
"%s: Could not find available port. Please set using "
"environment variable CELERY_RDB_PORT" % (self.me, ))
self._sock.listen(1)
me = "%s:%s" % (self.me, this_port)
context = self.context = {"me": me, "host": host, "port": this_port}
print("%(me)s: Please telnet %(host)s %(port)s."
" Type `exit` in session to continue." % context)
print("%(me)s: Waiting for client..." % context)
self._client, address = self._sock.accept()
context["remote_addr"] = ":".join(map(str, address))
print("%(me)s: In session with %(remote_addr)s" % context)
self._handle = sys.stdin = sys.stdout = self._client.makefile("rw")
Pdb.__init__(self, completekey="tab",
stdin=self._handle, stdout=self._handle)
def _close_session(self):
self.stdin, self.stdout = sys.stdin, sys.stdout = self._prev_handles
self._handle.close()
self._client.close()
self._sock.close()
self.active = False
print("%(me)s: Session %(remote_addr)s ended." % self.context)
def do_continue(self, arg):
self._close_session()
self.set_continue()
return 1
do_c = do_cont = do_continue
def do_quit(self, arg):
self._close_session()
self.set_quit()
return 1
do_q = do_exit = do_quit
def set_trace(self, frame=None):
if frame is None:
frame = _frame().f_back
try:
Pdb.set_trace(self, frame)
except socket.error, exc:
# connection reset by peer.
if exc.errno != errno.ECONNRESET:
raise
def set_quit(self):
# this raises a BdbQuit exception that we are unable to catch.
sys.settrace(None)
def debugger():
"""Returns the current debugger instance (if any),
or creates a new one."""
rdb = _current[0]
if rdb is None or not rdb.active:
rdb = _current[0] = Rdb()
return rdb
def set_trace(frame=None):
"""Set breakpoint at current location, or a specified frame"""
if frame is None:
frame = _frame().f_back
return debugger().set_trace(frame)
| agpl-3.0 | -5,303,194,896,927,360,000 | 27.647436 | 76 | 0.583576 | false |
VirgilSecurity/virgil-sdk-python | virgil_sdk/jwt/jwt_verifier.py | 1 | 2886 | # Copyright (C) 2016-2019 Virgil Security Inc.
#
# Lead Maintainer: Virgil Security Inc. <[email protected]>
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# (1) Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# (2) Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# (3) Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from .jwt_header_content import JwtHeaderContent
class JwtVerifier(object):
"""The JwtVerifier provides verification for Jwt."""
def __init__(
self,
access_token_signer,
api_public_key,
api_public_key_id
):
self._access_token_signer = access_token_signer
self._api_public_key = api_public_key
self._api_public_key_id = api_public_key_id
def verify_token(self, jwt_token):
# type: (Jwt) -> bool
"""
To verify specified token.
Args:
jwt_token: An instance of Jwt to be verified.
Returns:
True if token is verified, otherwise False.
"""
if jwt_token._header_content.key_id != self._api_public_key_id or\
jwt_token._header_content.algorithm != self._access_token_signer.algorithm or\
jwt_token._header_content.access_token_type != JwtHeaderContent.ACCESS_TOKEN_TYPE or\
jwt_token._header_content.content_type != JwtHeaderContent.CONTENT_TYPE:
return False
return self._access_token_signer.verify_token_signature(
bytearray(jwt_token.signature_data),
bytearray(jwt_token.unsigned_data),
self._api_public_key
)
| bsd-3-clause | -1,359,941,264,242,722,600 | 40.228571 | 96 | 0.691615 | false |
dzan/xenOnArm | tools/libxl/gentest.py | 19 | 9405 | #!/usr/bin/python
import os
import sys
import re
import random
import idl
def randomize_char(c):
if random.random() < 0.5:
return str.lower(c)
else:
return str.upper(c)
def randomize_case(s):
r = [randomize_char(c) for c in s]
return "".join(r)
def randomize_enum(e):
return random.choice([v.name for v in e.values])
handcoded = ["libxl_bitmap", "libxl_key_value_list",
"libxl_cpuid_policy_list", "libxl_string_list"]
def gen_rand_init(ty, v, indent = " ", parent = None):
s = ""
if isinstance(ty, idl.Enumeration):
s += "%s = %s;\n" % (ty.pass_arg(v, parent is None), randomize_enum(ty))
elif isinstance(ty, idl.Array):
if parent is None:
raise Exception("Array type must have a parent")
s += "%s = rand()%%8;\n" % (parent + ty.lenvar.name)
s += "%s = calloc(%s, sizeof(*%s));\n" % \
(v, parent + ty.lenvar.name, v)
s += "{\n"
s += " int i;\n"
s += " for (i=0; i<%s; i++)\n" % (parent + ty.lenvar.name)
s += gen_rand_init(ty.elem_type, v+"[i]",
indent + " ", parent)
s += "}\n"
elif isinstance(ty, idl.KeyedUnion):
if parent is None:
raise Exception("KeyedUnion type must have a parent")
s += "switch (%s) {\n" % (parent + ty.keyvar.name)
for f in ty.fields:
(nparent,fexpr) = ty.member(v, f, parent is None)
s += "case %s:\n" % f.enumname
s += gen_rand_init(f.type, fexpr, indent + " ", nparent)
s += " break;\n"
s += "}\n"
elif isinstance(ty, idl.Struct) \
and (parent is None or ty.json_fn is None):
for f in [f for f in ty.fields if not f.const]:
(nparent,fexpr) = ty.member(v, f, parent is None)
s += gen_rand_init(f.type, fexpr, "", nparent)
elif hasattr(ty, "rand_init") and ty.rand_init is not None:
s += "%s(%s);\n" % (ty.rand_init,
ty.pass_arg(v, isref=parent is None,
passby=idl.PASS_BY_REFERENCE))
elif ty.typename in ["libxl_uuid", "libxl_mac", "libxl_hwcap"]:
s += "rand_bytes((uint8_t *)%s, sizeof(*%s));\n" % (v,v)
elif ty.typename in ["libxl_domid", "libxl_devid"] or isinstance(ty, idl.Number):
s += "%s = rand() %% (sizeof(%s)*8);\n" % \
(ty.pass_arg(v, parent is None),
ty.pass_arg(v, parent is None))
elif ty.typename in ["bool"]:
s += "%s = rand() %% 2;\n" % v
elif ty.typename in ["libxl_defbool"]:
s += "libxl_defbool_set(%s, !!rand() %% 1);\n" % v
elif ty.typename in ["char *"]:
s += "%s = rand_str();\n" % v
elif ty.private:
pass
elif ty.typename in handcoded:
raise Exception("Gen for handcoded %s" % ty.typename)
else:
raise Exception("Cannot randomly init %s" % ty.typename)
if s != "":
s = indent + s
return s.replace("\n", "\n%s" % indent).rstrip(indent)
if __name__ == '__main__':
if len(sys.argv) < 3:
print >>sys.stderr, "Usage: gentest.py <idl> <implementation>"
sys.exit(1)
random.seed(os.getenv('LIBXL_TESTIDL_SEED'))
(builtins,types) = idl.parse(sys.argv[1])
impl = sys.argv[2]
f = open(impl, "w")
f.write("""
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include "libxl.h"
#include "libxl_utils.h"
static char *rand_str(void)
{
int i, sz = rand() % 32;
char *s = malloc(sz+1);
for (i=0; i<sz; i++)
s[i] = 'a' + (rand() % 26);
s[i] = '\\0';
return s;
}
static void rand_bytes(uint8_t *p, size_t sz)
{
int i;
for (i=0; i<sz; i++)
p[i] = rand() % 256;
}
static void libxl_bitmap_rand_init(libxl_bitmap *bitmap)
{
int i;
bitmap->size = rand() % 16;
bitmap->map = calloc(bitmap->size, sizeof(*bitmap->map));
libxl_for_each_bit(i, *bitmap) {
if (rand() % 2)
libxl_bitmap_set(bitmap, i);
else
libxl_bitmap_reset(bitmap, i);
}
}
static void libxl_key_value_list_rand_init(libxl_key_value_list *pkvl)
{
int i, nr_kvp = rand() % 16;
libxl_key_value_list kvl = calloc(nr_kvp+1, 2*sizeof(char *));
for (i = 0; i<2*nr_kvp; i += 2) {
kvl[i] = rand_str();
if (rand() % 8)
kvl[i+1] = rand_str();
else
kvl[i+1] = NULL;
}
kvl[i] = NULL;
kvl[i+1] = NULL;
*pkvl = kvl;
}
static void libxl_cpuid_policy_list_rand_init(libxl_cpuid_policy_list *pp)
{
int i, nr_policies = rand() % 16;
struct {
const char *n;
int w;
} options[] = {
/* A random selection from libxl_cpuid_parse_config */
{"maxleaf", 32},
{"family", 8},
{"model", 8},
{"stepping", 4},
{"localapicid", 8},
{"proccount", 8},
{"clflush", 8},
{"brandid", 8},
{"f16c", 1},
{"avx", 1},
{"osxsave", 1},
{"xsave", 1},
{"aes", 1},
{"popcnt", 1},
{"movbe", 1},
{"x2apic", 1},
{"sse4.2", 1},
{"sse4.1", 1},
{"dca", 1},
{"pdcm", 1},
{"procpkg", 6},
};
const int nr_options = sizeof(options)/sizeof(options[0]);
char buf[64];
libxl_cpuid_policy_list p = NULL;
for (i = 0; i < nr_policies; i++) {
int opt = rand() % nr_options;
int val = rand() % (1<<options[opt].w);
snprintf(buf, 64, \"%s=%#x\", options[opt].n, val);
libxl_cpuid_parse_config(&p, buf);
}
*pp = p;
}
static void libxl_string_list_rand_init(libxl_string_list *p)
{
int i, nr = rand() % 16;
libxl_string_list l = calloc(nr+1, sizeof(char *));
for (i = 0; i<nr; i++) {
l[i] = rand_str();
}
l[i] = NULL;
*p = l;
}
""")
for ty in builtins + types:
if isinstance(ty, idl.Number): continue
if ty.typename not in handcoded:
f.write("static void %s_rand_init(%s);\n" % \
(ty.typename,
ty.make_arg("p", passby=idl.PASS_BY_REFERENCE)))
f.write("static void %s_rand_init(%s)\n" % \
(ty.typename,
ty.make_arg("p", passby=idl.PASS_BY_REFERENCE)))
f.write("{\n")
f.write(gen_rand_init(ty, "p"))
f.write("}\n")
f.write("\n")
ty.rand_init = "%s_rand_init" % ty.typename
f.write("""
int main(int argc, char **argv)
{
""")
for ty in types:
f.write(" %s %s_val;\n" % (ty.typename, ty.typename))
f.write("""
int rc;
char *s;
xentoollog_logger_stdiostream *logger;
libxl_ctx *ctx;
logger = xtl_createlogger_stdiostream(stderr, XTL_DETAIL, 0);
if (!logger) exit(1);
if (libxl_ctx_alloc(&ctx, LIBXL_VERSION, 0, (xentoollog_logger*)logger)) {
fprintf(stderr, "cannot init xl context\\n");
exit(1);
}
""")
f.write(" printf(\"Testing TYPE_to_json()\\n\");\n")
f.write(" printf(\"----------------------\\n\");\n")
f.write(" printf(\"\\n\");\n")
for ty in [t for t in types if t.json_fn is not None]:
arg = ty.typename + "_val"
f.write(" %s_rand_init(%s);\n" % (ty.typename, \
ty.pass_arg(arg, isref=False, passby=idl.PASS_BY_REFERENCE)))
f.write(" s = %s_to_json(ctx, %s);\n" % \
(ty.typename, ty.pass_arg(arg, isref=False)))
f.write(" printf(\"%%s: %%s\\n\", \"%s\", s);\n" % ty.typename)
f.write(" if (s == NULL) abort();\n")
f.write(" free(s);\n")
if ty.dispose_fn is not None:
f.write(" %s(&%s_val);\n" % (ty.dispose_fn, ty.typename))
f.write("\n")
f.write(" printf(\"Testing Enumerations\\n\");\n")
f.write(" printf(\"--------------------\\n\");\n")
f.write(" printf(\"\\n\");\n")
for ty in [t for t in types if isinstance(t,idl.Enumeration)]:
f.write(" printf(\"%s -- to string:\\n\");\n" % (ty.typename))
for v in ty.values:
f.write(" printf(\"\\t%s = %%d = \\\"%%s\\\"\\n\", " \
"%s, %s_to_string(%s));\n" % \
(v.valuename, v.name, ty.typename, v.name))
f.write("\n")
f.write(" printf(\"%s -- to JSON:\\n\");\n" % (ty.typename))
for v in ty.values:
f.write(" printf(\"\\t%s = %%d = %%s\", " \
"%s, %s_to_json(ctx, %s));\n" %\
(v.valuename, v.name, ty.typename, v.name))
f.write("\n")
f.write(" printf(\"%s -- from string:\\n\");\n" % (ty.typename))
for v in [v.valuename for v in ty.values] + ["AN INVALID VALUE"]:
n = randomize_case(v)
f.write(" %s_val = -1;\n" % (ty.typename))
f.write(" rc = %s_from_string(\"%s\", &%s_val);\n" %\
(ty.typename, n, ty.typename))
f.write(" printf(\"\\t%s = \\\"%%s\\\" = %%d (rc %%d)\\n\", " \
"\"%s\", %s_val, rc);\n" %\
(v, n, ty.typename))
f.write("\n")
f.write("""
libxl_ctx_free(ctx);
xtl_logger_destroy((xentoollog_logger*)logger);
return 0;
}
""")
| gpl-2.0 | -7,528,817,808,149,349,000 | 30.989796 | 85 | 0.474428 | false |
davidcox/freetype-py | setup.py | 2 | 1174 | #!/usr/bin/env python
# -----------------------------------------------------------------------------
# FreeType high-level python API - copyright 2011 Nicolas P. Rougier
# Distributed under the terms of the new BSD license.
# -----------------------------------------------------------------------------
from distutils.core import setup
setup( name = 'freetype-py',
version = '0.3.3',
description = 'Freetype python bindings',
author = 'Nicolas P. Rougier',
author_email='[email protected]',
url='http://code.google.com/p/freetype-py/',
packages=['freetype'],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: X11 Applications',
'Environment :: MacOS X',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
],
)
| bsd-3-clause | -2,967,915,028,320,317,400 | 40.928571 | 79 | 0.498296 | false |
abretaud/tools-iuc | tools/circos/text-from-gff3.py | 17 | 1131 | #!/usr/bin/env python
import logging
import sys
from BCBio import GFF
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
if __name__ == "__main__":
attr = sys.argv[2]
for record in GFF.parse(sys.argv[1]):
if len(record.features) == 0:
continue
for feature in sorted(record.features, key=lambda x: x.location.start):
# chrom chromStart chromEnd
# name score strand
# thickStart thickEnd itemRgb
kv = {
"strand": 0 if not feature.location.strand else feature.location.strand,
"value": feature.qualifiers.get("score", [0])[0],
}
if attr not in feature.qualifiers:
continue
name = feature.qualifiers[attr][0]
line = [
record.id,
str(int(feature.location.start)),
str(int(feature.location.end)),
name,
",".join(["%s=%s" % x for x in sorted(kv.items())]),
]
sys.stdout.write("\t".join(line))
sys.stdout.write("\n")
| mit | 7,988,875,520,059,622,000 | 25.928571 | 88 | 0.519894 | false |
kromain/chromium-tools | clang_format.py | 3 | 2643 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Redirects to the version of clang-format checked into the Chrome tree.
clang-format binaries are pulled down from Google Cloud Storage whenever you
sync Chrome, to platform-specific locations. This script knows how to locate
those tools, assuming the script is invoked from inside a Chromium checkout."""
import gclient_utils
import os
import subprocess
import sys
class NotFoundError(Exception):
"""A file could not be found."""
def __init__(self, e):
Exception.__init__(self,
'Problem while looking for clang-format in Chromium source tree:\n'
' %s' % e)
def _FindChromiumSourceRoot():
"""Return the source root of the current chromium checkout, or die trying."""
# The use of .gn is somewhat incongruous here, but we need a file uniquely
# existing at src/. GN does the same thing at least.
source_root = gclient_utils.FindFileUpwards('.gn')
if not source_root:
raise NotFoundError(
'.gn file not found in any parent of the current path.')
return source_root
def FindClangFormatToolInChromiumTree():
"""Return a path to the clang-format executable, or die trying."""
tool_path = os.path.join(_FindChromiumSourceRoot(), 'third_party',
'clang_format', 'bin',
gclient_utils.GetMacWinOrLinux(),
'clang-format' + gclient_utils.GetExeSuffix())
if not os.path.exists(tool_path):
raise NotFoundError('File does not exist: %s' % tool_path)
return tool_path
def FindClangFormatScriptInChromiumTree(script_name):
"""Return a path to a clang-format helper script, or die trying."""
script_path = os.path.join(_FindChromiumSourceRoot(), 'third_party',
'clang_format', 'script', script_name)
if not os.path.exists(script_path):
raise NotFoundError('File does not exist: %s' % script_path)
return script_path
def main(args):
try:
tool = FindClangFormatToolInChromiumTree()
except NotFoundError, e:
print >> sys.stderr, e
sys.exit(1)
# Add some visibility to --help showing where the tool lives, since this
# redirection can be a little opaque.
help_syntax = ('-h', '--help', '-help', '-help-list', '--help-list')
if any(match in args for match in help_syntax):
print '\nDepot tools redirects you to the clang-format at:\n %s\n' % tool
return subprocess.call([tool] + sys.argv[1:])
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause | 5,868,807,343,760,346,000 | 34.716216 | 80 | 0.680288 | false |
Injabie3/Red-DiscordBot | cogs/lottery.py | 2 | 25131 | # Lottery was created by Redjumpman for Redbot
# This will create 2 data folders with 1 JSON file
import os
import asyncio
from discord.ext import commands
from .utils.dataIO import dataIO
from .utils import checks
from __main__ import send_cmd_help
from random import choice as randchoice
class Lottery:
"""Hosts lotteries on the server"""
def __init__(self, bot):
self.bot = bot
self.file_path = "data/JumperCogs/lottery/system.json"
self.system = dataIO.load_json(self.file_path)
self.funny = ["Rigging the system...",
"Removing tickets that didn't pay me off...",
"Adding fake tickets...", "Throwing out the bad names..",
"Switching out the winning ticket...",
"Picking from highest bribe...",
"Looking for a marked ticket...",
"Eeny, meeny, miny, moe...",
"I lost the tickets so...",
"Stop messaging me, I'm picking...",
"May the odds be ever in your favor...",
"I'm going to ban that guy who keeps spamming me, 'please!'... ",
"Winner winner, chicken dinner...",
"Can someone tell the guy who keeps yelling 'Bingo!' that he is playing the wrong game..."]
@commands.group(name="setlottery", pass_context=True)
async def setlottery(self, ctx):
"""Lottery Settings"""
if ctx.invoked_subcommand is None:
await send_cmd_help(ctx)
@setlottery.command(name="prize", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _prize_setlottery(self, ctx, amount: int):
"""Set's the prize amount for a lottery. Set to 0 to cancel."""
server = ctx.message.server
settings = self.check_server_settings(server)
if amount > 0:
settings["Lottery Prize"] = True
settings["Prize Amount"] = amount
dataIO.save_json(self.file_path, self.system)
await self.bot.say("A prize for the next lottery has been set for {} credits".format(amount))
elif amount == 0:
settings["Lottery Prize"] = False
settings["Prize Amount"] = amount
dataIO.save_json(self.file_path, self.system)
await self.bot.say("Prize for the next lottery drawing removed.")
else:
await self.bot.say("You can't use negative values.")
@setlottery.command(name="autofreeze", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _autofreeze_setlottery(self, ctx):
"""Turns on auto account freeze. Will freeze/unfreeze every 60 seconds."""
server = ctx.message.server
settings = self.check_server_settings(server)
if settings["Membership Freeze"]:
settings["Membership Freeze"] = False
dataIO.save_json(self.file_path, self.system)
await self.bot.say("Now turning off auto freeze. Please wait for the previous cycle to expire.")
else:
settings["Membership Freeze"] = True
dataIO.save_json(self.file_path, self.system)
await self.bot.say("Now turning on auto freeze. This will cycle through server accounts and freeze/unfreeze accounts that require the signup role.")
self.bot.loop.create_task(self.auto_freeze(ctx, settings))
@setlottery.command(name="fun", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _fun_setlottery(self, ctx):
"""Toggles fun text on and off"""
server = ctx.message.server
settings = self.check_server_settings(server)
if settings["Fun Text"]:
settings["Fun Text"] = False
dataIO.save_json(self.file_path, self.system)
await self.bot.say("Fun Text is now disabled.")
else:
settings["Fun Text"] = True
dataIO.save_json(self.file_path, self.system)
await self.bot.say("Fun Text is now enabled.")
@setlottery.command(name="role", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _role_setlottery(self, ctx, role: str):
"""Set the required role for membership sign-up. Default: None"""
server = ctx.message.server
settings = self.check_server_settings(server)
settings["Signup Role"] = role
dataIO.save_json(self.file_path, self.system)
await self.bot.say("Setting the required role to sign-up to **{}**.\nUnless set to **None**, users must be assigned this role to signup!".format(role))
@commands.group(name="lottery", pass_context=True)
async def lottery(self, ctx):
"""Lottery Group Command"""
if ctx.invoked_subcommand is None:
await send_cmd_help(ctx)
@lottery.command(name="version", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _version_lottery(self):
"""Shows the version of lottery cog you are running."""
version = self.system["Version"]
await self.bot.say("```Python\nYou are running Lottery Cog version {}.```".format(version))
@lottery.command(name="start", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _start_lottery(self, ctx, restriction=False, timer=0):
"""Starts a lottery. Can optionally restrict particpation and set a timer."""
user = ctx.message.author
server = ctx.message.server
settings = self.check_server_settings(server)
if not settings["Lottery Active"]:
settings["Lottery Count"] += 1
if restriction:
if not settings["Signup Role"]: # Checks if admin set a role to mention, otherwise default to lottery members
lottery_role = "lottery members"
else:
lottery_role = "@" + settings["Signup Role"]
settings["Lottery Member Requirement"] = True
else:
lottery_role = "everyone on the server"
settings["Lottery Active"] = True
dataIO.save_json(self.file_path, self.system)
if timer:
await self.bot.say("A lottery has been started by {}, for {}. It will end in {} seconds.".format(user.name, lottery_role, timer)) # TODO Change timer to time formatter function
await self.run_timer(timer, ctx.prefix, server, settings)
else:
await self.bot.say("A lottery has been started by {}, for {}.".format(user.name, lottery_role))
else:
await self.bot.say("I cannot start a new lottery until the current one has ended.")
@lottery.command(name="end", pass_context=True)
@checks.admin_or_permissions(manage_server=True)
async def _end_lottery(self, ctx):
"""Manually ends an on-going lottery"""
server = ctx.message.server
settings = self.check_server_settings(server)
if settings["Lottery Active"]:
if server.id in self.system["Lottery Players"]:
players = list(self.system["Lottery Players"][server.id].keys())
winner = randchoice(players)
mention = self.system["Lottery Players"][server.id][winner]["Mention"]
await self.display_lottery_winner(winner, mention, server, settings)
self.update_win_stats(winner, server.id)
self.lottery_clear(settings)
else:
await self.bot.say("There are no players playing in the lottery. Resetting lottery settings.")
self.lottery_clear(settings)
else:
await self.bot.say("There is no lottery for me to end.")
@lottery.command(name="play", pass_context=True)
async def _play_lottery(self, ctx):
"""Enters a user into an on-going lottery."""
server = ctx.message.server
user = ctx.message.author
settings = self.check_server_settings(server)
if settings["Lottery Active"]:
if await self.requirement_check(ctx, settings):
if server.id not in self.system["Lottery Players"]:
self.system["Lottery Players"][server.id] = {}
if user.id not in self.system["Lottery Players"][server.id]:
self.system["Lottery Players"][server.id][user.id] = {"Mention": user.mention}
players = len(self.system["Lottery Players"][server.id].keys())
dataIO.save_json(self.file_path, self.system)
self.update_play_stats(user.id, server.id)
await self.bot.say("{} you have been added to the lottery. Good luck.".format(user.mention))
await self.bot.say("There are now {} users participating in the lottery.".format(players))
else:
await self.bot.say("You have already entered into the lottery.")
else:
await self.bot.say("There is no on-going lottery.")
@lottery.command(name="signup", pass_context=True)
async def _signup_lottery(self, ctx):
"""Allows a user to sign-up to participate in lotteries"""
user = ctx.message.author
server = ctx.message.server
settings = self.check_server_settings(server)
role = settings["Signup Role"]
if role:
if self.role_check(ctx, role, user.id):
await self.member_creation(user, server.id, ctx.prefix)
else:
await self.bot.say("You do not have the {} role required to become a member".format(role))
else:
await self.member_creation(user, server.id, ctx.prefix)
@lottery.command(name="info", pass_context=True)
async def _info_lottery(self, ctx):
"""General information about this plugin"""
msg = """```
General Information about Lottery Plugin\n
=========================================\n
• When starting a lottery you can optionally set a timer and/or restrict to members only.\n
• By defualt all users can sign up for lottery membership. To retrict sign-ups to a role type {}setlottery role.\n
• {}lottery stats will show your stats if you are signed-up.\n
• You can freeze accounts that no longer have the sign-up role periodically by turning on {}setlottery freeze.\n
• Autofreeze feature will need to be enabled again if you shutdown your bot.\n
• Members who have a frozen account will no longer gain stats or particpate in member only lotteries.\n
• If a member gets their role back after their account was frozen, they need to type {}lottery activate to unfreeze the account.\n
• Lotteries can be hosted on different servers with the same bot without conflicts.\n
• Powerballs have not yet been implemented, but the framework is complete. Ignore powerball stats.\n
• Anyone can join a lottery without restrictions.```""".format(ctx.prefix, ctx.prefix, ctx.prefix, ctx.prefix, ctx.prefix)
await self.bot.say(msg)
@lottery.command(name="stats", pass_context=True)
async def _stats_lottery(self, ctx):
"""Shows your membership stats"""
user = ctx.message.author
server = ctx.message.server
settings = self.check_server_settings(server)
role = settings["Signup Role"]
if server.id in self.system["Lottery Members"]:
if user.id in self.system["Lottery Members"][server.id]:
if not self.system["Lottery Members"][server.id][user.id]["Account Frozen"]:
member = self.system["Lottery Members"][server.id][user.id]
lotteries_played = member["Lotteries Played"]
lotteries_won = member["Lotteries Won"]
account_status = member["Account Frozen"]
msg = "```"
msg += "\n{}'s Lottery Stats on {}".format(user.name, server.name)
msg += "\n================================================="
msg += "\nLotteries Played: {}".format(lotteries_played)
msg += "\nLotteries Won: {}".format(lotteries_won)
if account_status:
msg += "\nAccount Status: Frozen"
else:
msg += "\nAccount Status: Active"
msg += "```"
await self.bot.say(msg)
else:
await self.bot.say("Your account is frozen. You require the {} role on this server to track stats.\n".format(role) +
"If you are given back this role, type {}lottery activate to restore your account.".format(ctx.prefix))
else:
await self.bot.say("You are not a lottery member. Only members can view/track stats.")
else:
await self.bot.say("There are no Lottery Members on this server.")
def add_credits(self, userid, amount, server):
bank = self.bot.get_cog('Economy').bank
mobj = server.get_member(userid)
bank.deposit_credits(mobj, amount)
msg = "```{} credits have ben deposited into your account.```".format(amount)
return msg
def update_play_stats(self, userid, serverid):
if serverid in self.system["Lottery Members"]:
if userid in self.system["Lottery Members"][serverid]:
self.system["Lottery Members"][serverid][userid]["Lotteries Played"] += 1
dataIO.save_json(self.file_path, self.system)
def update_win_stats(self, winner, server):
if server in self.system["Lottery Members"]:
if winner in self.system["Lottery Members"][server]:
self.system["Lottery Members"][server][winner]["Lotteries Won"] += 1
dataIO.save_json(self.file_path, self.system)
def lottery_clear(self, settings):
self.system["Lottery Players"] = {}
settings["Lottery Prize"] = 0
settings["Lottery Member Requirement"] = False
settings["Lottery Active"] = False
dataIO.save_json(self.file_path, self.system)
def role_check(self, ctx, role, userid):
if userid in [m.id for m in ctx.message.server.members if role.lower() in [str(r).lower() for r in m.roles]]:
return True
else:
return False
def check_server_settings(self, server):
if server.id not in self.system["Config"]:
self.system["Config"][server.id] = {server.name: {"Lottery Count": 0,
"Lottery Active": False,
"Fun Text": False,
"Lottery Winners": 1,
"Prize Amount": 0,
"Powerball Active": False,
"Powerball Reoccuring": True,
"Powerball Jackpot": 3000,
"Powerball Ticket Limit": 0,
"Powerball Ticket Cost": 0,
"Powerball Winning Ticket": None,
"Powerball Grace Period": 1,
"Powerball Day": "Sunday",
"Powerball Time": "1700",
"Powerball Combo Payouts": [2.0, 3.0, 10],
"Powerball Jackpot Type": "Preset",
"Powerball Jackpot Percentage": 0.31,
"Powerball Jackpot Multiplier": 2.0,
"Powerball Jackpot Preset": 500,
"Signup Role": None,
"Lottery Member Requirement": False,
"Membership Freeze": False,
}
}
dataIO.save_json(self.file_path, self.system)
print("Creating default lottery settings for Server: {}".format(server.name))
path = self.system["Config"][server.id][server.name]
return path
else:
path = self.system["Config"][server.id][server.name]
return path
async def member_check(self, userid, serverid):
if serverid in self.system["Lottery Members"]:
if userid in self.system["Lottery Members"][serverid]:
return True
else:
await self.bot.say("This requires a lottery membership.")
return False
else:
await self.bot.say("This requires a lottery membership, but there are no members on this server.")
return False
async def requirement_check(self, ctx, settings):
server = ctx.message.server
user = ctx.message.author
if settings["Lottery Member Requirement"]:
if server.id in self.system["Lottery Members"]:
if user.id in self.system["Lottery Members"][server.id]:
if self.system["Lottery Members"][server.id][user.id]["Account Frozen"]:
await self.bot.say("Your account is frozen. If you meet the role requirement use {}lottery activate to restore your account.".format(ctx.prefix))
return False
else:
return True
else:
await self.bot.say("You do not meet the role requirment to participate in this lottery.")
return False
else:
return False
else:
return True
async def run_timer(self, timer, prefix, server, settings):
half_time = timer / 2
quarter_time = half_time / 2
await asyncio.sleep(half_time)
if settings["Lottery Active"] is True:
await self.bot.say("{} seconds remaining for the lottery. Type {}lottery play to join.".format(half_time, prefix))
await asyncio.sleep(quarter_time)
if settings["Lottery Active"] is True:
await self.bot.say("{} seconds remaining for the lottery. Type {}lottery play to join.".format(quarter_time, prefix))
await asyncio.sleep(quarter_time)
if settings["Lottery Active"] is True:
await self.bot.say("The lottery is now ending...")
await asyncio.sleep(1)
await self.end_lottery_timer(server, settings)
async def end_lottery_timer(self, server, settings):
if settings["Lottery Active"]:
if server.id in self.system["Lottery Players"]:
players = self.system["Lottery Players"][server.id].keys()
winner = randchoice(list(players))
mention = "<@" + winner + ">"
self.update_win_stats(winner, server.id)
await self.display_lottery_winner(winner, mention, server, settings)
self.lottery_clear(settings)
else:
await self.bot.say("There are no players in the lottery.")
self.lottery_clear(settings)
else:
pass
async def display_lottery_winner(self, winner, mention, server, settings):
await self.bot.say("The winner is...")
await asyncio.sleep(2)
if settings["Fun Text"]:
fun_text = randchoice(self.funny)
await self.bot.say(fun_text)
await asyncio.sleep(2)
await self.bot.say("Congratulations {}. You won the lottery!".format(mention))
if settings["Prize Amount"] > 0:
prize = settings["Prize Amount"]
await self.deposit_prize(winner, prize, server)
settings["Prize Amount"] = 0
dataIO.save_json(self.file_path, self.system)
async def deposit_prize(self, winner, prize, server):
bank = self.bot.get_cog('Economy').bank
member_object = server.get_member(winner)
bank.deposit_credits(member_object, prize)
await self.bot.say("{} credits have been deposited into your account.".format(prize))
async def member_creation(self, user, serverid, prefix):
if serverid not in self.system["Lottery Members"]:
self.system["Lottery Members"][serverid] = {}
dataIO.save_json(self.file_path, self.system)
if user.id not in self.system["Lottery Members"][serverid]:
self.system["Lottery Members"][serverid][user.id] = {"Name": user.name,
"ID": user.id,
"Lotteries Played": 0,
"Lotteries Won": 0,
"Powerballs Played": 0,
"Powerballs Won": 0,
"Powerball Tickets": [],
"Powerball Count": 0,
"Account Frozen": False}
dataIO.save_json(self.file_path, self.system)
await self.bot.say("Lottery Account created for {}. You may now particpate in on-going lotteries.\nCheck your stats with {}lottery stats".format(user.name, prefix))
else:
await self.bot.say("You are already member.")
async def auto_freeze(self, ctx, settings):
server = ctx.message.server
while settings["Membership Freeze"]:
role = settings["Signup Role"]
print("Loop started for {}".format(server.name))
if server.id in self.system["Lottery Members"]:
users = list(self.system["Lottery Members"][server.id].keys())
for user in users:
if self.role_check(ctx, role, user):
if self.system["Lottery Members"][server.id][user]["Account Frozen"]:
self.system["Lottery Members"][server.id][user]["Account Frozen"] = False
else:
pass
else:
if self.system["Lottery Members"][server.id][user]["Account Frozen"]:
pass
else:
self.system["Lottery Members"][server.id][user]["Account Frozen"] = True
dataIO.save_json(self.file_path, self.system)
await asyncio.sleep(5)
def check_folders():
if not os.path.exists("data/JumperCogs"): # Checks for parent directory for all Jumper cogs
print("Creating JumperCogs default directory")
os.makedirs("data/JumperCogs")
if not os.path.exists("data/JumperCogs/lottery"):
print("Creating JumperCogs lottery folder")
os.makedirs("data/JumperCogs/lottery")
def check_files():
default = {"Config": {},
"Lottery Members": {},
"Lottery Players": {},
"Version": 2.001
}
f = "data/JumperCogs/lottery/system.json"
if not dataIO.is_valid_json(f):
print("Adding system.json to data/JumperCogs/lottery/")
dataIO.save_json(f, default)
else:
current = dataIO.load_json(f)
if current["Version"] != default["Version"]:
print("Updating Lottery Cog from version {} to version {}".format(current["Version"], default["Version"]))
current["Version"] = default["Version"]
dataIO.save_json(f, current)
def setup(bot):
check_folders()
check_files()
n = Lottery(bot)
bot.add_cog(n)
| gpl-3.0 | -2,850,040,692,678,702,000 | 51.42766 | 193 | 0.538409 | false |
TheParrotsAreComing/PAWS | TestingAssets/Fosters/foster_filter.py | 2 | 2061 | import time
import sys
import _mysql
import random
import string
import re
from selenium import webdriver
from selenium.webdriver.support.ui import Select
import selenium.webdriver.chrome.service as service
from selenium.webdriver.common.keys import Keys
service = service.Service('D:\ChromeDriver\chromedriver')
service.start()
capabilities = {'chrome.binary': 'C:\Program Files (x86)\Google\Chrome\Application\chrome'} # Chrome path is different for everyone
driver = webdriver.Remote(service.service_url, capabilities)
try:
# Check to see if it was added
db=_mysql.connect('localhost','root','root','paws_db')
rand_fname=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
rand_lname=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
rand_mail=''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(6))
db.query("INSERT INTO fosters (first_name,last_name,address,email,created,is_deleted) VALUES(\""+rand_fname+"\",\""+rand_lname+"\",\"55 Gato Way\",\""+rand_mail+"@mail.com\",NOW(),false);");
db.store_result()
db.query("SELECT id,first_name FROM fosters where last_name=\""+rand_lname+"\" AND email=\""+rand_mail+"@mail.com\"")
r=db.store_result()
k=r.fetch_row(1,1)
a_id = k[0].get('id')
driver.set_window_size(sys.argv[1], sys.argv[2]);
driver.get('http://localhost:8765');
driver.find_element_by_id('email').send_keys('[email protected]')
driver.find_element_by_id('password').send_keys('password')
driver.find_element_by_css_selector('input[type="submit"]').click()
driver.get('http://localhost:8765/fosters/index/');
driver.find_element_by_class_name('cat-filter').click()
input_name = driver.find_element_by_id('First-Name')
input_name.location_once_scrolled_into_view
input_name.send_keys(rand_fname)
input_name.send_keys(Keys.RETURN)
if rand_fname in driver.find_element_by_class_name('card-h1').text:
print("pass")
else:
print("fail")
except Exception as e:
print(e)
print("fail")
finally:
driver.quit()
| mit | 7,965,257,144,329,852,000 | 31.714286 | 191 | 0.724891 | false |
donny/mako-mori | external/boto/mturk/notification.py | 40 | 4178 | # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
Provides NotificationMessage and Event classes, with utility methods, for
implementations of the Mechanical Turk Notification API.
"""
import hmac
try:
from hashlib import sha1 as sha
except ImportError:
import sha
import base64
import re
class NotificationMessage:
NOTIFICATION_WSDL = "http://mechanicalturk.amazonaws.com/AWSMechanicalTurk/2006-05-05/AWSMechanicalTurkRequesterNotification.wsdl"
NOTIFICATION_VERSION = '2006-05-05'
SERVICE_NAME = "AWSMechanicalTurkRequesterNotification"
OPERATION_NAME = "Notify"
EVENT_PATTERN = r"Event\.(?P<n>\d+)\.(?P<param>\w+)"
EVENT_RE = re.compile(EVENT_PATTERN)
def __init__(self, d):
"""
Constructor; expects parameter d to be a dict of string parameters from a REST transport notification message
"""
self.signature = d['Signature'] # vH6ZbE0NhkF/hfNyxz2OgmzXYKs=
self.timestamp = d['Timestamp'] # 2006-05-23T23:22:30Z
self.version = d['Version'] # 2006-05-05
assert d['method'] == NotificationMessage.OPERATION_NAME, "Method should be '%s'" % NotificationMessage.OPERATION_NAME
# Build Events
self.events = []
events_dict = {}
if 'Event' in d:
# TurboGears surprised me by 'doing the right thing' and making { 'Event': { '1': { 'EventType': ... } } } etc.
events_dict = d['Event']
else:
for k in d:
v = d[k]
if k.startswith('Event.'):
ed = NotificationMessage.EVENT_RE.search(k).groupdict()
n = int(ed['n'])
param = str(ed['param'])
if n not in events_dict:
events_dict[n] = {}
events_dict[n][param] = v
for n in events_dict:
self.events.append(Event(events_dict[n]))
def verify(self, secret_key):
"""
Verifies the authenticity of a notification message.
TODO: This is doing a form of authentication and
this functionality should really be merged
with the pluggable authentication mechanism
at some point.
"""
verification_input = NotificationMessage.SERVICE_NAME
verification_input += NotificationMessage.OPERATION_NAME
verification_input += self.timestamp
h = hmac.new(key=secret_key, digestmod=sha)
h.update(verification_input)
signature_calc = base64.b64encode(h.digest())
return self.signature == signature_calc
class Event:
def __init__(self, d):
self.event_type = d['EventType']
self.event_time_str = d['EventTime']
self.hit_type = d['HITTypeId']
self.hit_id = d['HITId']
if 'AssignmentId' in d: # Not present in all event types
self.assignment_id = d['AssignmentId']
#TODO: build self.event_time datetime from string self.event_time_str
def __repr__(self):
return "<boto.mturk.notification.Event: %s for HIT # %s>" % (self.event_type, self.hit_id)
| mit | -8,757,323,337,122,954,000 | 39.563107 | 134 | 0.649354 | false |
d1hotpep/cacheable | cacheable/tests/PeeweeTTLTest.py | 1 | 1473 | #!/usr/bin/python
import os
import sys
from time import sleep
from time import time
import unittest
sys.path = [ os.path.abspath(os.path.join(os.path.dirname(__file__), '../..')) ] + sys.path
from cacheable import Cacheable
from cacheable.adapter import PeeweeAdapter
from peewee import SqliteDatabase
class TimeCacheable(Cacheable):
@staticmethod
def load_data(keys):
return { x : int(time()) for x in keys }
class TimeTTLCacheable(Cacheable):
TTL = 1
@staticmethod
def load_data(keys):
return { x : int(time()) for x in keys }
class PeeweeTTLTest(unittest.TestCase):
def setUp(self):
database = SqliteDatabase(':memory:')
self.adapter = PeeweeAdapter(database)
self.adapter.create_table()
Cacheable.init(self.adapter)
def test_basic(self):
ts = time()
res = TimeCacheable.get('abc')
self.assertTrue(abs(res - ts) <= 1)
res2 = TimeCacheable.get('abc')
self.assertEqual(res, res2)
sleep(1.5)
res3 = TimeCacheable.get('abc')
self.assertEqual(res, res3)
def test_expired(self):
ts = time()
res = TimeTTLCacheable.get('abc')
self.assertTrue(abs(res - ts) <= 1)
res2 = TimeTTLCacheable.get('abc')
self.assertEqual(res, res2)
sleep(1.5)
res3 = TimeTTLCacheable.get('abc')
self.assertNotEqual(res, res3)
if __name__ == '__main__':
unittest.main()
| mit | -7,904,872,723,253,051,000 | 21.661538 | 91 | 0.617787 | false |
MRod5/pyturb | miscelanea/dispositivos_aerorreactor.py | 1 | 7724 | # -*- coding: utf-8 -*-
"""
Created on Thu May 7 12:54:28 2020
@author: marco
"""
import numpy as np
# Condiciones de iteración
iter_max = 100
tolerancia = 1e-6
def compresor_tempsalida_rend_adiab(eta, Tet, rel_compresion, gamma, verboso=False):
"""
Solución de la temperatura de remanso a la salida de un compresor (ventilador):
Entradas:
- eta: Rendimiento adiabático del compresor [-]
- Tet: Temperatura de remanso en la entrada [K]
- rel_compresion: Relación de compresión [-]
- Coeficiente de dilatación adiabático (gamma) [-]
- Valor discreto
- Función de la temperatura
- verboso: Si es verdadero: print con numero de iteracion, temperatura y residuo para cada iteracion
Si la gamma es función de la temperatura la solución se itera.
Salida:
- Temperatura de remanso a la salida [K]
- Número de iteraciones necesarias para converger
- Residuo de la temperatura (adimensional)
"""
if callable(gamma):
# Itera
sigue_iterando = True
Tst = Tet
niteraciones = 0
while sigue_iterando:
niteraciones += 1
Tst_ = Tst
Tmedia = (Tst + Tet)/2
gamma_ = gamma(Tmedia)
numerador = rel_compresion ** ((gamma_ - 1)/gamma_) - 1
Tst = (numerador / eta + 1) * Tet
residuo = np.abs((Tst - Tst_)/Tst_)
if verboso:
print(niteraciones, Tst, residuo)
if np.abs(residuo)<tolerancia:
sigue_iterando = False
return Tst, niteraciones, residuo
elif niteraciones==iter_max:
print('ATENCION: NUMERO ITERACIONES MAXIMAS ALCANZADAS')
sigue_iterando = False
return Tst, niteraciones, residuo
else:
numerador = rel_compresion ** ((gamma - 1)/gamma) - 1
Tst = (numerador / eta + 1) * Tet
return Tst
def combustor_tempsalida_dosado(eta, Tet, f, L, cp, verboso=False):
"""
Solución de la temperatura de remanso a la salida de un combustor:
Entradas:
- eta: Rendimiento de la combustión [-]
- Tet: Temperatura de remanso en la entrada [K]
- f: dosado [-]
- L: Poder calorífico del combustible [J/kg]
- cp: Calor específico a presión constante del aire
- Valor discreto
- Función de la temperatura
- verboso: Si es verdadero: print con numero de iteracion, temperatura y residuo para cada iteracion
Si el cp es función de la temperatura la solución se itera.
Salida:
- Temperatura de remanso a la salida [K]
- Número de iteraciones necesarias para converger
- Residuo de la temperatura (adimensional)
"""
if callable(cp):
# Itera
sigue_iterando = True
Tst = Tet
niteraciones = 0
while sigue_iterando:
niteraciones += 1
Tst_ = Tst
cp_ = cp(Tst)
Tst = (eta*f*L + cp_*Tet)/((1+f)*cp_)
residuo = np.abs((Tst - Tst_)/Tst_)
if verboso:
print(niteraciones, Tst, residuo)
if np.abs(residuo)<tolerancia:
sigue_iterando = False
return Tst, niteraciones, residuo
elif niteraciones==iter_max:
print('ATENCION: NUMERO ITERACIONES MAXIMAS ALCANZADAS')
sigue_iterando = False
return Tst, niteraciones, residuo
else:
Tst = (eta*f*L + cp*Tet)/((1+f)*cp)
return Tst
def turbina_tempsalida_acoplamiento(eta_mec, Wconsumido, GTurbina, cp, Het, Tet=500, verboso=False):
"""
Solución de la temperatura de remanso a la salida de una turbina dada la
potencia que debe suministrar, dadas unas pérdidas mecánicas en el eje de
acoplamiento mecánico y dado un cp:
-(Wconsumido + Wperdido) = WTurbina
Entradas:
- eta_mec: Tanto por 1 de pérdidas en eje mecánico [-]
- Wconsumido: Potencia mecánica consumida [W]
- GTurbina: Gasto circulante por la turbina [kg/s]
- cp: Calor específico a presión constante del aire
- Valor discreto
- Función de la temperatura
- Het: entalpía total entrante en la turbina [W]
Tet: Estimación inicial de la temperatura en entrada de la turbina [K]
Por defecto se considera temperatura de 500K
- verboso: Si es verdadero: print con numero de iteracion, temperatura y residuo para cada iteracion
Por defecto se considera Falso.
Si el cp es función de la temperatura la solución se itera.
Salida:
- Temperatura de remanso a la salida [K]
- Número de iteraciones necesarias para converger
- Residuo de la temperatura (adimensional)
"""
Wconsumido = np.abs(Wconsumido)
WTurbina = -Wconsumido/(1+eta_mec) # Potencia mecánica requerida a la turbina
hst = (WTurbina + Het)/GTurbina # Potencia específica a la salida de turbina
if callable(cp):
# Itera
sigue_iterando = True
Tst = Tet
niteraciones = 0
while sigue_iterando:
niteraciones += 1
Tst_ = Tst
Tturb = (Tst + Tet)/2
cp_ = cp(Tturb)
Tst = hst/cp_
residuo = np.abs((Tst - Tst_)/Tst_)
if verboso:
print(niteraciones, Tst, residuo)
if np.abs(residuo)<tolerancia:
sigue_iterando = False
return Tst, niteraciones, residuo
elif niteraciones==iter_max:
print('ATENCION: NUMERO ITERACIONES MAXIMAS ALCANZADAS')
sigue_iterando = False
return Tst, niteraciones, residuo
else:
Tst = hst/cp
return Tst
def turbina_presionsalida_rend_adiab(eta, pet, rel_temperaturas, gamma, Tturb=500, verboso=False):
"""
Presión de remanso a la salida de una turbina dada la relación de temperaturas
salida/entrada y la presión de entrada.
Entradas:
- eta: Rendimiento adiabático de turbina [-]
- pet: Presión de remanso a la entrada [Pa]
- Coeficiente de dilatación adiabático (gamma) [-]
- Tturb: Temperatura promedio de la turbina p.ej. (Tet+Tst)/2 [K]
Por defecto 500K
Salida:
- Temperatura de remanso a la salida [K]
"""
if callable(gamma):
gamma_=gamma(Tturb)
else:
gamma_=gamma
pst = pet*((rel_temperaturas - 1)/eta + 1)**(gamma_/(gamma_-1))
return pst
def tobera_temperaturaestatica_rend_adiab(eta, ps, pet, Tet, gamma):
if callable(gamma):
gamma_=gamma(Tet)
else:
gamma_=gamma
factor = (ps/pet)**((gamma_-1)/gamma_) - 1
factor = factor * eta + 1
Ts = Tet * factor
return Ts
def test(T4t, cp_air, G5, Wca, H4t):
sigue_iterando = True
ii = 0
T5t = 500
while sigue_iterando:
ii += 1
T5t_ = T5t
Tturb = (T4t+T5t)/2
T5t = (-Wca + H4t)/G5/cp_air(Tturb)
residuo = np.abs((T5t-T5t_)/T5t_)
print(ii, T5t, T5t_, residuo, cp_air(Tturb), Tturb)
if residuo<1e-6:
sigue_iterando=False
return T5t
| mit | 2,013,426,033,461,439,500 | 30.35102 | 108 | 0.555729 | false |
magsilva/scriptLattes | scriptLattes/grupo.py | 1 | 34361 | #!/usr/bin/python
# encoding: utf-8
# filename: grupo.py
#
# scriptLattes V8
# Copyright 2005-2013: Jesús P. Mena-Chalco e Roberto M. Cesar-Jr.
# http://scriptlattes.sourceforge.net/
#
#
# Este programa é um software livre; você pode redistribui-lo e/ou
# modifica-lo dentro dos termos da Licença Pública Geral GNU como
# publicada pela Fundação do Software Livre (FSF); na versão 2 da
# Licença, ou (na sua opinião) qualquer versão.
#
# Este programa é distribuído na esperança que possa ser util,
# mas SEM NENHUMA GARANTIA; sem uma garantia implicita de ADEQUAÇÂO a qualquer
# MERCADO ou APLICAÇÃO EM PARTICULAR. Veja a
# Licença Pública Geral GNU para maiores detalhes.
#
# Você deve ter recebido uma cópia da Licença Pública Geral GNU
# junto com este programa, se não, escreva para a Fundação do Software
# Livre(FSF) Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import fileinput
import sets
import operator
import os
from membro import *
from compiladorDeListas import *
from charts.grafoDeColaboracoes import *
from charts.graficoDeBarras import *
from charts.mapaDeGeolocalizacao import *
from geradorDePaginasWeb import *
from authorRank import *
from internacionalizacao.analisadorDePublicacoes import *
from qualis.qualis import *
from geradorDeXML import *
from scriptLattes.util import *
class Grupo:
compilador = None
listaDeParametros = []
listaDeMembros = []
listaDeRotulos = []
listaDeRotulosCores = []
listaDePublicacoesEinternacionalizacao = []
arquivoConfiguracao = None
itemsDesdeOAno = None
itemsAteOAno = None
diretorioCache = None
diretorioDoi = None
matrizArtigoEmPeriodico = None
matrizLivroPublicado = None
matrizCapituloDeLivroPublicado = None
matrizTextoEmJornalDeNoticia = None
matrizTrabalhoCompletoEmCongresso = None
matrizResumoExpandidoEmCongresso = None
matrizResumoEmCongresso = None
matrizArtigoAceito = None
matrizApresentacaoDeTrabalho = None
matrizOutroTipoDeProducaoBibliografica = None
matrizSoftwareComPatente = None
matrizSoftwareSemPatente = None
matrizProdutoTecnologico = None
matrizProcessoOuTecnica = None
matrizTrabalhoTecnico = None
matrizOutroTipoDeProducaoTecnica = None
matrizProducaoArtistica = None
matrizPatente = None
matrizProgramaComputador = None
matrizDesenhoIndustrial = None
matrizDeAdjacencia = None
matrizDeFrequencia = None
matrizDeFrequenciaNormalizada = None
vetorDeCoAutoria = None
grafosDeColaboracoes = None
mapaDeGeolocalizacao = None
geradorDeXml = None
vectorRank = None
nomes = None
rotulos = None
geolocalizacoes = None
qualis = None
def __init__(self, arquivo):
self.arquivoConfiguracao = arquivo
self.carregarParametrosPadrao()
# atualizamos a lista de parametros
for linha in fileinput.input(self.arquivoConfiguracao):
linha = linha.replace("\r","")
linha = linha.replace("\n","")
linhaPart = linha.partition("#") # eliminamos os comentários
linhaDiv = linhaPart[0].split("=",1)
if len(linhaDiv)==2:
self.atualizarParametro(linhaDiv[0], linhaDiv[1])
# carregamos o periodo global
ano1 = self.obterParametro('global-itens_desde_o_ano')
ano2 = self.obterParametro('global-itens_ate_o_ano')
if ano1.lower()=='hoje':
ano1 = str(datetime.datetime.now().year)
if ano2.lower()=='hoje':
ano2 = str(datetime.datetime.now().year)
if ano1=='':
ano1 = '0'
if ano2=='':
ano2 = '10000'
self.itemsDesdeOAno = int(ano1)
self.itemsAteOAno = int(ano2)
self.diretorioCache = self.obterParametro('global-diretorio_de_armazenamento_de_cvs')
if self.diretorioCache=='':
self.diretorioCache = os.path.expanduser(os.path.join("~", ".scriptLattes", "cacheCV"))
util.criarDiretorio(self.diretorioCache)
self.diretorioDoi = self.obterParametro('global-diretorio_de_armazenamento_de_doi')
if self.diretorioDoi == '':
self.diretorioDoi = os.path.expanduser(os.path.join("~", ".scriptLattes", "cacheDoi"))
util.criarDiretorio(self.diretorioDoi)
# carregamos a lista de membros
entrada = buscarArquivo(self.obterParametro('global-arquivo_de_entrada'))
idSequencial = 0
for linha in fileinput.input(entrada):
linha = linha.replace("\r","")
linha = linha.replace("\n","")
linhaPart = linha.partition("#") # eliminamos os comentários
linhaDiv = linhaPart[0].split(",")
if not linhaDiv[0].strip()=='':
identificador = linhaDiv[0].strip() if len(linhaDiv)>0 else ''
nome = linhaDiv[1].strip() if len(linhaDiv)>1 else ''
periodo = linhaDiv[2].strip() if len(linhaDiv)>2 else ''
rotulo = linhaDiv[3].strip() if len(linhaDiv)>3 and not linhaDiv[3].strip()=='' else '[Sem rotulo]'
# rotulo = rotulo.capitalize()
# atribuicao dos valores iniciais para cada membro
###if 'xml' in identificador.lower():
###### self.listaDeMembros.append(Membro(idSequencial, '', nome, periodo, rotulo, self.itemsDesdeOAno, self.itemsAteOAno, xml=identificador))
### self.listaDeMembros.append(Membro(idSequencial, identificador, nome, periodo, rotulo, self.itemsDesdeOAno, self.itemsAteOAno, diretorioCache))
###else:
self.listaDeMembros.append(Membro(idSequencial, identificador, nome, periodo, rotulo, self.itemsDesdeOAno, self.itemsAteOAno, self.diretorioCache))
self.listaDeRotulos.append(rotulo)
idSequencial+=1
self.listaDeRotulos = list(sets.Set(self.listaDeRotulos)) # lista unica de rotulos
self.listaDeRotulos.sort()
self.listaDeRotulosCores = ['']*len(self.listaDeRotulos)
self.qualis = Qualis(self) # carregamos Qualis a partir de arquivos definidos no arquivo de configuração
def gerarXMLdeGrupo(self):
if self.obterParametro('global-salvar_informacoes_em_formato_xml'):
self.geradorDeXml = GeradorDeXML(self)
self.geradorDeXml.gerarXmlParaGrupo()
if self.geradorDeXml.listaErroXml:
print "\n\n[AVISO] Erro ao gerar XML para os lattes abaixo:"
for item in self.geradorDeXml.listaErroXml:
print "- [ID Lattes: " + item + "]"
def gerarCSVdeQualisdeGrupo(self):
prefix = self.obterParametro('global-prefixo')+'-' if not self.obterParametro('global-prefixo')=='' else ''
# Salvamos a lista individual
s = ""
for membro in self.listaDeMembros:
s += self.imprimeCSVListaIndividual(membro.nomeCompleto, membro.listaArtigoEmPeriodico)
s += self.imprimeCSVListaIndividual(membro.nomeCompleto, membro.listaTrabalhoCompletoEmCongresso)
s += self.imprimeCSVListaIndividual(membro.nomeCompleto, membro.listaResumoExpandidoEmCongresso)
self.salvarArquivoGenerico(s, prefix+'publicacoesPorMembro.csv')
# Salvamos a lista total (publicações do grupo)
s = ""
s += self.imprimeCSVListaGrupal(self.compilador.listaCompletaArtigoEmPeriodico)
s += self.imprimeCSVListaGrupal(self.compilador.listaCompletaTrabalhoCompletoEmCongresso)
s += self.imprimeCSVListaGrupal(self.compilador.listaCompletaResumoExpandidoEmCongresso)
self.salvarArquivoGenerico(s, prefix+'publicacoesDoGrupo.csv')
def gerarCSVdeQualisdeGrupoOld(self):
if self.obterParametro('global-identificar_publicacoes_com_qualis'):
prefix = self.obterParametro('global-prefixo')+'-' if not self.obterParametro('global-prefixo')=='' else ''
# Salvamos a lista individual
s = ""
for membro in self.listaDeMembros:
if (not self.obterParametro('global-arquivo_qualis_de_periodicos')==''):
s += self.imprimeCSVListaIndividual(membro.nomeCompleto, membro.listaArtigoEmPeriodico)
if (not self.obterParametro('global-arquivo_qualis_de_congressos')==''):
s += self.imprimeCSVListaIndividual(membro.nomeCompleto, membro.listaTrabalhoCompletoEmCongresso)
s += self.imprimeCSVListaIndividual(membro.nomeCompleto, membro.listaResumoExpandidoEmCongresso)
self.salvarArquivoGenerico(s, prefix+'qualisPorMembro.csv')
# Salvamos a lista total (publicações do grupo)
s = ""
if (not self.obterParametro('global-arquivo_qualis_de_periodicos')==''):
s += self.imprimeCSVListaGrupal(self.compilador.listaCompletaArtigoEmPeriodico)
if (not self.obterParametro('global-arquivo_qualis_de_congressos')==''):
s += self.imprimeCSVListaGrupal(self.compilador.listaCompletaTrabalhoCompletoEmCongresso)
s += self.imprimeCSVListaGrupal(self.compilador.listaCompletaResumoExpandidoEmCongresso)
self.salvarArquivoGenerico(s, prefix+'qualisGrupal.csv')
def gerarArquivosTemporarios(self):
print "\n[CRIANDO ARQUIVOS TEMPORARIOS: CSV, RIS, TXT, GDF]"
self.gerarRISdeMembros()
self.gerarCSVdeQualisdeGrupo()
self.gerarXMLdeGrupo()
# Salvamos alguns dados para análise posterior (com outras ferramentas)
prefix = self.obterParametro('global-prefixo')+'-' if not self.obterParametro('global-prefixo')=='' else ''
# (1) matrizes
self.salvarMatrizTXT(self.matrizDeAdjacencia, prefix+"matrizDeAdjacencia.txt")
self.salvarMatrizTXT(self.matrizDeFrequencia, prefix+"matrizDeFrequencia.txt")
self.salvarMatrizTXT(self.matrizDeFrequenciaNormalizada, prefix+"matrizDeFrequenciaNormalizada.txt")
#self.salvarMatrizXML(self.matrizDeAdjacencia, prefix+"matrizDeAdjacencia.xml")
# (2) listas de nomes, rótulos, ids
self.salvarListaTXT(self.nomes, prefix+"listaDeNomes.txt")
self.salvarListaTXT(self.rotulos, prefix+"listaDeRotulos.txt")
self.salvarListaTXT(self.ids, prefix+"listaDeIDs.txt")
# (3) medidas de authorRanks
self.salvarListaTXT(self.vectorRank, prefix+"authorRank.txt")
# (4) lista unica de colaboradores (orientadores, ou qualquer outro tipo de parceiros...)
rawColaboradores = list([])
for membro in self.listaDeMembros:
for idColaborador in membro.listaIDLattesColaboradoresUnica:
rawColaboradores.append(idColaborador)
rawColaboradores = list(set(rawColaboradores))
self.salvarListaTXT(rawColaboradores, prefix+"colaboradores.txt")
# (5) Geolocalizacoes
self.geolocalizacoes = list([])
for membro in self.listaDeMembros:
self.geolocalizacoes.append(str(membro.enderecoProfissionalLat)+","+str(membro.enderecoProfissionalLon))
self.salvarListaTXT(self.geolocalizacoes, prefix+"listaDeGeolocalizacoes.txt")
# (6) arquivo GDF
self.gerarArquivoGDF(prefix+"rede.gdf")
def gerarArquivoGDF(self, nomeArquivo):
# Vêrtices
N = len(self.listaDeMembros)
string = "nodedef> name VARCHAR, idLattes VARCHAR, label VARCHAR, rotulo VARCHAR, lat DOUBLE, lon DOUBLE, collaborationRank DOUBLE, producaoBibliografica DOUBLE, artigoEmPeriodico DOUBLE, livro DOUBLE, capituloDeLivro DOUBLE, trabalhoEmCongresso DOUBLE, resumoExpandido DOUBLE, resumo DOUBLE, color VARCHAR"
i = 0
for membro in self.listaDeMembros:
nomeCompleto = unicodedata.normalize('NFKD', membro.nomeCompleto).encode('ASCII', 'ignore')
string += "\n"+str(i)+","+membro.idLattes+","+nomeCompleto+","+membro.rotulo+","+membro.enderecoProfissionalLat+","+membro.enderecoProfissionalLon+","
string += str(self.vectorRank[i])+","
string += str(len(membro.listaArtigoEmPeriodico)+len(membro.listaLivroPublicado)+len(membro.listaCapituloDeLivroPublicado)+len(membro.listaTrabalhoCompletoEmCongresso)+len(membro.listaResumoExpandidoEmCongresso)+len(membro.listaResumoEmCongresso))+","
string += str(len(membro.listaArtigoEmPeriodico))+","
string += str(len(membro.listaLivroPublicado))+","
string += str(len(membro.listaCapituloDeLivroPublicado))+","
string += str(len(membro.listaTrabalhoCompletoEmCongresso))+","
string += str(len(membro.listaResumoExpandidoEmCongresso))+","
string += str(len(membro.listaResumoEmCongresso))+","
string += "'"+self.HTMLColorToRGB(membro.rotuloCorBG)+"'"
i+=1
# Arestas
matriz = self.matrizDeAdjacencia
string += "\nedgedef> node1 VARCHAR, node2 VARCHAR, weight DOUBLE"
for i in range(0,N):
for j in range(i+1,N):
if (i!=j) and (matriz[i,j]>0):
string +='\n'+str(i)+','+str(j)+','+str(matriz[i,j])
# gerando o arquivo GDF
dir = self.obterParametro('global-diretorio_de_saida')
arquivo = open(dir+"/"+nomeArquivo, 'w')
arquivo.write(string) # .encode("utf8","ignore"))
arquivo.close()
def HTMLColorToRGB(self, colorstring):
colorstring = colorstring.strip()
if colorstring[0] == '#': colorstring = colorstring[1:]
r, g, b = colorstring[:2], colorstring[2:4], colorstring[4:]
r, g, b = [int(n, 16) for n in (r, g, b)]
#return (r, g, b)
return str(r)+","+str(g)+","+str(b)
def imprimeCSVListaIndividual(self, nomeCompleto, lista):
s = ""
for pub in lista:
s += pub.csv(nomeCompleto).encode('utf8')+"\n"
return s
def imprimeCSVListaGrupal(self, listaCompleta):
s = ""
keys = listaCompleta.keys()
keys.sort(reverse=True)
if len(keys)>0:
for ano in keys:
elementos = listaCompleta[ano]
elementos.sort(key = lambda x: x.chave.lower())
for index in range(0, len(elementos)):
pub = elementos[index]
s += pub.csv().encode('utf8')+"\n"
return s
def gerarRISdeMembros(self):
prefix = self.obterParametro('global-prefixo')+'-' if not self.obterParametro('global-prefixo')=='' else ''
s = ""
for membro in self.listaDeMembros:
s += membro.ris().encode('utf8')+"\n"
self.salvarArquivoGenerico(s, prefix+'membros.ris')
def salvarArquivoGenerico(self, conteudo, nomeArquivo):
dir = self.obterParametro('global-diretorio_de_saida')
arquivo = open(dir+"/"+nomeArquivo, 'w')
arquivo.write(conteudo)
arquivo.close()
def carregarDadosCVLattes(self):
indice = 1
cvMaxAge = self.obterParametro('global-tempo_expiracao_cache')
for membro in self.listaDeMembros:
print "\n[LENDO REGISTRO LATTES: " + str(indice) + "o. DA LISTA]"
indice += 1
membro.carregarDadosCVLattes(cvMaxAge)
membro.filtrarItemsPorPeriodo()
print membro
def gerarMapaDeGeolocalizacao(self):
if self.obterParametro('mapa-mostrar_mapa_de_geolocalizacao'):
self.mapaDeGeolocalizacao = MapaDeGeolocalizacao(self)
def gerarPaginasWeb(self):
paginasWeb = GeradorDePaginasWeb(self)
def compilarListasDeItems(self):
self.compilador = CompiladorDeListas(self) # compilamos todo e criamos 'listasCompletas'
# Grafos de coautoria
self.compilador.criarMatrizesDeColaboracao()
[self.matrizDeAdjacencia, self.matrizDeFrequencia] = self.compilador.uniaoDeMatrizesDeColaboracao()
self.vetorDeCoAutoria = self.matrizDeFrequencia.sum(axis=1) # suma das linhas = num. de items feitos em co-autoria (parceria) com outro membro do grupo
self.matrizDeFrequenciaNormalizada = self.matrizDeFrequencia.copy()
for i in range(0, self.numeroDeMembros()):
if not self.vetorDeCoAutoria[i]==0:
self.matrizDeFrequenciaNormalizada[i,:] /= float(self.vetorDeCoAutoria[i])
# AuthorRank
authorRank = AuthorRank(self.matrizDeFrequenciaNormalizada, 100)
self.vectorRank = authorRank.vectorRank
# listas de nomes, rotulos e IDs
self.nomes = list([])
self.rotulos = list([])
self.ids = list([])
for membro in self.listaDeMembros:
self.nomes.append(membro.nomeCompleto)
self.rotulos.append(membro.rotulo)
self.ids.append(membro.idLattes)
def identificarQualisEmPublicacoes(self):
if self.obterParametro('global-identificar_publicacoes_com_qualis'):
print "\n[IDENTIFICANDO QUALIS EM PUBLICAÇÕES]"
for membro in self.listaDeMembros:
self.qualis.analisarPublicacoes(membro, self) # Qualis - Adiciona Qualis as publicacoes dos membros
self.qualis.calcularTotaisDosQualis(self)
self.separarQualisPorAno()
def separarQualisPorAno(self):
for membro in self.listaDeMembros:
tabelas = self.qualis.qualisPorAno(membro)
membro.tabelaQualisDosAnos = tabelas[0]
membro.tabelaQualisDosTipos = tabelas[1]
def salvarListaTXT(self, lista, nomeArquivo):
dir = self.obterParametro('global-diretorio_de_saida')
arquivo = open(dir+"/"+nomeArquivo, 'w')
for i in range(0,len(lista)):
elemento = lista[i]
if type(elemento)==type(unicode()):
elemento = elemento.encode("utf8")
else:
elemento = str(elemento)
arquivo.write(elemento+'\n')
arquivo.close()
def salvarMatrizTXT(self, matriz, nomeArquivo):
dir = self.obterParametro('global-diretorio_de_saida')
arquivo = open(dir+"/"+nomeArquivo, 'w')
N = matriz.shape[0]
for i in range(0,N):
for j in range(0,N):
arquivo.write(str(matriz[i , j])+' ')
arquivo.write('\n')
arquivo.close()
def salvarMatrizXML(self, matriz, nomeArquivo):
dir = self.obterParametro('global-diretorio_de_saida')
arquivo = open(dir+"/"+nomeArquivo, 'w')
s ='<?xml version="1.0" encoding="UTF-8"?> \
\n<!-- An excerpt of an egocentric social network --> \
\n<graphml xmlns="http://graphml.graphdrawing.org/xmlns"> \
\n<graph edgedefault="undirected"> \
\n<!-- data schema --> \
\n<key id="name" for="node" attr.name="name" attr.type="string"/> \
\n<key id="nickname" for="node" attr.name="nickname" attr.type="string"/> \
\n<key id="gender" for="node" attr.name="gender" attr.type="string"/> \
\n<key id="image" for="node" attr.name="image" attr.type="string"/> \
\n<key id="link" for="node" attr.name="link" attr.type="string"/> \
\n<key id="amount" for="edge" attr.name="amount" attr.type="int"/> \
\n<key id="pubs" for="node" attr.name="pubs" attr.type="int"/>'
for i in range(0, self.numeroDeMembros()):
membro = self.listaDeMembros[i]
s+='\n<!-- nodes --> \
\n<node id="'+str(membro.idMembro)+'"> \
\n<data key="name">'+membro.nomeCompleto+'</data> \
\n<data key="nickname">'+membro.nomeEmCitacoesBibliograficas+'</data> \
\n<data key="gender">'+membro.sexo[0].upper()+'</data> \
\n<data key="image">'+membro.foto+'</data> \
\n<data key="link">'+membro.url+'</data> \
\n<data key="pubs">'+str(int(self.vetorDeCoAutoria[i]))+'</data> \
\n</node>'
N = matriz.shape[0]
for i in range(0,N):
for j in range(0,N):
if matriz[i,j]>0:
s+='\n<!-- edges --> \
\n<edge source="'+str(i)+'" target="'+str(j)+'"> \
\n<data key="amount">'+str(matriz[i,j])+'</data> \
\n</edge>'
s+='\n</graph>\
\n</graphml>'
arquivo.write(s.encode('utf8'))
arquivo.close()
def salvarVetorDeProducoes(self, vetor, nomeArquivo):
dir = self.obterParametro('global-diretorio_de_saida')
arquivo = open(dir+"/"+nomeArquivo, 'w')
string = ''
for i in range(0,len(vetor)):
(prefixo, pAnos, pQuantidades) = vetor[i]
string += "\n" + prefixo + ":"
for j in range(0,len(pAnos)):
string += str(pAnos[j]) + ',' + str(pQuantidades[j]) + ';'
arquivo.write(string)
arquivo.close()
def salvarListaInternalizacaoTXT(self, listaDoiValido, nomeArquivo):
dir = self.obterParametro('global-diretorio_de_saida')
arquivo = open(dir+"/"+nomeArquivo, 'w')
for i in range(0,len(listaDoiValido)):
elemento = listaDoiValido[i]
if type(elemento)==type(unicode()):
elemento = elemento.encode("utf8")
else:
elemento = str(elemento)
arquivo.write(elemento+'\n')
arquivo.close()
def gerarGraficosDeBarras(self):
print "\n[CRIANDO GRAFICOS DE BARRAS]"
gBarra = GraficoDeBarras(self.obterParametro('global-diretorio_de_saida'))
gBarra.criarGrafico(self.compilador.listaCompletaArtigoEmPeriodico, 'PB0', 'Numero de publicacoes')
gBarra.criarGrafico(self.compilador.listaCompletaLivroPublicado, 'PB1', 'Numero de publicacoes')
gBarra.criarGrafico(self.compilador.listaCompletaCapituloDeLivroPublicado, 'PB2', 'Numero de publicacoes')
gBarra.criarGrafico(self.compilador.listaCompletaTextoEmJornalDeNoticia, 'PB3', 'Numero de publicacoes')
gBarra.criarGrafico(self.compilador.listaCompletaTrabalhoCompletoEmCongresso, 'PB4', 'Numero de publicacoes')
gBarra.criarGrafico(self.compilador.listaCompletaResumoExpandidoEmCongresso, 'PB5', 'Numero de publicacoes')
gBarra.criarGrafico(self.compilador.listaCompletaResumoEmCongresso, 'PB6', 'Numero de publicacoes')
gBarra.criarGrafico(self.compilador.listaCompletaArtigoAceito, 'PB7', 'Numero de publicacoes')
gBarra.criarGrafico(self.compilador.listaCompletaApresentacaoDeTrabalho, 'PB8', 'Numero de publicacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOutroTipoDeProducaoBibliografica, 'PB9', 'Numero de publicacoes')
gBarra.criarGrafico(self.compilador.listaCompletaSoftwareComPatente, 'PT0', 'Numero de producoes tecnicas')
gBarra.criarGrafico(self.compilador.listaCompletaSoftwareSemPatente, 'PT1', 'Numero de producoes tecnicas')
gBarra.criarGrafico(self.compilador.listaCompletaProdutoTecnologico, 'PT2', u'Numero de producoes tecnicas')
gBarra.criarGrafico(self.compilador.listaCompletaProcessoOuTecnica, 'PT3', 'Numero de producoes tecnicas')
gBarra.criarGrafico(self.compilador.listaCompletaTrabalhoTecnico, 'PT4', 'Numero de producoes tecnicas')
gBarra.criarGrafico(self.compilador.listaCompletaOutroTipoDeProducaoTecnica, 'PT5', 'Numero de producoes tecnicas')
gBarra.criarGrafico(self.compilador.listaCompletaPatente, 'PR0', 'Numero de patentes')
gBarra.criarGrafico(self.compilador.listaCompletaProgramaComputador, 'PR1', 'Numero de programa de computador')
gBarra.criarGrafico(self.compilador.listaCompletaDesenhoIndustrial, 'PR2', 'Numero de desenho industrial')
gBarra.criarGrafico(self.compilador.listaCompletaProducaoArtistica, 'PA0', 'Numero de producoes artisticas')
gBarra.criarGrafico(self.compilador.listaCompletaOASupervisaoDePosDoutorado, 'OA0', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOATeseDeDoutorado, 'OA1', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOADissertacaoDeMestrado, 'OA2', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOAMonografiaDeEspecializacao, 'OA3', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOATCC, 'OA4', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOAIniciacaoCientifica, 'OA5', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOAOutroTipoDeOrientacao, 'OA6', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOCSupervisaoDePosDoutorado, 'OC0', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOCTeseDeDoutorado, 'OC1', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOCDissertacaoDeMestrado, 'OC2', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOCMonografiaDeEspecializacao, 'OC3', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOCTCC, 'OC4', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOCIniciacaoCientifica, 'OC5', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaOCOutroTipoDeOrientacao, 'OC6', 'Numero de orientacoes')
gBarra.criarGrafico(self.compilador.listaCompletaPremioOuTitulo, 'Pm', 'Numero de premios')
gBarra.criarGrafico(self.compilador.listaCompletaProjetoDePesquisa, 'Pj', 'Numero de projetos')
gBarra.criarGrafico(self.compilador.listaCompletaPB, 'PB', 'Numero de producoes bibliograficas')
gBarra.criarGrafico(self.compilador.listaCompletaPT, 'PT', 'Numero de producoes tecnicas')
gBarra.criarGrafico(self.compilador.listaCompletaPA, 'PA', 'Numero de producoes artisticas')
gBarra.criarGrafico(self.compilador.listaCompletaOA, 'OA', 'Numero de orientacoes em andamento')
gBarra.criarGrafico(self.compilador.listaCompletaOC, 'OC', 'Numero de orientacoes concluidas')
gBarra.criarGrafico(self.compilador.listaCompletaParticipacaoEmEvento, 'Ep', 'Numero de Eventos')
gBarra.criarGrafico(self.compilador.listaCompletaOrganizacaoDeEvento, 'Eo', 'Numero de Eventos')
prefix = self.obterParametro('global-prefixo')+'-' if not self.obterParametro('global-prefixo')=='' else ''
self.salvarVetorDeProducoes(gBarra.obterVetorDeProducoes(), prefix+'vetorDeProducoes.txt')
def gerarGrafosDeColaboracoes(self):
if self.obterParametro('grafo-mostrar_grafo_de_colaboracoes'):
self.grafosDeColaboracoes = GrafoDeColaboracoes(self, self.obterParametro('global-diretorio_de_saida'))
print "\n[ROTULOS]"
print "- "+str(self.listaDeRotulos)
print "- "+str(self.listaDeRotulosCores)
def gerarGraficoDeProporcoes(self):
if self.obterParametro('relatorio-incluir_grafico_de_proporcoes_bibliograficas'):
gProporcoes = GraficoDeProporcoes(self, self.obterParametro('global-diretorio_de_saida'))
def calcularInternacionalizacao(self):
if self.obterParametro('relatorio-incluir_internacionalizacao'):
print "\n[ANALISANDO INTERNACIONALIZACAO]"
self.analisadorDePublicacoes = AnalisadorDePublicacoes(self)
self.listaDePublicacoesEinternacionalizacao = self.analisadorDePublicacoes.analisarInternacionalizacaoNaCoautoria()
if self.analisadorDePublicacoes.listaDoiValido is not None:
prefix = self.obterParametro('global-prefixo')+'-' if not self.obterParametro('global-prefixo')=='' else ''
self.salvarListaInternalizacaoTXT( self.analisadorDePublicacoes.listaDoiValido,prefix+'internacionalizacao.txt')
def imprimirListasCompletas(self):
self.compilador.imprimirListasCompletas()
def imprimirMatrizesDeFrequencia(self):
self.compilador.imprimirMatrizesDeFrequencia()
print "\n[VETOR DE CO-AUTORIA]"
print self.vetorDeCoAutoria
print "\n[MATRIZ DE FREQUENCIA NORMALIZADA]"
print self.matrizDeFrequenciaNormalizada
def numeroDeMembros(self):
return len(self.listaDeMembros)
def ordenarListaDeMembros(self, chave):
self.listaDeMembros.sort(key=operator.attrgetter(chave)) # ordenamos por nome
def imprimirListaDeParametros(self):
for par in self.listaDeParametros:# .keys():
print "[PARAMETRO] ",par[0]," = ",par[1]
print
def imprimirListaDeMembros(self):
for membro in self.listaDeMembros:
print membro
print
def imprimirListaDeRotulos(self):
for rotulo in self.listaDeRotulos:
print "[ROTULO] ", rotulo
def atualizarParametro(self, parametro, valor):
parametro = parametro.strip().lower()
valor = valor.strip()
for i in range(0,len(self.listaDeParametros)):
if parametro==self.listaDeParametros[i][0]:
self.listaDeParametros[i][1] = valor
return
print "[AVISO IMPORTANTE] Nome de parametro desconhecido: "+parametro
def obterParametro(self, parametro):
for i in range(0,len(self.listaDeParametros)):
if parametro==self.listaDeParametros[i][0]:
if self.listaDeParametros[i][1].lower()=='sim':
return 1
if self.listaDeParametros[i][1].lower()=='nao' or self.listaDeParametros[i][1].lower()=='não':
return 0
return self.listaDeParametros[i][1]
def atribuirCoNoRotulo(self, indice, cor):
self.listaDeRotulosCores[indice] = cor
def carregarParametrosPadrao(self):
self.listaDeParametros.append(['global-nome_do_grupo', ''])
self.listaDeParametros.append(['global-arquivo_de_entrada', ''])
self.listaDeParametros.append(['global-diretorio_de_saida', ''])
self.listaDeParametros.append(['global-email_do_admin', ''])
self.listaDeParametros.append(['global-idioma', 'PT'])
self.listaDeParametros.append(['global-itens_desde_o_ano', ''])
self.listaDeParametros.append(['global-itens_ate_o_ano', '']) # hoje
self.listaDeParametros.append(['global-itens_por_pagina', '1000'])
self.listaDeParametros.append(['global-criar_paginas_jsp', 'nao'])
self.listaDeParametros.append(['global-google_analytics_key', ''])
self.listaDeParametros.append(['global-prefixo', ''])
self.listaDeParametros.append(['global-diretorio_de_armazenamento_de_cvs', ''])
self.listaDeParametros.append(['global-diretorio_de_armazenamento_de_doi', ''])
self.listaDeParametros.append(['global-tempo_expiracao_cache', ''])
self.listaDeParametros.append(['global-salvar_informacoes_em_formato_xml', 'nao'])
self.listaDeParametros.append(['global-identificar_publicacoes_com_qualis', 'nao'])
self.listaDeParametros.append(['global-extrair_qualis_online','sim'])
self.listaDeParametros.append(['global-arquivo_areas_qualis',''])
self.listaDeParametros.append(['global-arquivo_qualis_de_congressos', ''])
self.listaDeParametros.append(['global-arquivo_qualis_de_periodicos', ''])
self.listaDeParametros.append(['relatorio-salvar_publicacoes_em_formato_ris', 'nao'])
self.listaDeParametros.append(['relatorio-incluir_artigo_em_periodico', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_livro_publicado', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_capitulo_de_livro_publicado', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_texto_em_jornal_de_noticia', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_trabalho_completo_em_congresso', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_resumo_expandido_em_congresso', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_resumo_em_congresso', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_artigo_aceito_para_publicacao', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_apresentacao_de_trabalho', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_outro_tipo_de_producao_bibliografica', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_software_com_patente', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_software_sem_patente', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_produto_tecnologico', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_processo_ou_tecnica', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_trabalho_tecnico', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_outro_tipo_de_producao_tecnica', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_patente', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_programa_computador', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_desenho_industrial', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_producao_artistica', 'sim'])
self.listaDeParametros.append(['relatorio-mostrar_orientacoes', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_em_andamento_pos_doutorado', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_em_andamento_doutorado', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_em_andamento_mestrado', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_em_andamento_monografia_de_especializacao', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_em_andamento_tcc', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_em_andamento_iniciacao_cientifica', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_em_andamento_outro_tipo', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_concluida_pos_doutorado', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_concluida_doutorado', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_concluida_mestrado', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_concluida_monografia_de_especializacao', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_concluida_tcc', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_concluida_iniciacao_cientifica', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_orientacao_concluida_outro_tipo', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_projeto', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_premio', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_participacao_em_evento', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_organizacao_de_evento', 'sim'])
self.listaDeParametros.append(['relatorio-incluir_internacionalizacao', 'nao'])
self.listaDeParametros.append(['grafo-mostrar_grafo_de_colaboracoes', 'sim'])
self.listaDeParametros.append(['grafo-mostrar_todos_os_nos_do_grafo', 'sim'])
self.listaDeParametros.append(['grafo-considerar_rotulos_dos_membros_do_grupo', 'sim'])
self.listaDeParametros.append(['grafo-mostrar_aresta_proporcional_ao_numero_de_colaboracoes', 'sim'])
self.listaDeParametros.append(['grafo-incluir_artigo_em_periodico', 'sim'])
self.listaDeParametros.append(['grafo-incluir_livro_publicado', 'sim'])
self.listaDeParametros.append(['grafo-incluir_capitulo_de_livro_publicado', 'sim'])
self.listaDeParametros.append(['grafo-incluir_texto_em_jornal_de_noticia', 'sim'])
self.listaDeParametros.append(['grafo-incluir_trabalho_completo_em_congresso', 'sim'])
self.listaDeParametros.append(['grafo-incluir_resumo_expandido_em_congresso', 'sim'])
self.listaDeParametros.append(['grafo-incluir_resumo_em_congresso', 'sim'])
self.listaDeParametros.append(['grafo-incluir_artigo_aceito_para_publicacao', 'sim'])
self.listaDeParametros.append(['grafo-incluir_apresentacao_de_trabalho', 'sim'])
self.listaDeParametros.append(['grafo-incluir_outro_tipo_de_producao_bibliografica', 'sim'])
self.listaDeParametros.append(['grafo-incluir_software_com_patente', 'sim'])
self.listaDeParametros.append(['grafo-incluir_software_sem_patente', 'sim'])
self.listaDeParametros.append(['grafo-incluir_produto_tecnologico', 'sim'])
self.listaDeParametros.append(['grafo-incluir_processo_ou_tecnica', 'sim'])
self.listaDeParametros.append(['grafo-incluir_trabalho_tecnico', 'sim'])
self.listaDeParametros.append(['grafo-incluir_outro_tipo_de_producao_tecnica', 'sim'])
self.listaDeParametros.append(['grafo-incluir_patente', 'sim'])
self.listaDeParametros.append(['grafo-incluir_programa_computador', 'sim'])
self.listaDeParametros.append(['grafo-incluir_desenho_industrial', 'sim'])
self.listaDeParametros.append(['grafo-incluir_producao_artistica', 'sim'])
self.listaDeParametros.append(['grafo-incluir_grau_de_colaboracao', 'nao'])
self.listaDeParametros.append(['mapa-mostrar_mapa_de_geolocalizacao', 'sim'])
self.listaDeParametros.append(['mapa-incluir_membros_do_grupo', 'sim'])
self.listaDeParametros.append(['mapa-incluir_alunos_de_pos_doutorado', 'sim'])
self.listaDeParametros.append(['mapa-incluir_alunos_de_doutorado', 'sim'])
self.listaDeParametros.append(['mapa-incluir_alunos_de_mestrado', 'nao'])
| gpl-2.0 | -2,055,407,080,451,436,800 | 45.378378 | 309 | 0.743153 | false |
GREO/GNU-Radio | gnuradio-core/src/python/gnuradio/blks2impl/pfb_channelizer.py | 2 | 2164 | #!/usr/bin/env python
#
# Copyright 2009,2010 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr
class pfb_channelizer_ccf(gr.hier_block2):
'''
Make a Polyphase Filter channelizer (complex in, complex out, floating-point taps)
This simplifies the interface by allowing a single input stream to connect to this block.
It will then output a stream for each channel.
'''
def __init__(self, numchans, taps, oversample_rate=1):
gr.hier_block2.__init__(self, "pfb_channelizer_ccf",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature(numchans, numchans, gr.sizeof_gr_complex)) # Output signature
self._numchans = numchans
self._taps = taps
self._oversample_rate = oversample_rate
self.s2ss = gr.stream_to_streams(gr.sizeof_gr_complex, self._numchans)
self.pfb = gr.pfb_channelizer_ccf(self._numchans, self._taps,
self._oversample_rate)
self.v2s = gr.vector_to_streams(gr.sizeof_gr_complex, self._numchans)
self.connect(self, self.s2ss)
for i in xrange(self._numchans):
self.connect((self.s2ss,i), (self.pfb,i))
# Get independent streams from the filterbank and send them out
self.connect(self.pfb, self.v2s)
for i in xrange(self._numchans):
self.connect((self.v2s,i), (self,i))
| gpl-3.0 | 3,062,746,194,167,319,000 | 35.677966 | 93 | 0.674214 | false |
IDragonfire/modular-client | src/games/_gameswidget.py | 1 | 16522 | #-------------------------------------------------------------------------------
# Copyright (c) 2012 Gael Honorez.
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the GNU Public License v3.0
# which accompanies this distribution, and is available at
# http://www.gnu.org/licenses/gpl.html
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#-------------------------------------------------------------------------------
from PyQt4 import QtCore, QtGui
import util
from games.gameitem import GameItem, GameItemDelegate
from games.moditem import ModItem, mod_invisible, mods
from games.hostgamewidget import HostgameWidget
from games._mapSelectWidget import mapSelectWidget
from games import logger
from fa import Faction
import random
import fa
import modvault
import notificatation_system as ns
RANKED_SEARCH_EXPANSION_TIME = 10000 #milliseconds before search radius expands
SEARCH_RADIUS_INCREMENT = 0.05
SEARCH_RADIUS_MAX = 0.25
FormClass, BaseClass = util.loadUiType("games/games.ui")
class GamesWidget(FormClass, BaseClass):
def __init__(self, client, *args, **kwargs):
BaseClass.__init__(self, *args, **kwargs)
self.setupUi(self)
self.client = client
self.client.gamesTab.layout().addWidget(self)
#Dictionary containing our actual games.
self.games = {}
#Ranked search UI
self.rankedAeon.setIcon(util.icon("games/automatch/aeon.png"))
self.rankedCybran.setIcon(util.icon("games/automatch/cybran.png"))
self.rankedSeraphim.setIcon(util.icon("games/automatch/seraphim.png"))
self.rankedUEF.setIcon(util.icon("games/automatch/uef.png"))
self.rankedRandom.setIcon(util.icon("games/automatch/random.png"))
self.connectRankedToggles()
self.rankedTimer = QtCore.QTimer()
self.rankedTimer.timeout.connect(self.expandSearchRanked)
self.searchProgress.hide()
# Ranked search state variables
self.searching = False
self.radius = 0
self.race = None
self.ispassworded = False
self.canChooseMap = True
self.client.modInfo.connect(self.processModInfo)
self.client.gameInfo.connect(self.processGameInfo)
self.client.rankedGameAeon.connect(self.togglingAeon)
self.client.rankedGameCybran.connect(self.togglingCybran)
self.client.rankedGameSeraphim.connect(self.togglingSeraphim)
self.client.rankedGameUEF.connect(self.togglingUEF)
self.client.rankedGameRandom.connect(self.togglingRandom)
self.client.gameEnter.connect(self.stopSearchRanked)
self.client.viewingReplay.connect(self.stopSearchRanked)
self.gameList.setItemDelegate(GameItemDelegate(self))
self.gameList.itemDoubleClicked.connect(self.gameDoubleClicked)
self.modList.itemDoubleClicked.connect(self.hostGameClicked)
try:
self.mapSelectButton.clicked.connect(self.mapSelectClicked)
except:
QtGui.QMessageBox.warning(None, "Skin outdated.", "The theme you are using is outdated. Please remove it or the lobby will malfunction.")
#Load game name from settings (yay, it's persistent!)
self.loadGameName()
self.loadGameMap()
self.loadPassword()
self.options = []
def connectRankedToggles(self):
self.rankedAeon.toggled.connect(self.toggleAeon)
self.rankedCybran.toggled.connect(self.toggleCybran)
self.rankedSeraphim.toggled.connect(self.toggleSeraphim)
self.rankedUEF.toggled.connect(self.toggleUEF)
self.rankedRandom.toggled.connect(self.toggleRandom)
def disconnectRankedToggles(self):
self.rankedAeon.toggled.disconnect(self.toggleAeon)
self.rankedCybran.toggled.disconnect(self.toggleCybran)
self.rankedSeraphim.toggled.disconnect(self.toggleSeraphim)
self.rankedUEF.toggled.disconnect(self.toggleUEF)
self.rankedRandom.toggled.disconnect(self.toggleRandom)
def mapSelectClicked(self):
''' This is for handling map selector'''
mapSelection = mapSelectWidget(self)
mapSelection.exec_()
@QtCore.pyqtSlot(dict)
def processModInfo(self, message):
'''
Slot that interprets and propagates mod_info messages into the mod list
'''
item = ModItem(message)
if message["host"] :
self.modList.addItem(item)
else :
mod_invisible.append(message["name"])
if not message["name"] in mods :
mods[message["name"]] = item
self.client.replays.modList.addItem(message["name"])
@QtCore.pyqtSlot(dict)
def processGameInfo(self, message):
'''
Slot that interprets and propagates game_info messages into GameItems
'''
uid = message["uid"]
if uid not in self.games:
self.games[uid] = GameItem(uid)
self.gameList.addItem(self.games[uid])
self.games[uid].update(message, self.client)
if message['state'] == 'open' and message['access'] == 'public':
self.client.notificationSystem.on_event(ns.NotificationSystem.NEW_GAME, message)
else:
self.games[uid].update(message, self.client)
#Special case: removal of a game that has ended
if message['state'] == "closed":
if uid in self.games:
self.gameList.takeItem(self.gameList.row(self.games[uid]))
del self.games[uid]
return
def startSearchRanked(self, race):
if (fa.exe.running()):
QtGui.QMessageBox.information(None, "ForgedAlliance.exe", "FA is already running.")
self.stopSearchRanked()
return
if (not fa.exe.check("ladder1v1")):
self.stopSearchRanked()
logger.error("Can't play ranked without successfully updating Forged Alliance.")
return
if (self.searching):
logger.info("Switching Ranked Search to Race " + str(race))
self.race = race
self.client.send(dict(command="game_matchmaking", mod="ladder1v1", state="settings", faction = self.race))
else:
#Experimental UPnP Mapper - mappings are removed on app exit
if self.client.useUPnP:
fa.upnp.createPortMapping(self.client.localIP, self.client.gamePort, "UDP")
logger.info("Starting Ranked Search as " + str(race) + ", port: " + str(self.client.gamePort))
self.searching = True
self.race = race
self.radius = 0
self.searchProgress.setVisible(True)
self.labelAutomatch.setText("Searching...")
self.rankedTimer.start(RANKED_SEARCH_EXPANSION_TIME)
self.client.send(dict(command="game_matchmaking", mod="ladder1v1", state="start", gameport = self.client.gamePort, faction = self.race))
#self.client.writeToServer('SEARCH_LADDERGAME', 'START', self.client.gamePort)
@QtCore.pyqtSlot()
def expandSearchRanked(self):
self.radius += SEARCH_RADIUS_INCREMENT
if self.radius >= SEARCH_RADIUS_MAX:
self.radius = SEARCH_RADIUS_MAX;
logger.debug("Search Cap reached at " + str(self.radius))
self.rankedTimer.stop()
else:
logger.debug("Expanding search to " + str(self.radius))
self.client.send(dict(command="game_matchmaking", mod="ladder1v1", state="expand", rate=self.radius))
@QtCore.pyqtSlot()
def stopSearchRanked(self, *args):
if (self.searching):
logger.debug("Stopping Ranked Search")
self.rankedTimer.stop()
self.client.send(dict(command="game_matchmaking", mod="ladder1v1", state="stop"))
self.searching = False
self.searchProgress.setVisible(False)
self.labelAutomatch.setText("1 vs 1 Automatch")
self.disconnectRankedToggles()
self.rankedAeon.setChecked(False)
self.rankedCybran.setChecked(False)
self.rankedSeraphim.setChecked(False)
self.rankedUEF.setChecked(False)
self.connectRankedToggles()
@QtCore.pyqtSlot(bool)
def togglingAeon(self, state):
self.client.rankedAeon.setChecked(1)
self.toggleAeon(1)
@QtCore.pyqtSlot(bool)
def togglingCybran(self, state):
self.client.rankedCybran.setChecked(1)
self.toggleCybran(1)
@QtCore.pyqtSlot(bool)
def togglingSeraphim(self, state):
self.client.rankedSeraphim.setChecked(1)
self.toggleSeraphim(1)
@QtCore.pyqtSlot(bool)
def togglingRandom(self, state):
self.client.rankedRandom.setChecked(1)
self.toggleRandom(1)
@QtCore.pyqtSlot(bool)
def togglingUEF(self, state):
self.client.rankedUEF.setChecked(1)
self.toggleUEF(1)
@QtCore.pyqtSlot(bool)
def toggleUEF(self, state):
if (state):
self.startSearchRanked(Faction.UEF)
self.disconnectRankedToggles()
self.rankedAeon.setChecked(False)
self.rankedCybran.setChecked(False)
self.rankedSeraphim.setChecked(False)
self.connectRankedToggles()
else:
self.stopSearchRanked()
@QtCore.pyqtSlot(bool)
def toggleAeon(self, state):
if (state):
self.startSearchRanked(Faction.AEON)
self.disconnectRankedToggles()
self.rankedCybran.setChecked(False)
self.rankedSeraphim.setChecked(False)
self.rankedUEF.setChecked(False)
self.connectRankedToggles()
else:
self.stopSearchRanked()
@QtCore.pyqtSlot(bool)
def toggleCybran(self, state):
if (state):
self.startSearchRanked(Faction.CYBRAN)
self.disconnectRankedToggles()
self.rankedAeon.setChecked(False)
self.rankedSeraphim.setChecked(False)
self.rankedUEF.setChecked(False)
self.connectRankedToggles()
else:
self.stopSearchRanked()
@QtCore.pyqtSlot(bool)
def toggleSeraphim(self, state):
if (state):
self.startSearchRanked(Faction.SERAPHIM)
self.disconnectRankedToggles()
self.rankedAeon.setChecked(False)
self.rankedCybran.setChecked(False)
self.rankedUEF.setChecked(False)
self.connectRankedToggles()
else:
self.stopSearchRanked()
@QtCore.pyqtSlot(bool)
def toggleRandom(self, state):
if (state):
faction = random.randint(1,4)
if faction == 1 :
self.startSearchRanked(Faction.UEF)
elif faction == 2 :
self.startSearchRanked(Faction.CYBRAN)
elif faction == 3 :
self.startSearchRanked(Faction.AEON)
else :
self.startSearchRanked(Faction.SERAPHIM)
self.disconnectRankedToggles()
self.rankedAeon.setChecked(False)
self.rankedCybran.setChecked(False)
self.rankedSeraphim.setChecked(False)
self.connectRankedToggles()
else:
self.stopSearchRanked()
@QtCore.pyqtSlot(QtGui.QListWidgetItem)
def gameDoubleClicked(self, item):
'''
Slot that attempts to join a game.
'''
if not fa.exe.available():
return
self.stopSearchRanked() #Actually a workaround
passw = None
if fa.exe.check(item.mod, item.mapname, None, item.mods):
if item.access == "password" :
passw, ok = QtGui.QInputDialog.getText(self.client, "Passworded game" , "Enter password :", QtGui.QLineEdit.Normal, "")
if ok:
self.client.send(dict(command="game_join", password=passw, uid=item.uid, gameport=self.client.gamePort))
else :
self.client.send(dict(command="game_join", uid=item.uid, gameport=self.client.gamePort))
else:
pass #checkFA failed and notified the user what was wrong. We won't join now.
@QtCore.pyqtSlot(QtGui.QListWidgetItem)
def hostGameClicked(self, item):
'''
Hosting a game event
'''
if not fa.exe.available():
return
self.stopSearchRanked()
# A simple Hosting dialog.
if fa.exe.check(item.mod):
hostgamewidget = HostgameWidget(self, item)
if hostgamewidget.exec_() == 1 :
if self.gamename:
gameoptions = []
if len(self.options) != 0 :
oneChecked = False
for option in self.options :
if option.isChecked() :
oneChecked = True
gameoptions.append(option.isChecked())
if oneChecked == False :
QtGui.QMessageBox.warning(None, "No option checked !", "You have to check at least one option !")
return
modnames = [str(moditem.text()) for moditem in hostgamewidget.modList.selectedItems()]
mods = [hostgamewidget.mods[modstr] for modstr in modnames]
modvault.setActiveMods(mods, True) #should be removed later as it should be managed by the server.
# #Send a message to the server with our intent.
if self.ispassworded:
self.client.send(dict(command="game_host", access="password", password = self.gamepassword, mod=item.mod, title=self.gamename, mapname=self.gamemap, gameport=self.client.gamePort, options = gameoptions))
else :
self.client.send(dict(command="game_host", access="public", mod=item.mod, title=self.gamename, mapname=self.gamemap, gameport=self.client.gamePort, options = gameoptions))
#
def savePassword(self, password):
self.gamepassword = password
util.settings.beginGroup("fa.games")
util.settings.setValue("password", self.gamepassword)
util.settings.endGroup()
def loadPassword(self):
util.settings.beginGroup("fa.games")
self.gamepassword = util.settings.value("password", None)
util.settings.endGroup()
#Default Game Map ...
if not self.gamepassword:
self.gamepassword = "password"
def saveGameMap(self, name):
self.gamemap = name
util.settings.beginGroup("fa.games")
util.settings.setValue("gamemap", self.gamemap)
util.settings.endGroup()
def loadGameMap(self):
util.settings.beginGroup("fa.games")
self.gamemap = util.settings.value("gamemap", None)
util.settings.endGroup()
#Default Game Map ...
if not self.gamemap:
self.gamemap = "scmp_007"
def saveGameName(self, name):
self.gamename = name
util.settings.beginGroup("fa.games")
util.settings.setValue("gamename", self.gamename)
util.settings.endGroup()
def loadGameName(self):
util.settings.beginGroup("fa.games")
self.gamename = util.settings.value("gamename", None)
util.settings.endGroup()
#Default Game Name ...
if not self.gamename:
if (self.client.login):
self.gamename = self.client.login + "'s game"
else:
self.gamename = "nobody's game"
| gpl-3.0 | 7,961,750,450,102,302,000 | 33.995643 | 227 | 0.606585 | false |
bentley-historical-library/migration-tools | ead/agents/create_json_then_post-famname.py | 2 | 4772 | '''
first things first, import what we need'''
# requests takes all of the work out of python http making your integration with web services seamless, you'll need to install it
import requests
# csv implements classes to read and write tabular data in csv format
import csv
# json is a lightweight data interchange format inspired by javascript object literal syntax
import json
'''
preliminaries'''
# where is the csv that has been exported from openrefine?
famname_csv = 'agents-famname.csv'
# where is the csv that we'll record the original name and the new uri
famname_uris = 'famname-uris.csv'
# go ahead and create the headers of that csv
# open the csv in write mode
with open(famname_uris, 'wb') as famname_uris_csv_file:
# set up the writer
famname_uris_csv_file_writer = csv.writer(famname_uris_csv_file)
# write the headers
famname_uris_csv_file_writer.writerow(['ORIGINAL', 'uri'])
# preliminaries for using archivesspace api
# base url
base_url = 'http://localhost:8089'
# username default
username = 'admin'
# password default
password = 'admin'
'''
set up session in archivesspace using requests'''
# get authorization and return as json
authorization = requests.post(base_url + '/users/' + username + '/login?password=' + password).json()
# get the session token
session_token = authorization["session"]
# create the headers we'll need for posting
headers = {'X-ArchivesSpace-Session': session_token}
'''
go through csv, create a list of dictionaries for each entry'''
'''
for reference
"names": [
{
"lock_version": 0,
"family_name": "Eckard",
"prefix": "Prefix",
"dates": "Dates",
"qualifier": "Qualifer",
"sort_name": "Eckard, Prefix, Dates (Qualifer)",
"sort_name_auto_generate": true,
"created_by": "admin",
"last_modified_by": "admin",
"create_time": "2015-08-26T15:55:22Z",
"system_mtime": "2015-08-26T15:55:22Z",
"user_mtime": "2015-08-26T15:55:22Z",
"authorized": true,
"is_display_name": true,
"source": "lcnaf",
"jsonmodel_type": "name_family",
"use_dates": [
],
"authority_id": "Authority ID"
}
]'''
# open the famname csv in read mode
with open(famname_csv, 'r') as famname_csv_file:
# read it to get the data
famname_data = csv.reader(famname_csv_file)
# skip the first row
next(famname_data)
# go through each row
for row in famname_data:
# match up fields to row index
# original
original = row[5]
# family name
family_name = row[7]
# qualifier
qualifier = row[9]
# authority id
authority_id = row[2]
# source
source = row[3]
# set up list and dictionaries
row_dictionary = {}
# empty list for famname dictionaries
famname_list = []
# empty dictionary
famname_dictionary = {}
# add to dictionary
# if family name exists
if family_name:
# append it
famname_dictionary["family_name"] = family_name
# if a qualifer exists
if qualifier:
# append it
famname_dictionary["qualifier"] = qualifier
# if authority id exists
if authority_id:
# add it to dictionary
famname_dictionary["authority_id"] = authority_id
# if source exists
if source:
# add it to dictionary
famname_dictionary["source"] = source
# add other required fields to dictionary
# auto generate sort name
famname_dictionary["sort_name_auto_generate"] = True
# add dictionary to list
famname_list.append(famname_dictionary)
# add list to dictionary
row_dictionary["names"] = famname_list
'''
create json out of this'''
# create json
famname_json = json.dumps(row_dictionary)
print famname_json
'''
post it to archivesspace'''
# post the famname
famnames = requests.post(base_url + '/agents/families', headers = headers, data = famname_json).json()
print famnames
'''
get uri and append it to new csv'''
# write row of csv
# open the csv in append mode
with open(famname_uris, 'ab') as famname_uris_csv_file:
# set up the writer
famname_uris_csv_file_writer = csv.writer(famname_uris_csv_file)
# write the headers
if "status" in famnames:
famname_uris_csv_file_writer.writerow([original, famnames["uri"]])
| cc0-1.0 | -424,870,795,608,711,040 | 27.410714 | 129 | 0.598491 | false |
jiaphuan/models | research/delf/delf/python/datum_io_test.py | 2 | 2695 | # Copyright 2017 The TensorFlow Authors All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for datum_io, the python interface of DatumProto."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import tensorflow as tf
from delf import datum_io
class DatumIoTest(tf.test.TestCase):
def Conversion2dTestWithType(self, dtype):
original_data = np.arange(9).reshape(3, 3).astype(dtype)
serialized = datum_io.SerializeToString(original_data)
retrieved_data = datum_io.ParseFromString(serialized)
self.assertTrue(np.array_equal(original_data, retrieved_data))
def Conversion3dTestWithType(self, dtype):
original_data = np.arange(24).reshape(2, 3, 4).astype(dtype)
serialized = datum_io.SerializeToString(original_data)
retrieved_data = datum_io.ParseFromString(serialized)
self.assertTrue(np.array_equal(original_data, retrieved_data))
def testConversion2dWithType(self):
self.Conversion2dTestWithType(np.int8)
self.Conversion2dTestWithType(np.int16)
self.Conversion2dTestWithType(np.int32)
self.Conversion2dTestWithType(np.int64)
self.Conversion2dTestWithType(np.float16)
self.Conversion2dTestWithType(np.float32)
self.Conversion2dTestWithType(np.float64)
def testConversion3dWithType(self):
self.Conversion3dTestWithType(np.int8)
self.Conversion3dTestWithType(np.int16)
self.Conversion3dTestWithType(np.int32)
self.Conversion3dTestWithType(np.int64)
self.Conversion3dTestWithType(np.float16)
self.Conversion3dTestWithType(np.float32)
self.Conversion3dTestWithType(np.float64)
def testWriteAndReadToFile(self):
data = np.array([[[-1.0, 125.0, -2.5], [14.5, 3.5, 0.0]],
[[20.0, 0.0, 30.0], [25.5, 36.0, 42.0]]])
tmpdir = tf.test.get_temp_dir()
filename = os.path.join(tmpdir, 'test.datum')
datum_io.WriteToFile(data, filename)
data_read = datum_io.ReadFromFile(filename)
self.assertAllEqual(data_read, data)
if __name__ == '__main__':
tf.test.main()
| apache-2.0 | 8,100,915,956,753,142,000 | 36.430556 | 80 | 0.716883 | false |
mozilla/olympia | src/olympia/scanners/admin.py | 2 | 31066 | from django.conf import settings
from django.contrib import admin, messages
from django.contrib.admin import SimpleListFilter
from django.core.exceptions import FieldDoesNotExist
from django.db.models import Count, Prefetch
from django.http import Http404
from django.shortcuts import redirect
from django.template.loader import render_to_string
from django.urls import re_path, reverse
from django.utils.html import format_html, format_html_join
from django.utils.http import urlencode, is_safe_url
from django.utils.translation import gettext, gettext_lazy as _
from urllib.parse import urljoin, urlparse
from olympia import amo
from olympia.access import acl
from olympia.addons.models import Addon
from olympia.constants.scanners import (
ABORTING,
COMPLETED,
CUSTOMS,
FALSE_POSITIVE,
INCONCLUSIVE,
MAD,
NEW,
RESULT_STATES,
RUNNING,
SCHEDULED,
TRUE_POSITIVE,
UNKNOWN,
YARA,
)
from .models import (
ImproperScannerQueryRuleStateError,
ScannerQueryResult,
ScannerQueryRule,
ScannerResult,
ScannerRule,
)
from .tasks import run_yara_query_rule
def _is_safe_url(url, request):
"""Override the Django `is_safe_url()` to pass a configured list of allowed
hosts and enforce HTTPS."""
allowed_hosts = (
settings.DOMAIN,
urlparse(settings.EXTERNAL_SITE_URL).netloc,
)
require_https = request.is_secure() if request else False
return is_safe_url(url, allowed_hosts=allowed_hosts, require_https=require_https)
class PresenceFilter(SimpleListFilter):
def choices(self, cl):
for lookup, title in self.lookup_choices:
yield {
'selected': self.value() == lookup,
'query_string': cl.get_query_string({self.parameter_name: lookup}, []),
'display': title,
}
class MatchesFilter(PresenceFilter):
title = gettext('presence of matched rules')
parameter_name = 'has_matched_rules'
def lookups(self, request, model_admin):
return (('all', 'All'), (None, ' With matched rules only'))
def queryset(self, request, queryset):
if self.value() == 'all':
return queryset
return queryset.filter(has_matches=True)
class StateFilter(SimpleListFilter):
title = gettext('result state')
parameter_name = 'state'
def lookups(self, request, model_admin):
return (('all', 'All'), *RESULT_STATES.items())
def choices(self, cl):
for lookup, title in self.lookup_choices:
selected = (
lookup == UNKNOWN
if self.value() is None
else self.value() == str(lookup)
)
yield {
'selected': selected,
'query_string': cl.get_query_string({self.parameter_name: lookup}, []),
'display': title,
}
def queryset(self, request, queryset):
if self.value() == 'all':
return queryset
if self.value() is None:
return queryset.filter(state=UNKNOWN)
return queryset.filter(state=self.value())
class ScannerRuleListFilter(admin.RelatedOnlyFieldListFilter):
include_empty_choice = False
def field_choices(self, field, request, model_admin):
return [
(rule.pk, f'{rule.name} ({rule.get_scanner_display()})')
for rule in field.related_model.objects.only(
'pk', 'scanner', 'name'
).order_by('scanner', 'name')
]
class ExcludeMatchedRuleFilter(SimpleListFilter):
title = gettext('all but results matching only this rule')
parameter_name = 'exclude_rule'
def lookups(self, request, model_admin):
return [(None, 'No excluded rule')] + [
(rule.pk, f'{rule.name} ({rule.get_scanner_display()})')
for rule in ScannerRule.objects.only('pk', 'scanner', 'name').order_by(
'scanner', 'name'
)
]
def choices(self, cl):
for lookup, title in self.lookup_choices:
selected = (
lookup is None if self.value() is None else self.value() == str(lookup)
)
yield {
'selected': selected,
'query_string': cl.get_query_string({self.parameter_name: lookup}, []),
'display': title,
}
def queryset(self, request, queryset):
if self.value() is None:
return queryset
# We want to exclude results that *only* matched the given rule, so
# we know they'll have exactly one matched rule.
return queryset.annotate(num_matches=Count('matched_rules')).exclude(
matched_rules=self.value(), num_matches=1
)
class WithVersionFilter(PresenceFilter):
title = gettext('presence of a version')
parameter_name = 'has_version'
def lookups(self, request, model_admin):
return (('all', 'All'), (None, ' With version only'))
def queryset(self, request, queryset):
if self.value() == 'all':
return queryset
return queryset.exclude(version=None)
class VersionChannelFilter(admin.ChoicesFieldListFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title = gettext('version channel')
class AddonStatusFilter(admin.ChoicesFieldListFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title = gettext('add-on status')
class AddonVisibilityFilter(admin.BooleanFieldListFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title = gettext('add-on listing visibility')
def choices(self, changelist):
# We're doing a lookup on disabled_by_user: if it's True then the
# add-on listing is "invisible", and False it's "visible".
for lookup, title in (
(None, _('All')),
('1', _('Invisible')),
('0', _('Visible')),
):
yield {
'selected': self.lookup_val == lookup and not self.lookup_val2,
'query_string': changelist.get_query_string(
{self.lookup_kwarg: lookup}, [self.lookup_kwarg2]
),
'display': title,
}
class FileStatusFiler(admin.ChoicesFieldListFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title = gettext('file status')
class FileIsSigned(admin.BooleanFieldListFilter):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.title = gettext('file signature')
class AbstractScannerResultAdminMixin(admin.ModelAdmin):
actions = None
view_on_site = False
list_display = (
'id',
'formatted_addon',
'guid',
'authors',
'scanner',
'formatted_score',
'formatted_matched_rules',
'formatted_created',
'result_actions',
)
list_select_related = ('version',)
raw_id_fields = ('version', 'upload')
fields = (
'id',
'upload',
'formatted_addon',
'authors',
'guid',
'scanner',
'formatted_score',
'created',
'state',
'formatted_matched_rules_with_files',
'formatted_results',
'result_actions',
)
ordering = ('-pk',)
class Media:
css = {'all': ('css/admin/scannerresult.css',)}
def get_queryset(self, request):
# We already set list_select_related() so we don't need to repeat that.
# We also need to fetch the add-ons though, and because we need their
# translations for the name (see formatted_addon() below) we can't use
# select_related(). We don't want to run the default transformer though
# so we prefetch them with just the translations.
return self.model.objects.prefetch_related(
Prefetch(
'version__addon',
# We use `unfiltered` because we want to fetch all the add-ons,
# including the deleted ones.
queryset=Addon.unfiltered.all().only_translations(),
),
'version__files',
'version__addon__authors',
'matched_rules',
)
def get_unfiltered_changelist_params(self):
"""Return query parameters dict used to link to the changelist with
no filtering applied.
Needed to link to results from a rule, because the changelist view
might filter out some results by default."""
return {
WithVersionFilter.parameter_name: 'all',
StateFilter.parameter_name: 'all',
}
# Remove the "add" button
def has_add_permission(self, request):
return False
# Remove the "delete" button
def has_delete_permission(self, request, obj=None):
return False
# Read-only mode
def has_change_permission(self, request, obj=None):
return False
# Custom actions
def has_actions_permission(self, request):
return acl.action_allowed(request, amo.permissions.ADMIN_SCANNERS_RESULTS_EDIT)
def get_list_display(self, request):
fields = super().get_list_display(request)
return self._excludes_fields(request=request, fields=fields)
def get_fields(self, request, obj=None):
fields = super().get_fields(request, obj)
return self._excludes_fields(request=request, fields=fields)
def _excludes_fields(self, request, fields):
to_exclude = []
if not self.has_actions_permission(request):
to_exclude = ['result_actions']
try:
self.model._meta.get_field('upload')
except FieldDoesNotExist:
to_exclude.append('upload')
fields = list(filter(lambda x: x not in to_exclude, fields))
return fields
def formatted_addon(self, obj):
if obj.version:
return format_html(
'<a href="{}">{}</a>'
'<br>'
'<table>'
' <tr><td>Version:</td><td>{}</td></tr>'
' <tr><td>Channel:</td><td>{}</td></tr>'
'</table>',
# We use the add-on's ID to support deleted add-ons.
urljoin(
settings.EXTERNAL_SITE_URL,
reverse(
'reviewers.review',
args=[
(
'listed'
if obj.version.channel == amo.RELEASE_CHANNEL_LISTED
else 'unlisted'
),
obj.version.addon.id,
],
),
),
obj.version.addon.name,
obj.version.version,
obj.version.get_channel_display(),
)
return '-'
formatted_addon.short_description = 'Add-on'
def authors(self, obj):
if not obj.version:
return '-'
authors = obj.version.addon.authors.all()
contents = format_html_join(
'',
'<li><a href="{}">{}</a></li>',
(
(
urljoin(
settings.EXTERNAL_SITE_URL,
reverse('admin:users_userprofile_change', args=(author.pk,)),
),
author.email,
)
for author in authors
),
)
return format_html(
'<ul>{}</ul>'
'<br>'
'[<a href="{}?authors__in={}">Other add-ons by these authors</a>]',
contents,
urljoin(
settings.EXTERNAL_SITE_URL,
reverse('admin:addons_addon_changelist'),
),
','.join(str(author.pk) for author in authors),
)
def guid(self, obj):
if obj.version:
return obj.version.addon.guid
return '-'
guid.short_description = 'Add-on GUID'
guid.admin_order_field = 'version__addon__guid'
def channel(self, obj):
if obj.version:
return obj.version.get_channel_display()
return '-'
channel.short_description = 'Channel'
def formatted_created(self, obj):
if obj.version:
return obj.version.created.strftime('%Y-%m-%d %H:%M:%S')
return '-'
formatted_created.short_description = 'Created'
def formatted_results(self, obj):
return format_html('<pre>{}</pre>', obj.get_pretty_results())
formatted_results.short_description = 'Results'
def formatted_matched_rules(self, obj):
rule_model = self.model.matched_rules.rel.model
info = rule_model._meta.app_label, rule_model._meta.model_name
return format_html(
', '.join(
[
'<a href="{}">{} ({})</a>'.format(
reverse('admin:%s_%s_change' % info, args=[rule.pk]),
rule.name,
rule.get_scanner_display(),
)
for rule in obj.matched_rules.all()
]
)
)
formatted_matched_rules.short_description = 'Matched rules'
def formatted_matched_rules_with_files(
self, obj, template_name='formatted_matched_rules_with_files'
):
files_by_matched_rules = obj.get_files_by_matched_rules()
rule_model = self.model.matched_rules.rel.model
info = rule_model._meta.app_label, rule_model._meta.model_name
return render_to_string(
f'admin/scanners/scannerresult/{template_name}.html',
{
'rule_change_urlname': 'admin:%s_%s_change' % info,
'external_site_url': settings.EXTERNAL_SITE_URL,
'file_id': (obj.version.all_files[0].id if obj.version else None),
'matched_rules': [
{
'pk': rule.pk,
'name': rule.name,
'files': files_by_matched_rules[rule.name],
}
for rule in obj.matched_rules.all()
],
'addon_id': obj.version.addon.pk if obj.version else None,
'version_id': obj.version.pk if obj.version else None,
},
)
formatted_matched_rules_with_files.short_description = 'Matched rules'
def formatted_score(self, obj):
if obj.scanner not in [CUSTOMS, MAD]:
return '-'
if obj.score < 0:
return 'n/a'
return '{:0.0f}%'.format(obj.score * 100)
formatted_score.short_description = 'Score'
def safe_referer_redirect(self, request, default_url):
referer = request.META.get('HTTP_REFERER')
if referer and _is_safe_url(referer, request):
return redirect(referer)
return redirect(default_url)
def handle_true_positive(self, request, pk, *args, **kwargs):
can_use_actions = self.has_actions_permission(request)
if not can_use_actions or request.method != 'POST':
raise Http404
result = self.get_object(request, pk)
result.update(state=TRUE_POSITIVE)
messages.add_message(
request,
messages.INFO,
'Scanner result {} has been marked as true positive.'.format(pk),
)
return self.safe_referer_redirect(
request, default_url='admin:scanners_scannerresult_changelist'
)
def handle_inconclusive(self, request, pk, *args, **kwargs):
can_use_actions = self.has_actions_permission(request)
if not can_use_actions or request.method != 'POST':
raise Http404
result = self.get_object(request, pk)
result.update(state=INCONCLUSIVE)
messages.add_message(
request,
messages.INFO,
'Scanner result {} has been marked as inconclusive.'.format(pk),
)
return self.safe_referer_redirect(
request, default_url='admin:scanners_scannerresult_changelist'
)
def handle_false_positive(self, request, pk, *args, **kwargs):
can_use_actions = self.has_actions_permission(request)
if not can_use_actions or request.method != 'POST':
raise Http404
result = self.get_object(request, pk)
result.update(state=FALSE_POSITIVE)
messages.add_message(
request,
messages.INFO,
'Scanner result {} has been marked as false positive.'.format(pk),
)
title = 'False positive report for ScannerResult {}'.format(pk)
body = render_to_string(
'admin/false_positive_report.md', {'result': result, 'YARA': YARA}
)
labels = ','.join(
[
# Default label added to all issues
'false positive report'
]
+ ['rule: {}'.format(rule.name) for rule in result.matched_rules.all()]
)
return redirect(
'https://github.com/{}/issues/new?{}'.format(
result.get_git_repository(),
urlencode({'title': title, 'body': body, 'labels': labels}),
)
)
def handle_revert(self, request, pk, *args, **kwargs):
is_admin = acl.action_allowed(
request, amo.permissions.ADMIN_SCANNERS_RESULTS_EDIT
)
if not is_admin or request.method != 'POST':
raise Http404
result = self.get_object(request, pk)
result.update(state=UNKNOWN)
messages.add_message(
request,
messages.INFO,
'Scanner result {} report has been reverted.'.format(pk),
)
return self.safe_referer_redirect(
request, default_url='admin:scanners_scannerresult_changelist'
)
def get_urls(self):
urls = super().get_urls()
info = self.model._meta.app_label, self.model._meta.model_name
custom_urls = [
re_path(
r'^(?P<pk>.+)/report-false-positive/$',
self.admin_site.admin_view(self.handle_false_positive),
name='%s_%s_handlefalsepositive' % info,
),
re_path(
r'^(?P<pk>.+)/report-true-positive/$',
self.admin_site.admin_view(self.handle_true_positive),
name='%s_%s_handletruepositive' % info,
),
re_path(
r'^(?P<pk>.+)/report-inconclusive/$',
self.admin_site.admin_view(self.handle_inconclusive),
name='%s_%s_handleinconclusive' % info,
),
re_path(
r'^(?P<pk>.+)/revert-report/$',
self.admin_site.admin_view(self.handle_revert),
name='%s_%s_handlerevert' % info,
),
]
return custom_urls + urls
def result_actions(self, obj):
info = self.model._meta.app_label, self.model._meta.model_name
return render_to_string(
'admin/scannerresult_actions.html',
{
'handlefalsepositive_urlname': (
'admin:%s_%s_handlefalsepositive' % info
),
'handletruepositive_urlname': ('admin:%s_%s_handletruepositive' % info),
'handleinconclusive_urlname': ('admin:%s_%s_handleinconclusive' % info),
'handlerevert_urlname': 'admin:%s_%s_handlerevert' % info,
'obj': obj,
},
)
result_actions.short_description = 'Actions'
result_actions.allow_tags = True
class AbstractScannerRuleAdminMixin(admin.ModelAdmin):
view_on_site = False
list_display = ('name', 'scanner', 'action', 'is_active')
list_filter = ('scanner', 'action', 'is_active')
fields = (
'scanner',
'name',
'action',
'created',
'modified',
'matched_results_link',
'is_active',
'definition',
)
readonly_fields = ('created', 'modified', 'matched_results_link')
def formfield_for_choice_field(self, db_field, request, **kwargs):
if db_field.name == 'scanner':
kwargs['choices'] = (('', '---------'),)
for key, value in db_field.get_choices():
if key in [CUSTOMS, YARA]:
kwargs['choices'] += ((key, value),)
return super().formfield_for_choice_field(db_field, request, **kwargs)
class Media:
css = {'all': ('css/admin/scannerrule.css',)}
def get_fields(self, request, obj=None):
fields = super().get_fields(request, obj)
if not self.has_change_permission(request, obj):
# Remove the 'definition' field...
fields = list(filter(lambda x: x != 'definition', fields))
# ...and add its readonly (and pretty!) alter-ego.
fields.append('formatted_definition')
return fields
def matched_results_link(self, obj):
if not obj.pk or not obj.scanner:
return '-'
counts = obj.results.aggregate(
addons=Count('version__addon', distinct=True), total=Count('id')
)
ResultModel = obj.results.model
url = reverse(
'admin:{}_{}_changelist'.format(
ResultModel._meta.app_label, ResultModel._meta.model_name
)
)
params = {
'matched_rules__id__exact': str(obj.pk),
}
result_admin = admin.site._registry[ResultModel]
params.update(result_admin.get_unfiltered_changelist_params())
return format_html(
'<a href="{}?{}">{} ({} add-ons)</a>',
url,
urlencode(params),
counts['total'],
counts['addons'],
)
matched_results_link.short_description = 'Matched Results'
def formatted_definition(self, obj):
return format_html('<pre>{}</pre>', obj.definition)
formatted_definition.short_description = 'Definition'
@admin.register(ScannerResult)
class ScannerResultAdmin(AbstractScannerResultAdminMixin, admin.ModelAdmin):
list_filter = (
'scanner',
MatchesFilter,
StateFilter,
('matched_rules', ScannerRuleListFilter),
WithVersionFilter,
ExcludeMatchedRuleFilter,
)
@admin.register(ScannerQueryResult)
class ScannerQueryResultAdmin(AbstractScannerResultAdminMixin, admin.ModelAdmin):
raw_id_fields = ('version',)
list_display_links = None
list_display = (
'addon_name',
'guid',
'formatted_channel',
'version_number',
'formatted_created',
'is_file_signed',
'was_blocked',
'authors',
'formatted_matched_rules',
'matching_filenames',
'download',
)
list_filter = (
('matched_rules', ScannerRuleListFilter),
('version__channel', VersionChannelFilter),
('version__addon__status', AddonStatusFilter),
('version__addon__disabled_by_user', AddonVisibilityFilter),
('version__files__status', FileStatusFiler),
('version__files__is_signed', FileIsSigned),
('was_blocked', admin.BooleanFieldListFilter),
)
ordering = ('version__addon_id', 'version__channel', 'version__created')
class Media(AbstractScannerResultAdminMixin.Media):
js = ('js/admin/scannerqueryresult.js',)
def addon_name(self, obj):
# Custom, simpler implementation to go with add-on grouping: the
# version number and version channel are not included - they are
# displayed as separate columns.
if obj.version:
return obj.version.addon.name
return '-'
addon_name.short_description = 'Add-on'
def formatted_channel(self, obj):
if obj.version:
return format_html(
'<a href="{}">{}</a>',
# We use the add-on's ID to support deleted add-ons.
urljoin(
settings.EXTERNAL_SITE_URL,
reverse(
'reviewers.review',
args=[
(
'listed'
if obj.version.channel == amo.RELEASE_CHANNEL_LISTED
else 'unlisted'
),
obj.version.addon.id,
],
),
),
obj.version.get_channel_display(),
)
return '-'
def version_number(self, obj):
if obj.version:
return obj.version.version
return '-'
version_number.short_description = 'Version'
def is_file_signed(self, obj):
if obj.version and obj.version.current_file:
return obj.version.current_file.is_signed
return False
is_file_signed.short_description = 'Is Signed'
is_file_signed.boolean = True
def get_unfiltered_changelist_params(self):
return {}
def matching_filenames(self, obj):
return self.formatted_matched_rules_with_files(
obj, template_name='formatted_matching_files'
)
def download(self, obj):
if obj.version and obj.version.current_file:
return format_html(
'<a href="{}">{}</a>',
obj.version.current_file.get_absolute_url(attachment=True),
obj.version.current_file.pk,
)
return '-'
def has_actions_permission(self, request):
return acl.action_allowed(request, amo.permissions.ADMIN_SCANNERS_QUERY_EDIT)
@admin.register(ScannerRule)
class ScannerRuleAdmin(AbstractScannerRuleAdminMixin, admin.ModelAdmin):
pass
@admin.register(ScannerQueryRule)
class ScannerQueryRuleAdmin(AbstractScannerRuleAdminMixin, admin.ModelAdmin):
list_display = (
'name',
'scanner',
'run_on_disabled_addons',
'created',
'state_with_actions',
'completion_rate',
'matched_results_link',
)
list_filter = ('state',)
fields = (
'scanner',
'run_on_disabled_addons',
'state_with_actions',
'name',
'created',
'modified',
'completion_rate',
'matched_results_link',
'definition',
)
readonly_fields = (
'completion_rate',
'created',
'modified',
'matched_results_link',
'state_with_actions',
)
def change_view(self, request, *args, **kwargs):
kwargs['extra_context'] = kwargs.get('extra_context') or {}
kwargs['extra_context']['hide_action_buttons'] = not acl.action_allowed(
request, amo.permissions.ADMIN_SCANNERS_QUERY_EDIT
)
return super().change_view(request, *args, **kwargs)
def changelist_view(self, request, *args, **kwargs):
kwargs['extra_context'] = kwargs.get('extra_context') or {}
kwargs['extra_context']['hide_action_buttons'] = not acl.action_allowed(
request, amo.permissions.ADMIN_SCANNERS_QUERY_EDIT
)
return super().changelist_view(request, *args, **kwargs)
def has_change_permission(self, request, obj=None):
if obj and obj.state != NEW:
return False
return super().has_change_permission(request, obj=obj)
def handle_run(self, request, pk, *args, **kwargs):
is_admin = acl.action_allowed(
request, amo.permissions.ADMIN_SCANNERS_QUERY_EDIT
)
if not is_admin or request.method != 'POST':
raise Http404
rule = self.get_object(request, pk)
try:
# SCHEDULED is a transitional state that allows us to update the UI
# right away before redirecting. Once it starts being processed the
# task will switch it to RUNNING.
rule.change_state_to(SCHEDULED)
run_yara_query_rule.delay(rule.pk)
messages.add_message(
request,
messages.INFO,
'Scanner Query Rule {} has been successfully queued for '
'execution.'.format(rule.pk),
)
except ImproperScannerQueryRuleStateError:
messages.add_message(
request,
messages.ERROR,
'Scanner Query Rule {} could not be queued for execution '
'because it was in "{}"" state.'.format(
rule.pk, rule.get_state_display()
),
)
return redirect('admin:scanners_scannerqueryrule_changelist')
def handle_abort(self, request, pk, *args, **kwargs):
is_admin = acl.action_allowed(
request, amo.permissions.ADMIN_SCANNERS_QUERY_EDIT
)
if not is_admin or request.method != 'POST':
raise Http404
rule = self.get_object(request, pk)
try:
rule.change_state_to(ABORTING) # Tasks will take this into account
# FIXME: revoke existing tasks (would need to extract the
# GroupResult when executing the chord, store its id in the rule,
# then restore the GroupResult here to call revoke() on it)
messages.add_message(
request,
messages.INFO,
'Scanner Query Rule {} is being aborted.'.format(rule.pk),
)
except ImproperScannerQueryRuleStateError:
# We messed up somewhere.
messages.add_message(
request,
messages.ERROR,
'Scanner Query Rule {} could not be aborted because it was '
'in "{}" state'.format(rule.pk, rule.get_state_display()),
)
return redirect('admin:scanners_scannerqueryrule_changelist')
def get_urls(self):
urls = super().get_urls()
info = self.model._meta.app_label, self.model._meta.model_name
custom_urls = [
re_path(
r'^(?P<pk>.+)/abort/$',
self.admin_site.admin_view(self.handle_abort),
name='%s_%s_handle_abort' % info,
),
re_path(
r'^(?P<pk>.+)/run/$',
self.admin_site.admin_view(self.handle_run),
name='%s_%s_handle_run' % info,
),
]
return custom_urls + urls
def state_with_actions(self, obj):
return render_to_string(
'admin/scannerqueryrule_state_with_actions.html',
{
'obj': obj,
'COMPLETED': COMPLETED,
'NEW': NEW,
'RUNNING': RUNNING,
},
)
state_with_actions.short_description = 'State'
state_with_actions.allow_tags = True
| bsd-3-clause | 8,228,025,103,084,832,000 | 32.440258 | 88 | 0.553048 | false |
Ictp/indico | indico/MaKaC/webinterface/common/abstractDataWrapper.py | 2 | 12340 | # -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
from MaKaC.common.fossilize import fossilizes, Fossilizable
from MaKaC.fossils.abstracts import IAuthorFossil
from MaKaC.webinterface.general import normaliseListParam
from indico.core.config import Config
from MaKaC.review import AbstractFieldContent
BYTES_1MB = 1024 * 1024
class Author(Fossilizable):
fossilizes(IAuthorFossil)
def __init__(self, id, **data):
self._id = id
self.setValues(**data)
def setValues(self, **data):
self._title = data.get("title", "")
self._firstName = data.get("first_name", "")
self._familyName = data.get("family_name", "")
self._affiliation = data.get("affiliation", "")
self._email = data.get("email", "")
self._phone = data.get("phone", "")
self._speaker = data.get("isSpeaker", False)
def mapAuthor(self, author):
self._title = author.getTitle()
self._firstName = author.getFirstName()
self._familyName = author.getSurName()
self._affiliation = author.getAffiliation()
self._email = author.getEmail()
self._phone = author.getTelephone()
self._speaker = author.getAbstract().isSpeaker(author)
def getTitle(self):
return self._title
def getId(self):
return self._id
def setId(self, id):
self._id = id
def getFirstName(self):
return self._firstName
def getFamilyName(self):
return self._familyName
def getAffiliation(self):
return self._affiliation
def getEmail(self):
return self._email
def getPhone(self):
return self._phone
def isSpeaker(self):
return self._speaker
def __str__(self):
return "id:%s - nombre:%s" % (self._id, self._familyName)
class AbstractData(object):
def __init__(self, absMgr, params, headerSize, displayValues=False):
self._absMgr = absMgr
self._afm = absMgr.getAbstractFieldsMgr()
if (headerSize):
self._headerSize = float(headerSize) / BYTES_1MB
self._displayValues = displayValues
cparams = params.copy()
self._mapFromParams(cparams)
def _mapFromParams(self, params):
self.title = params.get("title", "").strip()
self._otherFields = {}
for f in self._afm.getFields():
fid = f.getId()
value = params.get("f_%s" % fid, "").strip()
self._otherFields[fid] = value
self.type = params.get("type", None)
self.tracks = normaliseListParam(params.get("tracks", []))
if self._displayValues:
# the call comes from modifying an existing abstract, we want to display the current content in the abstract form
self._prAuthorsListParam = params.get("prAuthors", [])
self._coAuthorsListParam = params.get("coAuthors", [])
self._setExistingAuthors()
else:
# the call comes from submitting a new abstract or modifying an existing abstract
from MaKaC.services.interface.rpc import json
self._prAuthorsListParam = json.decode(params.get("prAuthors", "[]"))
self._coAuthorsListParam = json.decode(params.get("coAuthors", "[]"))
self._setNewAuthors()
self.comments = params.get("comments", "")
self.origin = params.get("origin", "display")
self.files = normaliseListParam(params.get("file", []))
self.existingFiles = normaliseListParam(params.get("existingFile", []))
def setAbstractData(self, abstract):
conf = abstract.getConference()
cfaMgr = conf.getAbstractMgr()
afm = cfaMgr.getAbstractFieldsMgr()
abstract.setTitle(self.title)
for f in afm.getFields():
fieldId = f.getId()
abstract.setField(fieldId, self.getFieldValue(fieldId))
# add primary authors
for authData in self.getPrimaryAuthorList():
auth = abstract.newPrimaryAuthor(title=authData.getTitle(),
firstName=authData.getFirstName(),
surName=authData.getFamilyName(),
email=authData.getEmail(),
affiliation=authData.getAffiliation(),
address="",
telephone=authData.getPhone())
if authData.isSpeaker():
abstract.addSpeaker(auth)
# add co-authors
for authData in self.getCoAuthorList():
auth = abstract.newCoAuthor(title=authData.getTitle(),
firstName=authData.getFirstName(),
surName=authData.getFamilyName(),
email=authData.getEmail(),
affiliation=authData.getAffiliation(),
address="",
telephone=authData.getPhone())
if authData.isSpeaker():
abstract.addSpeaker(auth)
abstract.setContribType(self.type)
tracks = []
for trackId in self.tracks:
tracks.append(conf.getTrackById(trackId))
abstract.setTracks(tracks)
abstract.setComments(self.comments)
abstract.deleteFilesNotInList(self.existingFiles)
abstract.saveFiles(self.files)
def _setNewAuthors(self):
self._prAuthors = []
for author in self._prAuthorsListParam:
isSpeaker = author.get("isSpeaker", False)
values = {"title": author["title"],
"first_name": author["firstName"],
"family_name": author["familyName"],
"affiliation": author["affiliation"],
"email": author["email"],
"phone": author["phone"],
"isSpeaker": isSpeaker
}
authId = len(self._prAuthors)
self._prAuthors.append(Author(authId, **values))
self._coAuthors = []
for author in self._coAuthorsListParam:
isSpeaker = author.get("isSpeaker", False)
values = {"title": author["title"],
"first_name": author["firstName"],
"family_name": author["familyName"],
"affiliation": author["affiliation"],
"email": author["email"],
"phone": author["phone"],
"isSpeaker": isSpeaker
}
authId = len(self._prAuthors) + len(self._coAuthors)
self._coAuthors.append(Author(authId, **values))
def _setExistingAuthors(self):
self._prAuthors = []
for author in self._prAuthorsListParam:
values = {"title": author.getTitle(),
"first_name": author.getFirstName(),
"family_name": author.getFamilyName(),
"affiliation": author.getAffiliation(),
"email": author.getEmail(),
"phone": author.getTelephone(),
"isSpeaker": author.isSpeaker()
}
authId = len(self._prAuthors)
self._prAuthors.append(Author(authId, **values))
self._coAuthors = []
for author in self._coAuthorsListParam:
values = {"title": author.getTitle(),
"first_name": author.getFirstName(),
"family_name": author.getFamilyName(),
"affiliation": author.getAffiliation(),
"email": author.getEmail(),
"phone": author.getTelephone(),
"isSpeaker": author.isSpeaker()
}
authId = len(self._prAuthors) + len(self._coAuthors)
self._coAuthors.append(Author(authId, **values))
def getFieldNames(self):
return ['f_%s' % id for id in self._otherFields.keys()]
def getFieldValue(self, id):
return self._otherFields.get(id, "")
def setFieldValue(self, id, value):
self._otherFields[id] = value
def getPrimaryAuthorList(self):
return self._prAuthors
def getCoAuthorList(self):
return self._coAuthors
def check(self):
errors = []
if self.title.strip() == "":
errors.append(_("Abstract title cannot be empty"))
for f in self._afm.getFields():
value = self._otherFields.get(f.getId(), "")
errors += f.check(value)
if not self.origin == "management":
if not self._prAuthorsListParam:
errors.append(_("No primary author has been specified. You must define at least one primary author"))
if not self._checkSpeaker() and self._absMgr.showSelectAsSpeaker() and self._absMgr.isSelectSpeakerMandatory():
errors.append(_("At least one presenter must be specified"))
if not self.tracks and self._absMgr.areTracksMandatory():
# check if there are tracks, otherwise the user cannot select at least one
if len(self._absMgr.getConference().getTrackList()) != 0:
errors.append(_("At least one track must be seleted"))
if self._hasExceededTotalSize():
errors.append(_("The maximum size allowed for the attachments (%sMB) has been exceeded.") % Config.getInstance().getMaxUploadFilesTotalSize())
return errors
def _checkSpeaker(self):
for author in self._prAuthors:
if author.isSpeaker():
return True
for author in self._coAuthors:
if author.isSpeaker():
return True
return False
def _hasExceededTotalSize(self):
maxSize = float(Config.getInstance().getMaxUploadFilesTotalSize())
return maxSize > 0 and self._headerSize > maxSize
def toDict(self):
d = {"title": self.title,
"type": self.type,
"tracksSelectedList": self.tracks,
"prAuthors": self._prAuthors,
"coAuthors": self._coAuthors,
"comments": self.comments,
"attachments": self.files}
for f in self._afm.getFields():
id = f.getId()
d["f_"+id] = self._otherFields.get(id, "")
return d
class AbstractParam:
def __init__(self):
self._abstract = None
def _checkParams(self, params, conf, headerSize):
if params.has_key("abstractId"): # we are in modify
self._abstract = self._target = conf.getAbstractMgr().getAbstractById(params["abstractId"])
self._action = ""
if "cancel" in params:
self._action = "CANCEL"
return
typeId = params.get("type", "")
display_values = False
params["type"] = conf.getContribTypeById(typeId)
if ("prAuthors" not in params or "coAuthors" not in params) and self._abstract:
params.update({
"prAuthors": self._abstract.getPrimaryAuthorList(),
"coAuthors": self._abstract.getCoAuthorList(),
"file": self._abstract.getAttachments().values()
})
display_values = True
self._abstractData = AbstractData(conf.getAbstractMgr(), params, headerSize, displayValues=display_values)
if "validate" in params:
self._action = "VALIDATE"
| gpl-3.0 | 6,805,822,890,231,442,000 | 38.42492 | 154 | 0.568152 | false |
conorkcorbin/tractometry | tests.py | 1 | 1763 | import argparse
import numpy as np
import pandas as pd
import nibabel as nib
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from dipy.tracking.streamline import set_number_of_points
import utils
parser = argparse.ArgumentParser()
parser.add_argument('-testName', '--testName', required=True)
parser.add_argument('-bundle', '--bundle', default=None, required=False)
parser.add_argument('-csv', '--csv', default=None, required=False)
parser.add_argument('-roi1', '--roi1', default=None, required=False)
parser.add_argument('-roi2', '--roi2', default=None, required=False)
parser.add_argument('-out', '--out', default=None, required=False)
args = parser.parse_args()
if args.testName == 'sliceFibers':
tracks, hdr = nib.trackvis.read(args.bundle)
streamlines = [trk[0] for trk in tracks]
streamlines = [np.divide(s,hdr['voxel_size']) for s in streamlines]
roi1_img = nib.load(args.roi1)
roi1 = roi1_img.get_data()
roi1 = np.fliplr(roi1)
roi2_img = nib.load(args.roi2)
roi2 = roi2_img.get_data()
roi2 = np.fliplr(roi2)
sliced_streamlines = utils.sliceFibers(streamlines,roi1,roi2)
sliced_streamlines = [np.multiply(s,hdr['voxel_size']) for s in sliced_streamlines]
sliced_tracks = [(s,None,None) for s in sliced_streamlines]
nib.trackvis.write(args.out,sliced_tracks,hdr)
elif args.testName == 'tracks2CSV':
tracks, hdr = nib.trackvis.read(args.bundle)
streamlines = [trk[0] for trk in tracks]
# streamlines = [np.divide(s,hdr['voxel_size']) for s in streamlines]
utils.tracks2CSV(args.out,streamlines)
elif args.testName == 'csv2Tracks':
tracks, hdr = nib.trackvis.read(args.bundle)
streamlines = utils.csv2Tracks(args.csv)
tracks = [(s,None,None) for s in streamlines]
nib.trackvis.write(args.out,tracks,hdr)
| mit | -5,613,614,758,966,588,000 | 35 | 84 | 0.734543 | false |
rdo-management/heat | heat/engine/resources/openstack/neutron/provider_net.py | 2 | 4623 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from heat.common import exception
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import constraints
from heat.engine import properties
from heat.engine.resources.openstack.neutron import net
from heat.engine import support
class ProviderNet(net.Net):
support_status = support.SupportStatus(version='2014.1')
PROPERTIES = (
NAME, PROVIDER_NETWORK_TYPE, PROVIDER_PHYSICAL_NETWORK,
PROVIDER_SEGMENTATION_ID, ADMIN_STATE_UP, SHARED,
) = (
'name', 'network_type', 'physical_network',
'segmentation_id', 'admin_state_up', 'shared',
)
ATTRIBUTES = (
STATUS, SUBNETS, SHOW,
) = (
'status', 'subnets', 'show',
)
properties_schema = {
NAME: net.Net.properties_schema[NAME],
PROVIDER_NETWORK_TYPE: properties.Schema(
properties.Schema.STRING,
_('A string specifying the provider network type for the '
'network.'),
update_allowed=True,
required=True,
constraints=[
constraints.AllowedValues(['vlan', 'flat']),
]
),
PROVIDER_PHYSICAL_NETWORK: properties.Schema(
properties.Schema.STRING,
_('A string specifying physical network mapping for the '
'network.'),
update_allowed=True,
required=True,
),
PROVIDER_SEGMENTATION_ID: properties.Schema(
properties.Schema.STRING,
_('A string specifying the segmentation id for the '
'network.'),
update_allowed=True
),
ADMIN_STATE_UP: net.Net.properties_schema[ADMIN_STATE_UP],
SHARED: properties.Schema(
properties.Schema.BOOLEAN,
_('Whether this network should be shared across all tenants.'),
default=True,
update_allowed=True
),
}
attributes_schema = {
STATUS: attributes.Schema(
_("The status of the network.")
),
SUBNETS: attributes.Schema(
_("Subnets of this network.")
),
SHOW: attributes.Schema(
_("All attributes.")
),
}
def validate(self):
'''
Validates to ensure that segmentation_id is not there for flat
network type.
'''
super(ProviderNet, self).validate()
if (self.properties[self.PROVIDER_SEGMENTATION_ID] and
self.properties[self.PROVIDER_NETWORK_TYPE] != 'vlan'):
msg = _('segmentation_id not allowed for flat network type.')
raise exception.StackValidationFailed(message=msg)
@staticmethod
def add_provider_extension(props, key):
props['provider:' + key] = props.pop(key)
@staticmethod
def prepare_provider_properties(self, props):
self.add_provider_extension(props, self.PROVIDER_NETWORK_TYPE)
self.add_provider_extension(props, self.PROVIDER_PHYSICAL_NETWORK)
if self.PROVIDER_SEGMENTATION_ID in six.iterkeys(props):
self.add_provider_extension(props, self.PROVIDER_SEGMENTATION_ID)
def handle_create(self):
'''
Adds 'provider:' extension to the required properties during create.
'''
props = self.prepare_properties(
self.properties,
self.physical_resource_name())
self.prepare_provider_properties(self, props)
prov_net = self.neutron().create_network({'network': props})['network']
self.resource_id_set(prov_net['id'])
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
'''
Adds 'provider:' extension to the required properties during update.
'''
props = self.prepare_update_properties(json_snippet)
self.prepare_provider_properties(self, props)
self.neutron().update_network(self.resource_id, {'network': props})
def resource_mapping():
return {
'OS::Neutron::ProviderNet': ProviderNet,
}
| apache-2.0 | -4,678,954,023,445,764,000 | 31.787234 | 79 | 0.61843 | false |
writefaruq/lionface-app | django/contrib/gis/tests/geo3d/models.py | 12 | 1904 | from django.contrib.gis.db import models
class City3D(models.Model):
name = models.CharField(max_length=30)
point = models.PointField(dim=3)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class Interstate2D(models.Model):
name = models.CharField(max_length=30)
line = models.LineStringField(srid=4269)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class Interstate3D(models.Model):
name = models.CharField(max_length=30)
line = models.LineStringField(dim=3, srid=4269)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class InterstateProj2D(models.Model):
name = models.CharField(max_length=30)
line = models.LineStringField(srid=32140)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class InterstateProj3D(models.Model):
name = models.CharField(max_length=30)
line = models.LineStringField(dim=3, srid=32140)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class Polygon2D(models.Model):
name = models.CharField(max_length=30)
poly = models.PolygonField(srid=32140)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class Polygon3D(models.Model):
name = models.CharField(max_length=30)
poly = models.PolygonField(dim=3, srid=32140)
objects = models.GeoManager()
def __unicode__(self):
return self.name
class Point2D(models.Model):
point = models.PointField()
objects = models.GeoManager()
class Point3D(models.Model):
point = models.PointField(dim=3)
objects = models.GeoManager()
class MultiPoint3D(models.Model):
mpoint = models.MultiPointField(dim=3)
objects = models.GeoManager()
| bsd-3-clause | 1,716,627,517,714,469,600 | 25.594203 | 52 | 0.654412 | false |
Jai-Chaudhary/termite-data-server | web2py/scripts/cpdb.py | 8 | 25004 | import os
import sys
from collections import deque
import string
import argparse
import cStringIO
import operator
import cPickle as pickle
from collections import deque
import math
import re
import cmd
try:
import pyreadline as readline
except ImportError:
import readline
try:
from gluon import DAL
except ImportError as err:
print('gluon path not found')
class refTable(object):
def __init__(self):
self.columns = None
self.rows = None
def getcolHeader(self, colHeader):
return "{0}".format(' | '.join([string.join(string.strip('**{0}**'.format(item)),
'') for item in colHeader]))
def wrapTable(
self, rows, hasHeader=False, headerChar='-', delim=' | ', justify='left',
separateRows=False, prefix='', postfix='', wrapfunc=lambda x: x):
def rowWrapper(row):
'''---
newRows is returned like
[['w'], ['x'], ['y'], ['z']]
---'''
newRows = [wrapfunc(item).split('\n') for item in row]
self.rows = newRows
'''---
rowList gives like newRows but
formatted like [[w, x, y, z]]
---'''
rowList = [[substr or '' for substr in item]
for item in map(None, *newRows)]
return rowList
logicalRows = [rowWrapper(row) for row in rows]
columns = map(None, *reduce(operator.add, logicalRows))
self.columns = columns
maxWidths = [max(
[len(str
(item)) for
item in column]
) for column
in columns]
rowSeparator = headerChar * (len(prefix) + len(postfix) + sum(maxWidths) +
len(delim) * (len(maxWidths) - 1))
justify = {'center': str
.center,
'right': str
.rjust,
'left': str.
ljust
}[justify
.lower(
)]
output = cStringIO.StringIO()
if separateRows:
print >> output, rowSeparator
for physicalRows in logicalRows:
for row in physicalRows:
print >> output,\
prefix + delim.join([
justify(str(item), width) for (
item, width) in zip(row, maxWidths)]
) + postfix
if separateRows or hasHeader:
print >> output, rowSeparator
hasHeader = False
return output.getvalue()
def wrap_onspace(self, text, width):
return reduce(lambda line, word, width=width: '{0}{1}{2}'
.format(line, ' \n'[(len(
line[line.rfind('\n'
) + 1:]) + len(
word.split('\n', 1)[0]) >=
width)], word), text.split(' '))
def wrap_onspace_strict(self, text, width):
wordRegex = re.compile(r'\S{' + str(width) + r',}')
return self.wrap_onspace(
wordRegex.sub(
lambda m: self.
wrap_always(
m.group(), width), text
), width)
def wrap_always(self, text, width):
return '\n'.join(
[text[width * i:width * (i + 1
)] for i in xrange(
int(math.ceil(1. * len(
text) / width)))])
class tableHelper():
def __init__(self):
self.oTable = refTable()
def getAsRows(self, data):
return [row.strip().split(',') for row in data.splitlines()]
def getTable_noWrap(self, data, header=None):
rows = self.getAsRows(data)
if header is not None:
hRows = [header] + rows
else:
hRows = rows
table = self.oTable.wrapTable(hRows, hasHeader=True)
return table
def getTable_Wrap(self, data, wrapStyle, header=None, width=65):
wrapper = None
if len(wrapStyle) > 1:
rows = self.getAsRows(data)
if header is not None:
hRows = [header] + rows
else:
hRows = rows
for wrapper in (self.oTable.wrap_always,
self.oTable.wrap_onspace,
self.oTable.wrap_onspace_strict):
return self.oTable.wrapTable(hRows, hasHeader=True, separateRows=True, prefix='| ', postfix=' |', wrapfunc=lambda x:
wrapper(x, width))
else:
return self.getTable_noWrap(data, header)
def getAsErrorTable(self, err):
return self.getTable_Wrap(err, None)
class console:
def __init__(self, prompt, banner=None):
self.prompt = prompt
self.banner = banner
self.commands = {}
self.commandSort = []
self.db = None
for i in dir(self):
if "cmd_" == i[:4]:
cmd = i.split("cmd_")[1].lower()
self.commands[cmd] = getattr(self, i)
try:
self.commandSort.append((int(self
.commands[cmd].__doc__.split(
"|")[0]), cmd))
except:
pass
self.commandSort.sort()
self.commandSort = [i[1] for i in self.commandSort]
self.var_DEBUG = False
self.var_tableStyle = ''
self.configvars = {}
for i in dir(self):
if "var_" == i[:4]:
var = i.split("var_")[1]
self.configvars[var] = i
def setBanner(self, banner):
self.banner = banner
def execCmd(self, db):
self.db = db
print self.banner
while True:
try:
command = raw_input(self.prompt)
try:
self.execCommand(command)
except:
self.execute(command)
except KeyboardInterrupt:
break
except EOFError:
break
except Exception, a:
self.printError(a)
print ("\r\n\r\nBye!...")
sys.exit(0)
def printError(self, err):
sys.stderr.write("Error: {0}\r\n".format(str(err),))
if self.var_DEBUG:
pass
def execute(self, cmd):
try:
if not '-table ' in cmd:
exec '{0}'.format(cmd)
else:
file = None
table = None
fields = []
items = string.split(cmd, ' ')
invalidParams = []
table = self.getTable(items[1])
allowedParams = ['fields', 'file']
for i in items:
if '=' in i and not string.split(i, '=')[0] in allowedParams:
try:
invalidParams.append(i)
except Exception, err:
raise Exception('invalid parameter\n{0}'.format(i))
else:
if 'file=' in i:
file = os.path.abspath(string.strip(string.split(
i, '=')[1]))
if 'fields=' in i:
for field in string.split(string.split(i, '=')[1], ','):
if field in self.db[table].fields:
fields.append(string.strip(field))
if len(invalidParams) > 0:
print('the following parameter(s) is not valid\n{0}'.format(
string.join(invalidParams, ',')))
else:
try:
self.cmd_table(table, file, fields)
except Exception, err:
print('could not generate table for table {0}\n{1}'
.format(table, err))
except Exception, err:
print('sorry, can not do that!\n{0}'.format(err))
def getTable(self, tbl):
for mTbl in db.tables:
if tbl in mTbl:
if mTbl.startswith(tbl):
return mTbl
def execCommand(self, cmd):
words = cmd.split(" ")
words = [i for i in words if i]
if not words:
return
cmd, parameters = words[0].lower(), words[1:]
if not cmd in self.commands:
raise Exception(
"Command {0} not found. Try 'help'\r\n".format(cmd))
self.commands[cmd](*parameters)
'''---
DEFAULT COMMANDS (begins with cmd_)
---'''
def cmd_clear(self, numlines=100):
"""-5|clear|clear the screen"""
if os.name == "posix":
'''---
Unix/Linux/MacOS/BSD/etc
---'''
os.system('clear')
elif os.name in ("nt", "dos", "ce"):
'''---
Windows
---'''
os.system('CLS')
else:
'''---
Fallback for other operating systems.
---'''
print '\n' * numlines
def cmd_table(self, tbl, file=None, fields=[]):
"""-4|-table [TABLENAME] optional[file=None] [fields=None]|\
the default tableStyle is no_wrap - use the 'set x y' command to change the style\n\
style choices:
\twrap_always
\twrap_onspace
\twrap_onspace_strict
\tno_wrap (value '')\n
\t the 2nd optional param is a path to a file where the table will be written
\t the 3rd optional param is a list of fields you want displayed\n"""
table = None
for mTbl in db.tables:
if tbl in mTbl:
if mTbl.startswith(tbl):
table = mTbl
break
oTable = tableHelper()
'''---
tablestyle:
wrap_always
wrap_onspace
wrap_onspace_strict
or set set to "" for no wrapping
---'''
tableStyle = self.var_tableStyle
filedNotFound = []
table_fields = None
if len(fields) == 0:
table_fields = self.db[table].fields
else:
table_fields = fields
for field in fields:
if not field in self.db[table].fields:
filedNotFound.append(field)
if len(filedNotFound) == 0:
rows = self.db(self.db[table].id > 0).select()
rows_data = []
for row in rows:
rowdata = []
for f in table_fields:
rowdata.append('{0}'.format(row[f]))
rows_data.append(string.join(rowdata, ','))
data = string.join(rows_data, '\n')
dataTable = oTable.getTable_Wrap(data, tableStyle, table_fields)
print('TABLE {0}\n{1}'.format(table, dataTable))
if file is not None:
try:
tail, head = os.path.split(file)
try:
os.makedirs(tail)
except:
'do nothing, folders exist'
oFile = open(file, 'w')
oFile.write('TABLE: {0}\n{1}'.format(table, dataTable))
oFile.close()
print('{0} has been created and populated with all available data from table {1}\n'.format(file, table))
except Exception, err:
print("EXCEPTION: could not create table {0}\n{1}".format(
table, err))
else:
print('the following fields are not valid [{0}]'.format(
string.join(filedNotFound, ',')))
def cmd_help(self, *args):
'''-3|help|Show's help'''
alldata = []
lengths = []
for i in self.commandSort:
alldata.append(
self.commands[i].__doc__.split("|")[1:])
for i in alldata:
if len(i) > len(lengths):
for j in range(len(i)
- len(lengths)):
lengths.append(0)
j = 0
while j < len(i):
if len(i[j]) > lengths[j]:
lengths[j] = len(i[j])
j += 1
print ("-" * (lengths[0] + lengths[1] + 4))
for i in alldata:
print (("%-" + str(lengths[0]) + "s - %-" + str(
lengths[1]) + "s") % (i[0], i[1]))
if len(i) > 2:
for j in i[2:]: print (("%" + str(lengths[
0] + 9) + "s* %s") % (" ", j))
print
def cmd_vars(self, *args):
'''-2|vars|Show variables'''
print ("variables\r\n" + "-" * 79)
for i, j in self.configvars.items():
value = self.parfmt(repr(getattr(self, j)), 52)
print ("| %20s | %52s |" % (i, value[0]))
for k in value[1:]: print ("| %20s | %52s |" % ("", k))
if len(value) > 1:
print("| %20s | %52s |" % ("", ""))
print ("-" * 79)
def parfmt(self, txt, width):
res = []
pos = 0
while True:
a = txt[pos:pos + width]
if not a:
break
res.append(a)
pos += width
return res
def cmd_set(self, *args):
'''-1|set [variable_name] [value]|Set configuration variable value|Values are an expressions (100 | string.lower('ABC') | etc.'''
value = " ".join(args[1:])
if args[0] not in self.configvars:
setattr(self, "var_{0}".format(args[0]), eval(value))
setattr(self, "var_{0}".format(args[0]), eval(value))
def cmd_clearscreen(self, numlines=50):
'''---Clear the console.
---'''
if os.name == "posix":
'''---
Unix/Linux/MacOS/BSD/etc
---'''
os.system('clear')
elif os.name in ("nt", "dos", "ce"):
'''---
Windows
---'''
os.system('CLS')
else:
'''---
Fallback for other operating systems.
---'''
print '\n' * numlines
class dalShell(console):
def __init__(self):
pass
def shell(self, db):
console.__init__(self, prompt=">>> ", banner='dal interactive shell')
self.execCmd(db)
class setCopyDB():
def __init__(self):
'''---
non source or target specific vars
---'''
self.strModel = None
self.dalPath = None
self.db = None
'''---
source vars
---'''
self.sourceModel = None
self.sourceFolder = None
self.sourceConnectionString = None
self.sourcedbType = None
self.sourcedbName = None
'''---
target vars
---'''
self.targetdbType = None
self.targetdbName = None
self.targetModel = None
self.targetFolder = None
self.targetConnectionString = None
self.truncate = False
def _getDal(self):
mDal = None
if self.dalPath is not None:
global DAL
sys.path.append(self.dalPath)
mDal = __import__(
'dal', globals={}, locals={}, fromlist=['DAL'], level=0)
DAL = mDal.DAL
return mDal
def instDB(self, storageFolder, storageConnectionString, autoImport):
self.db = DAL(storageConnectionString, folder=os.path.abspath(
storageFolder), auto_import=autoImport)
return self.db
def delete_DB_tables(self, storageFolder, storageType):
print 'delete_DB_tablesn\n\t{0}\n\t{1}'.format(
storageFolder, storageType)
dataFiles = [storageType, "sql.log"]
try:
for f in os.listdir(storageFolder):
if ".table" in f:
fTable = "{0}/{1}".format(storageFolder, f)
os.remove(fTable)
print('deleted {0}'.format(fTable))
for dFile in dataFiles:
os.remove("{0}/{1}".format(storageFolder, dFile))
print('deleted {0}'.format(
"{0}/{1}".format(storageFolder, dFile)))
except Exception, errObj:
print(str(errObj))
def truncatetables(self, tables=[]):
if len(tables) != 0:
try:
print 'table value: {0}'.format(tables)
for tbl in self.db.tables:
for mTbl in tables:
if mTbl.startswith(tbl):
self.db[mTbl].truncate()
except Exception, err:
print('EXCEPTION: {0}'.format(err))
else:
try:
for tbl in self.db.tables:
self.db[tbl].truncate()
except Exception, err:
print('EXCEPTION: {0}'.format(err))
def copyDB(self):
other_db = DAL("{0}://{1}".format(
self.targetdbType, self.targetdbName), folder=self.targetFolder)
print 'creating tables...'
for table in self.db:
other_db.define_table(
table._tablename, *[field for field in table])
'''
should there be an option to truncAte target DB?
if yes, then change args to allow for choice
and set self.trancate to the art value
if self.truncate==True:
other_db[table._tablename].truncate()
'''
print 'exporting data...'
self.db.export_to_csv_file(open('tmp.sql', 'wb'))
print 'importing data...'
other_db.import_from_csv_file(open('tmp.sql', 'rb'))
other_db.commit()
print 'done!'
print 'Attention: do not run this program again or you end up with duplicate records'
def createfolderPath(self, folder):
try:
if folder is not None:
os.makedirs(folder)
except Exception, err:
pass
if __name__ == '__main__':
oCopy = setCopyDB()
db = None
targetDB = None
dbfolder = None
clean = False
model = None
truncate = False
parser = argparse.ArgumentParser(description='\
samplecmd line:\n\
-f ./blueLite/db_storage -i -y sqlite://storage.sqlite -Y sqlite://storage2.sqlite -d ./blueLite/pyUtils/sql/blueSQL -t True',
epilog='')
reqGroup = parser.add_argument_group('Required arguments')
reqGroup.add_argument('-f', '--sourceFolder', required=True, help="path to the 'source' folder of the 'source' DB")
reqGroup.add_argument('-F', '--targetFolder', required=False, help="path to the 'target' folder of the 'target' DB")
reqGroup.add_argument('-y', '--sourceConnectionString', required=True, help="source db connection string ()\n\
------------------------------------------------\n\
\
sqlite://storage.db\n\
mysql://username:password@localhost/test\n\
postgres://username:password@localhost/test\n\
mssql://username:password@localhost/test\n\
firebird://username:password@localhost/test\n\
oracle://username/password@test\n\
db2://username:password@test\n\
ingres://username:password@localhost/test\n\
informix://username:password@test\n\
\
------------------------------------------------")
reqGroup.add_argument('-Y', '--targetConnectionString', required=True,
help="target db type (sqlite,mySql,etc.)")
autoImpGroup = parser.add_argument_group('optional args (auto_import)')
autoImpGroup.add_argument('-a', '--autoimport', required=False, help='set to True to bypass loading of the model')
"""
*** removing -m/-M options for now --> i need a
better regex to match db.define('bla')...with optional db.commit()
modelGroup=parser.add_argument_group('optional args (create model)')
modelGroup.add_argument('-m','--sourcemodel'\
,required=False\
,help='to create a model from an existing model, point to the source model')
modelGroup.add_argument('-M','--targetmodel'\
,required=False\
,help='to create a model from an existing model, point to the target model')
"""
miscGroup = parser.add_argument_group('optional args/tasks')
miscGroup.add_argument('-i', '--interactive', required=False, action='store_true', help='run in interactive mode')
miscGroup.add_argument(
'-d', '--dal', required=False, help='path to dal.py')
miscGroup.add_argument('-t', '--truncate', choices=['True', 'False'], help='delete the records but *not* the table of the SOURCE DB')
miscGroup.add_argument('-b', '--tables', required=False, type=list, help='optional list (comma delimited) of SOURCE tables to truncate, defaults to all')
miscGroup.add_argument('-c', '--clean', required=False, help='delete the DB,tables and the log file, WARNING: this is unrecoverable')
args = parser.parse_args()
db = None
mDal = None
try:
oCopy.sourceFolder = args.sourceFolder
oCopy.targetFolder = args.sourceFolder
sourceItems = string.split(args.sourceConnectionString, '://')
oCopy.sourcedbType = sourceItems[0]
oCopy.sourcedbName = sourceItems[1]
targetItems = string.split(args.targetConnectionString, '://')
oCopy.targetdbType = targetItems[0]
oCopy.targetdbName = targetItems[1]
except Exception, err:
print('EXCEPTION: {0}'.format(err))
if args.dal:
try:
autoImport = True
if args.autoimport:
autoImport = args.autoimport
#sif not DAL in globals:
#if not sys.path.__contains__():
oCopy.dalPath = args.dal
mDal = oCopy._getDal()
db = oCopy.instDB(args.sourceFolder, args.sourceConnectionString,
autoImport)
except Exception, err:
print('EXCEPTION: could not set DAL\n{0}'.format(err))
if args.truncate:
try:
if args.truncate:
if args.tables:
tables = string.split(string.strip(args.tables), ',')
else:
oCopy.truncatetables([])
except Exception, err:
print('EXCEPTION: could not truncate tables\n{0}'.format(err))
try:
if args.clean:
oCopy.delete_DB_tables(oCopy.targetFolder, oCopy.targetType)
except Exception, err:
print('EXCEPTION: could not clean db\n{0}'.format(err))
"""
*** goes with -m/-M options... removed for now
if args.sourcemodel:
try:
oCopy.sourceModel=args.sourcemodel
oCopy.targetModel=args.sourcemodel
oCopy.createModel()
except Exception, err:
print('EXCEPTION: could not create model\n\
source model: {0}\n\
target model: {1}\n\
{2}'.format(args.sourcemodel,args.targetmodel,err))
"""
if args.sourceFolder:
try:
oCopy.sourceFolder = os.path.abspath(args.sourceFolder)
oCopy.createfolderPath(oCopy.sourceFolder)
except Exception, err:
print('EXCEPTION: could not create folder path\n{0}'.format(err))
else:
oCopy.dbStorageFolder = os.path.abspath(os.getcwd())
if args.targetFolder:
try:
oCopy.targetFolder = os.path.abspath(args.targetFolder)
oCopy.createfolderPath(oCopy.targetFolder)
except Exception, err:
print('EXCEPTION: could not create folder path\n{0}'.format(err))
if not args.interactive:
try:
oCopy.copyDB()
except Exception, err:
print('EXCEPTION: could not make a copy of the database\n{0}'.format(err))
else:
s = dalShell()
s.shell(db)
| bsd-3-clause | -3,011,977,756,521,602,000 | 35.028818 | 157 | 0.480683 | false |
caesar2164/edx-platform | common/djangoapps/student/views.py | 1 | 114210 | """
Student Views
"""
import datetime
import logging
import uuid
import json
import warnings
from collections import defaultdict
from urlparse import urljoin, urlsplit, parse_qs, urlunsplit
from django.views.generic import TemplateView
from pytz import UTC
from requests import HTTPError
from ipware.ip import get_ip
import edx_oauth2_provider
from django.conf import settings
from django.contrib.auth import logout, authenticate, login
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.decorators import login_required
from django.contrib.auth.views import password_reset_confirm
from django.contrib import messages
from django.core.context_processors import csrf
from django.core import mail
from django.core.urlresolvers import reverse, NoReverseMatch, reverse_lazy
from django.core.validators import validate_email, ValidationError
from django.db import IntegrityError, transaction
from django.http import HttpResponse, HttpResponseBadRequest, HttpResponseForbidden, HttpResponseServerError, Http404
from django.shortcuts import redirect
from django.utils.encoding import force_bytes, force_text
from django.utils.translation import ungettext
from django.utils.http import base36_to_int, urlsafe_base64_encode, urlencode
from django.utils.translation import ugettext as _, get_language
from django.views.decorators.csrf import csrf_exempt, ensure_csrf_cookie
from django.views.decorators.http import require_POST, require_GET
from django.db.models.signals import post_save
from django.dispatch import receiver, Signal
from django.template.response import TemplateResponse
from provider.oauth2.models import Client
from ratelimitbackend.exceptions import RateLimitException
from social.apps.django_app import utils as social_utils
from social.backends import oauth as social_oauth
from social.exceptions import AuthException, AuthAlreadyAssociated
from edxmako.shortcuts import render_to_response, render_to_string
from util.enterprise_helpers import data_sharing_consent_requirement_at_login
from course_modes.models import CourseMode
from shoppingcart.api import order_history
from student.models import (
Registration, UserProfile,
PendingEmailChange, CourseEnrollment, CourseEnrollmentAttribute, unique_id_for_user,
CourseEnrollmentAllowed, UserStanding, LoginFailures,
create_comments_service_user, PasswordHistory, UserSignupSource,
DashboardConfiguration, LinkedInAddToProfileConfiguration, ManualEnrollmentAudit, ALLOWEDTOENROLL_TO_ENROLLED,
LogoutViewConfiguration, RegistrationCookieConfiguration)
from student.forms import AccountCreationForm, PasswordResetFormNoActive, get_registration_extension_form
from student.tasks import send_activation_email
from lms.djangoapps.commerce.utils import EcommerceService # pylint: disable=import-error
from lms.djangoapps.verify_student.models import SoftwareSecurePhotoVerification # pylint: disable=import-error
from bulk_email.models import Optout, BulkEmailFlag # pylint: disable=import-error
from certificates.models import CertificateStatuses, certificate_status_for_student
from certificates.api import ( # pylint: disable=import-error
get_certificate_url,
has_html_certificates_enabled,
)
from lms.djangoapps.grades.new.course_grade import CourseGradeFactory
from xmodule.modulestore.django import modulestore
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import CourseLocator
from xmodule.modulestore.exceptions import ItemNotFoundError
from collections import namedtuple
from courseware.courses import get_course_about_section
from courseware.courses import get_courses, sort_by_announcement, sort_by_start_date # pylint: disable=import-error
from courseware.access import has_access
from courseware.models import CoursePreference
from django_comment_common.models import Role
from openedx.core.djangoapps.external_auth.models import ExternalAuthMap
import openedx.core.djangoapps.external_auth.views
from openedx.core.djangoapps.external_auth.login_and_register import (
login as external_auth_login,
register as external_auth_register
)
from bulk_email.models import Optout
import track.views
import dogstats_wrapper as dog_stats_api
from util.date_utils import get_default_time_display
from util.db import outer_atomic
from util.json_request import JsonResponse
from util.bad_request_rate_limiter import BadRequestRateLimiter
from util.keyword_substitution import substitute_keywords_with_data
from util.milestones_helpers import (
get_pre_requisite_courses_not_completed,
)
from util.password_policy_validators import validate_password_strength
import third_party_auth
from third_party_auth import pipeline, provider
from student.helpers import (
check_verify_status_by_course,
auth_pipeline_urls, get_next_url_for_login_page,
DISABLE_UNENROLL_CERT_STATES,
destroy_oauth_tokens
)
from student.cookies import set_logged_in_cookies, delete_logged_in_cookies, set_user_info_cookie
from student.models import anonymous_id_for_user, UserAttribute, EnrollStatusChange
from shoppingcart.models import DonationConfiguration, CourseRegistrationCode
from openedx.core.djangoapps.embargo import api as embargo_api
import analytics
from eventtracking import tracker
# Note that this lives in LMS, so this dependency should be refactored.
from notification_prefs.views import enable_notifications
from openedx.core.djangoapps.credit.email_utils import get_credit_provider_display_names, make_providers_strings
from openedx.core.djangoapps.lang_pref import LANGUAGE_KEY
from openedx.core.djangoapps.programs import utils as programs_utils
from openedx.core.djangoapps.programs.models import ProgramsApiConfig
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from openedx.core.djangoapps.theming import helpers as theming_helpers
from openedx.core.djangoapps.user_api.preferences import api as preferences_api
from openedx.core.djangoapps.catalog.utils import get_programs_data
from openedx.stanford.common.djangoapps.student.views import notify_enrollment_by_email
log = logging.getLogger("edx.student")
AUDIT_LOG = logging.getLogger("audit")
ReverifyInfo = namedtuple('ReverifyInfo', 'course_id course_name course_number date status display') # pylint: disable=invalid-name
SETTING_CHANGE_INITIATED = 'edx.user.settings.change_initiated'
# Used as the name of the user attribute for tracking affiliate registrations
REGISTRATION_AFFILIATE_ID = 'registration_affiliate_id'
REGISTRATION_UTM_PARAMETERS = {
'utm_source': 'registration_utm_source',
'utm_medium': 'registration_utm_medium',
'utm_campaign': 'registration_utm_campaign',
'utm_term': 'registration_utm_term',
'utm_content': 'registration_utm_content',
}
REGISTRATION_UTM_CREATED_AT = 'registration_utm_created_at'
# used to announce a registration
REGISTER_USER = Signal(providing_args=["user", "profile"])
LOGIN_LOCKOUT_PERIOD_PLUS_FIVE_MINUTES = int((5 * 60 + settings.MAX_FAILED_LOGIN_ATTEMPTS_LOCKOUT_PERIOD_SECS) / 60)
# Disable this warning because it doesn't make sense to completely refactor tests to appease Pylint
# pylint: disable=logging-format-interpolation
def csrf_token(context):
"""A csrf token that can be included in a form."""
token = context.get('csrf_token', '')
if token == 'NOTPROVIDED':
return ''
return (u'<div style="display:none"><input type="hidden"'
' name="csrfmiddlewaretoken" value="%s" /></div>' % (token))
# NOTE: This view is not linked to directly--it is called from
# branding/views.py:index(), which is cached for anonymous users.
# This means that it should always return the same thing for anon
# users. (in particular, no switching based on query params allowed)
def index(request, extra_context=None, user=AnonymousUser()):
"""
Render the edX main page.
extra_context is used to allow immediate display of certain modal windows, eg signup,
as used by external_auth.
"""
if extra_context is None:
extra_context = {}
programs_list = []
courses = get_courses(user)
if configuration_helpers.get_value(
"ENABLE_COURSE_SORTING_BY_START_DATE",
settings.FEATURES["ENABLE_COURSE_SORTING_BY_START_DATE"],
):
courses = sort_by_start_date(courses)
else:
courses = sort_by_announcement(courses)
context = {'courses': courses}
context['homepage_overlay_html'] = configuration_helpers.get_value('homepage_overlay_html')
# This appears to be an unused context parameter, at least for the master templates...
context['show_partners'] = configuration_helpers.get_value('show_partners', True)
# TO DISPLAY A YOUTUBE WELCOME VIDEO
# 1) Change False to True
context['show_homepage_promo_video'] = configuration_helpers.get_value('show_homepage_promo_video', False)
# 2) Add your video's YouTube ID (11 chars, eg "123456789xX"), or specify via site configuration
# Note: This value should be moved into a configuration setting and plumbed-through to the
# context via the site configuration workflow, versus living here
youtube_video_id = configuration_helpers.get_value('homepage_promo_video_youtube_id', "your-youtube-id")
context['homepage_promo_video_youtube_id'] = youtube_video_id
# allow for theme override of the courses list
context['courses_list'] = theming_helpers.get_template_path('courses_list.html')
# Insert additional context for use in the template
context.update(extra_context)
# Getting all the programs from course-catalog service. The programs_list is being added to the context but it's
# not being used currently in lms/templates/index.html. To use this list, you need to create a custom theme that
# overrides index.html. The modifications to index.html to display the programs will be done after the support
# for edx-pattern-library is added.
if configuration_helpers.get_value("DISPLAY_PROGRAMS_ON_MARKETING_PAGES",
settings.FEATURES.get("DISPLAY_PROGRAMS_ON_MARKETING_PAGES")):
programs_list = get_programs_data(user)
context["programs_list"] = programs_list
return render_to_response('index.html', context)
def process_survey_link(survey_link, user):
"""
If {UNIQUE_ID} appears in the link, replace it with a unique id for the user.
Currently, this is sha1(user.username). Otherwise, return survey_link.
"""
return survey_link.format(UNIQUE_ID=unique_id_for_user(user))
def cert_info(user, course_overview, course_mode):
"""
Get the certificate info needed to render the dashboard section for the given
student and course.
Arguments:
user (User): A user.
course_overview (CourseOverview): A course.
course_mode (str): The enrollment mode (honor, verified, audit, etc.)
Returns:
dict: Empty dict if certificates are disabled or hidden, or a dictionary with keys:
'status': one of 'generating', 'ready', 'notpassing', 'processing', 'restricted'
'show_download_url': bool
'download_url': url, only present if show_download_url is True
'show_disabled_download_button': bool -- true if state is 'generating'
'show_survey_button': bool
'survey_url': url, only if show_survey_button is True
'grade': if status is not 'processing'
'can_unenroll': if status allows for unenrollment
"""
if not course_overview.may_certify():
return {}
return _cert_info(
user,
course_overview,
certificate_status_for_student(user, course_overview.id),
course_mode
)
def reverification_info(statuses):
"""
Returns reverification-related information for *all* of user's enrollments whose
reverification status is in statuses.
Args:
statuses (list): a list of reverification statuses we want information for
example: ["must_reverify", "denied"]
Returns:
dictionary of lists: dictionary with one key per status, e.g.
dict["must_reverify"] = []
dict["must_reverify"] = [some information]
"""
reverifications = defaultdict(list)
# Sort the data by the reverification_end_date
for status in statuses:
if reverifications[status]:
reverifications[status].sort(key=lambda x: x.date)
return reverifications
def get_course_enrollments(user, org_to_include, orgs_to_exclude):
"""
Given a user, return a filtered set of his or her course enrollments.
Arguments:
user (User): the user in question.
org_to_include (str): If not None, ONLY courses of this org will be returned.
orgs_to_exclude (list[str]): If org_to_include is not None, this
argument is ignored. Else, courses of this org will be excluded.
Returns:
generator[CourseEnrollment]: a sequence of enrollments to be displayed
on the user's dashboard.
"""
for enrollment in CourseEnrollment.enrollments_for_user(user):
# If the course is missing or broken, log an error and skip it.
course_overview = enrollment.course_overview
if not course_overview:
log.error(
"User %s enrolled in broken or non-existent course %s",
user.username,
enrollment.course_id
)
continue
# Filter out anything that is not attributed to the current ORG.
if org_to_include and course_overview.location.org != org_to_include:
continue
# Conversely, filter out any enrollments with courses attributed to current ORG.
elif course_overview.location.org in orgs_to_exclude:
continue
# Else, include the enrollment.
else:
yield enrollment
def _cert_info(user, course_overview, cert_status, course_mode): # pylint: disable=unused-argument
"""
Implements the logic for cert_info -- split out for testing.
Arguments:
user (User): A user.
course_overview (CourseOverview): A course.
course_mode (str): The enrollment mode (honor, verified, audit, etc.)
"""
# simplify the status for the template using this lookup table
template_state = {
CertificateStatuses.generating: 'generating',
CertificateStatuses.downloadable: 'ready',
CertificateStatuses.notpassing: 'notpassing',
CertificateStatuses.restricted: 'restricted',
CertificateStatuses.auditing: 'auditing',
CertificateStatuses.audit_passing: 'auditing',
CertificateStatuses.audit_notpassing: 'auditing',
CertificateStatuses.unverified: 'unverified',
}
default_status = 'processing'
default_info = {
'status': default_status,
'show_disabled_download_button': False,
'show_download_url': False,
'show_survey_button': False,
'can_unenroll': True,
}
if cert_status is None:
return default_info
is_hidden_status = cert_status['status'] in ('unavailable', 'processing', 'generating', 'notpassing', 'auditing')
if course_overview.certificates_display_behavior == 'early_no_info' and is_hidden_status:
return {}
status = template_state.get(cert_status['status'], default_status)
status_dict = {
'status': status,
'show_download_url': status == 'ready',
'show_disabled_download_button': status == 'generating',
'mode': cert_status.get('mode', None),
'linked_in_url': None,
'can_unenroll': status not in DISABLE_UNENROLL_CERT_STATES,
}
if (status in ('generating', 'ready', 'notpassing', 'restricted', 'auditing', 'unverified') and
course_overview.end_of_course_survey_url is not None):
status_dict.update({
'show_survey_button': True,
'survey_url': process_survey_link(course_overview.end_of_course_survey_url, user)})
else:
status_dict['show_survey_button'] = False
if status == 'ready':
# showing the certificate web view button if certificate is ready state and feature flags are enabled.
if has_html_certificates_enabled(course_overview.id, course_overview):
if course_overview.has_any_active_web_certificate:
status_dict.update({
'show_cert_web_view': True,
'cert_web_view_url': get_certificate_url(course_id=course_overview.id, uuid=cert_status['uuid'])
})
else:
# don't show download certificate button if we don't have an active certificate for course
status_dict['show_download_url'] = False
elif 'download_url' not in cert_status:
log.warning(
u"User %s has a downloadable cert for %s, but no download url",
user.username,
course_overview.id
)
return default_info
else:
status_dict['download_url'] = cert_status['download_url']
# If enabled, show the LinkedIn "add to profile" button
# Clicking this button sends the user to LinkedIn where they
# can add the certificate information to their profile.
linkedin_config = LinkedInAddToProfileConfiguration.current()
# posting certificates to LinkedIn is not currently
# supported in White Labels
if linkedin_config.enabled and not theming_helpers.is_request_in_themed_site():
status_dict['linked_in_url'] = linkedin_config.add_to_profile_url(
course_overview.id,
course_overview.display_name,
cert_status.get('mode'),
cert_status['download_url']
)
if status in {'generating', 'ready', 'notpassing', 'restricted', 'auditing', 'unverified'}:
persisted_grade = CourseGradeFactory().get_persisted(user, course_overview)
if persisted_grade is not None:
status_dict['grade'] = unicode(persisted_grade.percent)
elif 'grade' in cert_status:
status_dict['grade'] = cert_status['grade']
else:
# Note: as of 11/20/2012, we know there are students in this state-- cs169.1x,
# who need to be regraded (we weren't tracking 'notpassing' at first).
# We can add a log.warning here once we think it shouldn't happen.
return default_info
return status_dict
@ensure_csrf_cookie
def signin_user(request):
"""Deprecated. To be replaced by :class:`student_account.views.login_and_registration_form`."""
external_auth_response = external_auth_login(request)
if external_auth_response is not None:
return external_auth_response
if UserProfile.has_registered(request.user):
return redirect(reverse('dashboard'))
# Determine the URL to redirect to following login:
redirect_to = get_next_url_for_login_page(request)
if request.user.is_authenticated():
return redirect(redirect_to)
third_party_auth_error = None
for msg in messages.get_messages(request):
if msg.extra_tags.split()[0] == "social-auth":
# msg may or may not be translated. Try translating [again] in case we are able to:
third_party_auth_error = _(unicode(msg)) # pylint: disable=translation-of-non-string
break
context = {
'login_redirect_url': redirect_to, # This gets added to the query string of the "Sign In" button in the header
# Bool injected into JS to submit form if we're inside a running third-
# party auth pipeline; distinct from the actual instance of the running
# pipeline, if any.
'pipeline_running': 'true' if pipeline.running(request) else 'false',
'pipeline_url': auth_pipeline_urls(pipeline.AUTH_ENTRY_LOGIN, redirect_url=redirect_to),
'platform_name': configuration_helpers.get_value(
'platform_name',
settings.PLATFORM_NAME
),
'third_party_auth_error': third_party_auth_error
}
return render_to_response('login.html', context)
@ensure_csrf_cookie
def register_user(request, extra_context=None):
"""Deprecated. To be replaced by :class:`student_account.views.login_and_registration_form`."""
# Determine the URL to redirect to following login:
redirect_to = get_next_url_for_login_page(request)
if UserProfile.has_registered(request.user):
return redirect(redirect_to)
external_auth_response = external_auth_register(request)
if external_auth_response is not None:
return external_auth_response
context = {
'login_redirect_url': redirect_to, # This gets added to the query string of the "Sign In" button in the header
'email': '',
'name': '',
'running_pipeline': None,
'pipeline_urls': auth_pipeline_urls(pipeline.AUTH_ENTRY_REGISTER, redirect_url=redirect_to),
'platform_name': configuration_helpers.get_value(
'platform_name',
settings.PLATFORM_NAME
),
'selected_provider': '',
'username': '',
}
if extra_context is not None:
context.update(extra_context)
if context.get("extauth_domain", '').startswith(
openedx.core.djangoapps.external_auth.views.SHIBBOLETH_DOMAIN_PREFIX
):
return render_to_response('register-shib.html', context)
# If third-party auth is enabled, prepopulate the form with data from the
# selected provider.
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
current_provider = provider.Registry.get_from_pipeline(running_pipeline)
if current_provider is not None:
overrides = current_provider.get_register_form_data(running_pipeline.get('kwargs'))
overrides['running_pipeline'] = running_pipeline
overrides['selected_provider'] = current_provider.name
context.update(overrides)
return render_to_response('register.html', context)
def complete_course_mode_info(course_id, enrollment, modes=None):
"""
We would like to compute some more information from the given course modes
and the user's current enrollment
Returns the given information:
- whether to show the course upsell information
- numbers of days until they can't upsell anymore
"""
if modes is None:
modes = CourseMode.modes_for_course_dict(course_id)
mode_info = {'show_upsell': False, 'days_for_upsell': None}
# we want to know if the user is already enrolled as verified or credit and
# if verified is an option.
if CourseMode.VERIFIED in modes and enrollment.mode in CourseMode.UPSELL_TO_VERIFIED_MODES:
mode_info['show_upsell'] = True
mode_info['verified_sku'] = modes['verified'].sku
mode_info['verified_bulk_sku'] = modes['verified'].bulk_sku
# if there is an expiration date, find out how long from now it is
if modes['verified'].expiration_datetime:
today = datetime.datetime.now(UTC).date()
mode_info['days_for_upsell'] = (modes['verified'].expiration_datetime.date() - today).days
return mode_info
def is_course_blocked(request, redeemed_registration_codes, course_key):
"""Checking either registration is blocked or not ."""
blocked = False
for redeemed_registration in redeemed_registration_codes:
# registration codes may be generated via Bulk Purchase Scenario
# we have to check only for the invoice generated registration codes
# that their invoice is valid or not
if redeemed_registration.invoice_item:
if not redeemed_registration.invoice_item.invoice.is_valid:
blocked = True
# disabling email notifications for unpaid registration courses
Optout.objects.get_or_create(user=request.user, course_id=course_key)
log.info(
u"User %s (%s) opted out of receiving emails from course %s",
request.user.username,
request.user.email,
course_key,
)
track.views.server_track(
request,
"change-email1-settings",
{"receive_emails": "no", "course": course_key.to_deprecated_string()},
page='dashboard',
)
break
return blocked
@login_required
@ensure_csrf_cookie
def dashboard(request):
"""
Provides the LMS dashboard view
TODO: This is lms specific and does not belong in common code.
Arguments:
request: The request object.
Returns:
The dashboard response.
"""
user = request.user
if not UserProfile.has_registered(user):
logout(request)
return redirect(reverse('dashboard'))
platform_name = configuration_helpers.get_value("platform_name", settings.PLATFORM_NAME)
enable_verified_certificates = configuration_helpers.get_value(
'ENABLE_VERIFIED_CERTIFICATES',
settings.FEATURES.get('ENABLE_VERIFIED_CERTIFICATES')
)
display_course_modes_on_dashboard = configuration_helpers.get_value(
'DISPLAY_COURSE_MODES_ON_DASHBOARD',
settings.FEATURES.get('DISPLAY_COURSE_MODES_ON_DASHBOARD', True)
)
# we want to filter and only show enrollments for courses within
# the 'ORG' defined in configuration.
course_org_filter = configuration_helpers.get_value('course_org_filter')
# Let's filter out any courses in an "org" that has been declared to be
# in a configuration
org_filter_out_set = configuration_helpers.get_all_orgs()
# remove our current org from the "filter out" list, if applicable
if course_org_filter:
org_filter_out_set.remove(course_org_filter)
# Build our (course, enrollment) list for the user, but ignore any courses that no
# longer exist (because the course IDs have changed). Still, we don't delete those
# enrollments, because it could have been a data push snafu.
course_enrollments = list(get_course_enrollments(user, course_org_filter, org_filter_out_set))
# sort the enrollment pairs by the enrollment date
course_enrollments.sort(key=lambda x: x.created, reverse=True)
# Retrieve the course modes for each course
enrolled_course_ids = [enrollment.course_id for enrollment in course_enrollments]
__, unexpired_course_modes = CourseMode.all_and_unexpired_modes_for_courses(enrolled_course_ids)
course_modes_by_course = {
course_id: {
mode.slug: mode
for mode in modes
}
for course_id, modes in unexpired_course_modes.iteritems()
}
# Check to see if the student has recently enrolled in a course.
# If so, display a notification message confirming the enrollment.
enrollment_message = _create_recent_enrollment_message(
course_enrollments, course_modes_by_course
)
course_optouts = Optout.objects.filter(user=user).values_list('course_id', flat=True)
message = ""
if not user.is_active:
message = render_to_string(
'registration/activate_account_notice.html',
{'email': user.email, 'platform_name': platform_name}
)
# Global staff can see what courses errored on their dashboard
staff_access = False
errored_courses = {}
if has_access(user, 'staff', 'global'):
# Show any courses that errored on load
staff_access = True
errored_courses = modulestore().get_errored_courses()
show_courseware_links_for = frozenset(
enrollment.course_id for enrollment in course_enrollments
if has_access(request.user, 'load', enrollment.course_overview)
and has_access(request.user, 'view_courseware_with_prerequisites', enrollment.course_overview)
)
# Find programs associated with courses being displayed. This information
# is passed in the template context to allow rendering of program-related
# information on the dashboard.
meter = programs_utils.ProgramProgressMeter(user, enrollments=course_enrollments)
programs_by_run = meter.engaged_programs(by_run=True)
# Construct a dictionary of course mode information
# used to render the course list. We re-use the course modes dict
# we loaded earlier to avoid hitting the database.
course_mode_info = {
enrollment.course_id: complete_course_mode_info(
enrollment.course_id, enrollment,
modes=course_modes_by_course[enrollment.course_id]
)
for enrollment in course_enrollments
}
# Determine the per-course verification status
# This is a dictionary in which the keys are course locators
# and the values are one of:
#
# VERIFY_STATUS_NEED_TO_VERIFY
# VERIFY_STATUS_SUBMITTED
# VERIFY_STATUS_APPROVED
# VERIFY_STATUS_MISSED_DEADLINE
#
# Each of which correspond to a particular message to display
# next to the course on the dashboard.
#
# If a course is not included in this dictionary,
# there is no verification messaging to display.
verify_status_by_course = check_verify_status_by_course(user, course_enrollments)
cert_statuses = {
enrollment.course_id: cert_info(request.user, enrollment.course_overview, enrollment.mode)
for enrollment in course_enrollments
}
# only show email settings for Mongo course and when bulk email is turned on
show_email_settings_for = frozenset(
enrollment.course_id for enrollment in course_enrollments if (
BulkEmailFlag.feature_enabled(enrollment.course_id)
)
)
# Verification Attempts
# Used to generate the "you must reverify for course x" banner
verification_status, verification_msg = SoftwareSecurePhotoVerification.user_status(user)
# Gets data for midcourse reverifications, if any are necessary or have failed
statuses = ["approved", "denied", "pending", "must_reverify"]
reverifications = reverification_info(statuses)
show_refund_option_for = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.refundable()
)
block_courses = frozenset(
enrollment.course_id for enrollment in course_enrollments
if is_course_blocked(
request,
CourseRegistrationCode.objects.filter(
course_id=enrollment.course_id,
registrationcoderedemption__redeemed_by=request.user
),
enrollment.course_id
)
)
enrolled_courses_either_paid = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.is_paid_course()
)
# If there are *any* denied reverifications that have not been toggled off,
# we'll display the banner
denied_banner = any(item.display for item in reverifications["denied"])
# Populate the Order History for the side-bar.
order_history_list = order_history(user, course_org_filter=course_org_filter, org_filter_out_set=org_filter_out_set)
# get list of courses having pre-requisites yet to be completed
courses_having_prerequisites = frozenset(
enrollment.course_id for enrollment in course_enrollments
if enrollment.course_overview.pre_requisite_courses
)
courses_requirements_not_met = get_pre_requisite_courses_not_completed(user, courses_having_prerequisites)
if 'notlive' in request.GET:
redirect_message = _("The course you are looking for does not start until {date}.").format(
date=request.GET['notlive']
)
elif 'course_closed' in request.GET:
redirect_message = _("The course you are looking for is closed for enrollment as of {date}.").format(
date=request.GET['course_closed']
)
else:
redirect_message = ''
context = {
'enrollment_message': enrollment_message,
'redirect_message': redirect_message,
'course_enrollments': course_enrollments,
'course_optouts': course_optouts,
'message': message,
'staff_access': staff_access,
'errored_courses': errored_courses,
'show_courseware_links_for': show_courseware_links_for,
'all_course_modes': course_mode_info,
'cert_statuses': cert_statuses,
'credit_statuses': _credit_statuses(user, course_enrollments),
'show_email_settings_for': show_email_settings_for,
'reverifications': reverifications,
'verification_status': verification_status,
'verification_status_by_course': verify_status_by_course,
'verification_msg': verification_msg,
'show_refund_option_for': show_refund_option_for,
'block_courses': block_courses,
'denied_banner': denied_banner,
'billing_email': settings.PAYMENT_SUPPORT_EMAIL,
'user': user,
'logout_url': reverse('logout'),
'platform_name': platform_name,
'enrolled_courses_either_paid': enrolled_courses_either_paid,
'provider_states': [],
'order_history_list': order_history_list,
'courses_requirements_not_met': courses_requirements_not_met,
'nav_hidden': True,
'programs_by_run': programs_by_run,
'show_program_listing': ProgramsApiConfig.current().show_program_listing,
'disable_courseware_js': True,
'display_course_modes_on_dashboard': enable_verified_certificates and display_course_modes_on_dashboard,
}
ecommerce_service = EcommerceService()
if ecommerce_service.is_enabled(request.user):
context.update({
'use_ecommerce_payment_flow': True,
'ecommerce_payment_page': ecommerce_service.payment_page_url(),
})
response = render_to_response('dashboard.html', context)
set_user_info_cookie(response, request)
return response
def _create_and_login_nonregistered_user(request):
new_student = UserProfile.create_nonregistered_user()
new_student.backend = settings.AUTHENTICATION_BACKENDS[0]
login(request, new_student)
request.session.set_expiry(604800) # set session to very long to reduce number of nonreg users created
@require_POST
def setup_sneakpeek(request, course_id):
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
if not CoursePreference.course_allows_nonregistered_access(course_key):
return HttpResponseForbidden("Cannot access the course")
if not request.user.is_authenticated():
# if there's no user, create a nonregistered user
_create_and_login_nonregistered_user(request)
elif UserProfile.has_registered(request.user):
# registered users can't sneakpeek, so log them out and create a new nonregistered user
logout(request)
_create_and_login_nonregistered_user(request)
# fall-through case is a sneakpeek user that's already logged in
can_enroll, error_msg = _check_can_enroll_in_course(request.user,
course_key,
access_type='within_enrollment_period')
if not can_enroll:
log.error(error_msg)
return HttpResponseBadRequest(error_msg)
CourseEnrollment.enroll(request.user, course_key)
return HttpResponse("OK. Allowed sneakpeek")
def _create_recent_enrollment_message(course_enrollments, course_modes): # pylint: disable=invalid-name
"""
Builds a recent course enrollment message.
Constructs a new message template based on any recent course enrollments
for the student.
Args:
course_enrollments (list[CourseEnrollment]): a list of course enrollments.
course_modes (dict): Mapping of course ID's to course mode dictionaries.
Returns:
A string representing the HTML message output from the message template.
None if there are no recently enrolled courses.
"""
recently_enrolled_courses = _get_recently_enrolled_courses(course_enrollments)
if recently_enrolled_courses:
enroll_messages = [
{
"course_id": enrollment.course_overview.id,
"course_name": enrollment.course_overview.display_name,
"allow_donation": _allow_donation(course_modes, enrollment.course_overview.id, enrollment)
}
for enrollment in recently_enrolled_courses
]
platform_name = configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME)
return render_to_string(
'enrollment/course_enrollment_message.html',
{'course_enrollment_messages': enroll_messages, 'platform_name': platform_name}
)
def _get_recently_enrolled_courses(course_enrollments):
"""
Given a list of enrollments, filter out all but recent enrollments.
Args:
course_enrollments (list[CourseEnrollment]): A list of course enrollments.
Returns:
list[CourseEnrollment]: A list of recent course enrollments.
"""
seconds = DashboardConfiguration.current().recent_enrollment_time_delta
time_delta = (datetime.datetime.now(UTC) - datetime.timedelta(seconds=seconds))
return [
enrollment for enrollment in course_enrollments
# If the enrollment has no created date, we are explicitly excluding the course
# from the list of recent enrollments.
if enrollment.is_active and enrollment.created > time_delta
]
def _allow_donation(course_modes, course_id, enrollment):
"""Determines if the dashboard will request donations for the given course.
Check if donations are configured for the platform, and if the current course is accepting donations.
Args:
course_modes (dict): Mapping of course ID's to course mode dictionaries.
course_id (str): The unique identifier for the course.
enrollment(CourseEnrollment): The enrollment object in which the user is enrolled
Returns:
True if the course is allowing donations.
"""
if course_id not in course_modes:
flat_unexpired_modes = {
unicode(course_id): [mode for mode in modes]
for course_id, modes in course_modes.iteritems()
}
flat_all_modes = {
unicode(course_id): [mode.slug for mode in modes]
for course_id, modes in CourseMode.all_modes_for_courses([course_id]).iteritems()
}
log.error(
u'Can not find `%s` in course modes.`%s`. All modes: `%s`',
course_id,
flat_unexpired_modes,
flat_all_modes
)
donations_enabled = DonationConfiguration.current().enabled
return (
donations_enabled and
enrollment.mode in course_modes[course_id] and
course_modes[course_id][enrollment.mode].min_price == 0
)
def _update_email_opt_in(request, org):
"""Helper function used to hit the profile API if email opt-in is enabled."""
email_opt_in = request.POST.get('email_opt_in')
if email_opt_in is not None:
email_opt_in_boolean = email_opt_in == 'true'
preferences_api.update_email_opt_in(request.user, org, email_opt_in_boolean)
def _credit_statuses(user, course_enrollments):
"""
Retrieve the status for credit courses.
A credit course is a course for which a user can purchased
college credit. The current flow is:
1. User becomes eligible for credit (submits verifications, passes the course, etc.)
2. User purchases credit from a particular credit provider.
3. User requests credit from the provider, usually creating an account on the provider's site.
4. The credit provider notifies us whether the user's request for credit has been accepted or rejected.
The dashboard is responsible for communicating the user's state in this flow.
Arguments:
user (User): The currently logged-in user.
course_enrollments (list[CourseEnrollment]): List of enrollments for the
user.
Returns: dict
The returned dictionary has keys that are `CourseKey`s and values that
are dictionaries with:
* eligible (bool): True if the user is eligible for credit in this course.
* deadline (datetime): The deadline for purchasing and requesting credit for this course.
* purchased (bool): Whether the user has purchased credit for this course.
* provider_name (string): The display name of the credit provider.
* provider_status_url (string): A URL the user can visit to check on their credit request status.
* request_status (string): Either "pending", "approved", or "rejected"
* error (bool): If true, an unexpected error occurred when retrieving the credit status,
so the user should contact the support team.
Example:
>>> _credit_statuses(user, course_enrollments)
{
CourseKey.from_string("edX/DemoX/Demo_Course"): {
"course_key": "edX/DemoX/Demo_Course",
"eligible": True,
"deadline": 2015-11-23 00:00:00 UTC,
"purchased": True,
"provider_name": "Hogwarts",
"provider_status_url": "http://example.com/status",
"request_status": "pending",
"error": False
}
}
"""
from openedx.core.djangoapps.credit import api as credit_api
# Feature flag off
if not settings.FEATURES.get("ENABLE_CREDIT_ELIGIBILITY"):
return {}
request_status_by_course = {
request["course_key"]: request["status"]
for request in credit_api.get_credit_requests_for_user(user.username)
}
credit_enrollments = {
enrollment.course_id: enrollment
for enrollment in course_enrollments
if enrollment.mode == "credit"
}
# When a user purchases credit in a course, the user's enrollment
# mode is set to "credit" and an enrollment attribute is set
# with the ID of the credit provider. We retrieve *all* such attributes
# here to minimize the number of database queries.
purchased_credit_providers = {
attribute.enrollment.course_id: attribute.value
for attribute in CourseEnrollmentAttribute.objects.filter(
namespace="credit",
name="provider_id",
enrollment__in=credit_enrollments.values()
).select_related("enrollment")
}
provider_info_by_id = {
provider["id"]: provider
for provider in credit_api.get_credit_providers()
}
statuses = {}
for eligibility in credit_api.get_eligibilities_for_user(user.username):
course_key = CourseKey.from_string(unicode(eligibility["course_key"]))
providers_names = get_credit_provider_display_names(course_key)
status = {
"course_key": unicode(course_key),
"eligible": True,
"deadline": eligibility["deadline"],
"purchased": course_key in credit_enrollments,
"provider_name": make_providers_strings(providers_names),
"provider_status_url": None,
"provider_id": None,
"request_status": request_status_by_course.get(course_key),
"error": False,
}
# If the user has purchased credit, then include information about the credit
# provider from which the user purchased credit.
# We retrieve the provider's ID from the an "enrollment attribute" set on the user's
# enrollment when the user's order for credit is fulfilled by the E-Commerce service.
if status["purchased"]:
provider_id = purchased_credit_providers.get(course_key)
if provider_id is None:
status["error"] = True
log.error(
u"Could not find credit provider associated with credit enrollment "
u"for user %s in course %s. The user will not be able to see his or her "
u"credit request status on the student dashboard. This attribute should "
u"have been set when the user purchased credit in the course.",
user.id, course_key
)
else:
provider_info = provider_info_by_id.get(provider_id, {})
status["provider_name"] = provider_info.get("display_name")
status["provider_status_url"] = provider_info.get("status_url")
status["provider_id"] = provider_id
statuses[course_key] = status
return statuses
@transaction.non_atomic_requests
@require_POST
@outer_atomic(read_committed=True)
def change_enrollment(request, check_access=True):
"""
Modify the enrollment status for the logged-in user.
TODO: This is lms specific and does not belong in common code.
The request parameter must be a POST request (other methods return 405)
that specifies course_id and enrollment_action parameters. If course_id or
enrollment_action is not specified, if course_id is not valid, if
enrollment_action is something other than "enroll" or "unenroll", if
enrollment_action is "enroll" and enrollment is closed for the course, or
if enrollment_action is "unenroll" and the user is not enrolled in the
course, a 400 error will be returned. If the user is not logged in, 403
will be returned; it is important that only this case return 403 so the
front end can redirect the user to a registration or login page when this
happens. This function should only be called from an AJAX request, so
the error messages in the responses should never actually be user-visible.
Args:
request (`Request`): The Django request object
Keyword Args:
check_access (boolean): If True, we check that an accessible course actually
exists for the given course_key before we enroll the student.
The default is set to False to avoid breaking legacy code or
code with non-standard flows (ex. beta tester invitations), but
for any standard enrollment flow you probably want this to be True.
Returns:
Response
"""
# Get the user
user = request.user
# Ensure the user is authenticated
if not UserProfile.has_registered(user):
return HttpResponseForbidden()
# Ensure we received a course_id
action = request.POST.get("enrollment_action")
if 'course_id' not in request.POST:
return HttpResponseBadRequest(_("Course id not specified"))
try:
course_id = SlashSeparatedCourseKey.from_deprecated_string(request.POST.get("course_id"))
except InvalidKeyError:
log.warning(
u"User %s tried to %s with invalid course id: %s",
user.username,
action,
request.POST.get("course_id"),
)
return HttpResponseBadRequest(_("Invalid course id"))
if action == "enroll":
# Make sure the course exists
# We don't do this check on unenroll, or a bad course id can't be unenrolled from
if not modulestore().has_course(course_id):
log.warning(
u"User %s tried to enroll in non-existent course %s",
user.username,
course_id
)
return HttpResponseBadRequest(_("Course id is invalid"))
can_enroll, error_msg = _check_can_enroll_in_course(user, course_id)
if not can_enroll:
return HttpResponseBadRequest(error_msg)
# Record the user's email opt-in preference
if settings.FEATURES.get('ENABLE_MKTG_EMAIL_OPT_IN'):
_update_email_opt_in(request, course_id.org)
available_modes = CourseMode.modes_for_course_dict(course_id)
# Check whether the user is blocked from enrolling in this course
# This can occur if the user's IP is on a global blacklist
# or if the user is enrolling in a country in which the course
# is not available.
redirect_url = embargo_api.redirect_if_blocked(
course_id, user=user, ip_address=get_ip(request),
url=request.path
)
if redirect_url:
return HttpResponse(redirect_url)
# Check that auto enrollment is allowed for this course
# (= the course is NOT behind a paywall)
if CourseMode.can_auto_enroll(course_id):
# Enroll the user using the default mode (audit)
# We're assuming that users of the course enrollment table
# will NOT try to look up the course enrollment model
# by its slug. If they do, it's possible (based on the state of the database)
# for no such model to exist, even though we've set the enrollment type
# to "audit".
try:
enroll_mode = CourseMode.auto_enroll_mode(course_id, available_modes)
if enroll_mode:
CourseEnrollment.enroll(user, course_id, check_access=check_access, mode=enroll_mode)
except Exception: # pylint: disable=broad-except
return HttpResponseBadRequest(_("Could not enroll"))
try:
course = modulestore().get_course(course_id)
except ItemNotFoundError:
log.warning("User {0} tried to enroll in non-existent course {1}"
.format(user.username, course_id))
return HttpResponseBadRequest(_("Course id is invalid"))
enrollment_email_result = json.loads(notify_enrollment_by_email(course, user, request).content)
if ('is_success' in enrollment_email_result and not enrollment_email_result['is_success']):
return HttpResponseBadRequest(_(enrollment_email_result['error']))
# If we have more than one course mode or professional ed is enabled,
# then send the user to the choose your track page.
# (In the case of no-id-professional/professional ed, this will redirect to a page that
# funnels users directly into the verification / payment flow)
if CourseMode.has_verified_mode(available_modes) or CourseMode.has_professional_mode(available_modes):
return HttpResponse(
reverse("course_modes_choose", kwargs={'course_id': unicode(course_id)})
)
# Otherwise, there is only one mode available (the default)
return HttpResponse()
elif action == "unenroll":
enrollment = CourseEnrollment.get_enrollment(user, course_id)
if not enrollment:
return HttpResponseBadRequest(_("You are not enrolled in this course"))
certificate_info = cert_info(user, enrollment.course_overview, enrollment.mode)
if certificate_info.get('status') in DISABLE_UNENROLL_CERT_STATES:
return HttpResponseBadRequest(_("Your certificate prevents you from unenrolling from this course"))
CourseEnrollment.unenroll(user, course_id)
return HttpResponse()
else:
return HttpResponseBadRequest(_("Enrollment action is invalid"))
def _check_can_enroll_in_course(user, course_key, access_type="enroll"):
"""
Refactored check for user being able to enroll in course
Returns (bool, error_message), where error message is only applicable if bool == False
"""
try:
course = modulestore().get_course(course_key)
except ItemNotFoundError:
log.warning("User {0} tried to enroll in non-existent course {1}"
.format(user.username, course_key))
return False, _("Course id is invalid")
if not has_access(user, access_type, course):
return False, _("Enrollment is closed")
return True, ""
# Need different levels of logging
@ensure_csrf_cookie
def login_user(request, error=""): # pylint: disable=too-many-statements,unused-argument
"""AJAX request to log in the user."""
backend_name = None
email = None
password = None
redirect_url = None
response = None
running_pipeline = None
third_party_auth_requested = third_party_auth.is_enabled() and pipeline.running(request)
third_party_auth_successful = False
trumped_by_first_party_auth = bool(request.POST.get('email')) or bool(request.POST.get('password'))
user = None
platform_name = configuration_helpers.get_value("platform_name", settings.PLATFORM_NAME)
if third_party_auth_requested and not trumped_by_first_party_auth:
# The user has already authenticated via third-party auth and has not
# asked to do first party auth by supplying a username or password. We
# now want to put them through the same logging and cookie calculation
# logic as with first-party auth.
running_pipeline = pipeline.get(request)
username = running_pipeline['kwargs'].get('username')
backend_name = running_pipeline['backend']
third_party_uid = running_pipeline['kwargs']['uid']
requested_provider = provider.Registry.get_from_pipeline(running_pipeline)
try:
user = pipeline.get_authenticated_user(requested_provider, username, third_party_uid)
third_party_auth_successful = True
except User.DoesNotExist:
AUDIT_LOG.warning(
u"Login failed - user with username {username} has no social auth "
"with backend_name {backend_name}".format(
username=username, backend_name=backend_name)
)
message = _(
"You've successfully logged into your {provider_name} account, "
"but this account isn't linked with an {platform_name} account yet."
).format(
platform_name=platform_name,
provider_name=requested_provider.name,
)
message += "<br/><br/>"
message += _(
"Use your {platform_name} username and password to log into {platform_name} below, "
"and then link your {platform_name} account with {provider_name} from your dashboard."
).format(
platform_name=platform_name,
provider_name=requested_provider.name,
)
message += "<br/><br/>"
message += _(
"If you don't have an {platform_name} account yet, "
"click <strong>Register</strong> at the top of the page."
).format(
platform_name=platform_name
)
return HttpResponse(message, content_type="text/plain", status=403)
else:
if 'email' not in request.POST or 'password' not in request.POST:
return JsonResponse({
"success": False,
# TODO: User error message
"value": _('There was an error receiving your login information. Please email us.'),
}) # TODO: this should be status code 400
email = request.POST['email']
password = request.POST['password']
try:
user = User.objects.get(email=email)
except User.DoesNotExist:
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u"Login failed - Unknown user email")
else:
AUDIT_LOG.warning(u"Login failed - Unknown user email: {0}".format(email))
# check if the user has a linked shibboleth account, if so, redirect the user to shib-login
# This behavior is pretty much like what gmail does for shibboleth. Try entering some @stanford.edu
# address into the Gmail login.
if settings.FEATURES.get('AUTH_USE_SHIB') and user:
try:
eamap = ExternalAuthMap.objects.get(user=user)
if eamap.external_domain.startswith(openedx.core.djangoapps.external_auth.views.SHIBBOLETH_DOMAIN_PREFIX):
return JsonResponse({
"success": False,
"redirect": reverse('shib-login'),
}) # TODO: this should be status code 301 # pylint: disable=fixme
except ExternalAuthMap.DoesNotExist:
# This is actually the common case, logging in user without external linked login
AUDIT_LOG.info(u"User %s w/o external auth attempting login", user)
# see if account has been locked out due to excessive login failures
user_found_by_email_lookup = user
if user_found_by_email_lookup and LoginFailures.is_feature_enabled():
if LoginFailures.is_user_locked_out(user_found_by_email_lookup):
lockout_message = ungettext(
"This account has been temporarily locked due to excessive login failures. "
"Try again in {minutes} minute. For security reasons, "
"resetting the password will NOT lift the lockout. Please wait for {minutes} minute.",
"This account has been temporarily locked due to excessive login failures. "
"Try again in {minutes} minutes. For security reasons, "
"resetting the password will NOT lift the lockout. Please wait for {minutes} minutes.",
LOGIN_LOCKOUT_PERIOD_PLUS_FIVE_MINUTES
).format(
minutes=LOGIN_LOCKOUT_PERIOD_PLUS_FIVE_MINUTES,
)
return JsonResponse({
"success": False,
"value": lockout_message,
}) # TODO: this should be status code 429 # pylint: disable=fixme
# see if the user must reset his/her password due to any policy settings
if user_found_by_email_lookup and PasswordHistory.should_user_reset_password_now(user_found_by_email_lookup):
return JsonResponse({
"success": False,
"value": _('Your password has expired due to password policy on this account. You must '
'reset your password before you can log in again. Please click the '
'"Forgot Password" link on this page to reset your password before logging in again.'),
}) # TODO: this should be status code 403 # pylint: disable=fixme
# if the user doesn't exist, we want to set the username to an invalid
# username so that authentication is guaranteed to fail and we can take
# advantage of the ratelimited backend
username = user.username if user else ""
if not third_party_auth_successful:
try:
user = authenticate(username=username, password=password, request=request)
# this occurs when there are too many attempts from the same IP address
except RateLimitException:
return JsonResponse({
"success": False,
"value": _('Too many failed login attempts. Try again later.'),
}) # TODO: this should be status code 429 # pylint: disable=fixme
if user is None:
# tick the failed login counters if the user exists in the database
if user_found_by_email_lookup and LoginFailures.is_feature_enabled():
LoginFailures.increment_lockout_counter(user_found_by_email_lookup)
# if we didn't find this username earlier, the account for this email
# doesn't exist, and doesn't have a corresponding password
if username != "":
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
loggable_id = user_found_by_email_lookup.id if user_found_by_email_lookup else "<unknown>"
AUDIT_LOG.warning(u"Login failed - password for user.id: {0} is invalid".format(loggable_id))
else:
AUDIT_LOG.warning(u"Login failed - password for {0} is invalid".format(email))
return JsonResponse({
"success": False,
"value": _('Email or password is incorrect.'),
}) # TODO: this should be status code 400 # pylint: disable=fixme
# successful login, clear failed login attempts counters, if applicable
if LoginFailures.is_feature_enabled():
LoginFailures.clear_lockout_counter(user)
# Track the user's sign in
if hasattr(settings, 'LMS_SEGMENT_KEY') and settings.LMS_SEGMENT_KEY:
tracking_context = tracker.get_tracker().resolve_context()
analytics.identify(
user.id,
{
'email': email,
'username': username
},
{
# Disable MailChimp because we don't want to update the user's email
# and username in MailChimp on every page load. We only need to capture
# this data on registration/activation.
'MailChimp': False
}
)
analytics.track(
user.id,
"edx.bi.user.account.authenticated",
{
'category': "conversion",
'label': request.POST.get('course_id'),
'provider': None
},
context={
'ip': tracking_context.get('ip'),
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
if user is not None and user.is_active:
try:
# We do not log here, because we have a handler registered
# to perform logging on successful logins.
login(request, user)
if request.POST.get('remember') == 'true':
request.session.set_expiry(604800)
log.debug("Setting user session to never expire")
else:
request.session.set_expiry(0)
except Exception as exc: # pylint: disable=broad-except
AUDIT_LOG.critical("Login failed - Could not create session. Is memcached running?")
log.critical("Login failed - Could not create session. Is memcached running?")
log.exception(exc)
raise
redirect_url = None # The AJAX method calling should know the default destination upon success
if third_party_auth_successful:
redirect_url = pipeline.get_complete_url(backend_name)
response = JsonResponse({
"success": True,
"redirect_url": redirect_url,
})
# Ensure that the external marketing site can
# detect that the user is logged in.
return set_logged_in_cookies(request, response, user)
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u"Login failed - Account not active for user.id: {0}, resending activation".format(user.id))
else:
AUDIT_LOG.warning(u"Login failed - Account not active for user {0}, resending activation".format(username))
reactivation_email_for_user(user)
not_activated_msg = _("Before you sign in, you need to activate your account. We have sent you an "
"email message with instructions for activating your account.")
return JsonResponse({
"success": False,
"value": not_activated_msg,
}) # TODO: this should be status code 400 # pylint: disable=fixme
@csrf_exempt
@require_POST
@social_utils.strategy("social:complete")
def login_oauth_token(request, backend):
"""
Authenticate the client using an OAuth access token by using the token to
retrieve information from a third party and matching that information to an
existing user.
"""
warnings.warn("Please use AccessTokenExchangeView instead.", DeprecationWarning)
backend = request.backend
if isinstance(backend, social_oauth.BaseOAuth1) or isinstance(backend, social_oauth.BaseOAuth2):
if "access_token" in request.POST:
# Tell third party auth pipeline that this is an API call
request.session[pipeline.AUTH_ENTRY_KEY] = pipeline.AUTH_ENTRY_LOGIN_API
user = None
try:
user = backend.do_auth(request.POST["access_token"])
except (HTTPError, AuthException):
pass
# do_auth can return a non-User object if it fails
if user and isinstance(user, User):
login(request, user)
return JsonResponse(status=204)
else:
# Ensure user does not re-enter the pipeline
request.social_strategy.clean_partial_pipeline()
return JsonResponse({"error": "invalid_token"}, status=401)
else:
return JsonResponse({"error": "invalid_request"}, status=400)
raise Http404
@require_GET
@login_required
@ensure_csrf_cookie
def manage_user_standing(request):
"""
Renders the view used to manage user standing. Also displays a table
of user accounts that have been disabled and who disabled them.
"""
if not request.user.is_staff:
raise Http404
all_disabled_accounts = UserStanding.objects.filter(
account_status=UserStanding.ACCOUNT_DISABLED
)
all_disabled_users = [standing.user for standing in all_disabled_accounts]
headers = ['username', 'account_changed_by']
rows = []
for user in all_disabled_users:
row = [user.username, user.standing.changed_by]
rows.append(row)
context = {'headers': headers, 'rows': rows}
return render_to_response("manage_user_standing.html", context)
@require_POST
@login_required
@ensure_csrf_cookie
def disable_account_ajax(request):
"""
Ajax call to change user standing. Endpoint of the form
in manage_user_standing.html
"""
if not request.user.is_staff:
raise Http404
username = request.POST.get('username')
context = {}
if username is None or username.strip() == '':
context['message'] = _('Please enter a username')
return JsonResponse(context, status=400)
account_action = request.POST.get('account_action')
if account_action is None:
context['message'] = _('Please choose an option')
return JsonResponse(context, status=400)
username = username.strip()
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
context['message'] = _("User with username {} does not exist").format(username)
return JsonResponse(context, status=400)
else:
user_account, _success = UserStanding.objects.get_or_create(
user=user, defaults={'changed_by': request.user},
)
if account_action == 'disable':
user_account.account_status = UserStanding.ACCOUNT_DISABLED
context['message'] = _("Successfully disabled {}'s account").format(username)
log.info(u"%s disabled %s's account", request.user, username)
elif account_action == 'reenable':
user_account.account_status = UserStanding.ACCOUNT_ENABLED
context['message'] = _("Successfully reenabled {}'s account").format(username)
log.info(u"%s reenabled %s's account", request.user, username)
else:
context['message'] = _("Unexpected account status")
return JsonResponse(context, status=400)
user_account.changed_by = request.user
user_account.standing_last_changed_at = datetime.datetime.now(UTC)
user_account.save()
return JsonResponse(context)
@login_required
@ensure_csrf_cookie
def change_setting(request):
"""JSON call to change a profile setting: Right now, location"""
# TODO (vshnayder): location is no longer used
u_prof = UserProfile.objects.get(user=request.user) # request.user.profile_cache
if 'location' in request.POST:
u_prof.location = request.POST['location']
u_prof.save()
return JsonResponse({
"success": True,
"location": u_prof.location,
})
class AccountValidationError(Exception):
def __init__(self, message, field):
super(AccountValidationError, self).__init__(message)
self.field = field
@receiver(post_save, sender=User)
def user_signup_handler(sender, **kwargs): # pylint: disable=unused-argument
"""
handler that saves the user Signup Source
when the user is created
"""
if 'created' in kwargs and kwargs['created']:
site = configuration_helpers.get_value('SITE_NAME')
if site:
user_signup_source = UserSignupSource(user=kwargs['instance'], site=site)
user_signup_source.save()
log.info(u'user {} originated from a white labeled "Microsite"'.format(kwargs['instance'].id))
def _do_create_account(form, custom_form=None):
"""
Given cleaned post variables, create the User and UserProfile objects, as well as the
registration for this user.
Returns a tuple (User, UserProfile, Registration).
Note: this function is also used for creating test users.
"""
errors = {}
errors.update(form.errors)
if custom_form:
errors.update(custom_form.errors)
if errors:
raise ValidationError(errors)
user = User(
username=form.cleaned_data["username"],
email=form.cleaned_data["email"],
is_active=False
)
user.set_password(form.cleaned_data["password"])
registration = Registration()
# TODO: Rearrange so that if part of the process fails, the whole process fails.
# Right now, we can have e.g. no registration e-mail sent out and a zombie account
try:
with transaction.atomic():
user.save()
if custom_form:
custom_model = custom_form.save(commit=False)
custom_model.user = user
custom_model.save()
except IntegrityError:
# Figure out the cause of the integrity error
if len(User.objects.filter(username=user.username)) > 0:
raise AccountValidationError(
_("An account with the Public Username '{username}' already exists.").format(username=user.username),
field="username"
)
elif len(User.objects.filter(email=user.email)) > 0:
raise AccountValidationError(
_("An account with the Email '{email}' already exists.").format(email=user.email),
field="email"
)
else:
raise
# add this account creation to password history
# NOTE, this will be a NOP unless the feature has been turned on in configuration
password_history_entry = PasswordHistory()
password_history_entry.create(user)
registration.register(user)
profile_fields = [
"name", "level_of_education", "gender", "mailing_address", "city", "country", "goals",
"year_of_birth"
]
profile = UserProfile(
user=user,
**{key: form.cleaned_data.get(key) for key in profile_fields}
)
extended_profile = form.cleaned_extended_profile
if extended_profile:
profile.meta = json.dumps(extended_profile)
try:
profile.save()
except Exception: # pylint: disable=broad-except
log.exception("UserProfile creation failed for user {id}.".format(id=user.id))
raise
return (user, profile, registration)
def create_account_with_params(request, params):
"""
Given a request and a dict of parameters (which may or may not have come
from the request), create an account for the requesting user, including
creating a comments service user object and sending an activation email.
This also takes external/third-party auth into account, updates that as
necessary, and authenticates the user for the request's session.
Does not return anything.
Raises AccountValidationError if an account with the username or email
specified by params already exists, or ValidationError if any of the given
parameters is invalid for any other reason.
Issues with this code:
* It is not transactional. If there is a failure part-way, an incomplete
account will be created and left in the database.
* Third-party auth passwords are not verified. There is a comment that
they are unused, but it would be helpful to have a sanity check that
they are sane.
* It is over 300 lines long (!) and includes disprate functionality, from
registration e-mails to all sorts of other things. It should be broken
up into semantically meaningful functions.
* The user-facing text is rather unfriendly (e.g. "Username must be a
minimum of two characters long" rather than "Please use a username of
at least two characters").
"""
# Copy params so we can modify it; we can't just do dict(params) because if
# params is request.POST, that results in a dict containing lists of values
params = dict(params.items())
# allow to define custom set of required/optional/hidden fields via configuration
extra_fields = configuration_helpers.get_value(
'REGISTRATION_EXTRA_FIELDS',
getattr(settings, 'REGISTRATION_EXTRA_FIELDS', {})
)
# Boolean of whether a 3rd party auth provider and credentials were provided in
# the API so the newly created account can link with the 3rd party account.
#
# Note: this is orthogonal to the 3rd party authentication pipeline that occurs
# when the account is created via the browser and redirect URLs.
should_link_with_social_auth = third_party_auth.is_enabled() and 'provider' in params
if should_link_with_social_auth or (third_party_auth.is_enabled() and pipeline.running(request)):
params["password"] = pipeline.make_random_password()
# Add a form requirement for data sharing consent if the EnterpriseCustomer
# for the request requires it at login
extra_fields['data_sharing_consent'] = data_sharing_consent_requirement_at_login(request)
# if doing signup for an external authorization, then get email, password, name from the eamap
# don't use the ones from the form, since the user could have hacked those
# unless originally we didn't get a valid email or name from the external auth
# TODO: We do not check whether these values meet all necessary criteria, such as email length
do_external_auth = 'ExternalAuthMap' in request.session
if do_external_auth:
eamap = request.session['ExternalAuthMap']
try:
validate_email(eamap.external_email)
params["email"] = eamap.external_email
except ValidationError:
pass
if eamap.external_name.strip() != '':
params["name"] = eamap.external_name
params["password"] = eamap.internal_password
log.debug(u'In create_account with external_auth: user = %s, email=%s', params["name"], params["email"])
extended_profile_fields = configuration_helpers.get_value('extended_profile_fields', [])
enforce_password_policy = (
settings.FEATURES.get("ENFORCE_PASSWORD_POLICY", False) and
not do_external_auth
)
# Can't have terms of service for certain SHIB users, like at Stanford
registration_fields = getattr(settings, 'REGISTRATION_EXTRA_FIELDS', {})
tos_required = (
registration_fields.get('terms_of_service') != 'hidden' or
registration_fields.get('honor_code') != 'hidden'
) and (
not settings.FEATURES.get("AUTH_USE_SHIB") or
not settings.FEATURES.get("SHIB_DISABLE_TOS") or
not do_external_auth or
not eamap.external_domain.startswith(openedx.core.djangoapps.external_auth.views.SHIBBOLETH_DOMAIN_PREFIX)
)
if not tos_required:
extra_fields.pop('terms_of_service', None)
form = AccountCreationForm(
data=params,
extra_fields=extra_fields,
extended_profile_fields=extended_profile_fields,
enforce_username_neq_password=True,
enforce_password_policy=enforce_password_policy,
tos_required=tos_required,
)
custom_form = get_registration_extension_form(data=params)
# Perform operations within a transaction that are critical to account creation
with transaction.atomic():
# first, create the account
(user, profile, registration) = _do_create_account(form, custom_form)
# next, link the account with social auth, if provided via the API.
# (If the user is using the normal register page, the social auth pipeline does the linking, not this code)
if should_link_with_social_auth:
backend_name = params['provider']
request.social_strategy = social_utils.load_strategy(request)
redirect_uri = reverse('social:complete', args=(backend_name, ))
request.backend = social_utils.load_backend(request.social_strategy, backend_name, redirect_uri)
social_access_token = params.get('access_token')
if not social_access_token:
raise ValidationError({
'access_token': [
_("An access_token is required when passing value ({}) for provider.").format(
params['provider']
)
]
})
request.session[pipeline.AUTH_ENTRY_KEY] = pipeline.AUTH_ENTRY_REGISTER_API
pipeline_user = None
error_message = ""
try:
pipeline_user = request.backend.do_auth(social_access_token, user=user)
except AuthAlreadyAssociated:
error_message = _("The provided access_token is already associated with another user.")
except (HTTPError, AuthException):
error_message = _("The provided access_token is not valid.")
if not pipeline_user or not isinstance(pipeline_user, User):
# Ensure user does not re-enter the pipeline
request.social_strategy.clean_partial_pipeline()
raise ValidationError({'access_token': [error_message]})
# Perform operations that are non-critical parts of account creation
preferences_api.set_user_preference(user, LANGUAGE_KEY, get_language())
if settings.FEATURES.get('ENABLE_DISCUSSION_EMAIL_DIGEST'):
try:
enable_notifications(user)
except Exception: # pylint: disable=broad-except
log.exception("Enable discussion notifications failed for user {id}.".format(id=user.id))
dog_stats_api.increment("common.student.account_created")
# If the user is registering via 3rd party auth, track which provider they use
third_party_provider = None
running_pipeline = None
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
third_party_provider = provider.Registry.get_from_pipeline(running_pipeline)
# Store received data sharing consent field values in the pipeline for use
# by any downstream pipeline elements which require them.
running_pipeline['kwargs']['data_sharing_consent'] = form.cleaned_data.get('data_sharing_consent', None)
# Track the user's registration
if hasattr(settings, 'LMS_SEGMENT_KEY') and settings.LMS_SEGMENT_KEY:
tracking_context = tracker.get_tracker().resolve_context()
identity_args = [
user.id, # pylint: disable=no-member
{
'email': user.email,
'username': user.username,
'name': profile.name,
# Mailchimp requires the age & yearOfBirth to be integers, we send a sane integer default if falsey.
'age': profile.age or -1,
'yearOfBirth': profile.year_of_birth or datetime.datetime.now(UTC).year,
'education': profile.level_of_education_display,
'address': profile.mailing_address,
'gender': profile.gender_display,
'country': unicode(profile.country),
}
]
if hasattr(settings, 'MAILCHIMP_NEW_USER_LIST_ID'):
identity_args.append({
"MailChimp": {
"listId": settings.MAILCHIMP_NEW_USER_LIST_ID
}
})
analytics.identify(*identity_args)
analytics.track(
user.id,
"edx.bi.user.account.registered",
{
'category': 'conversion',
'label': params.get('course_id'),
'provider': third_party_provider.name if third_party_provider else None
},
context={
'ip': tracking_context.get('ip'),
'Google Analytics': {
'clientId': tracking_context.get('client_id')
}
}
)
# Announce registration
REGISTER_USER.send(sender=None, user=user, profile=profile)
create_comments_service_user(user)
# Don't send email if we are:
#
# 1. Doing load testing.
# 2. Random user generation for other forms of testing.
# 3. External auth bypassing activation.
# 4. Have the platform configured to not require e-mail activation.
# 5. Registering a new user using a trusted third party provider (with skip_email_verification=True)
#
# Note that this feature is only tested as a flag set one way or
# the other for *new* systems. we need to be careful about
# changing settings on a running system to make sure no users are
# left in an inconsistent state (or doing a migration if they are).
send_email = (
not settings.FEATURES.get('SKIP_EMAIL_VALIDATION', None) and
not settings.FEATURES.get('AUTOMATIC_AUTH_FOR_TESTING') and
not (do_external_auth and settings.FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH')) and
not (
third_party_provider and third_party_provider.skip_email_verification and
user.email == running_pipeline['kwargs'].get('details', {}).get('email')
)
)
if send_email:
dest_addr = user.email
context = {
'name': profile.name,
'key': registration.activation_key,
}
# composes activation email
subject = render_to_string('emails/activation_email_subject.txt', context)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('emails/activation_email.txt', context)
from_address = configuration_helpers.get_value(
'email_from_address',
settings.DEFAULT_FROM_EMAIL
)
if settings.FEATURES.get('REROUTE_ACTIVATION_EMAIL'):
dest_addr = settings.FEATURES['REROUTE_ACTIVATION_EMAIL']
message = ("Activation for %s (%s): %s\n" % (user, user.email, profile.name) +
'-' * 80 + '\n\n' + message)
send_activation_email.delay(subject, message, from_address, dest_addr)
else:
registration.activate()
_enroll_user_in_pending_courses(user) # Enroll student in any pending courses
# Immediately after a user creates an account, we log them in. They are only
# logged in until they close the browser. They can't log in again until they click
# the activation link from the email.
new_user = authenticate(username=user.username, password=params['password'])
login(request, new_user)
request.session.set_expiry(0)
try:
record_registration_attributions(request, new_user)
# Don't prevent a user from registering due to attribution errors.
except Exception: # pylint: disable=broad-except
log.exception('Error while attributing cookies to user registration.')
# TODO: there is no error checking here to see that the user actually logged in successfully,
# and is not yet an active user.
if new_user is not None:
AUDIT_LOG.info(u"Login success on new account creation - {0}".format(new_user.username))
if do_external_auth:
eamap.user = new_user
eamap.dtsignup = datetime.datetime.now(UTC)
eamap.save()
AUDIT_LOG.info(u"User registered with external_auth %s", new_user.username)
AUDIT_LOG.info(u'Updated ExternalAuthMap for %s to be %s', new_user.username, eamap)
if settings.FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH'):
log.info('bypassing activation email')
new_user.is_active = True
new_user.save()
AUDIT_LOG.info(u"Login activated on extauth account - {0} ({1})".format(new_user.username, new_user.email))
return new_user
def _enroll_user_in_pending_courses(student):
"""
Enroll student in any pending courses he/she may have.
"""
ceas = CourseEnrollmentAllowed.objects.filter(email=student.email)
for cea in ceas:
if cea.auto_enroll:
enrollment = CourseEnrollment.enroll(student, cea.course_id)
manual_enrollment_audit = ManualEnrollmentAudit.get_manual_enrollment_by_email(student.email)
if manual_enrollment_audit is not None:
# get the enrolled by user and reason from the ManualEnrollmentAudit table.
# then create a new ManualEnrollmentAudit table entry for the same email
# different transition state.
ManualEnrollmentAudit.create_manual_enrollment_audit(
manual_enrollment_audit.enrolled_by, student.email, ALLOWEDTOENROLL_TO_ENROLLED,
manual_enrollment_audit.reason, enrollment
)
def record_affiliate_registration_attribution(request, user):
"""
Attribute this user's registration to the referring affiliate, if
applicable.
"""
affiliate_id = request.COOKIES.get(settings.AFFILIATE_COOKIE_NAME)
if user and affiliate_id:
UserAttribute.set_user_attribute(user, REGISTRATION_AFFILIATE_ID, affiliate_id)
def record_utm_registration_attribution(request, user):
"""
Attribute this user's registration to the latest UTM referrer, if
applicable.
"""
utm_cookie_name = RegistrationCookieConfiguration.current().utm_cookie_name
utm_cookie = request.COOKIES.get(utm_cookie_name)
if user and utm_cookie:
utm = json.loads(utm_cookie)
for utm_parameter_name in REGISTRATION_UTM_PARAMETERS:
utm_parameter = utm.get(utm_parameter_name)
if utm_parameter:
UserAttribute.set_user_attribute(
user,
REGISTRATION_UTM_PARAMETERS.get(utm_parameter_name),
utm_parameter
)
created_at_unixtime = utm.get('created_at')
if created_at_unixtime:
# We divide by 1000 here because the javascript timestamp generated is in milliseconds not seconds.
# PYTHON: time.time() => 1475590280.823698
# JS: new Date().getTime() => 1475590280823
created_at_datetime = datetime.datetime.fromtimestamp(int(created_at_unixtime) / float(1000), tz=UTC)
UserAttribute.set_user_attribute(
user,
REGISTRATION_UTM_CREATED_AT,
created_at_datetime
)
def record_registration_attributions(request, user):
"""
Attribute this user's registration based on referrer cookies.
"""
record_affiliate_registration_attribution(request, user)
record_utm_registration_attribution(request, user)
@csrf_exempt
def create_account(request, post_override=None):
"""
JSON call to create new edX account.
Used by form in signup_modal.html, which is included into navigation.html
"""
warnings.warn("Please use RegistrationView instead.", DeprecationWarning)
try:
user = create_account_with_params(request, post_override or request.POST)
except AccountValidationError as exc:
return JsonResponse({'success': False, 'value': exc.message, 'field': exc.field}, status=400)
except ValidationError as exc:
field, error_list = next(exc.message_dict.iteritems())
return JsonResponse(
{
"success": False,
"field": field,
"value": error_list[0],
},
status=400
)
redirect_url = None # The AJAX method calling should know the default destination upon success
# Resume the third-party-auth pipeline if necessary.
if third_party_auth.is_enabled() and pipeline.running(request):
running_pipeline = pipeline.get(request)
redirect_url = pipeline.get_complete_url(running_pipeline['backend'])
response = JsonResponse({
'success': True,
'redirect_url': redirect_url,
})
set_logged_in_cookies(request, response, user)
return response
def auto_auth(request):
"""
Create or configure a user account, then log in as that user.
Enabled only when
settings.FEATURES['AUTOMATIC_AUTH_FOR_TESTING'] is true.
Accepts the following querystring parameters:
* `username`, `email`, and `password` for the user account
* `full_name` for the user profile (the user's full name; defaults to the username)
* `staff`: Set to "true" to make the user global staff.
* `course_id`: Enroll the student in the course with `course_id`
* `roles`: Comma-separated list of roles to grant the student in the course with `course_id`
* `no_login`: Define this to create the user but not login
* `redirect`: Set to "true" will redirect to the `redirect_to` value if set, or
course home page if course_id is defined, otherwise it will redirect to dashboard
* `redirect_to`: will redirect to to this url
* `is_active` : make/update account with status provided as 'is_active'
If username, email, or password are not provided, use
randomly generated credentials.
"""
# Generate a unique name to use if none provided
unique_name = uuid.uuid4().hex[0:30]
# Use the params from the request, otherwise use these defaults
username = request.GET.get('username', unique_name)
password = request.GET.get('password', unique_name)
email = request.GET.get('email', unique_name + "@example.com")
full_name = request.GET.get('full_name', username)
is_staff = request.GET.get('staff', None)
is_superuser = request.GET.get('superuser', None)
course_id = request.GET.get('course_id', None)
redirect_to = request.GET.get('redirect_to', None)
active_status = request.GET.get('is_active')
# mode has to be one of 'honor'/'professional'/'verified'/'audit'/'no-id-professional'/'credit'
enrollment_mode = request.GET.get('enrollment_mode', 'honor')
active_status = (not active_status or active_status == 'true')
course_key = None
if course_id:
course_key = CourseLocator.from_string(course_id)
role_names = [v.strip() for v in request.GET.get('roles', '').split(',') if v.strip()]
redirect_when_done = request.GET.get('redirect', '').lower() == 'true' or redirect_to
login_when_done = 'no_login' not in request.GET
form = AccountCreationForm(
data={
'username': username,
'email': email,
'password': password,
'name': full_name,
},
tos_required=False
)
# Attempt to create the account.
# If successful, this will return a tuple containing
# the new user object.
try:
user, profile, reg = _do_create_account(form)
except (AccountValidationError, ValidationError):
# Attempt to retrieve the existing user.
user = User.objects.get(username=username)
user.email = email
user.set_password(password)
user.is_active = active_status
user.save()
profile = UserProfile.objects.get(user=user)
reg = Registration.objects.get(user=user)
# Set the user's global staff bit
if is_staff is not None:
user.is_staff = (is_staff == "true")
user.save()
if is_superuser is not None:
user.is_superuser = (is_superuser == "true")
user.save()
if active_status:
reg.activate()
reg.save()
# ensure parental consent threshold is met
year = datetime.date.today().year
age_limit = settings.PARENTAL_CONSENT_AGE_LIMIT
profile.year_of_birth = (year - age_limit) - 1
profile.save()
# Enroll the user in a course
if course_key is not None:
CourseEnrollment.enroll(user, course_key, mode=enrollment_mode)
# Apply the roles
for role_name in role_names:
role = Role.objects.get(name=role_name, course_id=course_key)
user.roles.add(role)
# Log in as the user
if login_when_done:
user = authenticate(username=username, password=password)
login(request, user)
create_comments_service_user(user)
# Provide the user with a valid CSRF token
# then return a 200 response unless redirect is true
if redirect_when_done:
# Redirect to specific page if specified
if redirect_to:
redirect_url = redirect_to
# Redirect to course info page if course_id is known
elif course_id:
try:
# redirect to course info page in LMS
redirect_url = reverse(
'info',
kwargs={'course_id': course_id}
)
except NoReverseMatch:
# redirect to course outline page in Studio
redirect_url = reverse(
'course_handler',
kwargs={'course_key_string': course_id}
)
else:
try:
# redirect to dashboard for LMS
redirect_url = reverse('dashboard')
except NoReverseMatch:
# redirect to home for Studio
redirect_url = reverse('home')
return redirect(redirect_url)
elif request.META.get('HTTP_ACCEPT') == 'application/json':
response = JsonResponse({
'created_status': u"Logged in" if login_when_done else "Created",
'username': username,
'email': email,
'password': password,
'user_id': user.id, # pylint: disable=no-member
'anonymous_id': anonymous_id_for_user(user, None),
})
else:
success_msg = u"{} user {} ({}) with password {} and user_id {}".format(
u"Logged in" if login_when_done else "Created",
username, email, password, user.id # pylint: disable=no-member
)
response = HttpResponse(success_msg)
response.set_cookie('csrftoken', csrf(request)['csrf_token'])
return response
@ensure_csrf_cookie
def activate_account(request, key):
"""When link in activation e-mail is clicked"""
regs = Registration.objects.filter(activation_key=key)
if len(regs) == 1:
user_logged_in = request.user.is_authenticated()
already_active = True
if not regs[0].user.is_active:
regs[0].activate()
already_active = False
# Enroll student in any pending courses he/she may have if auto_enroll flag is set
_enroll_user_in_pending_courses(regs[0].user)
resp = render_to_response(
"registration/activation_complete.html",
{
'user_logged_in': user_logged_in,
'already_active': already_active
}
)
return resp
if len(regs) == 0:
return render_to_response(
"registration/activation_invalid.html",
{'csrf': csrf(request)['csrf_token']}
)
return HttpResponseServerError(_("Unknown error. Please e-mail us to let us know how it happened."))
@csrf_exempt
@require_POST
def password_reset(request):
""" Attempts to send a password reset e-mail. """
# Add some rate limiting here by re-using the RateLimitMixin as a helper class
limiter = BadRequestRateLimiter()
if limiter.is_rate_limit_exceeded(request):
AUDIT_LOG.warning("Rate limit exceeded in password_reset")
return HttpResponseForbidden()
form = PasswordResetFormNoActive(request.POST)
if form.is_valid():
form.save(use_https=request.is_secure(),
from_email=configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL),
request=request)
# When password change is complete, a "edx.user.settings.changed" event will be emitted.
# But because changing the password is multi-step, we also emit an event here so that we can
# track where the request was initiated.
tracker.emit(
SETTING_CHANGE_INITIATED,
{
"setting": "password",
"old": None,
"new": None,
"user_id": request.user.id,
}
)
destroy_oauth_tokens(request.user)
else:
# bad user? tick the rate limiter counter
AUDIT_LOG.info("Bad password_reset user passed in.")
limiter.tick_bad_request_counter(request)
return JsonResponse({
'success': True,
'value': render_to_string('registration/password_reset_done.html', {}),
})
def uidb36_to_uidb64(uidb36):
"""
Needed to support old password reset URLs that use base36-encoded user IDs
https://github.com/django/django/commit/1184d077893ff1bc947e45b00a4d565f3df81776#diff-c571286052438b2e3190f8db8331a92bR231
Args:
uidb36: base36-encoded user ID
Returns: base64-encoded user ID. Otherwise returns a dummy, invalid ID
"""
try:
uidb64 = force_text(urlsafe_base64_encode(force_bytes(base36_to_int(uidb36))))
except ValueError:
uidb64 = '1' # dummy invalid ID (incorrect padding for base64)
return uidb64
def validate_password(user, password):
"""
Tie in password policy enforcement as an optional level of
security protection
Args:
user: the user object whose password we're checking.
password: the user's proposed new password.
Returns:
is_valid_password: a boolean indicating if the new password
passes the validation.
err_msg: an error message if there's a violation of one of the password
checks. Otherwise, `None`.
"""
err_msg = None
if settings.FEATURES.get('ENFORCE_PASSWORD_POLICY', False):
try:
validate_password_strength(password)
except ValidationError as err:
err_msg = _('Password: ') + '; '.join(err.messages)
# also, check the password reuse policy
if not PasswordHistory.is_allowable_password_reuse(user, password):
if user.is_staff:
num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STAFF_PASSWORDS_BEFORE_REUSE']
else:
num_distinct = settings.ADVANCED_SECURITY_CONFIG['MIN_DIFFERENT_STUDENT_PASSWORDS_BEFORE_REUSE']
# Because of how ngettext is, splitting the following into shorter lines would be ugly.
# pylint: disable=line-too-long
err_msg = ungettext(
"You are re-using a password that you have used recently. You must have {num} distinct password before reusing a previous password.",
"You are re-using a password that you have used recently. You must have {num} distinct passwords before reusing a previous password.",
num_distinct
).format(num=num_distinct)
# also, check to see if passwords are getting reset too frequent
if PasswordHistory.is_password_reset_too_soon(user):
num_days = settings.ADVANCED_SECURITY_CONFIG['MIN_TIME_IN_DAYS_BETWEEN_ALLOWED_RESETS']
# Because of how ngettext is, splitting the following into shorter lines would be ugly.
# pylint: disable=line-too-long
err_msg = ungettext(
"You are resetting passwords too frequently. Due to security policies, {num} day must elapse between password resets.",
"You are resetting passwords too frequently. Due to security policies, {num} days must elapse between password resets.",
num_days
).format(num=num_days)
is_password_valid = err_msg is None
return is_password_valid, err_msg
def password_reset_confirm_wrapper(request, uidb36=None, token=None):
"""
A wrapper around django.contrib.auth.views.password_reset_confirm.
Needed because we want to set the user as active at this step.
We also optionally do some additional password policy checks.
"""
# convert old-style base36-encoded user id to base64
uidb64 = uidb36_to_uidb64(uidb36)
platform_name = {
"platform_name": configuration_helpers.get_value('platform_name', settings.PLATFORM_NAME)
}
try:
uid_int = base36_to_int(uidb36)
user = User.objects.get(id=uid_int)
except (ValueError, User.DoesNotExist):
# if there's any error getting a user, just let django's
# password_reset_confirm function handle it.
return password_reset_confirm(
request, uidb64=uidb64, token=token, extra_context=platform_name
)
if request.method == 'POST':
password = request.POST['new_password1']
is_password_valid, password_err_msg = validate_password(user, password)
if not is_password_valid:
# We have a password reset attempt which violates some security
# policy. Use the existing Django template to communicate that
# back to the user.
context = {
'validlink': True,
'form': None,
'title': _('Password reset unsuccessful'),
'err_msg': password_err_msg,
}
context.update(platform_name)
return TemplateResponse(
request, 'registration/password_reset_confirm.html', context
)
# remember what the old password hash is before we call down
old_password_hash = user.password
response = password_reset_confirm(
request, uidb64=uidb64, token=token, extra_context=platform_name
)
# If password reset was unsuccessful a template response is returned (status_code 200).
# Check if form is invalid then show an error to the user.
# Note if password reset was successful we get response redirect (status_code 302).
if response.status_code == 200 and not response.context_data['form'].is_valid():
response.context_data['err_msg'] = _('Error in resetting your password. Please try again.')
return response
# get the updated user
updated_user = User.objects.get(id=uid_int)
# did the password hash change, if so record it in the PasswordHistory
if updated_user.password != old_password_hash:
entry = PasswordHistory()
entry.create(updated_user)
updated_user.backend = 'django.contrib.auth.backends.ModelBackend'
login(request, updated_user)
else:
response = password_reset_confirm(
request, uidb64=uidb64, token=token, extra_context=platform_name
)
response_was_successful = response.context_data.get('validlink')
if response_was_successful and not user.is_active:
user.is_active = True
user.save()
return response
def reactivation_email_for_user(user):
try:
reg = Registration.objects.get(user=user)
except Registration.DoesNotExist:
return JsonResponse({
"success": False,
"error": _('No inactive user with this e-mail exists'),
}) # TODO: this should be status code 400 # pylint: disable=fixme
context = {
'name': user.profile.name,
'key': reg.activation_key,
}
subject = render_to_string('emails/activation_email_subject.txt', context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/activation_email.txt', context)
from_address = configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
try:
user.email_user(subject, message, from_address)
except Exception: # pylint: disable=broad-except
log.error(
u'Unable to send reactivation email from "%s" to "%s"',
from_address,
user.email,
exc_info=True
)
return JsonResponse({
"success": False,
"error": _('Unable to send reactivation email')
}) # TODO: this should be status code 500 # pylint: disable=fixme
return JsonResponse({"success": True})
def validate_new_email(user, new_email):
"""
Given a new email for a user, does some basic verification of the new address If any issues are encountered
with verification a ValueError will be thrown.
"""
try:
validate_email(new_email)
except ValidationError:
raise ValueError(_('Valid e-mail address required.'))
if new_email == user.email:
raise ValueError(_('Old email is the same as the new email.'))
if User.objects.filter(email=new_email).count() != 0:
raise ValueError(_('An account with this e-mail already exists.'))
def do_email_change_request(user, new_email, activation_key=None):
"""
Given a new email for a user, does some basic verification of the new address and sends an activation message
to the new address. If any issues are encountered with verification or sending the message, a ValueError will
be thrown.
"""
pec_list = PendingEmailChange.objects.filter(user=user)
if len(pec_list) == 0:
pec = PendingEmailChange()
pec.user = user
else:
pec = pec_list[0]
# if activation_key is not passing as an argument, generate a random key
if not activation_key:
activation_key = uuid.uuid4().hex
pec.new_email = new_email
pec.activation_key = activation_key
pec.save()
context = {
'key': pec.activation_key,
'old_email': user.email,
'new_email': pec.new_email
}
subject = render_to_string('emails/email_change_subject.txt', context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/email_change.txt', context)
from_address = configuration_helpers.get_value(
'email_from_address',
settings.DEFAULT_FROM_EMAIL
)
try:
mail.send_mail(subject, message, from_address, [pec.new_email])
except Exception: # pylint: disable=broad-except
log.error(u'Unable to send email activation link to user from "%s"', from_address, exc_info=True)
raise ValueError(_('Unable to send email activation link. Please try again later.'))
# When the email address change is complete, a "edx.user.settings.changed" event will be emitted.
# But because changing the email address is multi-step, we also emit an event here so that we can
# track where the request was initiated.
tracker.emit(
SETTING_CHANGE_INITIATED,
{
"setting": "email",
"old": context['old_email'],
"new": context['new_email'],
"user_id": user.id,
}
)
@ensure_csrf_cookie
def confirm_email_change(request, key): # pylint: disable=unused-argument
"""
User requested a new e-mail. This is called when the activation
link is clicked. We confirm with the old e-mail, and update
"""
with transaction.atomic():
try:
pec = PendingEmailChange.objects.get(activation_key=key)
except PendingEmailChange.DoesNotExist:
response = render_to_response("invalid_email_key.html", {})
transaction.set_rollback(True)
return response
user = pec.user
address_context = {
'old_email': user.email,
'new_email': pec.new_email
}
if len(User.objects.filter(email=pec.new_email)) != 0:
response = render_to_response("email_exists.html", {})
transaction.set_rollback(True)
return response
subject = render_to_string('emails/email_change_subject.txt', address_context)
subject = ''.join(subject.splitlines())
message = render_to_string('emails/confirm_email_change.txt', address_context)
u_prof = UserProfile.objects.get(user=user)
meta = u_prof.get_meta()
if 'old_emails' not in meta:
meta['old_emails'] = []
meta['old_emails'].append([user.email, datetime.datetime.now(UTC).isoformat()])
u_prof.set_meta(meta)
u_prof.save()
# Send it to the old email...
try:
user.email_user(
subject,
message,
configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
)
except Exception: # pylint: disable=broad-except
log.warning('Unable to send confirmation email to old address', exc_info=True)
response = render_to_response("email_change_failed.html", {'email': user.email})
transaction.set_rollback(True)
return response
user.email = pec.new_email
user.save()
pec.delete()
# And send it to the new email...
try:
user.email_user(
subject,
message,
configuration_helpers.get_value('email_from_address', settings.DEFAULT_FROM_EMAIL)
)
except Exception: # pylint: disable=broad-except
log.warning('Unable to send confirmation email to new address', exc_info=True)
response = render_to_response("email_change_failed.html", {'email': pec.new_email})
transaction.set_rollback(True)
return response
response = render_to_response("email_change_successful.html", address_context)
return response
@require_POST
@login_required
@ensure_csrf_cookie
def change_email_settings(request):
"""Modify logged-in user's setting for receiving emails from a course."""
user = request.user
course_id = request.POST.get("course_id")
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
receive_emails = request.POST.get("receive_emails")
if receive_emails:
optout_object = Optout.objects.filter(user=user, course_id=course_key)
if optout_object:
optout_object.delete()
log.info(
u"User %s (%s) opted in to receive emails from course %s",
user.username,
user.email,
course_id,
)
track.views.server_track(
request,
"change-email-settings",
{"receive_emails": "yes", "course": course_id},
page='dashboard',
)
else:
Optout.objects.get_or_create(user=user, course_id=course_key)
log.info(
u"User %s (%s) opted out of receiving emails from course %s",
user.username,
user.email,
course_id,
)
track.views.server_track(
request,
"change-email-settings",
{"receive_emails": "no", "course": course_id},
page='dashboard',
)
return JsonResponse({"success": True})
class LogoutView(TemplateView):
"""
Logs out user and redirects.
The template should load iframes to log the user out of OpenID Connect services.
See http://openid.net/specs/openid-connect-logout-1_0.html.
"""
oauth_client_ids = []
template_name = 'logout.html'
# Keep track of the page to which the user should ultimately be redirected.
target = reverse_lazy('cas-logout') if settings.FEATURES.get('AUTH_USE_CAS') else '/'
def dispatch(self, request, *args, **kwargs): # pylint: disable=missing-docstring
# We do not log here, because we have a handler registered to perform logging on successful logouts.
request.is_from_logout = True
# Get the list of authorized clients before we clear the session.
self.oauth_client_ids = request.session.get(edx_oauth2_provider.constants.AUTHORIZED_CLIENTS_SESSION_KEY, [])
logout(request)
# If we don't need to deal with OIDC logouts, just redirect the user.
if LogoutViewConfiguration.current().enabled and self.oauth_client_ids:
response = super(LogoutView, self).dispatch(request, *args, **kwargs)
else:
response = redirect(self.target)
# Clear the cookie used by the edx.org marketing site
delete_logged_in_cookies(response)
return response
def _build_logout_url(self, url):
"""
Builds a logout URL with the `no_redirect` query string parameter.
Args:
url (str): IDA logout URL
Returns:
str
"""
scheme, netloc, path, query_string, fragment = urlsplit(url)
query_params = parse_qs(query_string)
query_params['no_redirect'] = 1
new_query_string = urlencode(query_params, doseq=True)
return urlunsplit((scheme, netloc, path, new_query_string, fragment))
def get_context_data(self, **kwargs):
context = super(LogoutView, self).get_context_data(**kwargs)
# Create a list of URIs that must be called to log the user out of all of the IDAs.
uris = Client.objects.filter(client_id__in=self.oauth_client_ids,
logout_uri__isnull=False).values_list('logout_uri', flat=True)
referrer = self.request.META.get('HTTP_REFERER', '').strip('/')
logout_uris = []
for uri in uris:
if not referrer or (referrer and not uri.startswith(referrer)):
logout_uris.append(self._build_logout_url(uri))
context.update({
'target': self.target,
'logout_uris': logout_uris,
})
return context
| agpl-3.0 | 1,533,828,457,470,076,200 | 40.500727 | 146 | 0.651475 | false |
rossjones/ScraperWikiX | web/frontend/specs/user_upgrade_spec.py | 1 | 1051 | from nose.tools import assert_equals, raises
from frontend.models import UserProfile
from django.contrib.auth.models import User
def setup():
global user
username,password = 'tester','pass'
user = User.objects.create_user(username, '%[email protected]' % username, password)
def ensure_can_upgrade_account():
profile = user.get_profile()
profile.change_plan('individual')
assert_equals(profile.plan, 'individual')
profile.change_plan('business')
assert_equals(profile.plan, 'business')
profile.change_plan('corporate')
assert_equals(profile.plan, 'corporate')
profile.change_plan('free')
assert_equals(profile.plan, 'free')
def ensure_account_upgraded():
profile = user.get_profile()
profile.change_plan('corporate')
db_user = User.objects.filter(username=user.username)[0]
profile = db_user.get_profile()
assert_equals(profile.plan, 'corporate')
@raises(ValueError)
def it_should_not_allow_an_invalid_plan():
profile = user.get_profile()
profile.change_plan('h0h0h0')
| agpl-3.0 | 7,040,054,505,104,011,000 | 30.848485 | 84 | 0.707897 | false |
spacy-io/spaCy | spacy/tests/lang/da/test_exceptions.py | 2 | 1824 | import pytest
@pytest.mark.parametrize("text", ["ca.", "m.a.o.", "Jan.", "Dec.", "kr.", "jf."])
def test_da_tokenizer_handles_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
@pytest.mark.parametrize("text", ["Jul.", "jul.", "Tor.", "Tors."])
def test_da_tokenizer_handles_ambiguous_abbr(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 2
@pytest.mark.parametrize("text", ["1.", "10.", "31."])
def test_da_tokenizer_handles_dates(da_tokenizer, text):
tokens = da_tokenizer(text)
assert len(tokens) == 1
def test_da_tokenizer_handles_exc_in_text(da_tokenizer):
text = "Det er bl.a. ikke meningen"
tokens = da_tokenizer(text)
assert len(tokens) == 5
assert tokens[2].text == "bl.a."
def test_da_tokenizer_handles_custom_base_exc(da_tokenizer):
text = "Her er noget du kan kigge i."
tokens = da_tokenizer(text)
assert len(tokens) == 8
assert tokens[6].text == "i"
assert tokens[7].text == "."
@pytest.mark.parametrize(
"text,n_tokens",
[
("Godt og/eller skidt", 3),
("Kør 4 km/t på vejen", 5),
("Det blæser 12 m/s.", 5),
("Det blæser 12 m/sek. på havnen", 6),
("Windows 8/Windows 10", 5),
("Billeten virker til bus/tog/metro", 8),
("26/02/2019", 1),
("Kristiansen c/o Madsen", 3),
("Sprogteknologi a/s", 2),
("De boede i A/B Bellevue", 5),
# note: skipping due to weirdness in UD_Danish-DDT
# ("Rotorhastigheden er 3400 o/m.", 5),
("Jeg købte billet t/r.", 5),
("Murerarbejdsmand m/k søges", 3),
("Netværket kører over TCP/IP", 4),
],
)
def test_da_tokenizer_slash(da_tokenizer, text, n_tokens):
tokens = da_tokenizer(text)
assert len(tokens) == n_tokens
| mit | -3,821,526,467,111,707,600 | 29.762712 | 81 | 0.592837 | false |
xAKLx/pox | pox/info/recoco_spy.py | 46 | 2718 | # Copyright 2012 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This is an extremely primitive start at some debugging.
At the moment, it is really just for recoco (maybe it belongs in there?).
"""
from pox.core import core
log = core.getLogger()
import time
import traceback
import pox.lib.recoco
_frames = []
def _tf (frame, event, arg):
if _frames is None: return _tf
#print " " * len(_frames) + event
if event == 'call':
_frames.append(frame)
return _tf
elif event == 'line':
return _tf
elif event == 'exception':
#_frames.pop()
return _tf
elif event == 'return':
_frames.pop()
elif event == 'c_call':
print "c_call"
_frames.append((frame,arg))
elif event == 'c_exception':
_frames.pop()
elif event == 'c_return':
_frames.pop()
def _trace_thread_proc ():
last = None
last_time = None
warned = None
while True:
try:
time.sleep(1)
c = len(_frames)
if c == 0: continue
f = _frames[-1]
stopAt = None
count = 0
sf = f
while sf is not None:
if sf.f_code == pox.lib.recoco.Scheduler.cycle.im_func.func_code:
stopAt = sf
break
count += 1
sf = sf.f_back
#if stopAt == None: continue
f = "\n".join([s.strip() for s in
traceback.format_stack(f,count)])
#f = " / ".join([s.strip() for s in
# traceback.format_stack(f,1)[0].strip().split("\n")])
#f = "\n".join([s.strip() for s in
# traceback.format_stack(f)])
if f != last:
if warned:
log.warning("Running again")
warned = None
last = f
last_time = time.time()
elif f != warned:
if time.time() - last_time > 3:
if stopAt is not None:
warned = f
log.warning("Stuck at:\n" + f)
#from pox.core import core
#core.f = f
except:
traceback.print_exc()
pass
def launch ():
def f ():
import sys
sys.settrace(_tf)
core.callLater(f)
import threading
_trace_thread = threading.Thread(target=_trace_thread_proc)
_trace_thread.daemon = True
_trace_thread.start()
| apache-2.0 | -3,226,294,816,008,024,600 | 24.166667 | 75 | 0.592347 | false |
guiniol/py3status | py3status/storage.py | 2 | 4614 | from __future__ import with_statement
import os
from collections import Iterable, Mapping
from pickle import dump, load
from tempfile import NamedTemporaryFile
from time import time
class Storage:
data = {}
initialized = False
def init(self, py3_wrapper, is_python_2):
self.is_python_2 = is_python_2
self.py3_wrapper = py3_wrapper
self.config = py3_wrapper.config
py3_config = self.config.get('py3_config', {})
# check for legacy storage cache
legacy_storage_path = self.get_legacy_storage_path()
# cutting edge storage cache
storage_config = py3_config.get('py3status', {}).get('storage')
if storage_config:
storage_file = os.path.expandvars(
os.path.expanduser(storage_config)
)
if '/' in storage_file:
storage_dir = None
else:
storage_dir = os.environ.get('XDG_CACHE_HOME')
else:
storage_dir = os.environ.get('XDG_CACHE_HOME')
storage_file = 'py3status_cache.data'
if not storage_dir:
storage_dir = os.path.expanduser('~/.cache')
self.storage_path = os.path.join(storage_dir, storage_file)
# move legacy storage cache to new desired / default location
if legacy_storage_path:
self.py3_wrapper.log(
'moving legacy storage_path {} to {}'.format(
legacy_storage_path, self.storage_path
)
)
os.rename(legacy_storage_path, self.storage_path)
try:
with open(self.storage_path, 'rb') as f:
try:
# python3
self.data = load(f, encoding='bytes')
except TypeError:
# python2
self.data = load(f)
except IOError:
pass
self.py3_wrapper.log('storage_path: {}'.format(self.storage_path))
if self.data:
self.py3_wrapper.log('storage_data: {}'.format(self.data))
self.initialized = True
def get_legacy_storage_path(self):
"""
Detect and return existing legacy storage path.
"""
config_dir = os.path.dirname(
self.py3_wrapper.config.get('i3status_config_path', '/tmp')
)
storage_path = os.path.join(config_dir, 'py3status.data')
if os.path.exists(storage_path):
return storage_path
else:
return None
def save(self):
"""
Save our data to disk. We want to always have a valid file.
"""
with NamedTemporaryFile(
dir=os.path.dirname(self.storage_path), delete=False
) as f:
# we use protocol=2 for python 2/3 compatibility
dump(self.data, f, protocol=2)
f.flush()
os.fsync(f.fileno())
tmppath = f.name
os.rename(tmppath, self.storage_path)
def fix(self, item):
"""
Make sure all strings are unicode for python 2/3 compatability
"""
if not self.is_python_2:
return item
if isinstance(item, str):
return item.decode('utf-8')
if isinstance(item, unicode): # noqa <-- python3 has no unicode
return item
if isinstance(item, Mapping):
return dict(map(self.fix, item.items()))
elif isinstance(item, Iterable):
return type(item)(map(self.fix, item))
return item
def storage_set(self, module_name, key, value):
if key.startswith('_'):
raise ValueError('cannot set keys starting with an underscore "_"')
key = self.fix(key)
value = self.fix(value)
if self.data.get(module_name, {}).get(key) == value:
return
if module_name not in self.data:
self.data[module_name] = {}
self.data[module_name][key] = value
ts = time()
if '_ctime' not in self.data[module_name]:
self.data[module_name]['_ctime'] = ts
self.data[module_name]['_mtime'] = ts
self.save()
def storage_get(self, module_name, key):
key = self.fix(key)
return self.data.get(module_name, {}).get(key, None)
def storage_del(self, module_name, key=None):
key = self.fix(key)
if module_name in self.data and key in self.data[module_name]:
del self.data[module_name][key]
self.save()
def storage_keys(self, module_name):
return self.data.get(module_name, {}).keys()
| bsd-3-clause | -575,864,646,525,704,060 | 31.723404 | 79 | 0.554183 | false |
shashank971/edx-platform | common/djangoapps/enrollment/views.py | 38 | 27557 | """
The Enrollment API Views should be simple, lean HTTP endpoints for API access. This should
consist primarily of authentication, request validation, and serialization.
"""
import logging
from django.core.exceptions import ObjectDoesNotExist
from django.utils.decorators import method_decorator
from opaque_keys import InvalidKeyError
from course_modes.models import CourseMode
from lms.djangoapps.commerce.utils import audit_log
from openedx.core.djangoapps.user_api.preferences.api import update_email_opt_in
from openedx.core.lib.api.permissions import ApiKeyHeaderPermission, ApiKeyHeaderPermissionIsAuthenticated
from rest_framework import status
from rest_framework.response import Response
from rest_framework.throttling import UserRateThrottle
from rest_framework.views import APIView
from opaque_keys.edx.keys import CourseKey
from embargo import api as embargo_api
from cors_csrf.authentication import SessionAuthenticationCrossDomainCsrf
from cors_csrf.decorators import ensure_csrf_cookie_cross_domain
from openedx.core.lib.api.authentication import (
SessionAuthenticationAllowInactiveUser,
OAuth2AuthenticationAllowInactiveUser,
)
from util.disable_rate_limit import can_disable_rate_limit
from enrollment import api
from enrollment.errors import (
CourseNotFoundError, CourseEnrollmentError,
CourseModeNotFoundError, CourseEnrollmentExistsError
)
from student.models import User
log = logging.getLogger(__name__)
REQUIRED_ATTRIBUTES = {
"credit": ["credit:provider_id"],
}
class EnrollmentCrossDomainSessionAuth(SessionAuthenticationAllowInactiveUser, SessionAuthenticationCrossDomainCsrf):
"""Session authentication that allows inactive users and cross-domain requests. """
pass
class ApiKeyPermissionMixIn(object):
"""
This mixin is used to provide a convenience function for doing individual permission checks
for the presence of API keys.
"""
def has_api_key_permissions(self, request):
"""
Checks to see if the request was made by a server with an API key.
Args:
request (Request): the request being made into the view
Return:
True if the request has been made with a valid API key
False otherwise
"""
return ApiKeyHeaderPermission().has_permission(request, self)
class EnrollmentUserThrottle(UserRateThrottle, ApiKeyPermissionMixIn):
"""Limit the number of requests users can make to the enrollment API."""
rate = '40/minute'
def allow_request(self, request, view):
return self.has_api_key_permissions(request) or super(EnrollmentUserThrottle, self).allow_request(request, view)
@can_disable_rate_limit
class EnrollmentView(APIView, ApiKeyPermissionMixIn):
"""
**Use Case**
Get the user's enrollment status for a course.
**Example Request**
GET /api/enrollment/v1/enrollment/{username},{course_id}
**Response Values**
If the request for information about the user is successful, an HTTP 200 "OK" response
is returned.
The HTTP 200 response has the following values.
* course_details: A collection that includes the following
values.
* course_end: The date and time when the course closes. If
null, the course never ends.
* course_id: The unique identifier for the course.
* course_modes: An array of data about the enrollment modes
supported for the course. If the request uses the parameter
include_expired=1, the array also includes expired
enrollment modes.
Each enrollment mode collection includes the following
values.
* currency: The currency of the listed prices.
* description: A description of this mode.
* expiration_datetime: The date and time after which
users cannot enroll in the course in this mode.
* min_price: The minimum price for which a user can
enroll in this mode.
* name: The full name of the enrollment mode.
* slug: The short name for the enrollment mode.
* suggested_prices: A list of suggested prices for
this enrollment mode.
* course_end: The date and time at which the course closes. If
null, the course never ends.
* course_start: The date and time when the course opens. If
null, the course opens immediately when it is created.
* enrollment_end: The date and time after which users cannot
enroll for the course. If null, the enrollment period never
ends.
* enrollment_start: The date and time when users can begin
enrolling in the course. If null, enrollment opens
immediately when the course is created.
* invite_only: A value indicating whether students must be
invited to enroll in the course. Possible values are true or
false.
* created: The date the user account was created.
* is_active: Whether the enrollment is currently active.
* mode: The enrollment mode of the user in this course.
* user: The ID of the user.
"""
authentication_classes = OAuth2AuthenticationAllowInactiveUser, SessionAuthenticationAllowInactiveUser
permission_classes = ApiKeyHeaderPermissionIsAuthenticated,
throttle_classes = EnrollmentUserThrottle,
# Since the course about page on the marketing site uses this API to auto-enroll users,
# we need to support cross-domain CSRF.
@method_decorator(ensure_csrf_cookie_cross_domain)
def get(self, request, course_id=None, username=None):
"""Create, read, or update enrollment information for a user.
HTTP Endpoint for all CRUD operations for a user course enrollment. Allows creation, reading, and
updates of the current enrollment for a particular course.
Args:
request (Request): To get current course enrollment information, a GET request will return
information for the current user and the specified course.
course_id (str): URI element specifying the course location. Enrollment information will be
returned, created, or updated for this particular course.
username (str): The username associated with this enrollment request.
Return:
A JSON serialized representation of the course enrollment.
"""
username = username or request.user.username
# TODO Implement proper permissions
if request.user.username != username and not self.has_api_key_permissions(request) \
and not request.user.is_superuser:
# Return a 404 instead of a 403 (Unauthorized). If one user is looking up
# other users, do not let them deduce the existence of an enrollment.
return Response(status=status.HTTP_404_NOT_FOUND)
try:
return Response(api.get_enrollment(username, course_id))
except CourseEnrollmentError:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={
"message": (
u"An error occurred while retrieving enrollments for user "
u"'{username}' in course '{course_id}'"
).format(username=username, course_id=course_id)
}
)
@can_disable_rate_limit
class EnrollmentCourseDetailView(APIView):
"""
**Use Case**
Get enrollment details for a course.
Response values include the course schedule and enrollment modes
supported by the course. Use the parameter include_expired=1 to
include expired enrollment modes in the response.
**Note:** Getting enrollment details for a course does not require
authentication.
**Example Requests**
GET /api/enrollment/v1/course/{course_id}
GET /api/enrollment/v1/course/{course_id}?include_expired=1
**Response Values**
If the request is successful, an HTTP 200 "OK" response is
returned along with a collection of course enrollments for the
user or for the newly created enrollment.
Each course enrollment contains the following values.
* course_end: The date and time when the course closes. If
null, the course never ends.
* course_id: The unique identifier for the course.
* course_modes: An array of data about the enrollment modes
supported for the course. If the request uses the parameter
include_expired=1, the array also includes expired
enrollment modes.
Each enrollment mode collection includes the following
values.
* currency: The currency of the listed prices.
* description: A description of this mode.
* expiration_datetime: The date and time after which
users cannot enroll in the course in this mode.
* min_price: The minimum price for which a user can
enroll in this mode.
* name: The full name of the enrollment mode.
* slug: The short name for the enrollment mode.
* suggested_prices: A list of suggested prices for
this enrollment mode.
* course_start: The date and time when the course opens. If
null, the course opens immediately when it is created.
* enrollment_end: The date and time after which users cannot
enroll for the course. If null, the enrollment period never
ends.
* enrollment_start: The date and time when users can begin
enrolling in the course. If null, enrollment opens
immediately when the course is created.
* invite_only: A value indicating whether students must be
invited to enroll in the course. Possible values are true or
false.
"""
authentication_classes = []
permission_classes = []
throttle_classes = EnrollmentUserThrottle,
def get(self, request, course_id=None):
"""Read enrollment information for a particular course.
HTTP Endpoint for retrieving course level enrollment information.
Args:
request (Request): To get current course enrollment information, a GET request will return
information for the specified course.
course_id (str): URI element specifying the course location. Enrollment information will be
returned.
Return:
A JSON serialized representation of the course enrollment details.
"""
try:
return Response(api.get_course_enrollment_details(course_id, bool(request.GET.get('include_expired', ''))))
except CourseNotFoundError:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={
"message": (
u"No course found for course ID '{course_id}'"
).format(course_id=course_id)
}
)
@can_disable_rate_limit
class EnrollmentListView(APIView, ApiKeyPermissionMixIn):
"""
**Use Cases**
* Get a list of all course enrollments for the currently signed in user.
* Enroll the currently signed in user in a course.
Currently a user can use this command only to enroll the user in
honor mode. If honor mode is not supported for the course, the
request fails and returns the available modes.
This command can use a server-to-server call to enroll a user in
other modes, such as "verified", "professional", or "credit". If
the mode is not supported for the course, the request will fail
and return the available modes.
You can include other parameters as enrollment attributes for a
specific course mode. For example, for credit mode, you can
include the following parameters to specify the credit provider
attribute.
* namespace: credit
* name: provider_id
* value: institution_name
**Example Requests**
GET /api/enrollment/v1/enrollment
POST /api/enrollment/v1/enrollment {
"mode": "credit",
"course_details":{"course_id": "edX/DemoX/Demo_Course"},
"enrollment_attributes":[{"namespace": "credit","name": "provider_id","value": "hogwarts",},]
}
**POST Parameters**
A POST request can include the following parameters.
* user: Optional. The username of the currently logged in user.
You cannot use the command to enroll a different user.
* mode: Optional. The course mode for the enrollment. Individual
users cannot upgrade their enrollment mode from 'honor'. Only
server-to-server requests can enroll with other modes.
* is_active: Optional. A Boolean value indicating whether the
enrollment is active. Only server-to-server requests are
allowed to deactivate an enrollment.
* course details: A collection that includes the following
information.
* course_id: The unique identifier for the course.
* email_opt_in: Optional. A Boolean value that indicates whether
the user wants to receive email from the organization that runs
this course.
* enrollment_attributes: A dictionary that contains the following
values.
* namespace: Namespace of the attribute
* name: Name of the attribute
* value: Value of the attribute
* is_active: Optional. A Boolean value that indicates whether the
enrollment is active. Only server-to-server requests can
deactivate an enrollment.
* mode: Optional. The course mode for the enrollment. Individual
users cannot upgrade their enrollment mode from "honor". Only
server-to-server requests can enroll with other modes.
* user: Optional. The user ID of the currently logged in user. You
cannot use the command to enroll a different user.
**GET Response Values**
If an unspecified error occurs when the user tries to obtain a
learner's enrollments, the request returns an HTTP 400 "Bad
Request" response.
If the user does not have permission to view enrollment data for
the requested learner, the request returns an HTTP 404 "Not Found"
response.
**POST Response Values**
If the user does not specify a course ID, the specified course
does not exist, or the is_active status is invalid, the request
returns an HTTP 400 "Bad Request" response.
If a user who is not an admin tries to upgrade a learner's course
mode, the request returns an HTTP 403 "Forbidden" response.
If the specified user does not exist, the request returns an HTTP
406 "Not Acceptable" response.
**GET and POST Response Values**
If the request is successful, an HTTP 200 "OK" response is
returned along with a collection of course enrollments for the
user or for the newly created enrollment.
Each course enrollment contains the following values.
* course_details: A collection that includes the following
values.
* course_end: The date and time when the course closes. If
null, the course never ends.
* course_id: The unique identifier for the course.
* course_modes: An array of data about the enrollment modes
supported for the course. If the request uses the parameter
include_expired=1, the array also includes expired
enrollment modes.
Each enrollment mode collection includes the following
values.
* currency: The currency of the listed prices.
* description: A description of this mode.
* expiration_datetime: The date and time after which users
cannot enroll in the course in this mode.
* min_price: The minimum price for which a user can enroll in
this mode.
* name: The full name of the enrollment mode.
* slug: The short name for the enrollment mode.
* suggested_prices: A list of suggested prices for this
enrollment mode.
* course_start: The date and time when the course opens. If
null, the course opens immediately when it is created.
* enrollment_end: The date and time after which users cannot
enroll for the course. If null, the enrollment period never
ends.
* enrollment_start: The date and time when users can begin
enrolling in the course. If null, enrollment opens
immediately when the course is created.
* invite_only: A value indicating whether students must be
invited to enroll in the course. Possible values are true or
false.
* created: The date the user account was created.
* is_active: Whether the enrollment is currently active.
* mode: The enrollment mode of the user in this course.
* user: The username of the user.
"""
authentication_classes = OAuth2AuthenticationAllowInactiveUser, EnrollmentCrossDomainSessionAuth
permission_classes = ApiKeyHeaderPermissionIsAuthenticated,
throttle_classes = EnrollmentUserThrottle,
# Since the course about page on the marketing site
# uses this API to auto-enroll users, we need to support
# cross-domain CSRF.
@method_decorator(ensure_csrf_cookie_cross_domain)
def get(self, request):
"""Gets a list of all course enrollments for the currently logged in user."""
username = request.GET.get('user', request.user.username)
if request.user.username != username and not self.has_api_key_permissions(request):
# Return a 404 instead of a 403 (Unauthorized). If one user is looking up
# other users, do not let them deduce the existence of an enrollment.
return Response(status=status.HTTP_404_NOT_FOUND)
try:
return Response(api.get_enrollments(username))
except CourseEnrollmentError:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={
"message": (
u"An error occurred while retrieving enrollments for user '{username}'"
).format(username=username)
}
)
def post(self, request):
"""Enrolls the currently logged-in user in a course.
Server-to-server calls may deactivate or modify the mode of existing enrollments. All other requests
go through `add_enrollment()`, which allows creation of new and reactivation of old enrollments.
"""
# Get the User, Course ID, and Mode from the request.
username = request.DATA.get('user', request.user.username)
course_id = request.DATA.get('course_details', {}).get('course_id')
if not course_id:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={"message": u"Course ID must be specified to create a new enrollment."}
)
try:
course_id = CourseKey.from_string(course_id)
except InvalidKeyError:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={
"message": u"No course '{course_id}' found for enrollment".format(course_id=course_id)
}
)
mode = request.DATA.get('mode', CourseMode.HONOR)
has_api_key_permissions = self.has_api_key_permissions(request)
# Check that the user specified is either the same user, or this is a server-to-server request.
if not username:
username = request.user.username
if username != request.user.username and not has_api_key_permissions:
# Return a 404 instead of a 403 (Unauthorized). If one user is looking up
# other users, do not let them deduce the existence of an enrollment.
return Response(status=status.HTTP_404_NOT_FOUND)
if mode != CourseMode.HONOR and not has_api_key_permissions:
return Response(
status=status.HTTP_403_FORBIDDEN,
data={
"message": u"User does not have permission to create enrollment with mode [{mode}].".format(
mode=mode
)
}
)
try:
# Lookup the user, instead of using request.user, since request.user may not match the username POSTed.
user = User.objects.get(username=username)
except ObjectDoesNotExist:
return Response(
status=status.HTTP_406_NOT_ACCEPTABLE,
data={
'message': u'The user {} does not exist.'.format(username)
}
)
embargo_response = embargo_api.get_embargo_response(request, course_id, user)
if embargo_response:
return embargo_response
try:
is_active = request.DATA.get('is_active')
# Check if the requested activation status is None or a Boolean
if is_active is not None and not isinstance(is_active, bool):
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={
'message': (u"'{value}' is an invalid enrollment activation status.").format(value=is_active)
}
)
enrollment_attributes = request.DATA.get('enrollment_attributes')
enrollment = api.get_enrollment(username, unicode(course_id))
mode_changed = enrollment and mode is not None and enrollment['mode'] != mode
active_changed = enrollment and is_active is not None and enrollment['is_active'] != is_active
missing_attrs = []
if enrollment_attributes:
actual_attrs = [
u"{namespace}:{name}".format(**attr)
for attr in enrollment_attributes
]
missing_attrs = set(REQUIRED_ATTRIBUTES.get(mode, [])) - set(actual_attrs)
if has_api_key_permissions and (mode_changed or active_changed):
if mode_changed and active_changed and not is_active:
# if the requester wanted to deactivate but specified the wrong mode, fail
# the request (on the assumption that the requester had outdated information
# about the currently active enrollment).
msg = u"Enrollment mode mismatch: active mode={}, requested mode={}. Won't deactivate.".format(
enrollment["mode"], mode
)
log.warning(msg)
return Response(status=status.HTTP_400_BAD_REQUEST, data={"message": msg})
if len(missing_attrs) > 0:
msg = u"Missing enrollment attributes: requested mode={} required attributes={}".format(
mode, REQUIRED_ATTRIBUTES.get(mode)
)
log.warning(msg)
return Response(status=status.HTTP_400_BAD_REQUEST, data={"message": msg})
response = api.update_enrollment(
username,
unicode(course_id),
mode=mode,
is_active=is_active,
enrollment_attributes=enrollment_attributes
)
else:
# Will reactivate inactive enrollments.
response = api.add_enrollment(username, unicode(course_id), mode=mode, is_active=is_active)
email_opt_in = request.DATA.get('email_opt_in', None)
if email_opt_in is not None:
org = course_id.org
update_email_opt_in(request.user, org, email_opt_in)
return Response(response)
except CourseModeNotFoundError as error:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={
"message": (
u"The course mode '{mode}' is not available for course '{course_id}'."
).format(mode=mode, course_id=course_id),
"course_details": error.data
})
except CourseNotFoundError:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={
"message": u"No course '{course_id}' found for enrollment".format(course_id=course_id)
}
)
except CourseEnrollmentExistsError as error:
return Response(data=error.enrollment)
except CourseEnrollmentError:
return Response(
status=status.HTTP_400_BAD_REQUEST,
data={
"message": (
u"An error occurred while creating the new course enrollment for user "
u"'{username}' in course '{course_id}'"
).format(username=username, course_id=course_id)
}
)
finally:
# Assumes that the ecommerce service uses an API key to authenticate.
if has_api_key_permissions:
current_enrollment = api.get_enrollment(username, unicode(course_id))
audit_log(
'enrollment_change_requested',
course_id=unicode(course_id),
requested_mode=mode,
actual_mode=current_enrollment['mode'] if current_enrollment else None,
requested_activation=is_active,
actual_activation=current_enrollment['is_active'] if current_enrollment else None,
user_id=user.id
)
| agpl-3.0 | -3,390,536,105,199,150,000 | 42.39685 | 120 | 0.598868 | false |
openqt/algorithms | projecteuler/pe165-intersections.py | 1 | 1824 | #!/usr/bin/env python
# coding=utf-8
"""165. Intersections
https://projecteuler.net/problem=165
A segment is uniquely defined by its two endpoints.
By considering two line segments in plane geometry there are three
possibilities:
the segments have zero points, one point, or infinitely many points in common.
Moreover when two segments have exactly one point in common it might be the
case that that common point is an endpoint of either one of the segments or of
both. If a common point of two segments is not an endpoint of either of the
segments it is an interior point of both segments.
We will call a common point T of two segments L1 and L2 a true intersection
point of L1 and L2 if T is the only common point of L1 and L2 and T is an
interior point of both segments.
Consider the three segments L1, L2, and L3:
L1: (27, 44) to (12, 32)
L2: (46, 53) to (17, 62)
L3: (46, 70) to (22, 40)
It can be verified that line segments L2 and L3 have a true intersection
point. We note that as the one of the end points of L3: (22,40) lies on L1
this is not considered to be a true point of intersection. L1 and L2 have no
common point. So among the three line segments, we find one true intersection
point.
Now let us do the same for 5000 line segments. To this end, we generate 20000
numbers using the so-called "Blum Blum Shub" pseudo-random number generator.
s0 = 290797
sn+1 = sn×sn (modulo 50515093)
tn = sn (modulo 500)
To create each line segment, we use four consecutive numbers tn. That is, the
first line segment is given by:
(t1, t2) to (t3, t4)
The first four numbers computed according to the above generator should be:
27, 144, 12 and 232. The first segment would thus be (27,144) to (12,232).
How many distinct true intersection points are found among the 5000 line
segments?
"""
| gpl-3.0 | 2,148,431,976,965,857,000 | 34.745098 | 78 | 0.747669 | false |
jgeewax/gcloud-python | monitoring/google/cloud/monitoring/resource.py | 7 | 6625 | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Monitored Resource Descriptors for the
`Google Stackdriver Monitoring API (V3)`_.
.. _Google Stackdriver Monitoring API (V3):
https://cloud.google.com/monitoring/api/ref_v3/rest/v3/\
projects.monitoredResourceDescriptors
"""
import collections
from google.cloud.monitoring.label import LabelDescriptor
class ResourceDescriptor(object):
"""Specification of a monitored resource type and its schema.
:type name: str
:param name:
The "resource name" of the monitored resource descriptor:
``"projects/<project_id>/monitoredResourceDescriptors/<type>"``
:type type_: str
:param type_:
The monitored resource type. For example: ``"gce_instance"``
:type display_name: str
:param display_name:
A concise name that might be displayed in user interfaces.
:type description: str
:param description:
A detailed description that might be used in documentation.
:type labels:
list of :class:`~google.cloud.monitoring.label.LabelDescriptor`
:param labels:
A sequence of label descriptors specifying the labels used
to identify a specific instance of this monitored resource.
"""
def __init__(self, name, type_, display_name, description, labels):
self.name = name
self.type = type_
self.display_name = display_name
self.description = description
self.labels = labels
@classmethod
def _fetch(cls, client, resource_type):
"""Look up a monitored resource descriptor by type.
:type client: :class:`google.cloud.monitoring.client.Client`
:param client: The client to use.
:type resource_type: str
:param resource_type: The resource type name.
:rtype: :class:`ResourceDescriptor`
:returns: The resource descriptor instance.
:raises: :class:`google.cloud.exceptions.NotFound` if the resource
descriptor is not found.
"""
path = ('/projects/{project}/monitoredResourceDescriptors/{type}'
.format(project=client.project,
type=resource_type))
info = client._connection.api_request(method='GET', path=path)
return cls._from_dict(info)
@classmethod
def _list(cls, client, filter_string=None):
"""List all monitored resource descriptors for the project.
:type client: :class:`google.cloud.monitoring.client.Client`
:param client: The client to use.
:type filter_string: str
:param filter_string:
(Optional) An optional filter expression describing the resource
descriptors to be returned. See the `filter documentation`_.
:rtype: list of :class:`ResourceDescriptor`
:returns: A list of resource descriptor instances.
.. _filter documentation:
https://cloud.google.com/monitoring/api/v3/filters
"""
path = '/projects/{project}/monitoredResourceDescriptors/'.format(
project=client.project)
descriptors = []
page_token = None
while True:
params = {}
if filter_string is not None:
params['filter'] = filter_string
if page_token is not None:
params['pageToken'] = page_token
response = client._connection.api_request(
method='GET', path=path, query_params=params)
for info in response.get('resourceDescriptors', ()):
descriptors.append(cls._from_dict(info))
page_token = response.get('nextPageToken')
if not page_token:
break
return descriptors
@classmethod
def _from_dict(cls, info):
"""Construct a resource descriptor from the parsed JSON representation.
:type info: dict
:param info:
A ``dict`` parsed from the JSON wire-format representation.
:rtype: :class:`ResourceDescriptor`
:returns: A resource descriptor.
"""
return cls(
name=info['name'],
type_=info['type'],
display_name=info.get('displayName', ''),
description=info.get('description', ''),
labels=tuple(LabelDescriptor._from_dict(label)
for label in info.get('labels', ())),
)
def __repr__(self):
return (
'<ResourceDescriptor:\n'
' name={name!r},\n'
' type={type!r},\n'
' labels={labels!r},\n'
' display_name={display_name!r},\n'
' description={description!r}>'
).format(**self.__dict__)
class Resource(collections.namedtuple('Resource', 'type labels')):
"""A monitored resource identified by specifying values for all labels.
The preferred way to construct a resource object is using the
:meth:`~google.cloud.monitoring.client.Client.resource` factory method
of the :class:`~google.cloud.monitoring.client.Client` class.
:type type: str
:param type: The resource type name.
:type labels: dict
:param labels: A mapping from label names to values for all labels
enumerated in the associated :class:`ResourceDescriptor`.
"""
__slots__ = ()
@classmethod
def _from_dict(cls, info):
"""Construct a resource object from the parsed JSON representation.
:type info: dict
:param info:
A ``dict`` parsed from the JSON wire-format representation.
:rtype: :class:`Resource`
:returns: A resource object.
"""
return cls(
type=info['type'],
labels=info.get('labels', {}),
)
def _to_dict(self):
"""Build a dictionary ready to be serialized to the JSON format.
:rtype: dict
:returns: A dict representation of the object that can be written to
the API.
"""
return {
'type': self.type,
'labels': self.labels,
}
| apache-2.0 | 1,638,923,971,437,702,100 | 31.960199 | 79 | 0.61766 | false |
HEPData/hepdata-converter-ws-client | hepdata_converter_ws_client/testsuite/__init__.py | 1 | 3956 | # -*- encoding: utf-8 -*-
__author__ = 'Michał Szostak'
import os
from random import randint
import shutil
import tempfile
import time
import unittest
import yaml
# We try to load using the CSafeLoader for speed improvements.
try:
from yaml import CSafeLoader as Loader
except ImportError: #pragma: no cover
from yaml import SafeLoader as Loader #pragma: no cover
def _parse_path_arguments(sample_file_name):
_sample_file_name = list(sample_file_name)
sample_file_name = []
for path_element in _sample_file_name:
sample_file_name += path_element.split('/')
return sample_file_name
def construct_testdata_path(path_elements):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), 'testdata', *path_elements)
class TMPDirMixin(object):
def tearDown(self):
shutil.rmtree(self.current_tmp)
def setUp(self):
self.current_tmp = os.path.join(tempfile.gettempdir(), str(int(time.time()))) + str(randint(0, 10000))
try:
os.mkdir(self.current_tmp)
finally:
pass
class insert_data_as_binary_file(object):
def __init__(self, *sample_file_name):
self.sample_file_name = _parse_path_arguments(sample_file_name)
def __call__(self, function):
def _inner(*args, **kwargs):
args = list(args)
with open(construct_testdata_path(self.sample_file_name), 'rb') as f:
args.append(f)
function(*args, **kwargs)
return _inner
class insert_data_as_extracted_dir(object):
def __init__(self, *sample_file_name):
self.sample_file_name = _parse_path_arguments(sample_file_name)
self.temp_path = tempfile.gettempdir()
def __call__(self, function):
def _inner(*args, **kwargs):
args = list(args)
with tempfile.TemporaryDirectory() as temp_dir:
shutil.unpack_archive(construct_testdata_path(self.sample_file_name), temp_dir)
# Assume zips consist of a single directory
unpacked_dir = os.path.join(temp_dir, os.listdir(temp_dir)[0])
args.append(unpacked_dir)
function(*args, **kwargs)
return _inner
class insert_path(object):
def __init__(self, *sample_file_name):
self.sample_file_name = _parse_path_arguments(sample_file_name)
def __call__(self, function):
def _inner(*args, **kwargs):
args = list(args)
args.append(construct_testdata_path(self.sample_file_name))
function(*args, **kwargs)
return _inner
class ExtendedTestCase(unittest.TestCase):
def assertMultiLineAlmostEqual(self, first, second, msg=None):
if hasattr(first, 'readlines'):
lines = first.readlines()
elif isinstance(first, (str, str)):
lines = first.split('\n')
if hasattr(second, 'readlines'):
orig_lines = second.readlines()
elif isinstance(second, (str, str)):
orig_lines = second.split('\n')
self.assertEqual(len(lines), len(orig_lines))
for i in range(len(lines)):
self.assertEqual(lines[i].strip(), orig_lines[i].strip())
def assertDirsEqual(self, first_dir, second_dir,
file_content_parser=lambda x: list(yaml.load_all(x, Loader=Loader)),
exclude=[], msg=None):
self.assertEqual(list(os.walk(first_dir))[1:], list(os.walk(second_dir))[1:], msg)
dirs = list(os.walk(first_dir))
for file in dirs[0][2]:
if file not in exclude:
with open(os.path.join(first_dir, file)) as f1, open(os.path.join(second_dir, file)) as f2:
# separated into 2 variables to ease debugging if the need arises
d1 = file_content_parser(f1.read())
d2 = file_content_parser(f2.read())
self.assertEqual(d1, d2)
| gpl-2.0 | 5,510,302,980,980,426,000 | 32.803419 | 110 | 0.603793 | false |
hyphaltip/cndtools | util/randSeq.py | 1 | 1185 | #!/usr/bin/env python
# Copyright (c) 2006
# Colin Dewey (University of Wisconsin-Madison)
# [email protected]
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import random
import FASTA
import sys
if len(sys.argv) != 3:
print >> sys.stderr, "usage: %s title length" % sys.argv[0]
sys.exit(1)
title = sys.argv[1]
length = int(sys.argv[2])
DNA = "ACGT"
seq = [None] * length
for i in xrange(length):
seq[i] = random.choice(DNA)
rec = FASTA.Record()
rec.title = title
rec.sequence = ''.join(seq)
print rec
| gpl-2.0 | 8,426,622,635,996,024,000 | 27.214286 | 75 | 0.723207 | false |
napjon/krisk | krisk/tests/test_title_legend_toolbox.py | 1 | 3970 | def test_title(bar_simple):
# Blank Title
assert bar_simple.option['title'] == {'text': ''}
# Title with set position
c_t = bar_simple.set_title('Hellow', x_pos='auto', y_pos='-5%')
assert bar_simple.option['title'] == {'left': 'auto',
'text': 'Hellow',
'top': '5%'}
def test_legend(bar_simple):
# Blank Legend
assert bar_simple.option['legend'] == {'align': 'auto',
'bottom': 'auto',
'data': [],
'left': 'auto',
'orient': 'horizontal'}
# Legend with orientation and position
c_l = bar_simple.set_legend(orient='vertical', x_pos='-5%', y_pos='auto')
assert bar_simple.option['legend'] == {'align': 'auto',
'bottom': 'auto',
'data': [],
'left': 'auto',
'orient': 'vertical',
'right': '5%'}
def test_toolbox(bar_simple):
# Default Toolbox
c_d = bar_simple.option['toolbox']
assert c_d == {'align': 'auto',
'bottom': 'auto',
'feature': {'dataZoom': {'show': False, 'title': 'Zoom'},
'restore': {'show': False, 'title': 'Reset'}},
'left': 'auto',
'orient': 'horizontal'}
# Default Toolbox with Non-Orientation and Position
c_dop = (bar_simple.set_toolbox(
align='right', orient='vertical', x_pos='-5%', y_pos='-5%')
.option['toolbox'])
assert c_dop == {'align': 'right',
'feature': {'dataZoom': {'show': False, 'title': 'Zoom'},
'restore': {'show': False, 'title': 'Reset'}},
'orient': 'vertical',
'right': '5%',
'top': '5%'}
# Restore, Save, and Zoom
c_rsz = (bar_simple.set_toolbox(
restore=True, save_format='png', data_zoom=True).option['toolbox'])
assert c_rsz == {'align': 'auto',
'bottom': 'auto',
'feature': {'dataZoom': {'show': True,
'title': 'Zoom'},
'restore': {'show': True,
'title': 'Reset'},
'saveAsImage': {'show': True,
'title': 'Download as Image',
'type': 'png'}},
'left': 'auto',
'orient': 'horizontal'}
# Data View and Magic Type
c_vzm = (bar_simple.set_toolbox(
data_view=False, data_zoom=True, magic_type=['line', 'bar'])
.option['toolbox'])
assert c_vzm == {'align': 'auto',
'bottom': 'auto',
'feature': {'dataView': {'lang': ['Table View', 'Back',
'Modify'],
'readOnly': False,
'show': True,
'title': 'Table View'},
'dataZoom': {'show': True,
'title': 'Zoom'},
'magicType': {'show': True,
'title': 'Chart Options',
'type': ['line', 'bar']},
'restore': {'show': False,
'title': 'Reset'}},
'left': 'auto',
'orient': 'horizontal'}
| bsd-3-clause | -1,553,210,094,076,432,100 | 46.261905 | 78 | 0.348363 | false |
wdv4758h/ZipPy | edu.uci.python.benchmark/src/benchmarks/sympy/sympy/printing/theanocode.py | 21 | 8160 | from __future__ import print_function, division
import inspect
from sympy.utilities import default_sort_key
from sympy.external import import_module
from sympy.printing.printer import Printer
import sympy
from functools import partial
theano = import_module('theano')
if theano:
ts = theano.scalar
tt = theano.tensor
from theano import sandbox
from theano.sandbox import linalg as tlinalg
mapping = {
sympy.Add: tt.add,
sympy.Mul: tt.mul,
sympy.Abs: tt.abs_,
sympy.sign: tt.sgn,
sympy.ceiling: tt.ceil,
sympy.floor: tt.floor,
sympy.log: tt.log,
sympy.exp: tt.exp,
sympy.sqrt: tt.sqrt,
sympy.cos: tt.cos,
sympy.acos: tt.arccos,
sympy.sin: tt.sin,
sympy.asin: tt.arcsin,
sympy.tan: tt.tan,
sympy.atan: tt.arctan,
sympy.atan2: tt.arctan2,
sympy.cosh: tt.cosh,
sympy.acosh: tt.arccosh,
sympy.sinh: tt.sinh,
sympy.asinh: tt.arcsinh,
sympy.tanh: tt.tanh,
sympy.atanh: tt.arctanh,
sympy.re: tt.real,
sympy.im: tt.imag,
sympy.arg: tt.angle,
sympy.erf: tt.erf,
sympy.gamma: tt.gamma,
sympy.loggamma: tt.gammaln,
sympy.Pow: tt.pow,
sympy.Eq: tt.eq,
sympy.StrictGreaterThan: tt.gt,
sympy.StrictLessThan: tt.lt,
sympy.LessThan: tt.le,
sympy.GreaterThan: tt.ge,
sympy.Max: tt.maximum, # Sympy accept >2 inputs, Theano only 2
sympy.Min: tt.minimum, # Sympy accept >2 inputs, Theano only 2
# Matrices
sympy.MatAdd: tt.Elemwise(ts.add),
sympy.HadamardProduct: tt.Elemwise(ts.mul),
sympy.Trace: tlinalg.trace,
sympy.Determinant : tlinalg.det,
sympy.Inverse: tlinalg.matrix_inverse,
sympy.Transpose: tt.DimShuffle((False, False), [1, 0]),
}
class TheanoPrinter(Printer):
""" Code printer for Theano computations """
printmethod = "_theano"
def __init__(self, *args, **kwargs):
self.cache = kwargs.pop('cache', dict())
super(TheanoPrinter, self).__init__(*args, **kwargs)
def _print_Symbol(self, s, dtypes={}, broadcastables={}):
dtype = dtypes.get(s, 'floatX')
broadcastable = broadcastables.get(s, ())
key = (s.name, dtype, broadcastable, type(s))
if key in self.cache:
return self.cache[key]
else:
value = tt.tensor(name=s.name, dtype=dtype, broadcastable=broadcastable)
self.cache[key] = value
return value
def _print_AppliedUndef(self, s, dtypes={}, broadcastables={}):
dtype = dtypes.get(s, 'floatX')
broadcastable = broadcastables.get(s, ())
name = str(type(s)) + '_' + str(s.args[0])
key = (name, dtype, broadcastable, type(s), s.args)
if key in self.cache:
return self.cache[key]
else:
value = tt.tensor(name=name, dtype=dtype, broadcastable=broadcastable)
self.cache[key] = value
return value
def _print_Basic(self, expr, **kwargs):
op = mapping[type(expr)]
children = [self._print(arg, **kwargs) for arg in expr.args]
return op(*children)
def _print_Number(self, n, **kwargs):
return eval(str(n))
def _print_MatrixSymbol(self, X, dtypes={}, **kwargs):
dtype = dtypes.get(X, 'floatX')
# shape = [self._print(d, dtypes) for d in X.shape]
key = (X.name, dtype, type(X))
if key in self.cache:
return self.cache[key]
else:
value = tt.Tensor(dtype, (False, False))(X.name)
self.cache[key] = value
return value
def _print_DenseMatrix(self, X, **kwargs):
try:
tt.stacklists
except AttributeError:
raise NotImplementedError(
"Matrix translation not yet supported in this version of Theano")
else:
return tt.stacklists([[self._print(arg, **kwargs) for arg in L]
for L in X.tolist()])
_print_ImmutableMatrix = _print_DenseMatrix
def _print_MatMul(self, expr, **kwargs):
children = [self._print(arg, **kwargs) for arg in expr.args]
result = children[0]
for child in children[1:]:
result = tt.dot(result, child)
return result
def _print_MatrixSlice(self, expr, **kwargs):
parent = self._print(expr.parent, **kwargs)
rowslice = self._print(slice(*expr.rowslice), **kwargs)
colslice = self._print(slice(*expr.colslice), **kwargs)
return parent[rowslice, colslice]
def _print_BlockMatrix(self, expr, **kwargs):
nrows, ncols = expr.blocks.shape
blocks = [[self._print(expr.blocks[r, c], **kwargs)
for c in range(ncols)]
for r in range(nrows)]
return tt.join(0, *[tt.join(1, *row) for row in blocks])
def _print_slice(self, expr, **kwargs):
return slice(*[self._print(i, **kwargs)
if isinstance(i, sympy.Basic) else i
for i in (expr.start, expr.stop, expr.step)])
def _print_Pi(self, expr, **kwargs):
return 3.141592653589793
def _print_Piecewise(self, expr, **kwargs):
import numpy as np
e, cond = expr.args[0].args
if len(expr.args) == 1:
return tt.switch(self._print(cond, **kwargs),
self._print(e, **kwargs),
np.nan)
return tt.switch(self._print(cond, **kwargs),
self._print(e, **kwargs),
self._print(sympy.Piecewise(*expr.args[1:]), **kwargs))
def _print_Rational(self, expr, **kwargs):
return tt.true_div(self._print(expr.p, **kwargs),
self._print(expr.q, **kwargs))
def _print_Integer(self, expr, **kwargs):
return expr.p
def _print_factorial(self, expr, **kwargs):
return self._print(sympy.gamma(expr.args[0] + 1), **kwargs)
def _print_Derivative(self, deriv, **kwargs):
rv = self._print(deriv.expr, **kwargs)
for var in deriv.variables:
var = self._print(var, **kwargs)
rv = tt.Rop(rv, var, tt.ones_like(var))
return rv
def emptyPrinter(self, expr):
return expr
def doprint(self, expr, **kwargs):
"""Returns printer's representation for expr (as a string)"""
return self._print(expr, **kwargs)
global_cache = {}
def theano_code(expr, cache=global_cache, **kwargs):
return TheanoPrinter(cache=cache, settings={}).doprint(expr, **kwargs)
def dim_handling(inputs, dim=None, dims={}, broadcastables={}, keys=(),
**kwargs):
""" Handle various input types for dimensions in tensor_wrap
See Also:
tensor_wrap
theano_funciton
"""
if dim:
dims = dict(zip(inputs, [dim]*len(inputs)))
if dims:
maxdim = max(dims.values())
broadcastables = dict((i, (False,)*dims[i] + (True,)*(maxdim-dims[i]))
for i in inputs)
return broadcastables
def theano_function(inputs, outputs, dtypes={}, cache=None, **kwargs):
""" Create Theano function from SymPy expressions """
cache = {} if cache == None else cache
broadcastables = dim_handling(inputs, **kwargs)
# Remove keyword arguments corresponding to dim_handling
dim_names = inspect.getargspec(dim_handling)[0]
theano_kwargs = dict((k, v) for k, v in kwargs.items()
if k not in dim_names)
code = partial(theano_code, cache=cache, dtypes=dtypes,
broadcastables=broadcastables)
tinputs = list(map(code, inputs))
toutputs = list(map(code, outputs))
toutputs = toutputs[0] if len(toutputs) == 1 else toutputs
return theano.function(tinputs, toutputs, **theano_kwargs)
| bsd-3-clause | 1,578,785,401,999,373,300 | 34.633188 | 84 | 0.563358 | false |
max-arnold/uwsgi-backports | uwsgistatus.py | 7 | 2418 | import uwsgi
import time
import sys
import os
def application(env, start_response):
print env
start_response('200 OK', [('Content-Type', 'text/html')])
yield '<h1>uWSGI %s status</h1>' % uwsgi.version
yield 'masterpid: <b>' + str(uwsgi.masterpid()) + '</b><br/>'
yield 'started on: <b>' + time.ctime(uwsgi.started_on) + '</b><br/>'
yield 'buffer size: <b>' + str(uwsgi.buffer_size) + '</b><br/>'
yield 'total_requests: <b>' + str(uwsgi.total_requests()) + '</b><br/>'
yield 'log size: <b>' + str(uwsgi.logsize()) + '</b><br/>'
yield 'workers: <b>' + str(uwsgi.numproc) + '</b><br/>'
yield "cwd: <b>%s</b><br/>" % os.getcwd()
try:
yield "mode: <b>%s</b><br/>" % uwsgi.mode
except:
pass
try:
yield "pidfile: <b>%s</b><br/>" % uwsgi.pidfile
except:
pass
yield "<h2>Hooks</h2>"
for h in range(0,255):
if uwsgi.has_hook(h):
yield "%d<br/>" % h
yield '<h2>dynamic options</h2>'
yield '<b>logging</b>: ' + str(uwsgi.get_option(0)) + '<br/>'
yield '<b>max_requests</b>: ' + str(uwsgi.getoption(1)) + '<br/>'
yield '<b>socket_timeout</b>: ' + str(uwsgi.getoption(2)) + '<br/>'
yield '<b>memory_debug</b>: ' + str(uwsgi.getoption(3)) + '<br/>'
yield '<b>master_interval</b>: ' + str(uwsgi.getoption(4)) + '<br/>'
yield '<b>harakiri</b>: ' + str(uwsgi.getoption(5)) + '<br/>'
yield '<b>cgi_mode</b>: ' + str(uwsgi.get_option(6)) + '<br/>'
yield '<b>threads</b>: ' + str(uwsgi.get_option(7)) + '<br/>'
yield '<b>process_reaper</b>: ' + str(uwsgi.get_option(8)) + '<br/>'
yield '<table border="1">'
yield '<th>worker id</th><th>pid</th><th>in request</th><th>requests</th><th>running time</th><th>address space</th><th>rss</th>'
workers = uwsgi.workers();
yield '<h2>workers</h2>'
for w in workers:
#print w
#print w['running_time']
if w is not None:
yield '<tr><td>'+ str(w['id']) +'</td><td>' + str(w['pid']) + '</td><td>' + str(w['pid']) + '</td><td>' + str(w['requests']) + '</td><td>' + str(w['running_time']) + '</td><td>' + str(w['vsz']) + '</td><td>' + str(w['rss']) + '</td></tr>'
print w
yield '</table>'
yield "<h2>PYTHONPATH</h2>"
yield "<ul>"
for p in sys.path:
yield "<li>%s</li>" % p
yield "</ul>"
yield "<i>%s</i>" % str(os.uname())
| gpl-2.0 | 6,243,679,049,501,226,000 | 29.607595 | 250 | 0.516543 | false |
Lana-B/Pheno4T | madanalysis/layout/histogram.py | 1 | 4207 | ################################################################################
#
# Copyright (C) 2012-2013 Eric Conte, Benjamin Fuks
# The MadAnalysis development team, email: <[email protected]>
#
# This file is part of MadAnalysis 5.
# Official website: <https://launchpad.net/madanalysis5>
#
# MadAnalysis 5 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MadAnalysis 5 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MadAnalysis 5. If not, see <http://www.gnu.org/licenses/>
#
################################################################################
from madanalysis.layout.histogram_core import HistogramCore
import logging
class Histogram:
stamp = 0
def __init__(self):
self.Reset()
def Print(self):
# General info
logging.info(self.name + ' ' + str(self.nbins) + \
str(self.xmin) + ' ' + str(self.xmax))
# Data
self.positive.Print()
self.negative.Print()
self.summary.Print()
def FinalizeReading(self,main,dataset):
import numpy
# Statistics
self.summary.nevents = self.positive.nevents + self.negative.nevents
self.summary.nentries = self.positive.nentries + self.negative.nentries
# sumw
self.summary.sumw = self.positive.sumw - self.negative.sumw
if self.summary.sumw<0:
self.summary.sumw=0
# sumw2
self.summary.sumw2 = self.positive.sumw2 - self.negative.sumw2
if self.summary.sumw2<0:
self.summary.sumw2=0
# sumwx
self.summary.sumwx = self.positive.sumwx - self.negative.sumwx
# no correction on it
# sumw2x
self.summary.sumw2x = self.positive.sumw2x - self.negative.sumw2x
# no correction on it
# underflow
self.summary.underflow = self.positive.underflow - self.negative.underflow
if self.summary.underflow<0:
self.summary.underflow=0
# overflow
self.summary.overflow = self.positive.overflow - self.negative.overflow
if self.summary.overflow<0:
self.summary.overflow=0
# Data
data = []
for i in range(0,len(self.positive.array)):
data.append(self.positive.array[i]-self.negative.array[i])
if data[-1]<0:
self.warnings.append(\
'dataset='+dataset.name+\
' -> bin '+str(i)+\
' has a negative content : '+\
str(data[-1])+'. This value is set to zero')
data[-1]=0
self.summary.array = numpy.array(data)
# Integral
self.positive.ComputeIntegral()
self.negative.ComputeIntegral()
self.summary.ComputeIntegral()
def CreateHistogram(self):
# New stamp
Histogram.stamp+=1
# Creating a new histo
from ROOT import TH1F
self.myhisto = TH1F(\
self.name+"_"+str(Histogram.stamp),\
self.name+"_"+str(Histogram.stamp),\
self.nbins,\
self.xmin,\
self.xmax)
# Filling bins
for bin in range(0,self.nbins):
self.myhisto.SetBinContent(bin+1, self.summary.array[bin])
def Reset(self):
# General info
self.name = ""
self.nbins = 100
self.xmin = 0.
self.xmax = 100.
self.scale = 0.
# Data
self.positive = HistogramCore()
self.negative = HistogramCore()
self.summary = HistogramCore()
# ROOT histo
self.myhisto = 0
# warnings
self.warnings = []
| gpl-3.0 | -4,322,020,847,721,325,000 | 28.626761 | 82 | 0.559306 | false |
temnoregg/django-helpdesk | helpdesk/south_migrations/0011_populate_usersettings.py | 5 | 5022 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.auth import get_user_model
from helpdesk.settings import DEFAULT_USER_SETTINGS
def pickle_settings(data):
"""Pickling as defined at migration's creation time"""
import cPickle
from helpdesk.lib import b64encode
return b64encode(cPickle.dumps(data))
# https://docs.djangoproject.com/en/1.7/topics/migrations/#data-migrations
def populate_usersettings(orm):
"""Create a UserSettings entry for each existing user.
This will only happen once (at install time, or at upgrade)
when the UserSettings model doesn't already exist."""
_User = get_user_model()
# Import historical version of models
User = orm[_User._meta.app_label+'.'+_User._meta.model_name]
UserSettings = orm["helpdesk"+'.'+"UserSettings"]
settings_pickled = pickle_settings(DEFAULT_USER_SETTINGS)
for u in User.objects.all():
try:
UserSettings.objects.get(user=u)
except UserSettings.DoesNotExist:
UserSettings.objects.create(user=u, settings_pickled=settings_pickled)
class Migration(DataMigration):
def forwards(self, orm):
populate_usersettings(orm)
def backwards(self, orm):
pass
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'helpdesk.usersettings': {
'Meta': {'object_name': 'UserSettings'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'settings_pickled': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
}
}
complete_apps = ['helpdesk']
symmetrical = True
| bsd-3-clause | 3,055,535,137,414,424,600 | 54.8 | 195 | 0.5908 | false |
anushreejangid/csmpe-main | csmpe/core_plugins/csm_install_operations/ios_xr/add.py | 1 | 4725 | # =============================================================================
#
# Copyright (c) 2016, Cisco Systems
# All rights reserved.
#
# # Author: Klaudiusz Staniek
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
# THE POSSIBILITY OF SUCH DAMAGE.
# =============================================================================
from csmpe.plugins import CSMPlugin
from install import install_add_remove
from csmpe.core_plugins.csm_get_inventory.ios_xr.plugin import get_package, get_inventory
class Plugin(CSMPlugin):
"""This plugin adds packages from repository to the device."""
name = "Install Add Plugin"
platforms = {'ASR9K', 'CRS'}
phases = {'Add'}
os = {'XR'}
def install_add(self, server_repository_url, s_packages, has_tar=False):
if server_repository_url.startswith("scp"):
# scp username:[email protected]:/home_directory destination_on_host
scp_username_and_password, sep, server_and_directory_and_destination = server_repository_url.partition('@')
# scp_username_and_password = 'scp username:password', sep = '@',
# server_ip_and_directory = 'x.x.x.x:/home_directory destination_on_host'
if not scp_username_and_password or not sep or not server_and_directory_and_destination:
self.ctx.error("Check if the SCP server repository is configured correctly on CSM Server.")
scp_username, sep, scp_password = scp_username_and_password.partition(':')
if not scp_username or not sep or not scp_password:
self.ctx.error("Check if the SCP server repository is configured correctly on CSM Server.")
server_and_directory, sep, destination_on_host = server_and_directory_and_destination.partition(' ')
if not server_and_directory or not sep or not destination_on_host:
self.ctx.error("Check if the SCP server repository is configured correctly on CSM Server.")
# scp username:@x.x.x.x:/home_directory
url = scp_username + '@' + server_and_directory
for package in s_packages.split():
cmd = "{}/{} {}".format(url, package, destination_on_host)
cmd = "admin install add source {} {} async".format(destination_on_host, s_packages)
else:
cmd = "admin install add source {} {} async".format(server_repository_url, s_packages)
install_add_remove(self.ctx, cmd, has_tar=has_tar)
def run(self):
server_repository_url = self.ctx.server_repository_url
if server_repository_url is None:
self.ctx.error("No repository provided")
return
packages = self.ctx.software_packages
if packages is None:
self.ctx.error("No package list provided")
return
has_tar = False
s_packages = " ".join([package for package in packages
if '-vm' not in package and ('pie' in package or 'tar' in package)])
if 'tar' in s_packages:
has_tar = True
if not s_packages:
self.ctx.error("None of the selected package(s) has an acceptable file extension.")
self.ctx.info("Add Package(s) Pending")
self.ctx.post_status("Add Package(s) Pending")
self.install_add(server_repository_url, s_packages, has_tar=has_tar)
self.ctx.info("Package(s) Added Successfully")
# Refresh package and inventory information
get_package(self.ctx)
get_inventory(self.ctx)
| bsd-2-clause | -5,918,361,077,678,879,000 | 46.25 | 119 | 0.65545 | false |
cmdunkers/DeeperMind | PythonEnv/lib/python2.7/site-packages/theano/sandbox/cuda/tests/test_blocksparse.py | 2 | 2710 | import numpy
from nose.plugins.skip import SkipTest
import theano
from theano import tensor
import theano.tests.unittest_tools as utt
import theano.tensor.nnet.tests.test_blocksparse
import theano.sandbox.cuda as cuda_ndarray
from theano.sandbox.cuda.blocksparse import (GpuSparseBlockOuter,
gpu_sparse_block_gemv,
gpu_sparse_block_outer)
from theano.sandbox.cuda.var import float32_shared_constructor
if not cuda_ndarray.cuda_available:
raise SkipTest('Optional package cuda disabled')
if theano.config.mode == 'FAST_COMPILE':
mode_with_gpu = theano.compile.mode.get_mode('FAST_RUN').including('gpu')
else:
mode_with_gpu = theano.compile.mode.get_default_mode().including('gpu')
class BlockSparse_Gemv_and_Outer(
theano.tensor.nnet.tests.test_blocksparse.BlockSparse_Gemv_and_Outer):
def setUp(self):
utt.seed_rng()
self.mode = mode_with_gpu.excluding('constant_folding')
self.gemv_op = gpu_sparse_block_gemv
self.outer_op = gpu_sparse_block_outer
# This test is temporarily disabled since we disabled the output_merge
# and alpha_merge optimizations for blocksparse due to brokeness.
# Re-enable when those are re-added.
def Xtest_blocksparse_grad_merge(self):
b = tensor.fmatrix()
h = tensor.ftensor3()
iIdx = tensor.lmatrix()
oIdx = tensor.lmatrix()
W_val, h_val, iIdx_val, b_val, oIdx_val = self.gemv_data()
W = float32_shared_constructor(W_val)
o = gpu_sparse_block_gemv(b.take(oIdx, axis=0), W, h, iIdx, oIdx)
gW = theano.grad(o.sum(), W)
lr = numpy.asarray(0.05, dtype='float32')
upd = W - lr * gW
f1 = theano.function([h, iIdx, b, oIdx], updates=[(W, upd)],
mode=mode_with_gpu)
# Make sure the lr update was merged.
assert isinstance(f1.maker.fgraph.outputs[0].owner.op,
GpuSparseBlockOuter)
# Exclude the merge optimizations.
mode = mode_with_gpu.excluding('local_merge_blocksparse_alpha')
mode = mode.excluding('local_merge_blocksparse_output')
f2 = theano.function([h, iIdx, b, oIdx], updates=[(W, upd)], mode=mode)
# Make sure the lr update is not merged.
assert not isinstance(f2.maker.fgraph.outputs[0].owner.op,
GpuSparseBlockOuter)
f2(h_val, iIdx_val, b_val, oIdx_val)
W_ref = W.get_value()
# reset the var
W.set_value(W_val)
f1(h_val, iIdx_val, b_val, oIdx_val)
W_opt = W.get_value()
utt.assert_allclose(W_ref, W_opt)
| bsd-3-clause | -5,929,907,997,292,509,000 | 34.657895 | 79 | 0.62214 | false |
bigsassy/sms-playground | kidmuseum.py | 1 | 14316 | import sys
import time
import json
from datetime import datetime
try:
from urllib2 import Request, urlopen, HTTPError
except:
from urllib.request import Request, urlopen, HTTPError
def handle_server_down(exctype, value, traceback):
if exctype == HTTPError and 'HTTP Error 502' in str(value):
print("Uh oh. Looks like the SMS Playground server is down :(")
print("You won't be able to run your program until it's brought back online.")
print("Email [email protected] to request for Eric to turn the server back on.")
sys.exit(1)
else:
sys.__excepthook__(exctype, value, traceback)
sys.excepthook = handle_server_down
start_conversation_url = "http://sms-playground.com/conversation/start"
send_message_url = "http://sms-playground.com/conversation/{}/message/send"
get_response_message_url = "http://sms-playground.com/conversation/{}/message/response/{}"
add_to_picture_url = "http://sms-playground.com/conversation/{}/picture/{}/{}"
get_transformed_picture_url = "http://sms-playground.com/conversation/{}/picture/{}/"
class TxtConversation(object):
"""
A TxtConversation manages a text conversation between a person txting you and your program.
It comes with a bunch of useful methods that help you communicate with the person texting
your program, in particular sending messages and getting information from the user of your
program.
It also handles registering your program with the Texting Playground. This allows people to
pick your program to chat with by sending a txt with the name of your program.
Here's a simple example of what a program could look like:
from kidmuseum import TxtConversation
conversation = TxtConversation("I <3 compliments")
converstaion.send_message("Hi! You love compliments? Well I got tons of 'em!")
name = converstaion.get_string("First, what's your name?")
conversation.send_message("Hey, " + name + " is an awesome name!")
conversation.send_message("I bet you're super smart too.")
conversation.send_message("To be honest, you're the coolest person I've talked today BY FAR :D")
converstaion.send_message("Gotta go, ttyl!")
Now, let's pretend the phone number for the SMS Playground was 240-555-0033. Here's what the
conversation would look like If someone texted I <3 compliments to that number.
Person: I <3 compliments
Program: Hi! You love compliments? Well I got tons of 'em!
Program: First, what's your name?
Person: Sarah
Program: Hey, Sarah is an awesome name!
Program: I bet you're super smart too.
Program: To be honest, you're the coolest person I've talked today BY FAR :D
Program: Gotta go, ttyl!
"""
def __init__(self, keyword, timeout=None):
"""
This is the code that get's called when you create the conversation.
Example:
conversation = TxtConversation("I <3 compliments")
:param keyword: What someone would text to start this conversation (e.g. "I <3 compliments")
"""
start_time = datetime.utcnow()
while (True):
# Ask the server to start a conversation with someone
# who texts the keyword to the Texting Playground's phone number
request = Request(start_conversation_url, json.dumps({
'keyword': keyword,
'messages_must_be_older_than': str(start_time),
}).encode('utf-8'), {'Content-Type': 'application/json'})
response_data = json.loads(urlopen(request).read().decode('utf8'))
# If nobody has texted our keyword to the Texting Playgroud yet,
# wait a bit and check again. If it's been a really long time,
# stop waiting and stop the program.
if 'wait_for_seconds' in response_data:
time.sleep(response_data['wait_for_seconds'])
if timeout and (datetime.utcnow() - start_time).seconds >= timeout:
raise Exception("Too much time passed while waiting for text with {}.".format(keyword))
continue
# return the special conversation code used to communicated with
# the user who started the conversation
self.conversation_code = response_data['conversation_code']
break
def send_message(self, message):
"""
Send a message to the user's phone.
Example:
converstaion.send_message("Hi! You love compliments? Well I got tons of 'em!")
:param message: The message sent to the user's phone.
"""
self._send_message(message)
def send_picture(self, picture_or_url, message=""):
"""
Send a message to the user's phone.
Examples:
converstaion.send_picture("http://dreamatico.com/data_images/kitten/kitten-2.jpg", "It's a kitten!")
converstaion.send_picture("http://dreamatico.com/data_images/kitten/kitten-7.jpg")
picture = conversation.get_picture("Gimme your best selfie")
picture.add_glasses("kanye_shades")
conversation.send_picture(picture, "You with Kanye Shades")
:param picture_or_url: Either a Picture object or a url to an image.
:param message: Optional message to send along with the picture
"""
url = picture_or_url
if type(picture_or_url) is Picture:
url = picture_or_url._get_url()
self._send_message(message, picture_url=url)
def get_string(self, prompt_message):
"""
Asks the user to reply with a message and returns it as a string.
Examples:
name = conversation.get_string("What's your name?")
conversation.send_message("Hi, " + name)
:param prompt_message: The message to send to the user, prompting for a response.
:return: The message sent by the user in reply as a string.
"""
self.send_message(prompt_message)
return self._get_response_message("string")
def get_integer(self, prompt_message):
"""
Asks the user to reply with a message and returns it as a string.
Examples:
age = conversation.get_integer("What's your age?")
age_after_ten_years = age + 10
conversation.send_message("In 10 years you'll be " + age_after_ten_years)
:param prompt_message: The message to send to the user, prompting for a response.
:return: The message sent by the user in reply as an integer.
"""
self.send_message(prompt_message)
return self._get_response_message("int")
def get_floating_point(self, prompt_message):
"""
Asks the user to reply with a message and returns it as a string.
Examples:
price = conversation.get_floating_point("How much was the bill?")
tip = price * 1.20 # tip 20%
conversation.send_message("You should tip " + tip)
:param prompt_message: The message to send to the user, prompting for a response.
:return: The message sent by the user in reply as an float.
"""
self.send_message(prompt_message)
return self._get_response_message("float")
def get_picture(self, prompt_message):
"""
Asks the user to reply with a picture and returns it as a Picture object.
Examples:
picture = conversation.get_picture("Gimme your best selfie")
picture.add_glasses("kanye_shades")
conversation.send_picture(picture, "You with Kanye Shades")
:param prompt_message: The message to send to the user, prompting for a response.
:return: The picture sent by the user in reply as a Picture object.
"""
self.send_message(prompt_message)
picture_code = self._get_response_message("picture")
return Picture(self.conversation_code, picture_code)
def _send_message(self, message, picture_url=None):
"""
Handles asking the SMS Playground server to send a message to the user
in this conversation. This is used by the `send_message` and `send_picture`
methods.
:param message: The message getting sent to the user
:param picture_url: Optionally, a url to the image getting sent to the user.
"""
request = Request(send_message_url.format(self.conversation_code), json.dumps({
'message': message,
'picture_url': picture_url,
}).encode('utf-8'), {'Content-Type': 'application/json'})
response = urlopen(request)
# If the server told us something was wrong with our request, stop the program
if response.getcode() != 200:
raise Exception("Failed to send message: {}".format(response.read()))
# Wait a little bit between sending messages so they don't reach the phone out of order
time.sleep(1)
def _get_response_message(self, response_type):
"""
Handles asking the SMS Playground server for the user's response to our previous message
sent in the conversation. This is used by the `get_string`, `get_integer`, `get_float` and
`get_picture` methods.
:param message: The message getting sent to the user
:param picture_url: Optionally, a url to the image getting sent to the user.
"""
timeout_seconds = 120
start_time = datetime.utcnow()
while (True):
# Ask the server for the message the user sent to respond
# to our last message sent to them
url = get_response_message_url.format(self.conversation_code, response_type)
request = Request(url, json.dumps({
'messages_must_be_older_than': str(start_time),
}).encode('utf-8'), {'Content-Type': 'application/json'})
response_data = json.loads(urlopen(request).read().decode('utf8'))
# If the user hasn't responded yet, wait a bit and check again.
# If it's been a really long time, stop waiting and stop the program.
if 'wait_for_seconds' in response_data:
time.sleep(response_data['wait_for_seconds'])
if (datetime.utcnow() - start_time).seconds >= timeout_seconds:
raise Exception("Too much time passed while waiting for a response")
continue
# return the special conversation code used to communicated with
# the user who started the conversation
if response_type == "picture":
return response_data['picture_code']
else:
return response_data['message']
class Picture(object):
"""
A Picture represents a picture send by the user in a text conversation. It provides method's
for manipulating the picture, for example adding sunglasses if there's a face in the picture.
Picture objects can be sent back to the user's phone by using the `send_picture` method
in a TxtConversation object.
Here's a simple example of what a program could look like:
from kidmuseum import TxtConversation
conversation = TxtConversation("Hipster")
selfie_picture = converstaion.get_picture("Hi! Send me a selfie, and I'll send you back a hipster!")
selfie_picture.add_moustache("moustache1")
selfie_picture.add_glasses("glasses1")
conversation.send_picture(selfie_picture)
Now, let's pretend the phone number for the SMS Playground was 240-555-0033. Here's what the
conversation would look like If someone texted I <3 compliments to that number.
Person: Hipster
Program: Hi! Send me a selfie, and I'll send you back a hipster!
Person: <takes selfie with camera and sends it>
Program: <replies with selfie, only with hipster sunglasses and moustache pasted on>
"""
def __init__(self, conversation_code, picture_code):
self.conversation_code = conversation_code
self.picture_code = picture_code
def add_moustache(self, moustache_name):
"""
Adds a moustache to the image if there's a face on it. Valid moustaches are "moustache1", "moustache2",
"moustache3", "moustache4", "moustache5", "moustache6", and "moustache7".
:param moustache_name: The name of the moustache. See list of valid moustache above.
"""
# Tell the server to send a text message to the user in the conversation
request = Request(add_to_picture_url.format(self.conversation_code, self.picture_code, "moustache"), json.dumps({
'moustache_name': moustache_name,
}).encode('utf-8'), {'Content-Type': 'application/json'})
try:
urlopen(request)
except HTTPError as error:
# If the server told us something was wrong with our request, stop the program
raise Exception("Failed to add a moustsache: {}".format(error.read()))
def add_glasses(self, glasses_name):
"""
Adds glasses to the image if there's a face on it. Valid glasses are "glasses1", "glasses2",
"glasses3", "glasses4", and "glasses5".
:param moustache_name: The name of the glasses. See list of valid glasses above.
"""
# Tell the server to send a text message to the user in the conversation
request = Request(add_to_picture_url.format(self.conversation_code, self.picture_code, "glasses"), json.dumps({
'glasses_name': glasses_name,
}).encode('utf-8'), {'Content-Type': 'application/json'})
try:
urlopen(request)
except HTTPError as error:
# If the server told us something was wrong with our request, stop the program
raise Exception("Failed to add glasses: {}".format(error.read()))
def _get_url(self):
"""
Asks the server for the URL for the picture with all the modifications defined (glasses, moustache, etc).
:return: The URL for the modified picture.
"""
request = Request(get_transformed_picture_url.format(self.conversation_code, self.picture_code))
response_data = json.loads(urlopen(request).read().decode('utf8'))
return response_data['url']
| mit | -1,191,694,240,274,205,700 | 42.250755 | 121 | 0.643266 | false |
dtroyer/cliff | cliff/tests/test_formatters_yaml.py | 1 | 3058 | #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
import yaml
from cliff.formatters import yaml_format
from cliff.tests import base
from cliff.tests import test_columns
import mock
class TestYAMLFormatter(base.TestBase):
def test_format_one(self):
sf = yaml_format.YAMLFormatter()
c = ('a', 'b', 'c', 'd')
d = ('A', 'B', 'C', '"escape me"')
expected = {
'a': 'A',
'b': 'B',
'c': 'C',
'd': '"escape me"'
}
output = six.StringIO()
args = mock.Mock()
sf.emit_one(c, d, output, args)
actual = yaml.safe_load(output.getvalue())
self.assertEqual(expected, actual)
def test_formattablecolumn_one(self):
sf = yaml_format.YAMLFormatter()
c = ('a', 'b', 'c', 'd')
d = ('A', 'B', 'C', test_columns.FauxColumn(['the', 'value']))
expected = {
'a': 'A',
'b': 'B',
'c': 'C',
'd': ['the', 'value'],
}
args = mock.Mock()
sf.add_argument_group(args)
args.noindent = True
output = six.StringIO()
sf.emit_one(c, d, output, args)
value = output.getvalue()
print(len(value.splitlines()))
actual = yaml.safe_load(output.getvalue())
self.assertEqual(expected, actual)
def test_list(self):
sf = yaml_format.YAMLFormatter()
c = ('a', 'b', 'c')
d = (
('A1', 'B1', 'C1'),
('A2', 'B2', 'C2'),
('A3', 'B3', 'C3')
)
expected = [
{'a': 'A1', 'b': 'B1', 'c': 'C1'},
{'a': 'A2', 'b': 'B2', 'c': 'C2'},
{'a': 'A3', 'b': 'B3', 'c': 'C3'}
]
output = six.StringIO()
args = mock.Mock()
sf.add_argument_group(args)
sf.emit_list(c, d, output, args)
actual = yaml.safe_load(output.getvalue())
self.assertEqual(expected, actual)
def test_formattablecolumn_list(self):
sf = yaml_format.YAMLFormatter()
c = ('a', 'b', 'c')
d = (
('A1', 'B1', test_columns.FauxColumn(['the', 'value'])),
)
expected = [
{'a': 'A1', 'b': 'B1', 'c': ['the', 'value']},
]
args = mock.Mock()
sf.add_argument_group(args)
args.noindent = True
output = six.StringIO()
sf.emit_list(c, d, output, args)
actual = yaml.safe_load(output.getvalue())
self.assertEqual(expected, actual)
| apache-2.0 | -9,176,197,613,891,788,000 | 29.58 | 76 | 0.519294 | false |
tarasane/h2o-3 | py2/testdir_hosts/test_parse_manyfile_hack.py | 21 | 1857 | import unittest, sys
sys.path.extend(['.','..','../..','py'])
import h2o2 as h2o
import h2o_cmd, h2o_import as h2i
from h2o_test import find_file, dump_json, verboseprint
print "Do a hack to import files individually, then parse, to avoid Frames.json on unused files"
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
h2o.init()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_parse_manyfile_hack(self):
for trial in range(2):
importFolderPath = "/home/0xdiag/datasets/manyfiles-nflx-gz"
importList = []
maxi = 50
# 4-9 don't exist?
for i in range(10, 10+maxi+1):
csvFilename = "file_%s.dat.gz" % i
csvPathname = importFolderPath + "/" + csvFilename
importResult = h2o.n0.import_files(path=csvPathname)
# just 1!
import_key = importResult['keys'][0]
assert len(importResult['keys'])==1
assert len(importResult['files'])==1
assert len(importResult['fails'])==0
assert len(importResult['dels'])==0
importList.append(import_key)
timeoutSecs = 800
parseResult = h2o.n0.parse(key=importList, timeoutSecs=timeoutSecs)
numRows, numCols, parse_key = h2o_cmd.infoFromParse(parseResult)
inspectResult = h2o_cmd.runInspect(key=parse_key)
missingList, labelList, numRows, numCols = h2o_cmd.infoFromInspect(inspectResult)
assert numRows == (maxi * 100000)
assert numCols == 542
# FIX! add summary. Do I need to do each col separately?
if __name__ == '__main__':
h2o.unit_main()
| apache-2.0 | -3,269,395,069,711,012,000 | 32.763636 | 96 | 0.578891 | false |
gooddata/openstack-nova | nova/tests/unit/api/openstack/compute/test_migrations.py | 1 | 16823 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from oslo_utils.fixture import uuidsentinel as uuids
import six
from webob import exc
from nova.api.openstack.compute import migrations as migrations_v21
from nova import context
from nova import exception
from nova import objects
from nova.objects import base
from nova import test
from nova.tests.unit.api.openstack import fakes
fake_migrations = [
# in-progress live migration
{
'id': 1,
'source_node': 'node1',
'dest_node': 'node2',
'source_compute': 'compute1',
'dest_compute': 'compute2',
'dest_host': '1.2.3.4',
'status': 'running',
'instance_uuid': uuids.instance1,
'old_instance_type_id': 1,
'new_instance_type_id': 2,
'migration_type': 'live-migration',
'hidden': False,
'memory_total': 123456,
'memory_processed': 12345,
'memory_remaining': 111111,
'disk_total': 234567,
'disk_processed': 23456,
'disk_remaining': 211111,
'created_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'deleted_at': None,
'deleted': False,
'uuid': uuids.migration1,
},
# non in-progress live migration
{
'id': 2,
'source_node': 'node1',
'dest_node': 'node2',
'source_compute': 'compute1',
'dest_compute': 'compute2',
'dest_host': '1.2.3.4',
'status': 'error',
'instance_uuid': uuids.instance1,
'old_instance_type_id': 1,
'new_instance_type_id': 2,
'migration_type': 'live-migration',
'hidden': False,
'memory_total': 123456,
'memory_processed': 12345,
'memory_remaining': 111111,
'disk_total': 234567,
'disk_processed': 23456,
'disk_remaining': 211111,
'created_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'deleted_at': None,
'deleted': False,
'uuid': uuids.migration2,
},
# in-progress resize
{
'id': 4,
'source_node': 'node10',
'dest_node': 'node20',
'source_compute': 'compute10',
'dest_compute': 'compute20',
'dest_host': '5.6.7.8',
'status': 'migrating',
'instance_uuid': uuids.instance2,
'old_instance_type_id': 5,
'new_instance_type_id': 6,
'migration_type': 'resize',
'hidden': False,
'memory_total': 456789,
'memory_processed': 56789,
'memory_remaining': 45000,
'disk_total': 96789,
'disk_processed': 6789,
'disk_remaining': 96000,
'created_at': datetime.datetime(2013, 10, 22, 13, 42, 2),
'updated_at': datetime.datetime(2013, 10, 22, 13, 42, 2),
'deleted_at': None,
'deleted': False,
'uuid': uuids.migration3,
},
# non in-progress resize
{
'id': 5,
'source_node': 'node10',
'dest_node': 'node20',
'source_compute': 'compute10',
'dest_compute': 'compute20',
'dest_host': '5.6.7.8',
'status': 'error',
'instance_uuid': uuids.instance2,
'old_instance_type_id': 5,
'new_instance_type_id': 6,
'migration_type': 'resize',
'hidden': False,
'memory_total': 456789,
'memory_processed': 56789,
'memory_remaining': 45000,
'disk_total': 96789,
'disk_processed': 6789,
'disk_remaining': 96000,
'created_at': datetime.datetime(2013, 10, 22, 13, 42, 2),
'updated_at': datetime.datetime(2013, 10, 22, 13, 42, 2),
'deleted_at': None,
'deleted': False,
'uuid': uuids.migration4,
}
]
migrations_obj = base.obj_make_list(
'fake-context',
objects.MigrationList(),
objects.Migration,
fake_migrations)
class FakeRequest(object):
environ = {"nova.context": context.RequestContext('fake_user', 'fake',
is_admin=True)}
GET = {}
class MigrationsTestCaseV21(test.NoDBTestCase):
migrations = migrations_v21
def _migrations_output(self):
return self.controller._output(self.req, migrations_obj)
def setUp(self):
"""Run before each test."""
super(MigrationsTestCaseV21, self).setUp()
self.controller = self.migrations.MigrationsController()
self.req = fakes.HTTPRequest.blank('', use_admin_context=True)
self.context = self.req.environ['nova.context']
def test_index(self):
migrations_in_progress = {'migrations': self._migrations_output()}
for mig in migrations_in_progress['migrations']:
self.assertIn('id', mig)
self.assertNotIn('deleted', mig)
self.assertNotIn('deleted_at', mig)
self.assertNotIn('links', mig)
filters = {'host': 'host1', 'status': 'migrating',
'instance_uuid': uuids.instance1,
'source_compute': 'host1', 'hidden': '0',
'migration_type': 'resize'}
# python-novaclient actually supports sending this even though it's
# not used in the DB API layer and is totally useless. This lets us,
# however, test that additionalProperties=True allows it.
unknown_filter = {'cell_name': 'ChildCell'}
self.req.GET.update(filters)
self.req.GET.update(unknown_filter)
with mock.patch.object(self.controller.compute_api,
'get_migrations',
return_value=migrations_obj) as (
mock_get_migrations
):
response = self.controller.index(self.req)
self.assertEqual(migrations_in_progress, response)
# Only with the filters, and the unknown filter is stripped
mock_get_migrations.assert_called_once_with(self.context, filters)
def test_index_query_allow_negative_int_as_string(self):
migrations = {'migrations': self._migrations_output()}
filters = ['host', 'status', 'cell_name', 'instance_uuid',
'source_compute', 'hidden', 'migration_type']
with mock.patch.object(self.controller.compute_api,
'get_migrations',
return_value=migrations_obj):
for fl in filters:
req = fakes.HTTPRequest.blank('/os-migrations',
use_admin_context=True,
query_string='%s=-1' % fl)
response = self.controller.index(req)
self.assertEqual(migrations, response)
def test_index_query_duplicate_query_parameters(self):
migrations = {'migrations': self._migrations_output()}
params = {'host': 'host1', 'status': 'migrating',
'cell_name': 'ChildCell', 'instance_uuid': uuids.instance1,
'source_compute': 'host1', 'hidden': '0',
'migration_type': 'resize'}
with mock.patch.object(self.controller.compute_api,
'get_migrations',
return_value=migrations_obj):
for k, v in params.items():
req = fakes.HTTPRequest.blank(
'/os-migrations', use_admin_context=True,
query_string='%s=%s&%s=%s' % (k, v, k, v))
response = self.controller.index(req)
self.assertEqual(migrations, response)
class MigrationsTestCaseV223(MigrationsTestCaseV21):
wsgi_api_version = '2.23'
def setUp(self):
"""Run before each test."""
super(MigrationsTestCaseV223, self).setUp()
self.req = fakes.HTTPRequest.blank(
'', version=self.wsgi_api_version, use_admin_context=True)
def test_index(self):
migrations = {'migrations': self.controller._output(
self.req, migrations_obj, True)}
for i, mig in enumerate(migrations['migrations']):
# first item is in-progress live migration
if i == 0:
self.assertIn('links', mig)
else:
self.assertNotIn('links', mig)
self.assertIn('migration_type', mig)
self.assertIn('id', mig)
self.assertNotIn('deleted', mig)
self.assertNotIn('deleted_at', mig)
with mock.patch.object(self.controller.compute_api,
'get_migrations') as m_get:
m_get.return_value = migrations_obj
response = self.controller.index(self.req)
self.assertEqual(migrations, response)
self.assertIn('links', response['migrations'][0])
self.assertIn('migration_type', response['migrations'][0])
class MigrationsTestCaseV259(MigrationsTestCaseV223):
wsgi_api_version = '2.59'
def test_index(self):
migrations = {'migrations': self.controller._output(
self.req, migrations_obj, True, True)}
for i, mig in enumerate(migrations['migrations']):
# first item is in-progress live migration
if i == 0:
self.assertIn('links', mig)
else:
self.assertNotIn('links', mig)
self.assertIn('migration_type', mig)
self.assertIn('id', mig)
self.assertIn('uuid', mig)
self.assertNotIn('deleted', mig)
self.assertNotIn('deleted_at', mig)
with mock.patch.object(self.controller.compute_api,
'get_migrations_sorted') as m_get:
m_get.return_value = migrations_obj
response = self.controller.index(self.req)
self.assertEqual(migrations, response)
self.assertIn('links', response['migrations'][0])
self.assertIn('migration_type', response['migrations'][0])
@mock.patch('nova.compute.api.API.get_migrations_sorted')
def test_index_with_invalid_marker(self, mock_migrations_get):
"""Tests detail paging with an invalid marker (not found)."""
mock_migrations_get.side_effect = exception.MarkerNotFound(
marker=uuids.invalid_marker)
req = fakes.HTTPRequest.blank(
'/os-migrations?marker=%s' % uuids.invalid_marker,
version=self.wsgi_api_version, use_admin_context=True)
e = self.assertRaises(exc.HTTPBadRequest,
self.controller.index, req)
self.assertEqual(
"Marker %s could not be found." % uuids.invalid_marker,
six.text_type(e))
def test_index_with_invalid_limit(self):
"""Tests detail paging with an invalid limit."""
req = fakes.HTTPRequest.blank(
'/os-migrations?limit=x', version=self.wsgi_api_version,
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.index, req)
req = fakes.HTTPRequest.blank(
'/os-migrations?limit=-1', version=self.wsgi_api_version,
use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_with_invalid_changes_since(self):
"""Tests detail paging with an invalid changes-since value."""
req = fakes.HTTPRequest.blank(
'/os-migrations?changes-since=wrong_time',
version=self.wsgi_api_version, use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_with_unknown_query_param(self):
"""Tests detail paging with an unknown query parameter."""
req = fakes.HTTPRequest.blank(
'/os-migrations?foo=bar',
version=self.wsgi_api_version, use_admin_context=True)
ex = self.assertRaises(exception.ValidationError,
self.controller.index, req)
self.assertIn('Additional properties are not allowed',
six.text_type(ex))
@mock.patch('nova.compute.api.API.get_migrations',
return_value=objects.MigrationList())
def test_index_with_changes_since_old_microversion(self, get_migrations):
"""Tests that the changes-since query parameter is ignored before
microversion 2.59.
"""
# Also use a valid filter (instance_uuid) to make sure only
# changes-since is removed.
req = fakes.HTTPRequest.blank(
'/os-migrations?changes-since=2018-01-10T16:59:24.138939&'
'instance_uuid=%s' % uuids.instance_uuid,
version='2.58', use_admin_context=True)
result = self.controller.index(req)
self.assertEqual({'migrations': []}, result)
get_migrations.assert_called_once_with(
req.environ['nova.context'],
{'instance_uuid': uuids.instance_uuid})
class MigrationTestCaseV266(MigrationsTestCaseV259):
wsgi_api_version = '2.66'
def test_index_with_invalid_changes_before(self):
"""Tests detail paging with an invalid changes-before value."""
req = fakes.HTTPRequest.blank(
'/os-migrations?changes-before=wrong_time',
version=self.wsgi_api_version, use_admin_context=True)
self.assertRaises(exception.ValidationError,
self.controller.index, req)
def test_index_with_changes_before_old_microversion_failed(self):
"""Tests that the changes-before query parameter is an error before
microversion 2.66.
"""
# Also use a valid filter (instance_uuid) to make sure
# changes-before is an additional property.
req = fakes.HTTPRequest.blank(
'/os-migrations?changes-before=2018-01-10T16:59:24.138939&'
'instance_uuid=%s' % uuids.instance_uuid,
version='2.65', use_admin_context=True)
ex = self.assertRaises(exception.ValidationError,
self.controller.index, req)
self.assertIn('Additional properties are not allowed',
six.text_type(ex))
@mock.patch('nova.compute.api.API.get_migrations',
return_value=objects.MigrationList())
def test_index_with_changes_before_old_microversion(self, get_migrations):
"""Tests that the changes-before query parameter is ignored before
microversion 2.59.
"""
# Also use a valid filter (instance_uuid) to make sure only
# changes-before is removed.
req = fakes.HTTPRequest.blank(
'/os-migrations?changes-before=2018-01-10T16:59:24.138939&'
'instance_uuid=%s' % uuids.instance_uuid,
version='2.58', use_admin_context=True)
result = self.controller.index(req)
self.assertEqual({'migrations': []}, result)
get_migrations.assert_called_once_with(
req.environ['nova.context'],
{'instance_uuid': uuids.instance_uuid})
class MigrationsPolicyEnforcement(test.NoDBTestCase):
def setUp(self):
super(MigrationsPolicyEnforcement, self).setUp()
self.controller = migrations_v21.MigrationsController()
self.req = fakes.HTTPRequest.blank('')
def test_list_policy_failed(self):
rule_name = "os_compute_api:os-migrations:index"
self.policy.set_rules({rule_name: "project_id:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index, self.req)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
class MigrationsPolicyEnforcementV223(MigrationsPolicyEnforcement):
wsgi_api_version = '2.23'
def setUp(self):
super(MigrationsPolicyEnforcementV223, self).setUp()
self.req = fakes.HTTPRequest.blank('', version=self.wsgi_api_version)
class MigrationsPolicyEnforcementV259(MigrationsPolicyEnforcementV223):
wsgi_api_version = '2.59'
| apache-2.0 | -8,953,287,839,083,142,000 | 38.49061 | 78 | 0.588599 | false |
sofiane87/lasagne-GAN | dcgan/dcgan_celeba2.py | 1 | 9831 | from __future__ import print_function
from keras.datasets import mnist
from keras.layers import Input, Dense, Reshape, Flatten, Dropout
from keras.layers import BatchNormalization, Activation, ZeroPadding2D, Cropping2D
from keras.layers.advanced_activations import LeakyReLU
from keras.layers.convolutional import UpSampling2D, Conv2D, Conv2DTranspose as Deconv
from keras.models import Sequential, Model
from keras.optimizers import Adam, SGD, RMSprop
import matplotlib.pyplot as plt
import sys
import os
import numpy as np
class DCGAN():
def __init__(self):
self.img_rows = 64
self.img_cols = 64
self.channels = 3
self.save_img_folder = 'dcgan/images/'
optimizer = Adam(0.0002)
optimizer_dis = SGD(0.0002)
self.latent_dim = 100
# Build and compile the discriminator
self.discriminator = self.build_discriminator()
self.discriminator.compile(loss='binary_crossentropy',
optimizer=optimizer_dis,
metrics=['accuracy'])
# Build and compile the generator
self.generator = self.build_generator()
self.generator.compile(loss='binary_crossentropy', optimizer=optimizer)
# The generator takes noise as input and generated imgs
z = Input(shape=(100,))
img = self.generator(z)
# For the combined model we will only train the generator
self.discriminator.trainable = False
# The valid takes generated images as input and determines validity
valid = self.discriminator(img)
# The combined model (stacked generator and discriminator) takes
# noise as input => generates images => determines validity
self.combined = Model(z, valid)
self.combined.compile(loss='binary_crossentropy', optimizer=optimizer)
def build_generator(self):
noise_shape = (100,)
model = Sequential()
model.add(Dense(1024 * 4 * 4, input_shape=noise_shape))
model.add(Reshape((4, 4, 1024)))
model.add(Deconv(512,kernel_size=4,strides=2,padding="valid"))
model.add(Cropping2D(1))
model.add(BatchNormalization())
model.add(Activation("relu"))
model.add(Deconv(256,kernel_size=4,strides=2,padding="valid"))
model.add(Cropping2D(1))
model.add(BatchNormalization())
model.add(Activation("relu"))
model.add(Deconv(128,kernel_size=4,strides=2,padding="valid"))
model.add(Cropping2D(1))
model.add(BatchNormalization())
model.add(Activation("relu"))
model.add(Deconv(self.channels,kernel_size=4,strides=2,padding="valid"))
model.add(Cropping2D(1))
model.add(Activation("tanh"))
# # print(model.shape)
# model = LeakyReLU(alpha=0.2)(model)
# model = BatchNormalization()(model)
# model = Cropping2D(1)(model)
# model.add(BatchNormalization(momentum=0.8))
# model.add(UpSampling2D())
# model.add(Conv2D(256, kernel_size=3, padding="same"))
# model.add(Activation("relu"))
# model.add(BatchNormalization(momentum=0.8))
# model.add(UpSampling2D())
# model.add(Conv2D(128, kernel_size=3, padding="same"))
# model.add(Activation("relu"))
# model.add(BatchNormalization(momentum=0.8))
# model.add(UpSampling2D())
# model.add(Conv2D(64, kernel_size=3, padding="same"))
# model.add(Activation("relu"))
# model.add(BatchNormalization(momentum=0.8))
# model.add(UpSampling2D())
# model.add(Conv2D(32, kernel_size=3, padding="same"))
# model.add(Activation("relu"))
# model.add(BatchNormalization(momentum=0.8))
# model.add(Conv2D(3, kernel_size=3, padding="same"))
# model.add(Activation("tanh"))
model.summary()
noise = Input(shape=noise_shape)
img = model(noise)
return Model(noise, img)
def build_discriminator(self):
img_shape = (self.img_rows, self.img_cols, self.channels)
model = Sequential()
model.add(ZeroPadding2D(2,input_shape=img_shape))
model.add(Conv2D(128, kernel_size=5, strides=2, padding="valid"))
model.add(BatchNormalization(momentum=0.8))
model.add(LeakyReLU(alpha=0.2))
model.add(ZeroPadding2D(2))
model.add(Conv2D(256, kernel_size=5, strides=2, padding="valid"))
model.add(BatchNormalization(momentum=0.8))
model.add(LeakyReLU(alpha=0.2))
model.add(ZeroPadding2D(2))
model.add(Conv2D(512, kernel_size=5, strides=2, padding="valid"))
model.add(BatchNormalization(momentum=0.8))
model.add(LeakyReLU(alpha=0.2))
# model.add(ZeroPadding2D(2))
# model.add(Conv2D(1024, kernel_size=5, strides=2, padding="valid"))
# model.add(BatchNormalization(momentum=0.8))
# model.add(LeakyReLU(alpha=0.2))
model.add(Flatten())
model.add(Dense(1, activation='sigmoid'))
model.summary()
img = Input(shape=img_shape)
validity = model(img)
return Model(img, validity)
def train(self, epochs, batch_size=128, save_interval=50):
# Load the dataset
X_train = self.load_data()
half_batch = int(batch_size / 2)
for epoch in range(epochs):
# ---------------------
# Train Discriminator
# ---------------------
# Select a random half batch of images
idx = np.random.randint(0, X_train.shape[0], half_batch)
imgs = X_train[idx]
# Sample noise and generate a half batch of new images
noise = np.random.normal(0, 1, (half_batch, 100))
gen_imgs = self.genreator.predict(noise)
# Train the discriminator (real classified as ones and generated as zeros)
d_loss_real = self.discriminator.train_on_batch(imgs, np.ones((half_batch, 1)))
d_loss_fake = self.discriminator.train_on_batch(gen_imgs, np.zeros((half_batch, 1)))
d_loss = 0.5 * np.add(d_loss_real, d_loss_fake)
# ---------------------
# Train Generator
# ---------------------
noise = np.random.normal(0, 1, (batch_size, 100))
# Train the generator (wants discriminator to mistake images as real)
g_loss = self.combined.train_on_batch(noise, np.ones((batch_size, 1)))
# Plot the progress
print ("%d [D loss: %f, acc.: %.2f%%] [G loss: %f]" % (epoch, d_loss[0], 100*d_loss[1], g_loss))
# If at save interval => save generated image samples
if epoch % save_interval == 0:
idx = np.random.randint(0, X_train.shape[0], batch_size)
imgs = X_train[idx]
self.save_imgs(epoch,imgs)
def save_imgs(self, epoch,imgs):
if not(os.path.exists(self.save_img_folder)):
os.makedirs(self.save_img_folder)
r, c = 5, 5
z = np.random.normal(size=(25, self.latent_dim))
gen_imgs = self.generator.predict(z)
gen_imgs = gen_imgs * 0.5 + 0.5
# z_imgs = self.encoder.predict(imgs)
# gen_enc_imgs = self.generator.predict(z_imgs)
# gen_enc_imgs = 0.5 * gen_enc_imgs + 0.5
fig, axs = plt.subplots(r, c)
cnt = 0
for i in range(r):
for j in range(c):
self.plot(axs[i,j],gen_imgs[cnt, :,:,:].squeeze())
cnt += 1
print('----- Saving generated -----')
if isinstance(epoch, str):
fig.savefig(self.save_img_folder + "mnist_{}.png".format(epoch))
else:
fig.savefig(self.save_img_folder + "mnist_%d.png" % epoch)
plt.close()
# fig, axs = plt.subplots(r, c)
# cnt = 0
# for i in range(r):
# for j in range(c):
# self.plot(axs[i,j],gen_enc_imgs[cnt, :,:,:].squeeze())
# cnt += 1
# print('----- Saving encoded -----')
# if isinstance(epoch, str):
# fig.savefig(self.save_img_folder + "mnist_{}_enc.png".format(epoch))
# else :
# fig.savefig(self.save_img_folder + "mnist_%d_enc.png" % epoch)
# plt.close()
fig, axs = plt.subplots(r, c)
cnt = 0
imgs = imgs * 0.5 + 0.5
for i in range(r):
for j in range(c):
self.plot(axs[i,j],imgs[cnt, :,:,:].squeeze())
cnt += 1
print('----- Saving real -----')
if isinstance(epoch, str):
fig.savefig(self.save_img_folder + "mnist_{}_real.png".format(epoch))
else :
fig.savefig(self.save_img_folder + "mnist_%d_real.png" % epoch)
plt.close()
def load_data(self):
self.dataPath = 'D:\Code\data\sceleba.npy'
print('----- Loading CelebA -------')
X_train = np.load(self.dataPath)
X_train = X_train.transpose([0,2,3,1])
X_train = (X_train.astype(np.float32) - 0.5) / 0.5
print('CelebA shape:', X_train.shape, X_train.min(), X_train.max())
print('------- CelebA loaded -------')
return X_train
def plot(self, fig, img):
if self.channels == 1:
fig.imshow(img,cmap=self.cmap)
fig.axis('off')
else:
fig.imshow(img)
fig.axis('off')
if __name__ == '__main__':
dcgan = DCGAN()
dcgan.train(epochs=50001, batch_size=64, save_interval=100)
| mit | 7,477,022,988,520,904,000 | 33.254355 | 108 | 0.556912 | false |
GabrielNicolasAvellaneda/chemlab | chemlab/mviewer/QIPythonWidget.py | 6 | 3102 | import atexit
from PyQt4.QtCore import QTimer
from IPython.kernel.zmq.kernelapp import IPKernelApp
from IPython.lib.kernel import find_connection_file
from IPython.qt.inprocess import QtInProcessKernelManager
from IPython.qt.console.rich_ipython_widget import RichIPythonWidget
from IPython.config.application import catch_config_error
from IPython.lib import guisupport
class QIPythonWidget(RichIPythonWidget):
def __init__(self):
super(QIPythonWidget, self).__init__()
def initialize(self):
kernel_manager = QtInProcessKernelManager()
kernel_manager.start_kernel()
kernel = kernel_manager.kernel
kernel.gui = 'qt4'
kernel_client = kernel_manager.client()
kernel_client.start_channels()
app = guisupport.get_app_qt4()
def stop():
kernel_client.stop_channels()
kernel_manager.shutdown_kernel()
app.exit()
self.kernel = kernel
self.kernel_manager = kernel_manager
self.kernel_client = kernel_client
self.exit_requested.connect(stop)
def get_user_namespace(self):
return self.kernel.shell.user_ns
def run_cell(self, *args, **kwargs):
return self.kernel.shell.run_cell(*args, **kwargs)
def ex(self, *args, **kwargs):
return self.kernel.shell.ex(*args, **kwargs)
def run_line_magic(self, *args, **kwargs):
return self.kernel.shell.run_line_magic(*args, **kwargs)
# class KernelApp(IPKernelApp):
# @catch_config_error
# def initialize(self, argv=[]):
# super(QIPythonWidget.KernelApp, self).initialize(argv)
# self.kernel.eventloop = self.loop_qt4_nonblocking
# self.kernel.start()
# self.start()
# def loop_qt4_nonblocking(self, kernel):
# kernel.timer = QTimer()
# kernel.timer.timeout.connect(kernel.do_one_iteration)
# kernel.timer.start(1000*kernel._poll_interval)
# def get_connection_file(self):
# return self.connection_file
# def get_user_namespace(self):
# return self.kernel.shell.user_ns
# def __init__(self, parent=None, colors='linux', instance_args=[]):
# super(QIPythonWidget, self).__init__()
# self.app = self.KernelApp.instance(argv=instance_args)
# def initialize(self, colors='linux'):
# self.app.initialize()
# self.set_default_style(colors=colors)
# self.connect_kernel(self.app.get_connection_file())
# def connect_kernel(self, conn, heartbeat=False):
# km = QtKernelManager(connection_file=find_connection_file(conn))
# km.load_connection_file()
# km.kernel.start_channels(hb=heartbeat)
# self.kernel_manager = km
# atexit.register(self.kernel_manager.cleanup_connection_file)
# def get_user_namespace(self):
# return self.app.get_user_namespace()
# def run_cell(self, *args, **kwargs):
# return self.app.shell.run_cell(*args, **kwargs) | gpl-3.0 | -6,670,682,393,399,999,000 | 32.365591 | 74 | 0.628627 | false |
sbuss/voteswap | lib/django/contrib/gis/db/backends/base/models.py | 10 | 7113 | import re
from django.contrib.gis import gdal
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class SpatialRefSysMixin(object):
"""
The SpatialRefSysMixin is a class used by the database-dependent
SpatialRefSys objects to reduce redundant code.
"""
# For pulling out the spheroid from the spatial reference string. This
# regular expression is used only if the user does not have GDAL installed.
# TODO: Flattening not used in all ellipsoids, could also be a minor axis,
# or 'b' parameter.
spheroid_regex = re.compile(r'.+SPHEROID\[\"(?P<name>.+)\",(?P<major>\d+(\.\d+)?),(?P<flattening>\d{3}\.\d+),')
# For pulling out the units on platforms w/o GDAL installed.
# TODO: Figure out how to pull out angular units of projected coordinate system and
# fix for LOCAL_CS types. GDAL should be highly recommended for performing
# distance queries.
units_regex = re.compile(
r'.+UNIT ?\["(?P<unit_name>[\w \'\(\)]+)", ?(?P<unit>[\d\.]+)'
r'(,AUTHORITY\["(?P<unit_auth_name>[\w \'\(\)]+)",'
r'"(?P<unit_auth_val>\d+)"\])?\]([\w ]+)?(,'
r'AUTHORITY\["(?P<auth_name>[\w \'\(\)]+)","(?P<auth_val>\d+)"\])?\]$'
)
@property
def srs(self):
"""
Returns a GDAL SpatialReference object, if GDAL is installed.
"""
if gdal.HAS_GDAL:
# TODO: Is caching really necessary here? Is complexity worth it?
if hasattr(self, '_srs'):
# Returning a clone of the cached SpatialReference object.
return self._srs.clone()
else:
# Attempting to cache a SpatialReference object.
# Trying to get from WKT first.
try:
self._srs = gdal.SpatialReference(self.wkt)
return self.srs
except Exception as e:
msg = e
try:
self._srs = gdal.SpatialReference(self.proj4text)
return self.srs
except Exception as e:
msg = e
raise Exception('Could not get OSR SpatialReference from WKT: %s\nError:\n%s' % (self.wkt, msg))
else:
raise Exception('GDAL is not installed.')
@property
def ellipsoid(self):
"""
Returns a tuple of the ellipsoid parameters:
(semimajor axis, semiminor axis, and inverse flattening).
"""
if gdal.HAS_GDAL:
return self.srs.ellipsoid
else:
m = self.spheroid_regex.match(self.wkt)
if m:
return (float(m.group('major')), float(m.group('flattening')))
else:
return None
@property
def name(self):
"Returns the projection name."
return self.srs.name
@property
def spheroid(self):
"Returns the spheroid name for this spatial reference."
return self.srs['spheroid']
@property
def datum(self):
"Returns the datum for this spatial reference."
return self.srs['datum']
@property
def projected(self):
"Is this Spatial Reference projected?"
if gdal.HAS_GDAL:
return self.srs.projected
else:
return self.wkt.startswith('PROJCS')
@property
def local(self):
"Is this Spatial Reference local?"
if gdal.HAS_GDAL:
return self.srs.local
else:
return self.wkt.startswith('LOCAL_CS')
@property
def geographic(self):
"Is this Spatial Reference geographic?"
if gdal.HAS_GDAL:
return self.srs.geographic
else:
return self.wkt.startswith('GEOGCS')
@property
def linear_name(self):
"Returns the linear units name."
if gdal.HAS_GDAL:
return self.srs.linear_name
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def linear_units(self):
"Returns the linear units."
if gdal.HAS_GDAL:
return self.srs.linear_units
elif self.geographic:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def angular_name(self):
"Returns the name of the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_name
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit_name')
@property
def angular_units(self):
"Returns the angular units."
if gdal.HAS_GDAL:
return self.srs.angular_units
elif self.projected:
return None
else:
m = self.units_regex.match(self.wkt)
return m.group('unit')
@property
def units(self):
"Returns a tuple of the units and the name."
if self.projected or self.local:
return (self.linear_units, self.linear_name)
elif self.geographic:
return (self.angular_units, self.angular_name)
else:
return (None, None)
@classmethod
def get_units(cls, wkt):
"""
Class method used by GeometryField on initialization to
retrieve the units on the given WKT, without having to use
any of the database fields.
"""
if gdal.HAS_GDAL:
return gdal.SpatialReference(wkt).units
else:
m = cls.units_regex.match(wkt)
return m.group('unit'), m.group('unit_name')
@classmethod
def get_spheroid(cls, wkt, string=True):
"""
Class method used by GeometryField on initialization to
retrieve the `SPHEROID[..]` parameters from the given WKT.
"""
if gdal.HAS_GDAL:
srs = gdal.SpatialReference(wkt)
sphere_params = srs.ellipsoid
sphere_name = srs['spheroid']
else:
m = cls.spheroid_regex.match(wkt)
if m:
sphere_params = (float(m.group('major')), float(m.group('flattening')))
sphere_name = m.group('name')
else:
return None
if not string:
return sphere_name, sphere_params
else:
# `string` parameter used to place in format acceptable by PostGIS
if len(sphere_params) == 3:
radius, flattening = sphere_params[0], sphere_params[2]
else:
radius, flattening = sphere_params
return 'SPHEROID["%s",%s,%s]' % (sphere_name, radius, flattening)
def __str__(self):
"""
Returns the string representation. If GDAL is installed,
it will be 'pretty' OGC WKT.
"""
try:
return six.text_type(self.srs)
except Exception:
return six.text_type(self.wkt)
| mit | -4,119,974,290,265,707,000 | 31.62844 | 115 | 0.557149 | false |
bccp/nbodykit | nbodykit/io/tests/test_fits.py | 2 | 2943 | from runtests.mpi import MPITest
from nbodykit.io.fits import FITSFile
import os
import numpy
import tempfile
import pickle
import contextlib
import pytest
try: import fitsio
except ImportError: fitsio is None
@contextlib.contextmanager
def temporary_data(data='table'):
"""
Write some temporary FITS data to disk
"""
try:
# generate data
if data == 'table':
dset = numpy.empty(1024, dtype=[('Position', ('f8', 3)), ('Mass', 'f8')])
dset['Position'] = numpy.random.random(size=(1024, 3))
dset['Mass'] = numpy.random.random(size=1024)
else:
dset = numpy.random.random(size=(1024, 3))
# write to file
tmpdir = tempfile.gettempdir()
tmpfile = os.path.join(tmpdir, 'nbkit_tmp_data.fits')
fitsio.write(tmpfile, dset, extname='Catalog')
yield (dset, tmpfile)
except:
raise
finally:
os.unlink(tmpfile)
@MPITest([1])
@pytest.mark.skipif(fitsio is None, "fitsio is not installed")
def test_data(comm):
with temporary_data() as (data, tmpfile):
# read
f = FITSFile(tmpfile)
# check columns
cols = ['Mass', 'Position']
assert(all(col in cols for col in f.columns))
# make sure data is the same
for col in cols:
numpy.testing.assert_almost_equal(data[col], f[col][:])
# try a slice
data2 = f.read(cols, 0, 512, 1)
for col in cols:
numpy.testing.assert_almost_equal(data[col][:512], data2[col])
# check size
numpy.testing.assert_equal(f.size, 1024)
@MPITest([1])
@pytest.mark.skipif(fitsio is None, "fitsio is not installed")
def test_string_ext(comm):
with temporary_data() as (data, tmpfile):
# read
f = FITSFile(tmpfile, ext='Catalog')
assert(all(col in ['Mass', 'Position'] for col in f.columns))
assert(f.size == 1024)
# read
f = FITSFile(tmpfile, ext=1)
assert(all(col in ['Mass', 'Position'] for col in f.columns))
assert(f.size == 1024)
# wrong ext
with pytest.raises(ValueError):
f = FITSFile(tmpfile, ext='WrongName')
@MPITest([1])
@pytest.mark.skipif(fitsio is None, "fitsio is not installed")
def test_wrong_ext(comm):
with temporary_data() as (data, tmpfile):
# no binary table data at 0
with pytest.raises(ValueError):
f = FITSFile(tmpfile, ext=0)
# invalid ext number
with pytest.raises(ValueError):
f = FITSFile(tmpfile, ext=2)
@MPITest([1])
@pytest.mark.skipif(fitsio is None, "fitsio is not installed")
def test_no_tabular_data(comm):
with temporary_data(data='image') as (data, tmpfile):
# no binary table data
with pytest.raises(ValueError):
f = FITSFile(tmpfile)
with pytest.raises(ValueError):
f = FITSFile(tmpfile, ext=1)
| gpl-3.0 | -2,176,975,253,572,595,500 | 25.513514 | 85 | 0.599728 | false |
Yaco-Sistemas/yith-library-server | yithlibraryserver/contributions/models.py | 1 | 1714 | # Yith Library Server is a password storage server.
# Copyright (C) 2013 Lorenzo Gil Sanchez <[email protected]>
#
# This file is part of Yith Library Server.
#
# Yith Library Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Yith Library Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Yith Library Server. If not, see <http://www.gnu.org/licenses/>.
def include_sticker(amount):
return amount > 1
def create_donation(request, data):
amount = int(data['amount'])
donation = {
'amount': amount,
'firstname': data['firstname'],
'lastname': data['lastname'],
'city': data['city'],
'country': data['country'],
'state': data['state'],
'street': data['street'],
'zip': data['zip'],
'email': data['email'],
'creation': request.datetime_service.utcnow(),
}
if include_sticker(amount):
donation['send_sticker'] = not ('no-sticker' in data)
else:
donation['send_sticker'] = False
if request.user is not None:
donation['user'] = request.user['_id']
else:
donation['user'] = None
_id = request.db.donations.insert(donation, safe=True)
donation['_id'] = _id
return donation
| agpl-3.0 | -968,914,231,171,617,800 | 33.28 | 78 | 0.661027 | false |
MrLeeh/pdftools | setup.py | 1 | 1912 | #!/usr/bin/env python
"""
Setupfile for pdftools.
:author: Stefan Lehmann <[email protected]>
:license: MIT, see license file or https://opensource.org/licenses/MIT
:created on 2018-04-14 20:40:27
:last modified by: Stefan Lehmann
:last modified time: 2018-04-14 21:03:58
"""
import ast
import io
import re
import os
from setuptools import setup, find_packages
def read(*names, **kwargs):
try:
with io.open(
os.path.join(os.path.dirname(__file__), *names),
encoding=kwargs.get("encoding", "utf8")
) as fp:
return fp.read()
except IOError:
return ''
def extract_version():
"""Extract the version from the package."""
# Regular expression for the version
_version_re = re.compile(r"__version__\s+=\s+(.*)")
with open("pdftools/__init__.py", "r") as f:
content = f.read()
version_match = _version_re.search(content)
version = str(ast.literal_eval(version_match.group(1)))
return version
setup(
name="pdftools",
version=extract_version(),
packages=find_packages(),
entry_points={"console_scripts": ["pdftools=pdftools._cli:main"]},
url="https://github.com/stlehmann/pdftools",
license="MIT",
author="Stefan Lehmann",
author_email="[email protected]",
description="A collection of convenience scripts for PDF manipulation, based on the PyPdf2 package",
long_description=read("README.md"),
long_description_content_type='text/markdown',
install_requires=["PyPdf2"],
maintainer="Stefan Lehmann",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3 :: Only",
"Topic :: Office/Business",
],
zip_safe=True,
)
| mit | 9,132,928,565,743,975,000 | 27.537313 | 104 | 0.635983 | false |
gantta/grovepi | sbsunit/boards/rasp_pi_legacy.py | 2 | 4169 | '''
Copyright 2014-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with the License. A copy of the License is located at
http://aws.amazon.com/apache2.0/
or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
Note: Other license terms may apply to certain, identified software files contained within or
distributed with the accompanying software if such terms are included in the directory containing
the accompanying software. Such other license terms will then apply in lieu of the terms of the
software license above.
'''
import RPi.GPIO as GPIO
from board import Board
from devices.legacy_flow_sensor import LegacyFlowSensor
from devices.legacy_sound_sensor import LegacySoundSensor
from devices.legacy_led import LegacyLED
from sbs.tools import Tools
from sbs.tools import RGB
from tornado import gen
from random import randint
import time
CYCLE_MESSAGES = 60
# The RaspPi board includes the GrovePi library and reads values from devices connected to a Raspberry Pi.
class LegacyRaspPi(Board):
# Initialize the board and all of the devices attached to the board.
lastMessage = { 'time':time.time(), 'message':0 }
def __init__(self, pins, thresholds):
super(LegacyRaspPi,self).__init__(pins, thresholds, "RPi")
GPIO.cleanup()
GPIO.setmode(GPIO.BOARD)
self.flow_sensor = LegacyFlowSensor(self.pins['sensors']['flow-sensor'])
self.sound_sensor = LegacySoundSensor(self.pins['sensors']['sound-sensor'], self.thresholds['sound'])
self.leds = {}
self.buttons = {}
# for button in pins['buttons']:
# if pins['buttons'][button]>0:
# self.buttons[button] = GroveButton(pins['buttons'][button], button)
for led in pins['led']:
if (pins['led'][led]>0):
self.leds[led] = LegacyLED(pins['led'][led], led)
self.print_to_screen('Simple Beer \n Service 4.0', RGB['orange'])
self.print_to_screen('IP Address: \n '+Tools.get_ip_address(), [0,128,64])
time.sleep(10)
#TODO: Make this section more dynamic, so any sensor can be automatically loaded.
def read_dht(self):
return "0"
def read_ultrasonic_ranger(self):
return 0;
def read_flow_sensor(self):
if self.flow_sensor.is_flowing():
self.print_to_screen("Beer is \n flowing!!", RGB['green'])
self.flow_sensor.read()
return self.flow_sensor.get_flow_count()
def read_sound_sensor(self):
if self.sound_sensor.is_noisy():
self.leds["red"].on()
else:
self.leds["red"].off()
return self.sound_sensor.read()
def print_to_screen(self, message, rgb):
Tools.log(message);
def turn_on_led(self, led):
self.leds[led].on()
def turn_off_led(self, led):
self.leds[led].off()
@gen.coroutine
def reset_wifi(self):
Tools.log("Reset Wifi")
#if self.buttons['reset-wifi'].is_down():
# self.print_to_screen("button pressed", [40,40,40])
def blink(self,led):
self.leds[led].blink()
# The clear function is run when the application halts.
def clear(self):
for led in self.leds:
self.leds[led].off()
def setHelloSBSScreen(self):
current_time = time.time();
if (current_time-self.lastMessage['time']>CYCLE_MESSAGES):
self.lastMessage['message'] += 1
if (self.lastMessage['message']==5):
self.lastMessage['message']=0
self.print_to_screen(self.sbs_messages(self.lastMessage['message']),RGB['orange'])
self.lastMessage['time'] = current_time
else:
self.print_to_screen(self.sbs_messages(self.lastMessage['message']),RGB['orange'])
def reset(self):
self.flow_sensor.reset_flow_count()
| apache-2.0 | -2,160,852,300,227,973,600 | 36.9 | 266 | 0.655313 | false |
WangWenjun559/Weiss | classifier/feature.py | 2 | 5801 | """
This script extracts features from the query.
=============================================
Usage:
This file cannot run standalone, the functions will be used in other scripts,
such as "train.py" and "classify.py"
TODO([email protected]):
- Consider encoding issue
Author: Wenjun Wang
Date: June 28, 2015
"""
import nltk
import hashlib
import numpy as np
def stopword(stpfile):
"""Reads stopwords from a file and return a set of stopwords
"""
stopwords = set()
for line in open(stpfile):
stopwords.add(line.strip())
return stopwords
def parse_options(options=''):
"""parse feature options, i.e. which types of features need to extract
Arg:
options: a string of feature options in the format like: '-uni -pos2'
Return:
feature_arg: a dictionary of feature options, key: feature name, value: True/False
"""
argv = options.split()
feature_arg = {}
feature_arg['unigram'] = False
feature_arg['POS'] = False
feature_arg['POSbigram'] = False
feature_arg['stem'] = False
feature_arg['stopword_removal'] = False
for i in xrange(0,len(argv)):
if argv[i].lower()[:4] == '-uni':
feature_arg['unigram'] = True
if argv[i].lower()[:6] == '-pos2':
feature_arg['POSbigram'] = True
feature_arg['POS'] = True
if argv[i].lower()[:6] == '-stprm':
feature_arg['stopword_removal'] = True
if argv[i].lower() == '-stem':
feature_arg['stem'] = True
return feature_arg
def feature_generator(query, stopwords, feature_arg):
"""Generate a feature set from the query
Args:
query: the query need to extract features from
stopwords: a set of stopwords
feature_arg: returned by parse_options,
contains info of which types of features need to be extract
Return:
features: a set of features
"""
features = set()
token_list = nltk.word_tokenize(query.lower())
if feature_arg['POS'] == True:
token_list = nltk.pos_tag(token_list)
if feature_arg['stopword_removal'] == True:
token_list = _stopword_removal(token_list, stopwords)
if feature_arg['stem'] == True:
token_list = _stemming(token_list)
if feature_arg['unigram'] == True:
_ngram(1, token_list, features)
if feature_arg['POSbigram'] == True:
_POSngram(2, token_list, features)
return features
def _ngram(n, token_list, features):
"""Extract ngram features
Currently, only implements unigram
This function is called by feature_generator
Args:
n: n=1 unigram, n=2 bigram, n=3 trigram
token_list: a list of tokens of a query
features: feature set need to update
"""
if n == 1:
for t in token_list:
if isinstance(t,tuple):
features |= set([t[0]])
elif isinstance(t,str):
features |= set([t])
def _POSngram(n, tag_list, features):
"""Extract POSngram features
Currently, only implements POSbigram
This function is called by feature_generator
Args:
n: n=1 POSunigram, n=2 POSbigram, n=3 POStrigram
tag_list: a list of (token, POStag) tuples of the query
features: feature set need to update
"""
features |= set(['START_'+tag_list[0][1]])
if n == 2:
for i in xrange(0,len(tag_list)-1):
features |= set([tag_list[i][1]+'_'+tag_list[i+1][1]])
features |= set([tag_list[-1][1]+'_END'])
def _stemming(token_list):
"""Stem all words in the list
Arg:
token_list: a list of tokens of a query
OR a list of (token, POStag) tuples of the query
Return:
stemmed_tokens: a list of stemmed tokens of a query
OR a list of (stemmed_token, POStag) tuples of the query
"""
snowball = nltk.SnowballStemmer("english")
if isinstance(token_list[0],str):
stemmed_tokens = [snowball.stem(t) for t in token_list]
elif isinstance(token_list[0],tuple):
stemmed_tokens = [(snowball.stem(t[0]),t[1]) for t in token_list]
return stemmed_tokens
def _stopword_removal(token_list, stopwords):
"""Remove all stopwords in a sentence
Arg:
token_list: a list of tokens of a query
OR a list of (token, POStag) tuples of the query
Return:
clean_tokens: stopwords-removed version of original token_list
"""
clean_tokens = []
while len(token_list) > 0:
if isinstance(token_list[0],str):
target = token_list[0].lower()
elif isinstance(token_list[0],tuple):
target = token_list[0][0].lower()
if target in stopwords:
token_list.pop(0)
else:
clean_tokens.append(token_list.pop(0))
return clean_tokens
def hashit(text, dictionary_size=1000):
'''
Takes a sentence, tokenizes it, stems each word, and hashes each word
based on the dictionary size specified.
'''
stemmer = nltk.SnowballStemmer("english", ignore_stopwords=True)
tokenizer = nltk.tokenize.RegexpTokenizer(r'\w+')
tokens = tokenizer.tokenize(unicode(text, errors='ignore'))
x_i = [0] * dictionary_size
for token in tokens:
stemmed = stemmer.stem(token.lower())
if not stemmed in nltk.corpus.stopwords.words('english') and len(stemmed) > 1:
hasher = hashlib.sha1()
hasher.update(stemmed)
index = int(hasher.hexdigest(), 16) % dictionary_size
x_i[index] += 1
return x_i
def list2Vec(word_list):
'''
Converts a list into a numpy vector/matrix
'''
a = np.array(word_list)
return a
| apache-2.0 | -59,786,810,808,692,360 | 29.856383 | 90 | 0.595759 | false |
tchx84/sugar | src/jarabe/model/notifications.py | 2 | 3598 | # Copyright (C) 2008 One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import sys
import logging
import dbus
from sugar3 import dispatch
from jarabe import config
_DBUS_SERVICE = 'org.freedesktop.Notifications'
_DBUS_IFACE = 'org.freedesktop.Notifications'
_DBUS_PATH = '/org/freedesktop/Notifications'
_instance = None
class NotificationService(dbus.service.Object):
def __init__(self):
bus = dbus.SessionBus()
bus_name = dbus.service.BusName(_DBUS_SERVICE, bus=bus)
dbus.service.Object.__init__(self, bus_name, _DBUS_PATH)
self._notification_counter = 0
self.notification_received = dispatch.Signal()
self.notification_cancelled = dispatch.Signal()
@dbus.service.method(_DBUS_IFACE,
in_signature='susssava{sv}i', out_signature='u')
def Notify(self, app_name, replaces_id, app_icon, summary, body, actions,
hints, expire_timeout):
logging.debug('Received notification: %r', [app_name, replaces_id,
'<app_icon>', summary, body, actions, '<hints>',
expire_timeout])
if replaces_id > 0:
notification_id = replaces_id
else:
if self._notification_counter == sys.maxint:
self._notification_counter = 1
else:
self._notification_counter += 1
notification_id = self._notification_counter
self.notification_received.send(self,
app_name=app_name,
replaces_id=replaces_id,
app_icon=app_icon,
summary=summary,
body=body,
actions=actions,
hints=hints,
expire_timeout=expire_timeout)
return notification_id
@dbus.service.method(_DBUS_IFACE, in_signature='u', out_signature='')
def CloseNotification(self, notification_id):
self.notification_cancelled.send(self, notification_id=notification_id)
@dbus.service.method(_DBUS_IFACE, in_signature='', out_signature='as')
def GetCapabilities(self):
return []
@dbus.service.method(_DBUS_IFACE, in_signature='', out_signature='sss')
def GetServerInformation(self, name, vendor, version):
return 'Sugar Shell', 'Sugar', config.version
@dbus.service.signal(_DBUS_IFACE, signature='uu')
def NotificationClosed(self, notification_id, reason):
pass
@dbus.service.signal(_DBUS_IFACE, signature='us')
def ActionInvoked(self, notification_id, action_key):
pass
def get_service():
global _instance
if not _instance:
_instance = NotificationService()
return _instance
def init():
get_service()
| gpl-2.0 | 1,321,024,111,071,749,400 | 33.932039 | 79 | 0.61423 | false |
jordanemedlock/psychtruths | temboo/core/Library/Foursquare/Checkins/RecentCheckins.py | 5 | 4415 | # -*- coding: utf-8 -*-
###############################################################################
#
# RecentCheckins
# Returns a list of recent friends' check-ins.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class RecentCheckins(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the RecentCheckins Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(RecentCheckins, self).__init__(temboo_session, '/Library/Foursquare/Checkins/RecentCheckins')
def new_input_set(self):
return RecentCheckinsInputSet()
def _make_result_set(self, result, path):
return RecentCheckinsResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return RecentCheckinsChoreographyExecution(session, exec_id, path)
class RecentCheckinsInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the RecentCheckins
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AfterTimeStamp(self, value):
"""
Set the value of the AfterTimeStamp input for this Choreo. ((optional, integer) Seconds after which to look for check-ins, e.g. for looking for new check-ins since the last fetch.)
"""
super(RecentCheckinsInputSet, self)._set_input('AfterTimeStamp', value)
def set_Latitude(self, value):
"""
Set the value of the Latitude input for this Choreo. ((optional, decimal) The latitude point of the user's location.)
"""
super(RecentCheckinsInputSet, self)._set_input('Latitude', value)
def set_Limit(self, value):
"""
Set the value of the Limit input for this Choreo. ((optional, integer) Number of results to return, up to 100.)
"""
super(RecentCheckinsInputSet, self)._set_input('Limit', value)
def set_Longitude(self, value):
"""
Set the value of the Longitude input for this Choreo. ((optional, decimal) The longitude point of the user's location.)
"""
super(RecentCheckinsInputSet, self)._set_input('Longitude', value)
def set_OauthToken(self, value):
"""
Set the value of the OauthToken input for this Choreo. ((required, string) The FourSquare API Oauth token string.)
"""
super(RecentCheckinsInputSet, self)._set_input('OauthToken', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that response should be in. Can be set to xml or json. Defaults to json.)
"""
super(RecentCheckinsInputSet, self)._set_input('ResponseFormat', value)
class RecentCheckinsResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the RecentCheckins Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Foursquare. Corresponds to the ResponseFormat input. Defaults to JSON.)
"""
return self._output.get('Response', None)
class RecentCheckinsChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return RecentCheckinsResultSet(response, path)
| apache-2.0 | -6,873,900,823,442,688,000 | 40.261682 | 188 | 0.674745 | false |
arskom/spyne | spyne/test/interop/test_pyramid.py | 2 | 2730 | # coding: utf-8
#
# spyne - Copyright (C) Spyne contributors.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
#
import unittest
from wsgiref.util import setup_testing_defaults
from wsgiref.validate import validator
from lxml import etree
from pyramid import testing
from pyramid.config import Configurator
from pyramid.request import Request
from spyne.protocol.soap import Soap11
from spyne.service import Service
from spyne.decorator import srpc
from spyne import Application
from spyne.model import Unicode, Integer, Iterable
from spyne.server.pyramid import PyramidApplication
class SpyneIntegrationTest(unittest.TestCase):
"""Tests for integration of Spyne into Pyramid view callable"""
class HelloWorldService(Service):
@srpc(Unicode, Integer, _returns=Iterable(Unicode))
def say_hello(name, times):
for i in range(times):
yield 'Hello, %s' % name
def setUp(self):
request = testing.DummyRequest()
self.config = testing.setUp(request=request)
def tearDown(self):
testing.tearDown()
def testGetWsdl(self):
"""Simple test for serving of WSDL by spyne through pyramid route"""
application = PyramidApplication(
Application([self.HelloWorldService],
tns='spyne.examples.hello',
in_protocol=Soap11(validator='lxml'),
out_protocol=Soap11()))
config = Configurator(settings={'debug_all': True})
config.add_route('home', '/')
config.add_view(application, route_name='home')
wsgi_app = validator(config.make_wsgi_app())
env = {
'SCRIPT_NAME': '',
'REQUEST_METHOD': 'GET',
'PATH_INFO': '/',
'QUERY_STRING': 'wsdl',
}
setup_testing_defaults(env)
request = Request(env)
resp = request.get_response(wsgi_app)
self.assert_(resp.status.startswith("200 "))
node = etree.XML(resp.body) # will throw exception if non well formed
| lgpl-2.1 | -6,575,140,074,578,834,000 | 34.921053 | 78 | 0.676923 | false |
nuwainfo/trac-code-comments-plugin | code_comments/web.py | 2 | 11501 | import re
import copy
from trac.core import *
from trac.web.chrome import INavigationContributor, ITemplateProvider, add_script, add_script_data, add_stylesheet, add_notice, add_link
from trac.web.main import IRequestHandler, IRequestFilter
from trac.util import Markup
from trac.util.text import to_unicode
from trac.util.presentation import Paginator
from trac.versioncontrol.api import RepositoryManager
from code_comments.comments import Comments
from code_comments.comment import CommentJSONEncoder, format_to_html
try:
import json
except ImportError:
import simplejson as json
class CodeComments(Component):
implements(ITemplateProvider, IRequestFilter)
href = 'code-comments'
# ITemplateProvider methods
def get_templates_dirs(self):
return [self.get_template_dir()]
def get_template_dir(self):
from pkg_resources import resource_filename
return resource_filename(__name__, 'templates')
def get_htdocs_dirs(self):
from pkg_resources import resource_filename
return [('code-comments', resource_filename(__name__, 'htdocs'))]
# IRequestFilter methods
def pre_process_request(self, req, handler):
return handler
def post_process_request(self, req, template, data, content_type):
add_stylesheet(req, 'code-comments/code-comments.css')
return template, data, content_type
class MainNavigation(CodeComments):
implements(INavigationContributor)
# INavigationContributor methods
def get_active_navigation_item(self, req):
return self.href
def get_navigation_items(self, req):
if 'TRAC_ADMIN' in req.perm:
yield 'mainnav', 'code-comments', Markup('<a href="%s">Code Comments</a>' % (
req.href(self.href) ) )
class JSDataForRequests(CodeComments):
implements(IRequestFilter)
js_templates = ['page-comments-block', 'comment', 'add-comment-dialog', 'comment', 'comments-for-a-line',]
# IRequestFilter methods
def pre_process_request(self, req, handler):
return handler
def post_process_request(self, req, template, data, content_type):
if data is None:
return
js_data = {
'comments_rest_url': req.href(CommentsREST.href),
'formatting_help_url': req.href.wiki('WikiFormatting'),
'delete_url': req.href(DeleteCommentForm.href),
'preview_url': req.href(WikiPreview.href),
'templates': self.templates_js_data(),
'active_comment_id': req.args.get('codecomment'),
'username': req.authname,
'is_admin': 'TRAC_ADMIN' in req.perm,
}
original_return_value = template, data, content_type
if req.path_info.startswith('/changeset'):
js_data.update(self.changeset_js_data(req, data))
elif req.path_info.startswith('/browser'):
js_data.update(self.browser_js_data(req, data))
elif re.match(r'/attachment/ticket/\d+/.*', req.path_info):
js_data.update(self.attachment_js_data(req, data))
else:
return original_return_value
add_script(req, 'code-comments/jquery-1.11.1.min.js')
add_script(req, 'code-comments/json2.js')
add_script(req, 'code-comments/underscore-min.js')
add_script(req, 'code-comments/backbone-min.js')
# jQuery UI includes: UI Core, Interactions, Button & Dialog Widgets, Core Effects, custom theme
add_script(req, 'code-comments/jquery-ui/jquery-ui.js')
add_stylesheet(req, 'code-comments/jquery-ui/trac-theme.css')
add_script(req, 'code-comments/jquery.ba-throttle-debounce.min.js')
add_script(req, 'code-comments/code-comments.js')
add_script_data(req, {'CodeComments': js_data})
return original_return_value
def templates_js_data(self):
data = {}
for name in self.js_templates:
# we want to use the name as JS identifier and we can't have dashes there
data[name.replace('-', '_')] = self.template_js_data(name)
return data
def changeset_js_data(self, req, data):
return {'page': 'changeset', 'revision': data['new_rev'], 'path': '', 'selectorToInsertAfter': 'div.diff div.diff:last'}
def browser_js_data(self, req, data):
return {'page': 'browser', 'revision': data['rev'], 'path': data['path'], 'selectorToInsertAfter': 'table.code'}
def attachment_js_data(self, req, data):
path = req.path_info.replace('/attachment/', 'attachment:/')
return {'page': 'attachment', 'revision': 0, 'path': path, 'selectorToInsertAfter': 'div#preview'}
def template_js_data(self, name):
file_name = name + '.html'
return to_unicode(open(self.get_template_dir() + '/js/' + file_name).read())
class ListComments(CodeComments):
implements(IRequestHandler)
COMMENTS_PER_PAGE = 50
# IRequestHandler methods
def match_request(self, req):
return req.path_info == '/' + self.href
def process_request(self, req):
req.perm.require('TRAC_ADMIN')
self.data = {}
self.args = {}
self.req = req
self.per_page = int(req.args.get('per-page', self.COMMENTS_PER_PAGE))
self.page = int(req.args.get('page', 1))
self.order_by = req.args.get('orderby', 'id')
self.order = req.args.get('order', 'DESC')
self.add_path_and_author_filters()
self.comments = Comments(req, self.env);
self.data['comments'] = self.comments.search(self.args, self.order, self.per_page, self.page, self.order_by)
self.data['reponame'], repos, path = RepositoryManager(self.env).get_repository_by_path('/')
self.data['can_delete'] = 'TRAC_ADMIN' in req.perm
self.data['paginator'] = self.get_paginator()
self.data['current_sorting_method'] = self.order_by
self.data['current_order'] = self.order
self.data['sortable_headers'] = []
self.data.update(self.comments.get_filter_values())
self.prepare_sortable_headers()
return 'comments.html', self.data, None
def post_process_request(self, req, template, data, content_type):
add_stylesheet(req, 'code-comments/sort/sort.css')
add_script(req, 'code-comments/code-comments-list.js')
return template, data, content_type
def add_path_and_author_filters(self):
self.data['current_path_selection'] = '';
self.data['current_author_selection'] = '';
if self.req.args.get('filter-by-path'):
self.args['path__prefix'] = self.req.args['filter-by-path'];
self.data['current_path_selection'] = self.req.args['filter-by-path']
if self.req.args.get('filter-by-author'):
self.args['author'] = self.req.args['filter-by-author']
self.data['current_author_selection'] = self.req.args['filter-by-author']
def get_paginator(self):
def href_with_page(page):
args = copy.copy(self.req.args)
args['page'] = page
return self.req.href(self.href, args)
paginator = Paginator(self.data['comments'], self.page-1, self.per_page, Comments(self.req, self.env).count(self.args))
if paginator.has_next_page:
add_link(self.req, 'next', href_with_page(self.page + 1), 'Next Page')
if paginator.has_previous_page:
add_link(self.req, 'prev', href_with_page(self.page - 1), 'Previous Page')
shown_pages = paginator.get_shown_pages(page_index_count = 11)
links = [{'href': href_with_page(page), 'class': None, 'string': str(page), 'title': 'Page %d' % page}
for page in shown_pages]
paginator.shown_pages = links
paginator.current_page = {'href': None, 'class': 'current', 'string': str(paginator.page + 1), 'title': None}
return paginator
def prepare_sortable_headers(self):
displayed_sorting_methods = ('id', 'author', 'time', 'path', 'text')
displayed_sorting_method_names = ('ID', 'Author', 'Date', 'Path', 'Text')
query_args = self.req.args
if ( query_args.has_key('page') ):
del query_args['page']
for sorting_method, sorting_method_name in zip(displayed_sorting_methods, displayed_sorting_method_names):
query_args['orderby'] = sorting_method
html_class = 'header'
if self.order_by == sorting_method:
if 'ASC' == self.order:
query_args['order'] = 'DESC'
html_class += ' headerSortUp'
else:
query_args['order'] = 'ASC'
html_class += ' headerSortDown'
link = self.req.href(self.href, query_args)
self.data['sortable_headers'].append({ 'name': sorting_method_name, 'link': link, 'html_class': html_class })
class DeleteCommentForm(CodeComments):
implements(IRequestHandler)
href = CodeComments.href + '/delete'
# IRequestHandler methods
def match_request(self, req):
return req.path_info == '/' + self.href
def process_request(self, req):
req.perm.require('TRAC_ADMIN')
if 'GET' == req.method:
return self.form(req)
else:
return self.delete(req)
def form(self, req):
data = {}
referrer = req.get_header('Referer')
data['comment'] = Comments(req, self.env).by_id(req.args['id'])
data['return_to'] = referrer
return 'delete.html', data, None
def delete(self, req):
comment = Comments(req, self.env).by_id(req.args['id'])
comment.delete()
add_notice(req, 'Comment deleted.')
req.redirect(req.args['return_to'] or req.href())
class BundleCommentsRedirect(CodeComments):
implements(IRequestHandler)
href = CodeComments.href + '/create-ticket'
# IRequestHandler methods
def match_request(self, req):
return req.path_info == '/' + self.href
def process_request(self, req):
text = ''
for id in req.args['ids'].split(','):
comment = Comments(req, self.env).by_id(id)
text += """
[[CodeCommentLink(%(id)s)]]
%(comment_text)s
""".lstrip() % {'id': id, 'comment_text': comment.text}
req.redirect(req.href.newticket(description=text))
class CommentsREST(CodeComments):
implements(IRequestHandler)
href = CodeComments.href + '/comments'
# IRequestHandler methods
def match_request(self, req):
return req.path_info.startswith('/' + self.href)
def return_json(self, req, data, code=200):
req.send(json.dumps(data, cls=CommentJSONEncoder), 'application/json')
def process_request(self, req):
#TODO: catch errors
if '/' + self.href == req.path_info:
if 'GET' == req.method:
self.return_json(req, Comments(req, self.env).search(req.args))
if 'POST' == req.method:
comments = Comments(req, self.env)
id = comments.create(json.loads(req.read()))
self.return_json(req, comments.by_id(id))
class WikiPreview(CodeComments):
implements(IRequestHandler)
href = CodeComments.href + '/preview'
# IRequestHandler methods
def match_request(self, req):
return req.path_info.startswith('/' + self.href)
def process_request(self, req):
req.send(format_to_html(req, self.env, req.args.get('text', '')).encode('utf-8'))
| gpl-2.0 | 4,607,735,256,648,059,400 | 38.386986 | 136 | 0.621946 | false |
Jajcus/pyxmpp2 | pyxmpp2/test/stream_reader.py | 1 | 4892 | #!/usr/bin/python -u
# -*- coding: UTF-8 -*-
# pylint: disable=C0111
import os
import logging
from pyxmpp2.test._support import DATA_DIR
logger = logging.getLogger("pyxmpp2.test.stream_reader")
import unittest
from xml.etree import ElementTree
from pyxmpp2 import xmppparser
from pyxmpp2.utils import xml_elements_equal
class EventTemplate:
# pylint: disable=R0903
def __init__(self, template):
self.event, offset, xml = template.split(None, 2)
self.offset = int(offset)
self.xml = ElementTree.XML(eval(xml))
def match(self, event, node):
if self.event != event:
return False
if event == "end":
return True
if not xml_elements_equal(self.xml, node):
return False
return True
def __repr__(self):
return "<EventTemplate %r at %r: %r>" % (self.event, self.offset,
ElementTree.dump(self.xml))
class StreamHandler(xmppparser.XMLStreamHandler):
def __init__(self, test_case):
xmppparser.XMLStreamHandler.__init__(self)
self.test_case = test_case
def stream_start(self, element):
self.test_case.event("start", element)
def stream_end(self):
self.test_case.event("end", None)
def stream_element(self, element):
self.test_case.event("node", element)
# pylint: disable=C0103
expected_events = []
# pylint: disable=C0103
whole_stream = None
def load_expected_events():
with open(os.path.join(DATA_DIR, "stream_info.txt")) as stream_info:
for line in stream_info:
if line.startswith("#"):
continue
line = line.strip()
expected_events.append(EventTemplate(line))
def load_whole_stream():
# pylint: disable=W0603
global whole_stream
whole_stream = ElementTree.parse(os.path.join(DATA_DIR, "stream.xml"))
class TestStreamReader(unittest.TestCase):
def setUp(self):
self.expected_events = list(expected_events)
self.handler = StreamHandler(self)
self.reader = xmppparser.StreamReader(self.handler)
self.file = open(os.path.join(DATA_DIR, "stream.xml"))
self.chunk_start = 0
self.chunk_end = 0
self.whole_stream = ElementTree.ElementTree()
def tearDown(self):
del self.handler
del self.reader
del self.whole_stream
def test_1(self):
self.do_test(1)
def test_2(self):
self.do_test(2)
def test_10(self):
self.do_test(10)
def test_100(self):
self.do_test(100)
def test_1000(self):
self.do_test(1000)
def do_test(self, chunk_length):
while 1:
data = self.file.read(chunk_length)
if not data:
self.reader.feed('')
break
self.chunk_end += len(data)
self.reader.feed(data)
if not data:
self.event("end", None)
break
self.chunk_start = self.chunk_end
root1 = self.whole_stream.getroot()
self.assertIsNotNone(root1)
root2 = whole_stream.getroot()
if not xml_elements_equal(root1, root2, True):
self.fail("Whole stream invalid. Got: %r, Expected: %r"
% (ElementTree.tostring(root1),
ElementTree.tostring(root2)))
def event(self, event, element):
logger.debug(" event: {0!r} element: {1!r}".format(event, element))
expected = self.expected_events.pop(0)
self.assertTrue(event==expected.event, "Got %r, expected %r" %
(event, expected.event))
if expected.offset < self.chunk_start:
self.fail("Delayed event: %r. Expected at: %i, found at %i:%i"
% (event, expected.offset, self.chunk_start,
self.chunk_end))
if expected.offset > self.chunk_end:
self.fail("Early event: %r. Expected at: %i, found at %i:%i"
% (event, expected.offset, self.chunk_start,
self.chunk_end))
if not expected.match(event, element):
self.fail("Unmatched event. Expected: %r, got: %r;%r"
% (expected, event, ElementTree.dump(element)))
if event == "start":
# pylint: disable=W0212
self.whole_stream._setroot(element)
elif event == "node":
root = self.whole_stream.getroot()
root.append(element)
# pylint: disable=W0611
from pyxmpp2.test._support import load_tests, setup_logging
def setUpModule():
load_expected_events()
load_whole_stream()
setup_logging()
if __name__ == "__main__":
unittest.main()
| lgpl-2.1 | 9,048,098,941,547,246,000 | 31.613333 | 79 | 0.564186 | false |
Purg/kwiver | sprokit/tests/bindings/python/sprokit/pipeline_util/test-export_.py | 4 | 2608 | #!@PYTHON_EXECUTABLE@
#ckwg +28
# Copyright 2011-2013 by Kitware, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither name of Kitware, Inc. nor the names of any contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def test_import(path_unused):
try:
import sprokit.pipeline_util.export_
except:
test_error("Failed to import the export_ module")
def test_simple_pipeline(path):
import os
from sprokit.pipeline import pipeline
from sprokit.pipeline import modules
from sprokit.pipeline_util import bake
from sprokit.pipeline_util import export_
modules.load_known_modules()
p = bake.bake_pipe_file(path)
r, w = os.pipe()
name = 'graph'
export_.export_dot(w, p, name)
p.setup_pipeline()
export_.export_dot(w, p, name)
os.close(r)
os.close(w)
if __name__ == '__main__':
import os
import sys
if not len(sys.argv) == 5:
test_error("Expected four arguments")
sys.exit(1)
testname = sys.argv[1]
os.chdir(sys.argv[2])
sys.path.append(sys.argv[3])
pipeline_dir = sys.argv[4]
path = os.path.join(pipeline_dir, '%s.pipe' % testname)
from sprokit.test.test import *
run_test(testname, find_tests(locals()), path)
| bsd-3-clause | 7,324,491,306,691,780,000 | 30.047619 | 80 | 0.715107 | false |
jakejhansen/minesweeper_solver | evolutionary/hpcjobs/Minesweeper10-1/run-minesweeper.py | 13 | 10466 | import argparse
import cProfile
import multiprocessing as mp
import os
import pstats
import time
import gym
import IPython
import numpy as np
from keras.layers import Dense
from keras.models import Input, Model, Sequential, clone_model, load_model
from keras.optimizers import Adam
from context import core
from core.strategies import ES
from minesweeper_tk import Minesweeper
def fitnessfun(env, model):
total_reward = 0
done = False
observation = env.reset()
steps = 0
while not done and steps < rows*cols-mines:
action = model.predict(observation.reshape((1, 1)+observation.shape))
observation, reward, done, info = env.step(np.argmax(action))
total_reward += reward
steps += 1
return total_reward, steps
def testfun(model, env, episodes):
# IPython.embed()
total_reward = []
try:
for i in range(episodes):
total_reward.append(0)
input()
observation = env.reset()
done = False
t = 0
while not done and t < rows*cols-mines:
input()
action = model.predict(observation.reshape((1, 1)+observation.shape))
observation, reward, done, info = env.step(np.argmax(action))
total_reward[i] += reward
t += 1
print('Reward: {: >2.1f}'.format(reward))
except KeyboardInterrupt:
raise
return total_reward
parser = argparse.ArgumentParser()
parser.add_argument('--nwrk', type=int, default=mp.cpu_count())
parser.add_argument('--nags', type=int, default=20)
parser.add_argument('--ngns', type=int, default=10000)
parser.add_argument('--cint', type=int, default=20)
parser.add_argument('--sigm', type=float, default=0.1)
parser.add_argument('--lrte', type=float, default=0.1)
parser.add_argument('--regu', type=float, default=0.001)
parser.add_argument('--size', type=int, default=6)
parser.add_argument('--mine', type=int, default=7)
args = parser.parse_args()
rows = args.size
cols = args.size
mines = args.mine
FULL = False
rewards = {"win": 0.9, "loss": -1, "progress": 0.9, "noprogress": -0.3, "YOLO": -0.3}
env = Minesweeper(display=False, FULL=FULL, ROWS=rows, COLS=cols, MINES=mines, rewards=rewards)
n_inputs = rows*cols*10 if FULL else rows*cols*2
n_hidden = [rows*cols*10, 250, 250, 250, 250]
n_outputs = rows*cols
# Model
model = Sequential()
model.add(Dense(input_shape=(1, n_inputs),
units=n_hidden[0],
activation='relu',
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,#l2(reg),
bias_regularizer=None))#l2(reg)))
# Hidden
for n_units in n_hidden[1:]:
model.add(Dense(units=n_units,
activation='relu',
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,#l2(reg),
bias_regularizer=None))#l2(reg)))
# Output
model.add(Dense(units=n_outputs,
activation='softmax',
kernel_initializer='glorot_uniform',
bias_initializer='zeros',
kernel_regularizer=None,
bias_regularizer=None))
model.compile(optimizer='rmsprop', loss='mean_squared_error')
model.summary()
DO_PROFILE = False
save_dir = os.path.split(os.path.realpath(__file__))[0]
if __name__ == '__main__':
mp.freeze_support()
e = ES(fun=fitnessfun, model=model, env=env, reg={'L2': args.regu}, population=args.nags, learning_rate=args.lrte, sigma=args.sigm, workers=args.nwrk, save_dir=save_dir)
e.load_checkpoint()
if DO_PROFILE:
cProfile.run('e.evolve(args.ngns, print_every=1, plot_every=10)', 'profilingstats')
e.evolve(args.ngns, checkpoint_every=args.cint, plot_every=args.cint)
if DO_PROFILE:
p = pstats.Stats('profilingstats')
p.sort_stats('cumulative').print_stats(10)
p.sort_stats('time').print_stats(10)
# model = load_model('model.h5')
# env = Minesweeper(display=True, FULL=FULL, ROWS=rows, COLS=cols, MINES=mines, rewards=rewards)
# testfun(model, env, 10)
"""
model = Sequential()
model.add(Dense(input_shape=(1, n_inputs),
units=n_hidden[0],
activation='relu',
kernel_initializer='glorot_uniform',
bias_initializer='zeros'))
# Hidden
for n_units in n_hidden[1:]:
model.add(Dense(units=n_units,
activation='relu',
kernel_initializer='glorot_uniform',
bias_initializer='zeros'))
# Output
model.add(Dense(units=n_outputs,
activation='softmax',
kernel_initializer='glorot_uniform',
bias_initializer='zeros'))
model.compile(optimizer='adam', loss='mean_squared_error')
model.summary()
"""
"""
======= PROFILING WITH 1 WORKER =======
Wed Dec 6 10:08:15 2017 profilingstats
1107747 function calls (1092605 primitive calls) in 125.457 seconds
Ordered by: cumulative time
List reduced from 2470 to 10 due to restriction <10>
ncalls tottime percall cumtime percall filename:lineno(function)
20/1 0.000 0.000 125.458 125.458 {built-in method builtins.exec}
1 0.001 0.001 125.458 125.458 <string>:1(<module>)
1 0.032 0.032 125.457 125.457 /Users/Jakob/Desktop/minesweeper_solver/evolutionary/CartPole-v1-multi/strategies.py:70(evolve)
30 0.000 0.000 121.111 4.037 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:261(map)
33 0.000 0.000 121.108 3.670 /Users/Jakob/anaconda/lib/python3.6/threading.py:533(wait)
33 0.000 0.000 121.108 3.670 /Users/Jakob/anaconda/lib/python3.6/threading.py:263(wait)
30 0.000 0.000 121.108 4.037 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:637(get)
30 0.000 0.000 121.107 4.037 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:634(wait)
166 121.107 0.730 121.107 0.730 {method 'acquire' of '_thread.lock' objects}
30 0.038 0.001 2.091 0.070 es-multi-threaded.py:15(fitnessfun)
Wed Dec 6 10:08:15 2017 profilingstats
1107747 function calls (1092605 primitive calls) in 125.457 seconds
Ordered by: internal time
List reduced from 2470 to 10 due to restriction <10>
ncalls tottime percall cumtime percall filename:lineno(function)
166 121.107 0.730 121.107 0.730 {method 'acquire' of '_thread.lock' objects}
4618 0.432 0.000 0.614 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/theano/compile/function_module.py:725(__call__)
10 0.344 0.034 0.344 0.034 {method 'poll' of 'select.poll' objects}
4 0.227 0.057 0.227 0.057 {built-in method _tkinter.create}
22372 0.212 0.000 0.212 0.000 {built-in method numpy.core.multiarray.array}
2472 0.207 0.000 0.207 0.000 {built-in method numpy.core.multiarray.dot}
61099 0.123 0.000 0.123 0.000 {built-in method builtins.hasattr}
4618 0.118 0.000 1.007 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/keras/engine/training.py:1209(_predict_loop)
1 0.101 0.101 0.101 0.101 {method 'acquire' of '_multiprocessing.SemLock' objects}
4618 0.084 0.000 0.084 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/keras/engine/training.py:406(<listcomp>)
======= PROFILING WITH 4 WORKERS =======
Wed Dec 6 10:00:43 2017 profilingstats
3111894 function calls (3068601 primitive calls) in 211.293 seconds
Ordered by: cumulative time
List reduced from 2462 to 10 due to restriction <10>
ncalls tottime percall cumtime percall filename:lineno(function)
27/1 0.001 0.000 211.296 211.296 {built-in method builtins.exec}
1 0.115 0.115 211.295 211.295 /Users/Jakob/Desktop/minesweeper_solver/evolutionary/CartPole-v1-multi/strategies.py:70(evolve)
100 0.001 0.000 200.251 2.003 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:261(map)
103 0.001 0.000 200.241 1.944 /Users/Jakob/anaconda/lib/python3.6/threading.py:533(wait)
100 0.000 0.000 200.240 2.002 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:637(get)
100 0.000 0.000 200.239 2.002 /Users/Jakob/anaconda/lib/python3.6/multiprocessing/pool.py:634(wait)
103 0.001 0.000 200.239 1.944 /Users/Jakob/anaconda/lib/python3.6/threading.py:263(wait)
515 200.238 0.389 200.238 0.389 {method 'acquire' of '_thread.lock' objects}
100 0.122 0.001 5.254 0.053 es-multi-threaded.py:15(fitnessfun)
100 0.001 0.000 4.544 0.045 /Users/Jakob/Desktop/minesweeper_solver/evolutionary/CartPole-v1-multi/strategies.py:58(plot_progress)
Wed Dec 6 10:00:43 2017 profilingstats
3111894 function calls (3068601 primitive calls) in 211.293 seconds
Ordered by: internal time
List reduced from 2462 to 10 due to restriction <10>
ncalls tottime percall cumtime percall filename:lineno(function)
515 200.238 0.389 200.238 0.389 {method 'acquire' of '_thread.lock' objects}
15292 1.299 0.000 1.880 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/theano/compile/function_module.py:725(__call__)
67701 0.658 0.000 0.658 0.000 {built-in method numpy.core.multiarray.array}
7026 0.574 0.000 0.574 0.000 {built-in method numpy.core.multiarray.dot}
11 0.490 0.045 0.490 0.045 {built-in method _tkinter.create}
15292 0.368 0.000 3.128 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/keras/engine/training.py:1209(_predict_loop)
10 0.294 0.029 0.294 0.029 {method 'poll' of 'select.poll' objects}
15292 0.264 0.000 0.264 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/keras/engine/training.py:406(<listcomp>)
15292 0.261 0.000 0.493 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/gym/envs/classic_control/cartpole.py:56(_step)
15292 0.203 0.000 0.248 0.000 /Users/Jakob/anaconda/lib/python3.6/site-packages/keras/engine/training.py:364(_make_batches)
"""
| mit | 2,059,481,714,279,348,500 | 44.112069 | 173 | 0.637875 | false |
mitya57/django | tests/model_formsets/tests.py | 5 | 78307 | import datetime
import re
from datetime import date
from decimal import Decimal
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.forms.models import (
BaseModelFormSet, _get_foreign_key, inlineformset_factory,
modelformset_factory,
)
from django.http import QueryDict
from django.test import TestCase, skipUnlessDBFeature
from .models import (
AlternateBook, Author, AuthorMeeting, BetterAuthor, Book, BookWithCustomPK,
BookWithOptionalAltEditor, ClassyMexicanRestaurant, CustomPrimaryKey,
Location, Membership, MexicanRestaurant, Owner, OwnerProfile, Person,
Place, Player, Poem, Poet, Post, Price, Product, Repository, Restaurant,
Revision, Team,
)
class DeletionTests(TestCase):
def test_deletion(self):
PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True)
poet = Poet.objects.create(name='test')
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': str(poet.pk),
'form-0-name': 'test',
'form-0-DELETE': 'on',
}
formset = PoetFormSet(data, queryset=Poet.objects.all())
formset.save(commit=False)
self.assertEqual(Poet.objects.count(), 1)
formset.save()
self.assertTrue(formset.is_valid())
self.assertEqual(Poet.objects.count(), 0)
def test_add_form_deletion_when_invalid(self):
"""
Make sure that an add form that is filled out, but marked for deletion
doesn't cause validation errors.
"""
PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True)
poet = Poet.objects.create(name='test')
# One existing untouched and two new unvalid forms
data = {
'form-TOTAL_FORMS': '3',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': str(poet.id),
'form-0-name': 'test',
'form-1-id': '',
'form-1-name': 'x' * 1000, # Too long
'form-2-id': str(poet.id), # Violate unique constraint
'form-2-name': 'test2',
}
formset = PoetFormSet(data, queryset=Poet.objects.all())
# Make sure this form doesn't pass validation.
self.assertIs(formset.is_valid(), False)
self.assertEqual(Poet.objects.count(), 1)
# Then make sure that it *does* pass validation and delete the object,
# even though the data in new forms aren't actually valid.
data['form-0-DELETE'] = 'on'
data['form-1-DELETE'] = 'on'
data['form-2-DELETE'] = 'on'
formset = PoetFormSet(data, queryset=Poet.objects.all())
self.assertIs(formset.is_valid(), True)
formset.save()
self.assertEqual(Poet.objects.count(), 0)
def test_change_form_deletion_when_invalid(self):
"""
Make sure that a change form that is filled out, but marked for deletion
doesn't cause validation errors.
"""
PoetFormSet = modelformset_factory(Poet, fields="__all__", can_delete=True)
poet = Poet.objects.create(name='test')
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '1',
'form-MAX_NUM_FORMS': '0',
'form-0-id': str(poet.id),
'form-0-name': 'x' * 1000,
}
formset = PoetFormSet(data, queryset=Poet.objects.all())
# Make sure this form doesn't pass validation.
self.assertIs(formset.is_valid(), False)
self.assertEqual(Poet.objects.count(), 1)
# Then make sure that it *does* pass validation and delete the object,
# even though the data isn't actually valid.
data['form-0-DELETE'] = 'on'
formset = PoetFormSet(data, queryset=Poet.objects.all())
self.assertIs(formset.is_valid(), True)
formset.save()
self.assertEqual(Poet.objects.count(), 0)
def test_outdated_deletion(self):
poet = Poet.objects.create(name='test')
poem = Poem.objects.create(name='Brevity is the soul of wit', poet=poet)
PoemFormSet = inlineformset_factory(Poet, Poem, fields="__all__", can_delete=True)
# Simulate deletion of an object that doesn't exist in the database
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '2',
'form-0-id': str(poem.pk),
'form-0-name': 'foo',
'form-1-id': str(poem.pk + 1), # doesn't exist
'form-1-name': 'bar',
'form-1-DELETE': 'on',
}
formset = PoemFormSet(data, instance=poet, prefix="form")
# The formset is valid even though poem.pk + 1 doesn't exist,
# because it's marked for deletion anyway
self.assertTrue(formset.is_valid())
formset.save()
# Make sure the save went through correctly
self.assertEqual(Poem.objects.get(pk=poem.pk).name, "foo")
self.assertEqual(poet.poem_set.count(), 1)
self.assertFalse(Poem.objects.filter(pk=poem.pk + 1).exists())
class ModelFormsetTest(TestCase):
def test_modelformset_factory_without_fields(self):
""" Regression for #19733 """
message = (
"Calling modelformset_factory without defining 'fields' or 'exclude' "
"explicitly is prohibited."
)
with self.assertRaisesMessage(ImproperlyConfigured, message):
modelformset_factory(Author)
def test_simple_save(self):
qs = Author.objects.all()
AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=3)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 3)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_form-0-name">Name:</label>'
'<input id="id_form-0-name" type="text" name="form-0-name" maxlength="100" />'
'<input type="hidden" name="form-0-id" id="id_form-0-id" /></p>'
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_form-1-name">Name:</label>'
'<input id="id_form-1-name" type="text" name="form-1-name" maxlength="100" />'
'<input type="hidden" name="form-1-id" id="id_form-1-id" /></p>'
)
self.assertHTMLEqual(
formset.forms[2].as_p(),
'<p><label for="id_form-2-name">Name:</label>'
' <input id="id_form-2-name" type="text" name="form-2-name" maxlength="100" />'
'<input type="hidden" name="form-2-id" id="id_form-2-id" /></p>'
)
data = {
'form-TOTAL_FORMS': '3', # the number of forms rendered
'form-INITIAL_FORMS': '0', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-name': 'Charles Baudelaire',
'form-1-name': 'Arthur Rimbaud',
'form-2-name': '',
}
formset = AuthorFormSet(data=data, queryset=qs)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 2)
author1, author2 = saved
self.assertEqual(author1, Author.objects.get(name='Charles Baudelaire'))
self.assertEqual(author2, Author.objects.get(name='Arthur Rimbaud'))
authors = list(Author.objects.order_by('name'))
self.assertEqual(authors, [author2, author1])
# Gah! We forgot Paul Verlaine. Let's create a formset to edit the
# existing authors with an extra form to add him. We *could* pass in a
# queryset to restrict the Author objects we edit, but in this case
# we'll use it to display them in alphabetical order by name.
qs = Author.objects.order_by('name')
AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=1, can_delete=False)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 3)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_form-0-name">Name:</label>'
'<input id="id_form-0-name" type="text" name="form-0-name" value="Arthur Rimbaud" maxlength="100" />'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /></p>' % author2.id
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_form-1-name">Name:</label>'
'<input id="id_form-1-name" type="text" name="form-1-name" value="Charles Baudelaire" maxlength="100" />'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" /></p>' % author1.id
)
self.assertHTMLEqual(
formset.forms[2].as_p(),
'<p><label for="id_form-2-name">Name:</label>'
'<input id="id_form-2-name" type="text" name="form-2-name" maxlength="100" />'
'<input type="hidden" name="form-2-id" id="id_form-2-id" /></p>'
)
data = {
'form-TOTAL_FORMS': '3', # the number of forms rendered
'form-INITIAL_FORMS': '2', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-id': str(author2.id),
'form-0-name': 'Arthur Rimbaud',
'form-1-id': str(author1.id),
'form-1-name': 'Charles Baudelaire',
'form-2-name': 'Paul Verlaine',
}
formset = AuthorFormSet(data=data, queryset=qs)
self.assertTrue(formset.is_valid())
# Only changed or new objects are returned from formset.save()
saved = formset.save()
self.assertEqual(len(saved), 1)
author3 = saved[0]
self.assertEqual(author3, Author.objects.get(name='Paul Verlaine'))
authors = list(Author.objects.order_by('name'))
self.assertEqual(authors, [author2, author1, author3])
# This probably shouldn't happen, but it will. If an add form was
# marked for deletion, make sure we don't save that form.
qs = Author.objects.order_by('name')
AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=1, can_delete=True)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 4)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_form-0-name">Name:</label>'
'<input id="id_form-0-name" type="text" name="form-0-name" '
'value="Arthur Rimbaud" maxlength="100" /></p>'
'<p><label for="id_form-0-DELETE">Delete:</label>'
'<input type="checkbox" name="form-0-DELETE" id="id_form-0-DELETE" />'
'<input type="hidden" name="form-0-id" value="%d" id="id_form-0-id" /></p>' % author2.id
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_form-1-name">Name:</label>'
'<input id="id_form-1-name" type="text" name="form-1-name" '
'value="Charles Baudelaire" maxlength="100" /></p>'
'<p><label for="id_form-1-DELETE">Delete:</label>'
'<input type="checkbox" name="form-1-DELETE" id="id_form-1-DELETE" />'
'<input type="hidden" name="form-1-id" value="%d" id="id_form-1-id" /></p>' % author1.id
)
self.assertHTMLEqual(
formset.forms[2].as_p(),
'<p><label for="id_form-2-name">Name:</label>'
'<input id="id_form-2-name" type="text" name="form-2-name" '
'value="Paul Verlaine" maxlength="100" /></p>'
'<p><label for="id_form-2-DELETE">Delete:</label>'
'<input type="checkbox" name="form-2-DELETE" id="id_form-2-DELETE" />'
'<input type="hidden" name="form-2-id" value="%d" id="id_form-2-id" /></p>' % author3.id
)
self.assertHTMLEqual(
formset.forms[3].as_p(),
'<p><label for="id_form-3-name">Name:</label>'
'<input id="id_form-3-name" type="text" name="form-3-name" maxlength="100" /></p>'
'<p><label for="id_form-3-DELETE">Delete:</label>'
'<input type="checkbox" name="form-3-DELETE" id="id_form-3-DELETE" />'
'<input type="hidden" name="form-3-id" id="id_form-3-id" /></p>'
)
data = {
'form-TOTAL_FORMS': '4', # the number of forms rendered
'form-INITIAL_FORMS': '3', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-id': str(author2.id),
'form-0-name': 'Arthur Rimbaud',
'form-1-id': str(author1.id),
'form-1-name': 'Charles Baudelaire',
'form-2-id': str(author3.id),
'form-2-name': 'Paul Verlaine',
'form-3-name': 'Walt Whitman',
'form-3-DELETE': 'on',
}
formset = AuthorFormSet(data=data, queryset=qs)
self.assertTrue(formset.is_valid())
# No objects were changed or saved so nothing will come back.
self.assertEqual(formset.save(), [])
authors = list(Author.objects.order_by('name'))
self.assertEqual(authors, [author2, author1, author3])
# Let's edit a record to ensure save only returns that one record.
data = {
'form-TOTAL_FORMS': '4', # the number of forms rendered
'form-INITIAL_FORMS': '3', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-id': str(author2.id),
'form-0-name': 'Walt Whitman',
'form-1-id': str(author1.id),
'form-1-name': 'Charles Baudelaire',
'form-2-id': str(author3.id),
'form-2-name': 'Paul Verlaine',
'form-3-name': '',
'form-3-DELETE': '',
}
formset = AuthorFormSet(data=data, queryset=qs)
self.assertTrue(formset.is_valid())
# One record has changed.
saved = formset.save()
self.assertEqual(len(saved), 1)
self.assertEqual(saved[0], Author.objects.get(name='Walt Whitman'))
def test_commit_false(self):
# Test the behavior of commit=False and save_m2m
author1 = Author.objects.create(name='Charles Baudelaire')
author2 = Author.objects.create(name='Paul Verlaine')
author3 = Author.objects.create(name='Walt Whitman')
meeting = AuthorMeeting.objects.create(created=date.today())
meeting.authors.set(Author.objects.all())
# create an Author instance to add to the meeting.
author4 = Author.objects.create(name='John Steinbeck')
AuthorMeetingFormSet = modelformset_factory(AuthorMeeting, fields="__all__", extra=1, can_delete=True)
data = {
'form-TOTAL_FORMS': '2', # the number of forms rendered
'form-INITIAL_FORMS': '1', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-id': str(meeting.id),
'form-0-name': '2nd Tuesday of the Week Meeting',
'form-0-authors': [author2.id, author1.id, author3.id, author4.id],
'form-1-name': '',
'form-1-authors': '',
'form-1-DELETE': '',
}
formset = AuthorMeetingFormSet(data=data, queryset=AuthorMeeting.objects.all())
self.assertTrue(formset.is_valid())
instances = formset.save(commit=False)
for instance in instances:
instance.created = date.today()
instance.save()
formset.save_m2m()
self.assertQuerysetEqual(instances[0].authors.all(), [
'<Author: Charles Baudelaire>',
'<Author: John Steinbeck>',
'<Author: Paul Verlaine>',
'<Author: Walt Whitman>',
])
def test_max_num(self):
# Test the behavior of max_num with model formsets. It should allow
# all existing related objects/inlines for a given object to be
# displayed, but not allow the creation of new inlines beyond max_num.
Author.objects.create(name='Charles Baudelaire')
Author.objects.create(name='Paul Verlaine')
Author.objects.create(name='Walt Whitman')
qs = Author.objects.order_by('name')
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=None, extra=3)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 6)
self.assertEqual(len(formset.extra_forms), 3)
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=4, extra=3)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 4)
self.assertEqual(len(formset.extra_forms), 1)
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=0, extra=3)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 3)
self.assertEqual(len(formset.extra_forms), 0)
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=None)
formset = AuthorFormSet(queryset=qs)
self.assertQuerysetEqual(formset.get_queryset(), [
'<Author: Charles Baudelaire>',
'<Author: Paul Verlaine>',
'<Author: Walt Whitman>',
])
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=0)
formset = AuthorFormSet(queryset=qs)
self.assertQuerysetEqual(formset.get_queryset(), [
'<Author: Charles Baudelaire>',
'<Author: Paul Verlaine>',
'<Author: Walt Whitman>',
])
AuthorFormSet = modelformset_factory(Author, fields="__all__", max_num=4)
formset = AuthorFormSet(queryset=qs)
self.assertQuerysetEqual(formset.get_queryset(), [
'<Author: Charles Baudelaire>',
'<Author: Paul Verlaine>',
'<Author: Walt Whitman>',
])
def test_min_num(self):
# Test the behavior of min_num with model formsets. It should be
# added to extra.
qs = Author.objects.none()
AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=0)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 0)
AuthorFormSet = modelformset_factory(Author, fields="__all__", min_num=1, extra=0)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 1)
AuthorFormSet = modelformset_factory(Author, fields="__all__", min_num=1, extra=1)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 2)
def test_min_num_with_existing(self):
# Test the behavior of min_num with existing objects.
Author.objects.create(name='Charles Baudelaire')
qs = Author.objects.all()
AuthorFormSet = modelformset_factory(Author, fields="__all__", extra=0, min_num=1)
formset = AuthorFormSet(queryset=qs)
self.assertEqual(len(formset.forms), 1)
def test_custom_save_method(self):
class PoetForm(forms.ModelForm):
def save(self, commit=True):
# change the name to "Vladimir Mayakovsky" just to be a jerk.
author = super().save(commit=False)
author.name = "Vladimir Mayakovsky"
if commit:
author.save()
return author
PoetFormSet = modelformset_factory(Poet, fields="__all__", form=PoetForm)
data = {
'form-TOTAL_FORMS': '3', # the number of forms rendered
'form-INITIAL_FORMS': '0', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-name': 'Walt Whitman',
'form-1-name': 'Charles Baudelaire',
'form-2-name': '',
}
qs = Poet.objects.all()
formset = PoetFormSet(data=data, queryset=qs)
self.assertTrue(formset.is_valid())
poets = formset.save()
self.assertEqual(len(poets), 2)
poet1, poet2 = poets
self.assertEqual(poet1.name, 'Vladimir Mayakovsky')
self.assertEqual(poet2.name, 'Vladimir Mayakovsky')
def test_custom_form(self):
"""
model_formset_factory() respects fields and exclude parameters of a
custom form.
"""
class PostForm1(forms.ModelForm):
class Meta:
model = Post
fields = ('title', 'posted')
class PostForm2(forms.ModelForm):
class Meta:
model = Post
exclude = ('subtitle',)
PostFormSet = modelformset_factory(Post, form=PostForm1)
formset = PostFormSet()
self.assertNotIn("subtitle", formset.forms[0].fields)
PostFormSet = modelformset_factory(Post, form=PostForm2)
formset = PostFormSet()
self.assertNotIn("subtitle", formset.forms[0].fields)
def test_custom_queryset_init(self):
"""
A queryset can be overridden in the formset's __init__() method.
"""
Author.objects.create(name='Charles Baudelaire')
Author.objects.create(name='Paul Verlaine')
class BaseAuthorFormSet(BaseModelFormSet):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.queryset = Author.objects.filter(name__startswith='Charles')
AuthorFormSet = modelformset_factory(Author, fields='__all__', formset=BaseAuthorFormSet)
formset = AuthorFormSet()
self.assertEqual(len(formset.get_queryset()), 1)
def test_model_inheritance(self):
BetterAuthorFormSet = modelformset_factory(BetterAuthor, fields="__all__")
formset = BetterAuthorFormSet()
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_form-0-name">Name:</label>'
'<input id="id_form-0-name" type="text" name="form-0-name" maxlength="100" /></p>'
'<p><label for="id_form-0-write_speed">Write speed:</label>'
'<input type="number" name="form-0-write_speed" id="id_form-0-write_speed" />'
'<input type="hidden" name="form-0-author_ptr" id="id_form-0-author_ptr" /></p>'
)
data = {
'form-TOTAL_FORMS': '1', # the number of forms rendered
'form-INITIAL_FORMS': '0', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-author_ptr': '',
'form-0-name': 'Ernest Hemingway',
'form-0-write_speed': '10',
}
formset = BetterAuthorFormSet(data)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
author1, = saved
self.assertEqual(author1, BetterAuthor.objects.get(name='Ernest Hemingway'))
hemingway_id = BetterAuthor.objects.get(name="Ernest Hemingway").pk
formset = BetterAuthorFormSet()
self.assertEqual(len(formset.forms), 2)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_form-0-name">Name:</label>'
'<input id="id_form-0-name" type="text" name="form-0-name" value="Ernest Hemingway" maxlength="100" /></p>'
'<p><label for="id_form-0-write_speed">Write speed:</label>'
'<input type="number" name="form-0-write_speed" value="10" id="id_form-0-write_speed" />'
'<input type="hidden" name="form-0-author_ptr" value="%d" id="id_form-0-author_ptr" /></p>' % hemingway_id
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_form-1-name">Name:</label>'
'<input id="id_form-1-name" type="text" name="form-1-name" maxlength="100" /></p>'
'<p><label for="id_form-1-write_speed">Write speed:</label>'
'<input type="number" name="form-1-write_speed" id="id_form-1-write_speed" />'
'<input type="hidden" name="form-1-author_ptr" id="id_form-1-author_ptr" /></p>'
)
data = {
'form-TOTAL_FORMS': '2', # the number of forms rendered
'form-INITIAL_FORMS': '1', # the number of forms with initial data
'form-MAX_NUM_FORMS': '', # the max number of forms
'form-0-author_ptr': hemingway_id,
'form-0-name': 'Ernest Hemingway',
'form-0-write_speed': '10',
'form-1-author_ptr': '',
'form-1-name': '',
'form-1-write_speed': '',
}
formset = BetterAuthorFormSet(data)
self.assertTrue(formset.is_valid())
self.assertEqual(formset.save(), [])
def test_inline_formsets(self):
# We can also create a formset that is tied to a parent model. This is
# how the admin system's edit inline functionality works.
AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=3, fields="__all__")
author = Author.objects.create(name='Charles Baudelaire')
formset = AuthorBooksFormSet(instance=author)
self.assertEqual(len(formset.forms), 3)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_book_set-0-title">Title:</label> <input id="id_book_set-0-title" type="text" '
'name="book_set-0-title" maxlength="100" /><input type="hidden" name="book_set-0-author" value="%d" '
'id="id_book_set-0-author" /><input type="hidden" name="book_set-0-id" id="id_book_set-0-id" />'
'</p>' % author.id
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_book_set-1-title">Title:</label>'
'<input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100" />'
'<input type="hidden" name="book_set-1-author" value="%d" id="id_book_set-1-author" />'
'<input type="hidden" name="book_set-1-id" id="id_book_set-1-id" /></p>' % author.id
)
self.assertHTMLEqual(
formset.forms[2].as_p(),
'<p><label for="id_book_set-2-title">Title:</label>'
'<input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100" />'
'<input type="hidden" name="book_set-2-author" value="%d" id="id_book_set-2-author" />'
'<input type="hidden" name="book_set-2-id" id="id_book_set-2-id" /></p>' % author.id
)
data = {
'book_set-TOTAL_FORMS': '3', # the number of forms rendered
'book_set-INITIAL_FORMS': '0', # the number of forms with initial data
'book_set-MAX_NUM_FORMS': '', # the max number of forms
'book_set-0-title': 'Les Fleurs du Mal',
'book_set-1-title': '',
'book_set-2-title': '',
}
formset = AuthorBooksFormSet(data, instance=author)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
book1, = saved
self.assertEqual(book1, Book.objects.get(title='Les Fleurs du Mal'))
self.assertQuerysetEqual(author.book_set.all(), ['<Book: Les Fleurs du Mal>'])
# Now that we've added a book to Charles Baudelaire, let's try adding
# another one. This time though, an edit form will be available for
# every existing book.
AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__")
author = Author.objects.get(name='Charles Baudelaire')
formset = AuthorBooksFormSet(instance=author)
self.assertEqual(len(formset.forms), 3)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_book_set-0-title">Title:</label>'
'<input id="id_book_set-0-title" type="text" name="book_set-0-title" '
'value="Les Fleurs du Mal" maxlength="100" />'
'<input type="hidden" name="book_set-0-author" value="%d" id="id_book_set-0-author" />'
'<input type="hidden" name="book_set-0-id" value="%d" id="id_book_set-0-id" /></p>' % (
author.id, book1.id,
)
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_book_set-1-title">Title:</label>'
'<input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100" />'
'<input type="hidden" name="book_set-1-author" value="%d" id="id_book_set-1-author" />'
'<input type="hidden" name="book_set-1-id" id="id_book_set-1-id" /></p>' % author.id
)
self.assertHTMLEqual(
formset.forms[2].as_p(),
'<p><label for="id_book_set-2-title">Title:</label>'
'<input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100" />'
'<input type="hidden" name="book_set-2-author" value="%d" id="id_book_set-2-author" />'
'<input type="hidden" name="book_set-2-id" id="id_book_set-2-id" /></p>' % author.id
)
data = {
'book_set-TOTAL_FORMS': '3', # the number of forms rendered
'book_set-INITIAL_FORMS': '1', # the number of forms with initial data
'book_set-MAX_NUM_FORMS': '', # the max number of forms
'book_set-0-id': str(book1.id),
'book_set-0-title': 'Les Fleurs du Mal',
'book_set-1-title': 'Les Paradis Artificiels',
'book_set-2-title': '',
}
formset = AuthorBooksFormSet(data, instance=author)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
book2, = saved
self.assertEqual(book2, Book.objects.get(title='Les Paradis Artificiels'))
# As you can see, 'Les Paradis Artificiels' is now a book belonging to
# Charles Baudelaire.
self.assertQuerysetEqual(author.book_set.order_by('title'), [
'<Book: Les Fleurs du Mal>',
'<Book: Les Paradis Artificiels>',
])
def test_inline_formsets_save_as_new(self):
# The save_as_new parameter lets you re-associate the data to a new
# instance. This is used in the admin for save_as functionality.
AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__")
Author.objects.create(name='Charles Baudelaire')
# An immutable QueryDict simulates request.POST.
data = QueryDict(mutable=True)
data.update({
'book_set-TOTAL_FORMS': '3', # the number of forms rendered
'book_set-INITIAL_FORMS': '2', # the number of forms with initial data
'book_set-MAX_NUM_FORMS': '', # the max number of forms
'book_set-0-id': '1',
'book_set-0-title': 'Les Fleurs du Mal',
'book_set-1-id': '2',
'book_set-1-title': 'Les Paradis Artificiels',
'book_set-2-title': '',
})
data._mutable = False
formset = AuthorBooksFormSet(data, instance=Author(), save_as_new=True)
self.assertTrue(formset.is_valid())
self.assertIs(data._mutable, False)
new_author = Author.objects.create(name='Charles Baudelaire')
formset = AuthorBooksFormSet(data, instance=new_author, save_as_new=True)
saved = formset.save()
self.assertEqual(len(saved), 2)
book1, book2 = saved
self.assertEqual(book1.title, 'Les Fleurs du Mal')
self.assertEqual(book2.title, 'Les Paradis Artificiels')
# Test using a custom prefix on an inline formset.
formset = AuthorBooksFormSet(prefix="test")
self.assertEqual(len(formset.forms), 2)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_test-0-title">Title:</label>'
'<input id="id_test-0-title" type="text" name="test-0-title" maxlength="100" />'
'<input type="hidden" name="test-0-author" id="id_test-0-author" />'
'<input type="hidden" name="test-0-id" id="id_test-0-id" /></p>'
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_test-1-title">Title:</label>'
'<input id="id_test-1-title" type="text" name="test-1-title" maxlength="100" />'
'<input type="hidden" name="test-1-author" id="id_test-1-author" />'
'<input type="hidden" name="test-1-id" id="id_test-1-id" /></p>'
)
def test_inline_formsets_with_custom_pk(self):
# Test inline formsets where the inline-edited object has a custom
# primary key that is not the fk to the parent object.
self.maxDiff = 1024
AuthorBooksFormSet2 = inlineformset_factory(
Author, BookWithCustomPK, can_delete=False, extra=1, fields="__all__"
)
author = Author.objects.create(pk=1, name='Charles Baudelaire')
formset = AuthorBooksFormSet2(instance=author)
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_bookwithcustompk_set-0-my_pk">My pk:</label>'
'<input id="id_bookwithcustompk_set-0-my_pk" type="number" '
'name="bookwithcustompk_set-0-my_pk" step="1" /></p>'
'<p><label for="id_bookwithcustompk_set-0-title">Title:</label>'
'<input id="id_bookwithcustompk_set-0-title" type="text" '
'name="bookwithcustompk_set-0-title" maxlength="100" />'
'<input type="hidden" name="bookwithcustompk_set-0-author" '
'value="1" id="id_bookwithcustompk_set-0-author" /></p>'
)
data = {
'bookwithcustompk_set-TOTAL_FORMS': '1', # the number of forms rendered
'bookwithcustompk_set-INITIAL_FORMS': '0', # the number of forms with initial data
'bookwithcustompk_set-MAX_NUM_FORMS': '', # the max number of forms
'bookwithcustompk_set-0-my_pk': '77777',
'bookwithcustompk_set-0-title': 'Les Fleurs du Mal',
}
formset = AuthorBooksFormSet2(data, instance=author)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
book1, = saved
self.assertEqual(book1.pk, 77777)
book1 = author.bookwithcustompk_set.get()
self.assertEqual(book1.title, 'Les Fleurs du Mal')
def test_inline_formsets_with_multi_table_inheritance(self):
# Test inline formsets where the inline-edited object uses multi-table
# inheritance, thus has a non AutoField yet auto-created primary key.
AuthorBooksFormSet3 = inlineformset_factory(Author, AlternateBook, can_delete=False, extra=1, fields="__all__")
author = Author.objects.create(pk=1, name='Charles Baudelaire')
formset = AuthorBooksFormSet3(instance=author)
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_alternatebook_set-0-title">Title:</label>'
'<input id="id_alternatebook_set-0-title" type="text" '
'name="alternatebook_set-0-title" maxlength="100" /></p>'
'<p><label for="id_alternatebook_set-0-notes">Notes:</label>'
'<input id="id_alternatebook_set-0-notes" type="text" '
'name="alternatebook_set-0-notes" maxlength="100" />'
'<input type="hidden" name="alternatebook_set-0-author" value="1" '
'id="id_alternatebook_set-0-author" />'
'<input type="hidden" name="alternatebook_set-0-book_ptr" '
'id="id_alternatebook_set-0-book_ptr" /></p>'
)
data = {
'alternatebook_set-TOTAL_FORMS': '1', # the number of forms rendered
'alternatebook_set-INITIAL_FORMS': '0', # the number of forms with initial data
'alternatebook_set-MAX_NUM_FORMS': '', # the max number of forms
'alternatebook_set-0-title': 'Flowers of Evil',
'alternatebook_set-0-notes': 'English translation of Les Fleurs du Mal'
}
formset = AuthorBooksFormSet3(data, instance=author)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
book1, = saved
self.assertEqual(book1.title, 'Flowers of Evil')
self.assertEqual(book1.notes, 'English translation of Les Fleurs du Mal')
@skipUnlessDBFeature('supports_partially_nullable_unique_constraints')
def test_inline_formsets_with_nullable_unique_together(self):
# Test inline formsets where the inline-edited object has a
# unique_together constraint with a nullable member
AuthorBooksFormSet4 = inlineformset_factory(
Author, BookWithOptionalAltEditor, can_delete=False, extra=2, fields="__all__"
)
author = Author.objects.create(pk=1, name='Charles Baudelaire')
data = {
'bookwithoptionalalteditor_set-TOTAL_FORMS': '2', # the number of forms rendered
'bookwithoptionalalteditor_set-INITIAL_FORMS': '0', # the number of forms with initial data
'bookwithoptionalalteditor_set-MAX_NUM_FORMS': '', # the max number of forms
'bookwithoptionalalteditor_set-0-author': '1',
'bookwithoptionalalteditor_set-0-title': 'Les Fleurs du Mal',
'bookwithoptionalalteditor_set-1-author': '1',
'bookwithoptionalalteditor_set-1-title': 'Les Fleurs du Mal',
}
formset = AuthorBooksFormSet4(data, instance=author)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 2)
book1, book2 = saved
self.assertEqual(book1.author_id, 1)
self.assertEqual(book1.title, 'Les Fleurs du Mal')
self.assertEqual(book2.author_id, 1)
self.assertEqual(book2.title, 'Les Fleurs du Mal')
def test_inline_formsets_with_custom_save_method(self):
AuthorBooksFormSet = inlineformset_factory(Author, Book, can_delete=False, extra=2, fields="__all__")
author = Author.objects.create(pk=1, name='Charles Baudelaire')
book1 = Book.objects.create(pk=1, author=author, title='Les Paradis Artificiels')
book2 = Book.objects.create(pk=2, author=author, title='Les Fleurs du Mal')
book3 = Book.objects.create(pk=3, author=author, title='Flowers of Evil')
class PoemForm(forms.ModelForm):
def save(self, commit=True):
# change the name to "Brooklyn Bridge" just to be a jerk.
poem = super().save(commit=False)
poem.name = "Brooklyn Bridge"
if commit:
poem.save()
return poem
PoemFormSet = inlineformset_factory(Poet, Poem, form=PoemForm, fields="__all__")
data = {
'poem_set-TOTAL_FORMS': '3', # the number of forms rendered
'poem_set-INITIAL_FORMS': '0', # the number of forms with initial data
'poem_set-MAX_NUM_FORMS': '', # the max number of forms
'poem_set-0-name': 'The Cloud in Trousers',
'poem_set-1-name': 'I',
'poem_set-2-name': '',
}
poet = Poet.objects.create(name='Vladimir Mayakovsky')
formset = PoemFormSet(data=data, instance=poet)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 2)
poem1, poem2 = saved
self.assertEqual(poem1.name, 'Brooklyn Bridge')
self.assertEqual(poem2.name, 'Brooklyn Bridge')
# We can provide a custom queryset to our InlineFormSet:
custom_qs = Book.objects.order_by('-title')
formset = AuthorBooksFormSet(instance=author, queryset=custom_qs)
self.assertEqual(len(formset.forms), 5)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_book_set-0-title">Title:</label>'
'<input id="id_book_set-0-title" type="text" name="book_set-0-title" '
'value="Les Paradis Artificiels" maxlength="100" />'
'<input type="hidden" name="book_set-0-author" value="1" id="id_book_set-0-author" />'
'<input type="hidden" name="book_set-0-id" value="1" id="id_book_set-0-id" /></p>'
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_book_set-1-title">Title:</label>'
'<input id="id_book_set-1-title" type="text" name="book_set-1-title" '
'value="Les Fleurs du Mal" maxlength="100" />'
'<input type="hidden" name="book_set-1-author" value="1" id="id_book_set-1-author" />'
'<input type="hidden" name="book_set-1-id" value="2" id="id_book_set-1-id" /></p>'
)
self.assertHTMLEqual(
formset.forms[2].as_p(),
'<p><label for="id_book_set-2-title">Title:</label>'
'<input id="id_book_set-2-title" type="text" name="book_set-2-title" '
'value="Flowers of Evil" maxlength="100" />'
'<input type="hidden" name="book_set-2-author" value="1" id="id_book_set-2-author" />'
'<input type="hidden" name="book_set-2-id" value="3" id="id_book_set-2-id" /></p>'
)
self.assertHTMLEqual(
formset.forms[3].as_p(),
'<p><label for="id_book_set-3-title">Title:</label>'
'<input id="id_book_set-3-title" type="text" name="book_set-3-title" maxlength="100" />'
'<input type="hidden" name="book_set-3-author" value="1" id="id_book_set-3-author" />'
'<input type="hidden" name="book_set-3-id" id="id_book_set-3-id" /></p>'
)
self.assertHTMLEqual(
formset.forms[4].as_p(),
'<p><label for="id_book_set-4-title">Title:</label>'
'<input id="id_book_set-4-title" type="text" name="book_set-4-title" maxlength="100" />'
'<input type="hidden" name="book_set-4-author" value="1" id="id_book_set-4-author" />'
'<input type="hidden" name="book_set-4-id" id="id_book_set-4-id" /></p>'
)
data = {
'book_set-TOTAL_FORMS': '5', # the number of forms rendered
'book_set-INITIAL_FORMS': '3', # the number of forms with initial data
'book_set-MAX_NUM_FORMS': '', # the max number of forms
'book_set-0-id': str(book1.id),
'book_set-0-title': 'Les Paradis Artificiels',
'book_set-1-id': str(book2.id),
'book_set-1-title': 'Les Fleurs du Mal',
'book_set-2-id': str(book3.id),
'book_set-2-title': 'Flowers of Evil',
'book_set-3-title': 'Revue des deux mondes',
'book_set-4-title': '',
}
formset = AuthorBooksFormSet(data, instance=author, queryset=custom_qs)
self.assertTrue(formset.is_valid())
custom_qs = Book.objects.filter(title__startswith='F')
formset = AuthorBooksFormSet(instance=author, queryset=custom_qs)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_book_set-0-title">Title:</label>'
'<input id="id_book_set-0-title" type="text" name="book_set-0-title" '
'value="Flowers of Evil" maxlength="100" />'
'<input type="hidden" name="book_set-0-author" value="1" id="id_book_set-0-author" />'
'<input type="hidden" name="book_set-0-id" value="3" id="id_book_set-0-id" /></p>'
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_book_set-1-title">Title:</label>'
'<input id="id_book_set-1-title" type="text" name="book_set-1-title" maxlength="100" />'
'<input type="hidden" name="book_set-1-author" value="1" id="id_book_set-1-author" />'
'<input type="hidden" name="book_set-1-id" id="id_book_set-1-id" /></p>'
)
self.assertHTMLEqual(
formset.forms[2].as_p(),
'<p><label for="id_book_set-2-title">Title:</label>'
'<input id="id_book_set-2-title" type="text" name="book_set-2-title" maxlength="100" />'
'<input type="hidden" name="book_set-2-author" value="1" id="id_book_set-2-author" />'
'<input type="hidden" name="book_set-2-id" id="id_book_set-2-id" /></p>'
)
data = {
'book_set-TOTAL_FORMS': '3', # the number of forms rendered
'book_set-INITIAL_FORMS': '1', # the number of forms with initial data
'book_set-MAX_NUM_FORMS': '', # the max number of forms
'book_set-0-id': str(book3.id),
'book_set-0-title': 'Flowers of Evil',
'book_set-1-title': 'Revue des deux mondes',
'book_set-2-title': '',
}
formset = AuthorBooksFormSet(data, instance=author, queryset=custom_qs)
self.assertTrue(formset.is_valid())
def test_inline_formsets_with_custom_save_method_related_instance(self):
"""
The ModelForm.save() method should be able to access the related object
if it exists in the database (#24395).
"""
class PoemForm2(forms.ModelForm):
def save(self, commit=True):
poem = super().save(commit=False)
poem.name = "%s by %s" % (poem.name, poem.poet.name)
if commit:
poem.save()
return poem
PoemFormSet = inlineformset_factory(Poet, Poem, form=PoemForm2, fields="__all__")
data = {
'poem_set-TOTAL_FORMS': '1',
'poem_set-INITIAL_FORMS': '0',
'poem_set-MAX_NUM_FORMS': '',
'poem_set-0-name': 'Le Lac',
}
poet = Poet()
formset = PoemFormSet(data=data, instance=poet)
self.assertTrue(formset.is_valid())
# The Poet instance is saved after the formset instantiation. This
# happens in admin's changeform_view() when adding a new object and
# some inlines in the same request.
poet.name = 'Lamartine'
poet.save()
poem = formset.save()[0]
self.assertEqual(poem.name, 'Le Lac by Lamartine')
def test_inline_formsets_with_wrong_fk_name(self):
""" Regression for #23451 """
message = "fk_name 'title' is not a ForeignKey to 'model_formsets.Author'."
with self.assertRaisesMessage(ValueError, message):
inlineformset_factory(Author, Book, fields="__all__", fk_name='title')
def test_custom_pk(self):
# We need to ensure that it is displayed
CustomPrimaryKeyFormSet = modelformset_factory(CustomPrimaryKey, fields="__all__")
formset = CustomPrimaryKeyFormSet()
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_form-0-my_pk">My pk:</label> <input id="id_form-0-my_pk" type="text" '
'name="form-0-my_pk" maxlength="10" /></p>'
'<p><label for="id_form-0-some_field">Some field:</label>'
'<input id="id_form-0-some_field" type="text" name="form-0-some_field" maxlength="100" /></p>'
)
# Custom primary keys with ForeignKey, OneToOneField and AutoField ############
place = Place.objects.create(pk=1, name='Giordanos', city='Chicago')
FormSet = inlineformset_factory(Place, Owner, extra=2, can_delete=False, fields="__all__")
formset = FormSet(instance=place)
self.assertEqual(len(formset.forms), 2)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_owner_set-0-name">Name:</label>'
'<input id="id_owner_set-0-name" type="text" name="owner_set-0-name" maxlength="100" />'
'<input type="hidden" name="owner_set-0-place" value="1" id="id_owner_set-0-place" />'
'<input type="hidden" name="owner_set-0-auto_id" id="id_owner_set-0-auto_id" /></p>'
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_owner_set-1-name">Name:</label>'
'<input id="id_owner_set-1-name" type="text" name="owner_set-1-name" maxlength="100" />'
'<input type="hidden" name="owner_set-1-place" value="1" id="id_owner_set-1-place" />'
'<input type="hidden" name="owner_set-1-auto_id" id="id_owner_set-1-auto_id" /></p>'
)
data = {
'owner_set-TOTAL_FORMS': '2',
'owner_set-INITIAL_FORMS': '0',
'owner_set-MAX_NUM_FORMS': '',
'owner_set-0-auto_id': '',
'owner_set-0-name': 'Joe Perry',
'owner_set-1-auto_id': '',
'owner_set-1-name': '',
}
formset = FormSet(data, instance=place)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
owner1, = saved
self.assertEqual(owner1.name, 'Joe Perry')
self.assertEqual(owner1.place.name, 'Giordanos')
formset = FormSet(instance=place)
self.assertEqual(len(formset.forms), 3)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_owner_set-0-name">Name:</label>'
'<input id="id_owner_set-0-name" type="text" name="owner_set-0-name" value="Joe Perry" maxlength="100" />'
'<input type="hidden" name="owner_set-0-place" value="1" id="id_owner_set-0-place" />'
'<input type="hidden" name="owner_set-0-auto_id" value="%d" id="id_owner_set-0-auto_id" /></p>'
% owner1.auto_id
)
self.assertHTMLEqual(
formset.forms[1].as_p(),
'<p><label for="id_owner_set-1-name">Name:</label>'
'<input id="id_owner_set-1-name" type="text" name="owner_set-1-name" maxlength="100" />'
'<input type="hidden" name="owner_set-1-place" value="1" id="id_owner_set-1-place" />'
'<input type="hidden" name="owner_set-1-auto_id" id="id_owner_set-1-auto_id" /></p>'
)
self.assertHTMLEqual(
formset.forms[2].as_p(),
'<p><label for="id_owner_set-2-name">Name:</label>'
'<input id="id_owner_set-2-name" type="text" name="owner_set-2-name" maxlength="100" />'
'<input type="hidden" name="owner_set-2-place" value="1" id="id_owner_set-2-place" />'
'<input type="hidden" name="owner_set-2-auto_id" id="id_owner_set-2-auto_id" /></p>'
)
data = {
'owner_set-TOTAL_FORMS': '3',
'owner_set-INITIAL_FORMS': '1',
'owner_set-MAX_NUM_FORMS': '',
'owner_set-0-auto_id': str(owner1.auto_id),
'owner_set-0-name': 'Joe Perry',
'owner_set-1-auto_id': '',
'owner_set-1-name': 'Jack Berry',
'owner_set-2-auto_id': '',
'owner_set-2-name': '',
}
formset = FormSet(data, instance=place)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
owner2, = saved
self.assertEqual(owner2.name, 'Jack Berry')
self.assertEqual(owner2.place.name, 'Giordanos')
# Ensure a custom primary key that is a ForeignKey or OneToOneField get rendered for the user to choose.
FormSet = modelformset_factory(OwnerProfile, fields="__all__")
formset = FormSet()
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_form-0-owner">Owner:</label>'
'<select name="form-0-owner" id="id_form-0-owner">'
'<option value="" selected>---------</option>'
'<option value="%d">Joe Perry at Giordanos</option>'
'<option value="%d">Jack Berry at Giordanos</option>'
'</select></p>'
'<p><label for="id_form-0-age">Age:</label>'
'<input type="number" name="form-0-age" id="id_form-0-age" min="0" /></p>'
% (owner1.auto_id, owner2.auto_id)
)
owner1 = Owner.objects.get(name='Joe Perry')
FormSet = inlineformset_factory(Owner, OwnerProfile, max_num=1, can_delete=False, fields="__all__")
self.assertEqual(FormSet.max_num, 1)
formset = FormSet(instance=owner1)
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_ownerprofile-0-age">Age:</label>'
'<input type="number" name="ownerprofile-0-age" id="id_ownerprofile-0-age" min="0" />'
'<input type="hidden" name="ownerprofile-0-owner" value="%d" id="id_ownerprofile-0-owner" /></p>'
% owner1.auto_id
)
data = {
'ownerprofile-TOTAL_FORMS': '1',
'ownerprofile-INITIAL_FORMS': '0',
'ownerprofile-MAX_NUM_FORMS': '1',
'ownerprofile-0-owner': '',
'ownerprofile-0-age': '54',
}
formset = FormSet(data, instance=owner1)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
profile1, = saved
self.assertEqual(profile1.owner, owner1)
self.assertEqual(profile1.age, 54)
formset = FormSet(instance=owner1)
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_ownerprofile-0-age">Age:</label>'
'<input type="number" name="ownerprofile-0-age" value="54" id="id_ownerprofile-0-age" min="0" />'
'<input type="hidden" name="ownerprofile-0-owner" value="%d" id="id_ownerprofile-0-owner" /></p>'
% owner1.auto_id
)
data = {
'ownerprofile-TOTAL_FORMS': '1',
'ownerprofile-INITIAL_FORMS': '1',
'ownerprofile-MAX_NUM_FORMS': '1',
'ownerprofile-0-owner': str(owner1.auto_id),
'ownerprofile-0-age': '55',
}
formset = FormSet(data, instance=owner1)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
profile1, = saved
self.assertEqual(profile1.owner, owner1)
self.assertEqual(profile1.age, 55)
def test_unique_true_enforces_max_num_one(self):
# ForeignKey with unique=True should enforce max_num=1
place = Place.objects.create(pk=1, name='Giordanos', city='Chicago')
FormSet = inlineformset_factory(Place, Location, can_delete=False, fields="__all__")
self.assertEqual(FormSet.max_num, 1)
formset = FormSet(instance=place)
self.assertEqual(len(formset.forms), 1)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<p><label for="id_location_set-0-lat">Lat:</label>'
'<input id="id_location_set-0-lat" type="text" name="location_set-0-lat" maxlength="100" /></p>'
'<p><label for="id_location_set-0-lon">Lon:</label> '
'<input id="id_location_set-0-lon" type="text" name="location_set-0-lon" maxlength="100" />'
'<input type="hidden" name="location_set-0-place" value="1" id="id_location_set-0-place" />'
'<input type="hidden" name="location_set-0-id" id="id_location_set-0-id" /></p>'
)
def test_foreign_keys_in_parents(self):
self.assertEqual(type(_get_foreign_key(Restaurant, Owner)), models.ForeignKey)
self.assertEqual(type(_get_foreign_key(MexicanRestaurant, Owner)), models.ForeignKey)
def test_unique_validation(self):
FormSet = modelformset_factory(Product, fields="__all__", extra=1)
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-slug': 'car-red',
}
formset = FormSet(data)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
product1, = saved
self.assertEqual(product1.slug, 'car-red')
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-slug': 'car-red',
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{'slug': ['Product with this Slug already exists.']}])
def test_modelformset_validate_max_flag(self):
# If validate_max is set and max_num is less than TOTAL_FORMS in the
# data, then throw an exception. MAX_NUM_FORMS in the data is
# irrelevant here (it's output as a hint for the client but its
# value in the returned data is not checked)
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '2', # should be ignored
'form-0-price': '12.00',
'form-0-quantity': '1',
'form-1-price': '24.00',
'form-1-quantity': '2',
}
FormSet = modelformset_factory(Price, fields="__all__", extra=1, max_num=1, validate_max=True)
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.non_form_errors(), ['Please submit 1 or fewer forms.'])
# Now test the same thing without the validate_max flag to ensure
# default behavior is unchanged
FormSet = modelformset_factory(Price, fields="__all__", extra=1, max_num=1)
formset = FormSet(data)
self.assertTrue(formset.is_valid())
def test_unique_together_validation(self):
FormSet = modelformset_factory(Price, fields="__all__", extra=1)
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-price': '12.00',
'form-0-quantity': '1',
}
formset = FormSet(data)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
price1, = saved
self.assertEqual(price1.price, Decimal('12.00'))
self.assertEqual(price1.quantity, 1)
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-price': '12.00',
'form-0-quantity': '1',
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{'__all__': ['Price with this Price and Quantity already exists.']}])
def test_unique_together_with_inlineformset_factory(self):
# Also see bug #8882.
repository = Repository.objects.create(name='Test Repo')
FormSet = inlineformset_factory(Repository, Revision, extra=1, fields="__all__")
data = {
'revision_set-TOTAL_FORMS': '1',
'revision_set-INITIAL_FORMS': '0',
'revision_set-MAX_NUM_FORMS': '',
'revision_set-0-repository': repository.pk,
'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76',
'revision_set-0-DELETE': '',
}
formset = FormSet(data, instance=repository)
self.assertTrue(formset.is_valid())
saved = formset.save()
self.assertEqual(len(saved), 1)
revision1, = saved
self.assertEqual(revision1.repository, repository)
self.assertEqual(revision1.revision, '146239817507f148d448db38840db7c3cbf47c76')
# attempt to save the same revision against the same repo.
data = {
'revision_set-TOTAL_FORMS': '1',
'revision_set-INITIAL_FORMS': '0',
'revision_set-MAX_NUM_FORMS': '',
'revision_set-0-repository': repository.pk,
'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76',
'revision_set-0-DELETE': '',
}
formset = FormSet(data, instance=repository)
self.assertFalse(formset.is_valid())
self.assertEqual(formset.errors, [{'__all__': ['Revision with this Repository and Revision already exists.']}])
# unique_together with inlineformset_factory with overridden form fields
# Also see #9494
FormSet = inlineformset_factory(Repository, Revision, fields=('revision',), extra=1)
data = {
'revision_set-TOTAL_FORMS': '1',
'revision_set-INITIAL_FORMS': '0',
'revision_set-MAX_NUM_FORMS': '',
'revision_set-0-repository': repository.pk,
'revision_set-0-revision': '146239817507f148d448db38840db7c3cbf47c76',
'revision_set-0-DELETE': '',
}
formset = FormSet(data, instance=repository)
self.assertFalse(formset.is_valid())
def test_callable_defaults(self):
# Use of callable defaults (see bug #7975).
person = Person.objects.create(name='Ringo')
FormSet = inlineformset_factory(Person, Membership, can_delete=False, extra=1, fields="__all__")
formset = FormSet(instance=person)
# Django will render a hidden field for model fields that have a callable
# default. This is required to ensure the value is tested for change correctly
# when determine what extra forms have changed to save.
self.assertEqual(len(formset.forms), 1) # this formset only has one form
form = formset.forms[0]
now = form.fields['date_joined'].initial()
result = form.as_p()
result = re.sub(r'[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}(?:\.[0-9]+)?', '__DATETIME__', result)
self.assertHTMLEqual(
result,
'<p><label for="id_membership_set-0-date_joined">Date joined:</label>'
'<input type="text" name="membership_set-0-date_joined" '
'value="__DATETIME__" id="id_membership_set-0-date_joined" />'
'<input type="hidden" name="initial-membership_set-0-date_joined" value="__DATETIME__" '
'id="initial-membership_set-0-id_membership_set-0-date_joined" /></p>'
'<p><label for="id_membership_set-0-karma">Karma:</label>'
'<input type="number" name="membership_set-0-karma" id="id_membership_set-0-karma" />'
'<input type="hidden" name="membership_set-0-person" value="%d" id="id_membership_set-0-person" />'
'<input type="hidden" name="membership_set-0-id" id="id_membership_set-0-id" /></p>'
% person.id)
# test for validation with callable defaults. Validations rely on hidden fields
data = {
'membership_set-TOTAL_FORMS': '1',
'membership_set-INITIAL_FORMS': '0',
'membership_set-MAX_NUM_FORMS': '',
'membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'),
'initial-membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'),
'membership_set-0-karma': '',
}
formset = FormSet(data, instance=person)
self.assertTrue(formset.is_valid())
# now test for when the data changes
one_day_later = now + datetime.timedelta(days=1)
filled_data = {
'membership_set-TOTAL_FORMS': '1',
'membership_set-INITIAL_FORMS': '0',
'membership_set-MAX_NUM_FORMS': '',
'membership_set-0-date_joined': one_day_later.strftime('%Y-%m-%d %H:%M:%S'),
'initial-membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'),
'membership_set-0-karma': '',
}
formset = FormSet(filled_data, instance=person)
self.assertFalse(formset.is_valid())
# now test with split datetime fields
class MembershipForm(forms.ModelForm):
date_joined = forms.SplitDateTimeField(initial=now)
class Meta:
model = Membership
fields = "__all__"
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.fields['date_joined'].widget = forms.SplitDateTimeWidget()
FormSet = inlineformset_factory(
Person,
Membership,
form=MembershipForm,
can_delete=False,
extra=1,
fields="__all__",
)
data = {
'membership_set-TOTAL_FORMS': '1',
'membership_set-INITIAL_FORMS': '0',
'membership_set-MAX_NUM_FORMS': '',
'membership_set-0-date_joined_0': now.strftime('%Y-%m-%d'),
'membership_set-0-date_joined_1': now.strftime('%H:%M:%S'),
'initial-membership_set-0-date_joined': now.strftime('%Y-%m-%d %H:%M:%S'),
'membership_set-0-karma': '',
}
formset = FormSet(data, instance=person)
self.assertTrue(formset.is_valid())
def test_inlineformset_factory_with_null_fk(self):
# inlineformset_factory tests with fk having null=True. see #9462.
# create some data that will exhibit the issue
team = Team.objects.create(name="Red Vipers")
Player(name="Timmy").save()
Player(name="Bobby", team=team).save()
PlayerInlineFormSet = inlineformset_factory(Team, Player, fields="__all__")
formset = PlayerInlineFormSet()
self.assertQuerysetEqual(formset.get_queryset(), [])
formset = PlayerInlineFormSet(instance=team)
players = formset.get_queryset()
self.assertEqual(len(players), 1)
player1, = players
self.assertEqual(player1.team, team)
self.assertEqual(player1.name, 'Bobby')
def test_model_formset_with_custom_pk(self):
# a formset for a Model that has a custom primary key that still needs to be
# added to the formset automatically
FormSet = modelformset_factory(ClassyMexicanRestaurant, fields=["tacos_are_yummy"])
self.assertEqual(sorted(FormSet().forms[0].fields.keys()), ['tacos_are_yummy', 'the_restaurant'])
def test_model_formset_with_initial_model_instance(self):
# has_changed should compare model instance and primary key
# see #18898
FormSet = modelformset_factory(Poem, fields='__all__')
john_milton = Poet(name="John Milton")
john_milton.save()
data = {
'form-TOTAL_FORMS': 1,
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': '',
'form-0-name': '',
'form-0-poet': str(john_milton.id),
}
formset = FormSet(initial=[{'poet': john_milton}], data=data)
self.assertFalse(formset.extra_forms[0].has_changed())
def test_model_formset_with_initial_queryset(self):
# has_changed should work with queryset and list of pk's
# see #18898
FormSet = modelformset_factory(AuthorMeeting, fields='__all__')
Author.objects.create(pk=1, name='Charles Baudelaire')
data = {
'form-TOTAL_FORMS': 1,
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': '',
'form-0-name': '',
'form-0-created': '',
'form-0-authors': list(Author.objects.values_list('id', flat=True)),
}
formset = FormSet(initial=[{'authors': Author.objects.all()}], data=data)
self.assertFalse(formset.extra_forms[0].has_changed())
def test_prevent_duplicates_from_with_the_same_formset(self):
FormSet = modelformset_factory(Product, fields="__all__", extra=2)
data = {
'form-TOTAL_FORMS': 2,
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': '',
'form-0-slug': 'red_car',
'form-1-slug': 'red_car',
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(formset._non_form_errors, ['Please correct the duplicate data for slug.'])
FormSet = modelformset_factory(Price, fields="__all__", extra=2)
data = {
'form-TOTAL_FORMS': 2,
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': '',
'form-0-price': '25',
'form-0-quantity': '7',
'form-1-price': '25',
'form-1-quantity': '7',
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(
formset._non_form_errors,
['Please correct the duplicate data for price and quantity, which must be unique.']
)
# Only the price field is specified, this should skip any unique checks since
# the unique_together is not fulfilled. This will fail with a KeyError if broken.
FormSet = modelformset_factory(Price, fields=("price",), extra=2)
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-price': '24',
'form-1-price': '24',
}
formset = FormSet(data)
self.assertTrue(formset.is_valid())
FormSet = inlineformset_factory(Author, Book, extra=0, fields="__all__")
author = Author.objects.create(pk=1, name='Charles Baudelaire')
Book.objects.create(pk=1, author=author, title='Les Paradis Artificiels')
Book.objects.create(pk=2, author=author, title='Les Fleurs du Mal')
Book.objects.create(pk=3, author=author, title='Flowers of Evil')
book_ids = author.book_set.order_by('id').values_list('id', flat=True)
data = {
'book_set-TOTAL_FORMS': '2',
'book_set-INITIAL_FORMS': '2',
'book_set-MAX_NUM_FORMS': '',
'book_set-0-title': 'The 2008 Election',
'book_set-0-author': str(author.id),
'book_set-0-id': str(book_ids[0]),
'book_set-1-title': 'The 2008 Election',
'book_set-1-author': str(author.id),
'book_set-1-id': str(book_ids[1]),
}
formset = FormSet(data=data, instance=author)
self.assertFalse(formset.is_valid())
self.assertEqual(formset._non_form_errors, ['Please correct the duplicate data for title.'])
self.assertEqual(formset.errors, [{}, {'__all__': ['Please correct the duplicate values below.']}])
FormSet = modelformset_factory(Post, fields="__all__", extra=2)
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'blah',
'form-0-slug': 'Morning',
'form-0-subtitle': 'foo',
'form-0-posted': '2009-01-01',
'form-1-title': 'blah',
'form-1-slug': 'Morning in Prague',
'form-1-subtitle': 'rawr',
'form-1-posted': '2009-01-01'
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(
formset._non_form_errors,
['Please correct the duplicate data for title which must be unique for the date in posted.']
)
self.assertEqual(
formset.errors,
[{}, {'__all__': ['Please correct the duplicate values below.']}]
)
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
'form-0-slug': 'Morning in Prague',
'form-0-subtitle': 'foo',
'form-0-posted': '2009-01-01',
'form-1-title': 'blah',
'form-1-slug': 'Morning in Prague',
'form-1-subtitle': 'rawr',
'form-1-posted': '2009-08-02'
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(
formset._non_form_errors,
['Please correct the duplicate data for slug which must be unique for the year in posted.']
)
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
'form-0-slug': 'Morning in Prague',
'form-0-subtitle': 'rawr',
'form-0-posted': '2008-08-01',
'form-1-title': 'blah',
'form-1-slug': 'Prague',
'form-1-subtitle': 'rawr',
'form-1-posted': '2009-08-02'
}
formset = FormSet(data)
self.assertFalse(formset.is_valid())
self.assertEqual(
formset._non_form_errors,
['Please correct the duplicate data for subtitle which must be unique for the month in posted.']
)
def test_prevent_change_outer_model_and_create_invalid_data(self):
author = Author.objects.create(name='Charles')
other_author = Author.objects.create(name='Walt')
AuthorFormSet = modelformset_factory(Author, fields='__all__')
data = {
'form-TOTAL_FORMS': '2',
'form-INITIAL_FORMS': '2',
'form-MAX_NUM_FORMS': '',
'form-0-id': str(author.id),
'form-0-name': 'Charles',
'form-1-id': str(other_author.id), # A model not in the formset's queryset.
'form-1-name': 'Changed name',
}
# This formset is only for Walt Whitman and shouldn't accept data for
# other_author.
formset = AuthorFormSet(data=data, queryset=Author.objects.filter(id__in=(author.id,)))
self.assertTrue(formset.is_valid())
formset.save()
# The name of other_author shouldn't be changed and new models aren't
# created.
self.assertQuerysetEqual(Author.objects.all(), ['<Author: Charles>', '<Author: Walt>'])
class TestModelFormsetOverridesTroughFormMeta(TestCase):
def test_modelformset_factory_widgets(self):
widgets = {
'name': forms.TextInput(attrs={'class': 'poet'})
}
PoetFormSet = modelformset_factory(Poet, fields="__all__", widgets=widgets)
form = PoetFormSet.form()
self.assertHTMLEqual(
"%s" % form['name'],
'<input id="id_name" maxlength="100" type="text" class="poet" name="name" required />'
)
def test_inlineformset_factory_widgets(self):
widgets = {
'title': forms.TextInput(attrs={'class': 'book'})
}
BookFormSet = inlineformset_factory(Author, Book, widgets=widgets, fields="__all__")
form = BookFormSet.form()
self.assertHTMLEqual(
"%s" % form['title'],
'<input class="book" id="id_title" maxlength="100" name="title" type="text" required />'
)
def test_modelformset_factory_labels_overrides(self):
BookFormSet = modelformset_factory(Book, fields="__all__", labels={
'title': 'Name'
})
form = BookFormSet.form()
self.assertHTMLEqual(form['title'].label_tag(), '<label for="id_title">Name:</label>')
def test_inlineformset_factory_labels_overrides(self):
BookFormSet = inlineformset_factory(Author, Book, fields="__all__", labels={
'title': 'Name'
})
form = BookFormSet.form()
self.assertHTMLEqual(form['title'].label_tag(), '<label for="id_title">Name:</label>')
def test_modelformset_factory_help_text_overrides(self):
BookFormSet = modelformset_factory(Book, fields="__all__", help_texts={
'title': 'Choose carefully.'
})
form = BookFormSet.form()
self.assertEqual(form['title'].help_text, 'Choose carefully.')
def test_inlineformset_factory_help_text_overrides(self):
BookFormSet = inlineformset_factory(Author, Book, fields="__all__", help_texts={
'title': 'Choose carefully.'
})
form = BookFormSet.form()
self.assertEqual(form['title'].help_text, 'Choose carefully.')
def test_modelformset_factory_error_messages_overrides(self):
author = Author.objects.create(pk=1, name='Charles Baudelaire')
BookFormSet = modelformset_factory(Book, fields="__all__", error_messages={
'title': {
'max_length': 'Title too long!!'
}
})
form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id})
form.full_clean()
self.assertEqual(form.errors, {'title': ['Title too long!!']})
def test_inlineformset_factory_error_messages_overrides(self):
author = Author.objects.create(pk=1, name='Charles Baudelaire')
BookFormSet = inlineformset_factory(Author, Book, fields="__all__", error_messages={
'title': {
'max_length': 'Title too long!!'
}
})
form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id})
form.full_clean()
self.assertEqual(form.errors, {'title': ['Title too long!!']})
def test_modelformset_factory_field_class_overrides(self):
author = Author.objects.create(pk=1, name='Charles Baudelaire')
BookFormSet = modelformset_factory(Book, fields="__all__", field_classes={
'title': forms.SlugField,
})
form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id})
self.assertIs(Book._meta.get_field('title').__class__, models.CharField)
self.assertIsInstance(form.fields['title'], forms.SlugField)
def test_inlineformset_factory_field_class_overrides(self):
author = Author.objects.create(pk=1, name='Charles Baudelaire')
BookFormSet = inlineformset_factory(Author, Book, fields="__all__", field_classes={
'title': forms.SlugField,
})
form = BookFormSet.form(data={'title': 'Foo ' * 30, 'author': author.id})
self.assertIs(Book._meta.get_field('title').__class__, models.CharField)
self.assertIsInstance(form.fields['title'], forms.SlugField)
| bsd-3-clause | -54,937,005,986,186,290 | 43.798055 | 119 | 0.574891 | false |
nirbheek/cerbero | cerbero/tools/osxrelocator.py | 20 | 4868 | #!/usr/bin/env python
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
import os
from cerbero.utils import shell
INT_CMD = 'install_name_tool'
OTOOL_CMD = 'otool'
class OSXRelocator(object):
'''
Wrapper for OS X's install_name_tool and otool commands to help
relocating shared libraries.
It parses lib/ /libexec and bin/ directories, changes the prefix path of
the shared libraries that an object file uses and changes it's library
ID if the file is a shared library.
'''
def __init__(self, root, lib_prefix, new_lib_prefix, recursive):
self.root = root
self.lib_prefix = self._fix_path(lib_prefix)
self.new_lib_prefix = self._fix_path(new_lib_prefix)
self.recursive = recursive
def relocate(self):
self.parse_dir(self.root)
def relocate_file(self, object_file, id=None):
self.change_libs_path(object_file)
self.change_id(object_file, id)
def change_id(self, object_file, id=None):
id = id or object_file.replace(self.lib_prefix, self.new_lib_prefix)
filename = os.path.basename(object_file)
if not (filename.endswith('so') or filename.endswith('dylib')):
return
cmd = '%s -id %s %s' % (INT_CMD, id, object_file)
shell.call(cmd)
def change_libs_path(self, object_file):
for lib in self.list_shared_libraries(object_file):
if self.lib_prefix in lib:
new_lib = lib.replace(self.lib_prefix, self.new_lib_prefix)
cmd = '%s -change %s %s %s' % (INT_CMD, lib, new_lib,
object_file)
shell.call(cmd)
def parse_dir(self, dir_path, filters=None):
for dirpath, dirnames, filenames in os.walk(dir_path):
for f in filenames:
if filters is not None and \
os.path.splitext(f)[1] not in filters:
continue
lib = os.path.join(dirpath, f)
id = self.library_id_name(lib).replace(
self.lib_prefix, self.new_lib_prefix)
self.change_libs_path(lib)
self.change_id(lib, id)
if not self.recursive:
break
@staticmethod
def list_shared_libraries(object_file):
cmd = '%s -L %s' % (OTOOL_CMD, object_file)
res = shell.check_call(cmd).split('\n')
# We don't use the first line
libs = res[1:]
# Remove the first character tabulation
libs = [x[1:] for x in libs]
# Remove the version info
libs = [x.split(' ', 1)[0] for x in libs]
return libs
@staticmethod
def library_id_name(object_file):
cmd = '%s -D %s' % (OTOOL_CMD, object_file)
res = shell.check_call(cmd).split('\n')[0]
# the library name ends with ':'
lib_name = res[:-1]
return lib_name
def _fix_path(self, path):
if path.endswith('/'):
return path[:-1]
return path
class Main(object):
def run(self):
# We use OptionParser instead of ArgumentsParse because this script
# might be run in OS X 10.6 or older, which do not provide the argparse
# module
import optparse
usage = "usage: %prog [options] directory old_prefix new_prefix"
description = 'Rellocates object files changing the dependant '\
' dynamic libraries location path with a new one'
parser = optparse.OptionParser(usage=usage, description=description)
parser.add_option('-r', '--recursive', action='store_true',
default=False, dest='recursive',
help='Scan directories recursively')
options, args = parser.parse_args()
if len(args) != 3:
parser.print_usage()
exit(1)
relocator = OSXRelocator(args[0], args[1], args[2], options.recursive)
relocator.relocate()
exit(0)
if __name__ == "__main__":
main = Main()
main.run()
| lgpl-2.1 | 8,574,183,854,850,085,000 | 35.328358 | 79 | 0.607025 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.