repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
zhangh43/incubator-hawq | tools/bin/ext/pg8000/protocol.py | 15 | 47286 | # vim: sw=4:expandtab:foldmethod=marker
#
# Copyright (c) 2007-2009, Mathieu Fenniak
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
__author__ = "Mathieu Fenniak"
import socket
import select
import threading
import struct
import hashlib
from cStringIO import StringIO
from errors import *
from util import MulticastDelegate
import types
##
# An SSLRequest message. To initiate an SSL-encrypted connection, an
# SSLRequest message is used rather than a {@link StartupMessage
# StartupMessage}. A StartupMessage is still sent, but only after SSL
# negotiation (if accepted).
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class SSLRequest(object):
def __init__(self):
pass
# Int32(8) - Message length, including self.<br>
# Int32(80877103) - The SSL request code.<br>
def serialize(self):
return struct.pack("!ii", 8, 80877103)
##
# A StartupMessage message. Begins a DB session, identifying the user to be
# authenticated as and the database to connect to.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class StartupMessage(object):
# Greenplum utility mode
def __init__(self, user, database=None, options=None):
self.user = user
self.database = database
self.options = options
# Int32 - Message length, including self.
# Int32(196608) - Protocol version number. Version 3.0.
# Any number of key/value pairs, terminated by a zero byte:
# String - A parameter name (user, database, or options)
# String - Parameter value
def serialize(self):
protocol = 196608
val = struct.pack("!i", protocol)
val += "user\x00" + self.user + "\x00"
if self.database:
val += "database\x00" + self.database + "\x00"
if self.options:
val += "options\x00" + self.options + "\x00"
val += "\x00"
val = struct.pack("!i", len(val) + 4) + val
return val
##
# Parse message. Creates a prepared statement in the DB session.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
#
# @param ps Name of the prepared statement to create.
# @param qs Query string.
# @param type_oids An iterable that contains the PostgreSQL type OIDs for
# parameters in the query string.
class Parse(object):
def __init__(self, ps, qs, type_oids):
self.ps = ps
self.qs = qs
self.type_oids = type_oids
def __repr__(self):
return "<Parse ps=%r qs=%r>" % (self.ps, self.qs)
# Byte1('P') - Identifies the message as a Parse command.
# Int32 - Message length, including self.
# String - Prepared statement name. An empty string selects the unnamed
# prepared statement.
# String - The query string.
# Int16 - Number of parameter data types specified (can be zero).
# For each parameter:
# Int32 - The OID of the parameter data type.
def serialize(self):
val = self.ps + "\x00" + self.qs + "\x00"
val = val + struct.pack("!h", len(self.type_oids))
for oid in self.type_oids:
# Parse message doesn't seem to handle the -1 type_oid for NULL
# values that other messages handle. So we'll provide type_oid 705,
# the PG "unknown" type.
if oid == -1: oid = 705
val = val + struct.pack("!i", oid)
val = struct.pack("!i", len(val) + 4) + val
val = "P" + val
return val
##
# Bind message. Readies a prepared statement for execution.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
#
# @param portal Name of the destination portal.
# @param ps Name of the source prepared statement.
# @param in_fc An iterable containing the format codes for input
# parameters. 0 = Text, 1 = Binary.
# @param params The parameters.
# @param out_fc An iterable containing the format codes for output
# parameters. 0 = Text, 1 = Binary.
# @param kwargs Additional arguments to pass to the type conversion
# methods.
class Bind(object):
def __init__(self, portal, ps, in_fc, params, out_fc, **kwargs):
self.portal = portal
self.ps = ps
self.in_fc = in_fc
self.params = []
for i in range(len(params)):
if len(self.in_fc) == 0:
fc = 0
elif len(self.in_fc) == 1:
fc = self.in_fc[0]
else:
fc = self.in_fc[i]
self.params.append(types.pg_value(params[i], fc, **kwargs))
self.out_fc = out_fc
def __repr__(self):
return "<Bind p=%r s=%r>" % (self.portal, self.ps)
# Byte1('B') - Identifies the Bind command.
# Int32 - Message length, including self.
# String - Name of the destination portal.
# String - Name of the source prepared statement.
# Int16 - Number of parameter format codes.
# For each parameter format code:
# Int16 - The parameter format code.
# Int16 - Number of parameter values.
# For each parameter value:
# Int32 - The length of the parameter value, in bytes, not including this
# this length. -1 indicates a NULL parameter value, in which no
# value bytes follow.
# Byte[n] - Value of the parameter.
# Int16 - The number of result-column format codes.
# For each result-column format code:
# Int16 - The format code.
def serialize(self):
retval = StringIO()
retval.write(self.portal + "\x00")
retval.write(self.ps + "\x00")
retval.write(struct.pack("!h", len(self.in_fc)))
for fc in self.in_fc:
retval.write(struct.pack("!h", fc))
retval.write(struct.pack("!h", len(self.params)))
for param in self.params:
if param == None:
# special case, NULL value
retval.write(struct.pack("!i", -1))
else:
retval.write(struct.pack("!i", len(param)))
retval.write(param)
retval.write(struct.pack("!h", len(self.out_fc)))
for fc in self.out_fc:
retval.write(struct.pack("!h", fc))
val = retval.getvalue()
val = struct.pack("!i", len(val) + 4) + val
val = "B" + val
return val
##
# A Close message, used for closing prepared statements and portals.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
#
# @param typ 'S' for prepared statement, 'P' for portal.
# @param name The name of the item to close.
class Close(object):
def __init__(self, typ, name):
if len(typ) != 1:
raise InternalError("Close typ must be 1 char")
self.typ = typ
self.name = name
# Byte1('C') - Identifies the message as a close command.
# Int32 - Message length, including self.
# Byte1 - 'S' for prepared statement, 'P' for portal.
# String - The name of the item to close.
def serialize(self):
val = self.typ + self.name + "\x00"
val = struct.pack("!i", len(val) + 4) + val
val = "C" + val
return val
##
# A specialized Close message for a portal.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class ClosePortal(Close):
def __init__(self, name):
Close.__init__(self, "P", name)
##
# A specialized Close message for a prepared statement.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class ClosePreparedStatement(Close):
def __init__(self, name):
Close.__init__(self, "S", name)
##
# A Describe message, used for obtaining information on prepared statements
# and portals.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
#
# @param typ 'S' for prepared statement, 'P' for portal.
# @param name The name of the item to close.
class Describe(object):
def __init__(self, typ, name):
if len(typ) != 1:
raise InternalError("Describe typ must be 1 char")
self.typ = typ
self.name = name
# Byte1('D') - Identifies the message as a describe command.
# Int32 - Message length, including self.
# Byte1 - 'S' for prepared statement, 'P' for portal.
# String - The name of the item to close.
def serialize(self):
val = self.typ + self.name + "\x00"
val = struct.pack("!i", len(val) + 4) + val
val = "D" + val
return val
##
# A specialized Describe message for a portal.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class DescribePortal(Describe):
def __init__(self, name):
Describe.__init__(self, "P", name)
def __repr__(self):
return "<DescribePortal %r>" % (self.name)
##
# A specialized Describe message for a prepared statement.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class DescribePreparedStatement(Describe):
def __init__(self, name):
Describe.__init__(self, "S", name)
def __repr__(self):
return "<DescribePreparedStatement %r>" % (self.name)
##
# A Flush message forces the backend to deliver any data pending in its
# output buffers.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class Flush(object):
# Byte1('H') - Identifies the message as a flush command.
# Int32(4) - Length of message, including self.
def serialize(self):
return 'H\x00\x00\x00\x04'
def __repr__(self):
return "<Flush>"
##
# Causes the backend to close the current transaction (if not in a BEGIN/COMMIT
# block), and issue ReadyForQuery.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class Sync(object):
# Byte1('S') - Identifies the message as a sync command.
# Int32(4) - Length of message, including self.
def serialize(self):
return 'S\x00\x00\x00\x04'
def __repr__(self):
return "<Sync>"
##
# Transmits a password.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class PasswordMessage(object):
def __init__(self, pwd):
self.pwd = pwd
# Byte1('p') - Identifies the message as a password message.
# Int32 - Message length including self.
# String - The password. Password may be encrypted.
def serialize(self):
val = self.pwd + "\x00"
val = struct.pack("!i", len(val) + 4) + val
val = "p" + val
return val
##
# Requests that the backend execute a portal and retrieve any number of rows.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
# @param row_count The number of rows to return. Can be zero to indicate the
# backend should return all rows. If the portal represents a
# query that does not return rows, no rows will be returned
# no matter what the row_count.
class Execute(object):
def __init__(self, portal, row_count):
self.portal = portal
self.row_count = row_count
# Byte1('E') - Identifies the message as an execute message.
# Int32 - Message length, including self.
# String - The name of the portal to execute.
# Int32 - Maximum number of rows to return, if portal contains a query that
# returns rows. 0 = no limit.
def serialize(self):
val = self.portal + "\x00" + struct.pack("!i", self.row_count)
val = struct.pack("!i", len(val) + 4) + val
val = "E" + val
return val
##
# Informs the backend that the connection is being closed.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class Terminate(object):
def __init__(self):
pass
# Byte1('X') - Identifies the message as a terminate message.
# Int32(4) - Message length, including self.
def serialize(self):
return 'X\x00\x00\x00\x04'
##
# Base class of all Authentication[*] messages.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class AuthenticationRequest(object):
def __init__(self, data):
pass
# Byte1('R') - Identifies the message as an authentication request.
# Int32(8) - Message length, including self.
# Int32 - An authentication code that represents different
# authentication messages:
# 0 = AuthenticationOk
# 5 = MD5 pwd
# 2 = Kerberos v5 (not supported by pg8000)
# 3 = Cleartext pwd (not supported by pg8000)
# 4 = crypt() pwd (not supported by pg8000)
# 6 = SCM credential (not supported by pg8000)
# 7 = GSSAPI (not supported by pg8000)
# 8 = GSSAPI data (not supported by pg8000)
# 9 = SSPI (not supported by pg8000)
# Some authentication messages have additional data following the
# authentication code. That data is documented in the appropriate class.
def createFromData(data):
ident = struct.unpack("!i", data[:4])[0]
klass = authentication_codes.get(ident, None)
if klass != None:
return klass(data[4:])
else:
raise NotSupportedError("authentication method %r not supported" % (ident,))
createFromData = staticmethod(createFromData)
def ok(self, conn, user, **kwargs):
raise InternalError("ok method should be overridden on AuthenticationRequest instance")
##
# A message representing that the backend accepting the provided username
# without any challenge.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class AuthenticationOk(AuthenticationRequest):
def ok(self, conn, user, **kwargs):
return True
##
# A message representing the backend requesting an MD5 hashed password
# response. The response will be sent as md5(md5(pwd + login) + salt).
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class AuthenticationMD5Password(AuthenticationRequest):
# Additional message data:
# Byte4 - Hash salt.
def __init__(self, data):
self.salt = "".join(struct.unpack("4c", data))
def ok(self, conn, user, password=None, **kwargs):
if password == None:
raise InterfaceError("server requesting MD5 password authentication, but no password was provided")
pwd = "md5" + hashlib.md5(hashlib.md5(password + user).hexdigest() + self.salt).hexdigest()
conn._send(PasswordMessage(pwd))
conn._flush()
reader = MessageReader(conn)
reader.add_message(AuthenticationRequest, lambda msg, reader: reader.return_value(msg.ok(conn, user)), reader)
reader.add_message(ErrorResponse, self._ok_error)
return reader.handle_messages()
def _ok_error(self, msg):
if msg.code == "28000":
raise InterfaceError("md5 password authentication failed")
else:
raise msg.createException()
authentication_codes = {
0: AuthenticationOk,
5: AuthenticationMD5Password,
}
##
# ParameterStatus message sent from backend, used to inform the frotnend of
# runtime configuration parameter changes.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class ParameterStatus(object):
def __init__(self, key, value):
self.key = key
self.value = value
# Byte1('S') - Identifies ParameterStatus
# Int32 - Message length, including self.
# String - Runtime parameter name.
# String - Runtime parameter value.
def createFromData(data):
key = data[:data.find("\x00")]
value = data[data.find("\x00")+1:-1]
return ParameterStatus(key, value)
createFromData = staticmethod(createFromData)
##
# BackendKeyData message sent from backend. Contains a connection's process
# ID and a secret key. Can be used to terminate the connection's current
# actions, such as a long running query. Not supported by pg8000 yet.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class BackendKeyData(object):
def __init__(self, process_id, secret_key):
self.process_id = process_id
self.secret_key = secret_key
# Byte1('K') - Identifier.
# Int32(12) - Message length, including self.
# Int32 - Process ID.
# Int32 - Secret key.
def createFromData(data):
process_id, secret_key = struct.unpack("!2i", data)
return BackendKeyData(process_id, secret_key)
createFromData = staticmethod(createFromData)
##
# Message representing a query with no data.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class NoData(object):
# Byte1('n') - Identifier.
# Int32(4) - Message length, including self.
def createFromData(data):
return NoData()
createFromData = staticmethod(createFromData)
##
# Message representing a successful Parse.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class ParseComplete(object):
# Byte1('1') - Identifier.
# Int32(4) - Message length, including self.
def createFromData(data):
return ParseComplete()
createFromData = staticmethod(createFromData)
##
# Message representing a successful Bind.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class BindComplete(object):
# Byte1('2') - Identifier.
# Int32(4) - Message length, including self.
def createFromData(data):
return BindComplete()
createFromData = staticmethod(createFromData)
##
# Message representing a successful Close.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class CloseComplete(object):
# Byte1('3') - Identifier.
# Int32(4) - Message length, including self.
def createFromData(data):
return CloseComplete()
createFromData = staticmethod(createFromData)
##
# Message representing data from an Execute has been received, but more data
# exists in the portal.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class PortalSuspended(object):
# Byte1('s') - Identifier.
# Int32(4) - Message length, including self.
def createFromData(data):
return PortalSuspended()
createFromData = staticmethod(createFromData)
##
# Message representing the backend is ready to process a new query.
# <p>
# Stability: This is an internal class. No stability guarantee is made.
class ReadyForQuery(object):
def __init__(self, status):
self._status = status
##
# I = Idle, T = Idle in Transaction, E = idle in failed transaction.
status = property(lambda self: self._status)
def __repr__(self):
return "<ReadyForQuery %s>" % \
{"I": "Idle", "T": "Idle in Transaction", "E": "Idle in Failed Transaction"}[self.status]
# Byte1('Z') - Identifier.
# Int32(5) - Message length, including self.
# Byte1 - Status indicator.
def createFromData(data):
return ReadyForQuery(data)
createFromData = staticmethod(createFromData)
##
# Represents a notice sent from the server. This is not the same as a
# notification. A notice is just additional information about a query, such
# as a notice that a primary key has automatically been created for a table.
# <p>
# A NoticeResponse instance will have properties containing the data sent
# from the server:
# <ul>
# <li>severity -- "ERROR", "FATAL', "PANIC", "WARNING", "NOTICE", "DEBUG",
# "INFO", or "LOG". Always present.</li>
# <li>code -- the SQLSTATE code for the error. See Appendix A of the
# PostgreSQL documentation for specific error codes. Always present.</li>
# <li>msg -- human-readable error message. Always present.</li>
# <li>detail -- Optional additional information.</li>
# <li>hint -- Optional suggestion about what to do about the issue.</li>
# <li>position -- Optional index into the query string.</li>
# <li>where -- Optional context.</li>
# <li>file -- Source-code file.</li>
# <li>line -- Source-code line.</li>
# <li>routine -- Source-code routine.</li>
# </ul>
# <p>
# Stability: Added in pg8000 v1.03. Required properties severity, code, and
# msg are guaranteed for v1.xx. Other properties should be checked with
# hasattr before accessing.
class NoticeResponse(object):
responseKeys = {
"S": "severity", # always present
"C": "code", # always present
"M": "msg", # always present
"D": "detail",
"H": "hint",
"P": "position",
"p": "_position",
"q": "_query",
"W": "where",
"F": "file",
"L": "line",
"R": "routine",
}
def __init__(self, **kwargs):
for arg, value in kwargs.items():
setattr(self, arg, value)
def __repr__(self):
return "<NoticeResponse %s %s %r>" % (self.severity, self.code, self.msg)
def dataIntoDict(data):
retval = {}
for s in data.split("\x00"):
if not s: continue
key, value = s[0], s[1:]
key = NoticeResponse.responseKeys.get(key, key)
retval[key] = value
return retval
dataIntoDict = staticmethod(dataIntoDict)
# Byte1('N') - Identifier
# Int32 - Message length
# Any number of these, followed by a zero byte:
# Byte1 - code identifying the field type (see responseKeys)
# String - field value
def createFromData(data):
return NoticeResponse(**NoticeResponse.dataIntoDict(data))
createFromData = staticmethod(createFromData)
##
# A message sent in case of a server-side error. Contains the same properties
# that {@link NoticeResponse NoticeResponse} contains.
# <p>
# Stability: Added in pg8000 v1.03. Required properties severity, code, and
# msg are guaranteed for v1.xx. Other properties should be checked with
# hasattr before accessing.
class ErrorResponse(object):
def __init__(self, **kwargs):
for arg, value in kwargs.items():
setattr(self, arg, value)
def __repr__(self):
return "<ErrorResponse %s %s %r>" % (self.severity, self.code, self.msg)
def createException(self):
return ProgrammingError(self.severity, self.code, self.msg)
def createFromData(data):
return ErrorResponse(**NoticeResponse.dataIntoDict(data))
createFromData = staticmethod(createFromData)
##
# A message sent if this connection receives a NOTIFY that it was LISTENing for.
# <p>
# Stability: Added in pg8000 v1.03. When limited to accessing properties from
# a notification event dispatch, stability is guaranteed for v1.xx.
class NotificationResponse(object):
def __init__(self, backend_pid, condition, additional_info):
self._backend_pid = backend_pid
self._condition = condition
self._additional_info = additional_info
##
# An integer representing the process ID of the backend that triggered
# the NOTIFY.
# <p>
# Stability: Added in pg8000 v1.03, stability guaranteed for v1.xx.
backend_pid = property(lambda self: self._backend_pid)
##
# The name of the notification fired.
# <p>
# Stability: Added in pg8000 v1.03, stability guaranteed for v1.xx.
condition = property(lambda self: self._condition)
##
# Currently unspecified by the PostgreSQL documentation as of v8.3.1.
# <p>
# Stability: Added in pg8000 v1.03, stability guaranteed for v1.xx.
additional_info = property(lambda self: self._additional_info)
def __repr__(self):
return "<NotificationResponse %s %s %r>" % (self.backend_pid, self.condition, self.additional_info)
def createFromData(data):
backend_pid = struct.unpack("!i", data[:4])[0]
data = data[4:]
null = data.find("\x00")
condition = data[:null]
data = data[null+1:]
null = data.find("\x00")
additional_info = data[:null]
return NotificationResponse(backend_pid, condition, additional_info)
createFromData = staticmethod(createFromData)
class ParameterDescription(object):
def __init__(self, type_oids):
self.type_oids = type_oids
def createFromData(data):
count = struct.unpack("!h", data[:2])[0]
type_oids = struct.unpack("!" + "i"*count, data[2:])
return ParameterDescription(type_oids)
createFromData = staticmethod(createFromData)
class RowDescription(object):
def __init__(self, fields):
self.fields = fields
def createFromData(data):
count = struct.unpack("!h", data[:2])[0]
data = data[2:]
fields = []
for i in range(count):
null = data.find("\x00")
field = {"name": data[:null]}
data = data[null+1:]
field["table_oid"], field["column_attrnum"], field["type_oid"], field["type_size"], field["type_modifier"], field["format"] = struct.unpack("!ihihih", data[:18])
data = data[18:]
fields.append(field)
return RowDescription(fields)
createFromData = staticmethod(createFromData)
class CommandComplete(object):
def __init__(self, command, rows=None, oid=None):
self.command = command
self.rows = rows
self.oid = oid
def createFromData(data):
values = data[:-1].split(" ")
args = {}
args['command'] = values[0]
if args['command'] in ("INSERT", "DELETE", "UPDATE", "MOVE", "FETCH", "COPY"):
args['rows'] = int(values[-1])
if args['command'] == "INSERT":
args['oid'] = int(values[1])
else:
args['command'] = data[:-1]
return CommandComplete(**args)
createFromData = staticmethod(createFromData)
class DataRow(object):
def __init__(self, fields):
self.fields = fields
def createFromData(data):
count = struct.unpack("!h", data[:2])[0]
data = data[2:]
fields = []
for i in range(count):
val_len = struct.unpack("!i", data[:4])[0]
data = data[4:]
if val_len == -1:
fields.append(None)
else:
fields.append(data[:val_len])
data = data[val_len:]
return DataRow(fields)
createFromData = staticmethod(createFromData)
class CopyData(object):
# "d": CopyData,
def __init__(self, data):
self.data = data
def createFromData(data):
return CopyData(data)
createFromData = staticmethod(createFromData)
def serialize(self):
return 'd' + struct.pack('!i', len(self.data) + 4) + self.data
class CopyDone(object):
# Byte1('c') - Identifier.
# Int32(4) - Message length, including self.
def createFromData(data):
return CopyDone()
createFromData = staticmethod(createFromData)
def serialize(self):
return 'c\x00\x00\x00\x04'
class CopyOutResponse(object):
# Byte1('H')
# Int32(4) - Length of message contents in bytes, including self.
# Int8(1) - 0 textual, 1 binary
# Int16(2) - Number of columns
# Int16(N) - Format codes for each column (0 text, 1 binary)
def __init__(self, is_binary, column_formats):
self.is_binary = is_binary
self.column_formats = column_formats
def createFromData(data):
is_binary, num_cols = struct.unpack('!bh', data[:3])
column_formats = struct.unpack('!' + ('h' * num_cols), data[3:])
return CopyOutResponse(is_binary, column_formats)
createFromData = staticmethod(createFromData)
class CopyInResponse(object):
# Byte1('G')
# Otherwise the same as CopyOutResponse
def __init__(self, is_binary, column_formats):
self.is_binary = is_binary
self.column_formats = column_formats
def createFromData(data):
is_binary, num_cols = struct.unpack('!bh', data[:3])
column_formats = struct.unpack('!' + ('h' * num_cols), data[3:])
return CopyInResponse(is_binary, column_formats)
createFromData = staticmethod(createFromData)
class SSLWrapper(object):
def __init__(self, sslobj):
self.sslobj = sslobj
def send(self, data):
self.sslobj.write(data)
def recv(self, num):
return self.sslobj.read(num)
class MessageReader(object):
def __init__(self, connection):
self._conn = connection
self._msgs = []
# If true, raise exception from an ErrorResponse after messages are
# processed. This can be used to leave the connection in a usable
# state after an error response, rather than having unconsumed
# messages that won't be understood in another context.
self.delay_raising_exception = False
self.ignore_unhandled_messages = False
def add_message(self, msg_class, handler, *args, **kwargs):
self._msgs.append((msg_class, handler, args, kwargs))
def clear_messages(self):
self._msgs = []
def return_value(self, value):
self._retval = value
def handle_messages(self):
exc = None
while 1:
msg = self._conn._read_message()
msg_handled = False
for (msg_class, handler, args, kwargs) in self._msgs:
if isinstance(msg, msg_class):
msg_handled = True
retval = handler(msg, *args, **kwargs)
if retval:
# The handler returned a true value, meaning that the
# message loop should be aborted.
if exc != None:
raise exc
return retval
elif hasattr(self, "_retval"):
# The handler told us to return -- used for non-true
# return values
if exc != None:
raise exc
return self._retval
if msg_handled:
continue
elif isinstance(msg, ErrorResponse):
exc = msg.createException()
if not self.delay_raising_exception:
raise exc
elif isinstance(msg, NoticeResponse):
self._conn.handleNoticeResponse(msg)
elif isinstance(msg, ParameterStatus):
self._conn.handleParameterStatus(msg)
elif isinstance(msg, NotificationResponse):
self._conn.handleNotificationResponse(msg)
elif not self.ignore_unhandled_messages:
raise InternalError("Unexpected response msg %r" % (msg))
def sync_on_error(fn):
def _fn(self, *args, **kwargs):
try:
self._sock_lock.acquire()
return fn(self, *args, **kwargs)
except:
self._sync()
raise
finally:
self._sock_lock.release()
return _fn
class Connection(object):
def __init__(self, unix_sock=None, host=None, port=5432, socket_timeout=60, ssl=False, records=False):
self._client_encoding = "ascii"
self._integer_datetimes = False
self._record_field_names = {}
self._sock_buf = ""
self._sock_buf_pos = 0
self._send_sock_buf = []
self._block_size = 8192
self.user_wants_records = records
if unix_sock == None and host != None:
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
elif unix_sock != None:
if not hasattr(socket, "AF_UNIX"):
raise InterfaceError("attempt to connect to unix socket on unsupported platform")
self._sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
else:
raise ProgrammingError("one of host or unix_sock must be provided")
if unix_sock == None and host != None:
self._sock.connect((host, port))
elif unix_sock != None:
self._sock.connect(unix_sock)
if ssl:
self._send(SSLRequest())
self._flush()
resp = self._sock.recv(1)
if resp == 'S':
self._sock = SSLWrapper(socket.ssl(self._sock))
else:
raise InterfaceError("server refuses SSL")
else:
# settimeout causes ssl failure, on windows. Python bug 1462352.
self._sock.settimeout(socket_timeout)
self._state = "noauth"
self._backend_key_data = None
self._sock_lock = threading.Lock()
self.NoticeReceived = MulticastDelegate()
self.ParameterStatusReceived = MulticastDelegate()
self.NotificationReceived = MulticastDelegate()
self.ParameterStatusReceived += self._onParameterStatusReceived
def verifyState(self, state):
if self._state != state:
raise InternalError("connection state must be %s, is %s" % (state, self._state))
def _send(self, msg):
assert self._sock_lock.locked()
#print "_send(%r)" % msg
data = msg.serialize()
self._send_sock_buf.append(data)
def _flush(self):
assert self._sock_lock.locked()
self._sock.sendall("".join(self._send_sock_buf))
del self._send_sock_buf[:]
def _read_bytes(self, byte_count):
retval = []
bytes_read = 0
while bytes_read < byte_count:
if self._sock_buf_pos == len(self._sock_buf):
self._sock_buf = self._sock.recv(1024)
self._sock_buf_pos = 0
rpos = min(len(self._sock_buf), self._sock_buf_pos + (byte_count - bytes_read))
addt_data = self._sock_buf[self._sock_buf_pos:rpos]
bytes_read += (rpos - self._sock_buf_pos)
assert bytes_read <= byte_count
self._sock_buf_pos = rpos
retval.append(addt_data)
return "".join(retval)
def _read_message(self):
assert self._sock_lock.locked()
bytes = self._read_bytes(5)
message_code = bytes[0]
data_len = struct.unpack("!i", bytes[1:])[0] - 4
bytes = self._read_bytes(data_len)
assert len(bytes) == data_len
msg = message_types[message_code].createFromData(bytes)
#print "_read_message() -> %r" % msg
return msg
def authenticate(self, user, **kwargs):
self.verifyState("noauth")
self._sock_lock.acquire()
try:
self._send(StartupMessage(user, database=kwargs.get("database",None), options=kwargs.get("options", None)))
self._flush()
msg = self._read_message()
if isinstance(msg, ErrorResponse):
raise msg.createException()
if not isinstance(msg, AuthenticationRequest):
raise InternalError("StartupMessage was responded to with non-AuthenticationRequest msg %r" % msg)
if not msg.ok(self, user, **kwargs):
raise InterfaceError("authentication method %s failed" % msg.__class__.__name__)
self._state = "auth"
reader = MessageReader(self)
reader.add_message(ReadyForQuery, self._ready_for_query)
reader.add_message(BackendKeyData, self._receive_backend_key_data)
reader.handle_messages()
finally:
self._sock_lock.release()
self._cache_record_attnames()
def _ready_for_query(self, msg):
self._state = "ready"
return True
def _receive_backend_key_data(self, msg):
self._backend_key_data = msg
def _cache_record_attnames(self):
if not self.user_wants_records:
return
parse_retval = self.parse("",
"""SELECT
pg_type.oid, attname
FROM
pg_type
INNER JOIN pg_attribute ON (attrelid = pg_type.typrelid)
WHERE typreceive = 'record_recv'::regproc
ORDER BY pg_type.oid, attnum""",
[])
row_desc, cmd = self.bind("tmp", "", (), parse_retval, None)
eod, rows = self.fetch_rows("tmp", 0, row_desc)
self._record_field_names = {}
typoid, attnames = None, []
for row in rows:
new_typoid, attname = row
if new_typoid != typoid and typoid != None:
self._record_field_names[typoid] = attnames
attnames = []
typoid = new_typoid
attnames.append(attname)
self._record_field_names[typoid] = attnames
@sync_on_error
def parse(self, statement, qs, param_types):
self.verifyState("ready")
type_info = [types.pg_type_info(x) for x in param_types]
param_types, param_fc = [x[0] for x in type_info], [x[1] for x in type_info] # zip(*type_info) -- fails on empty arr
self._send(Parse(statement, qs, param_types))
self._send(DescribePreparedStatement(statement))
self._send(Flush())
self._flush()
reader = MessageReader(self)
# ParseComplete is good.
reader.add_message(ParseComplete, lambda msg: 0)
# Well, we don't really care -- we're going to send whatever we
# want and let the database deal with it. But thanks anyways!
reader.add_message(ParameterDescription, lambda msg: 0)
# We're not waiting for a row description. Return something
# destinctive to let bind know that there is no output.
reader.add_message(NoData, lambda msg: (None, param_fc))
# Common row description response
reader.add_message(RowDescription, lambda msg: (msg, param_fc))
return reader.handle_messages()
@sync_on_error
def bind(self, portal, statement, params, parse_data, copy_stream):
self.verifyState("ready")
row_desc, param_fc = parse_data
if row_desc == None:
# no data coming out
output_fc = ()
else:
# We've got row_desc that allows us to identify what we're going to
# get back from this statement.
output_fc = [types.py_type_info(f, self._record_field_names) for f in row_desc.fields]
self._send(Bind(portal, statement, param_fc, params, output_fc, client_encoding = self._client_encoding, integer_datetimes = self._integer_datetimes))
# We need to describe the portal after bind, since the return
# format codes will be different (hopefully, always what we
# requested).
self._send(DescribePortal(portal))
self._send(Flush())
self._flush()
# Read responses from server...
reader = MessageReader(self)
# BindComplete is good -- just ignore
reader.add_message(BindComplete, lambda msg: 0)
# NoData in this case means we're not executing a query. As a
# result, we won't be fetching rows, so we'll never execute the
# portal we just created... unless we execute it right away, which
# we'll do.
reader.add_message(NoData, self._bind_nodata, portal, reader, copy_stream)
# Return the new row desc, since it will have the format types we
# asked the server for
reader.add_message(RowDescription, lambda msg: (msg, None))
return reader.handle_messages()
def _copy_in_response(self, copyin, fileobj, old_reader):
if fileobj == None:
raise CopyQueryWithoutStreamError()
while True:
data = fileobj.read(self._block_size)
if not data:
break
self._send(CopyData(data))
self._flush()
self._send(CopyDone())
self._send(Sync())
self._flush()
def _copy_out_response(self, copyout, fileobj, old_reader):
if fileobj == None:
raise CopyQueryWithoutStreamError()
reader = MessageReader(self)
reader.add_message(CopyData, self._copy_data, fileobj)
reader.add_message(CopyDone, lambda msg: 1)
reader.handle_messages()
def _copy_data(self, copydata, fileobj):
fileobj.write(copydata.data)
def _bind_nodata(self, msg, portal, old_reader, copy_stream):
# Bind message returned NoData, causing us to execute the command.
self._send(Execute(portal, 0))
self._send(Sync())
self._flush()
output = {}
reader = MessageReader(self)
reader.add_message(CopyOutResponse, self._copy_out_response, copy_stream, reader)
reader.add_message(CopyInResponse, self._copy_in_response, copy_stream, reader)
reader.add_message(CommandComplete, lambda msg, out: out.setdefault('msg', msg) and False, output)
reader.add_message(ReadyForQuery, lambda msg: 1)
reader.delay_raising_exception = True
reader.handle_messages()
old_reader.return_value((None, output['msg']))
@sync_on_error
def fetch_rows(self, portal, row_count, row_desc):
self.verifyState("ready")
self._send(Execute(portal, row_count))
self._send(Flush())
self._flush()
rows = []
reader = MessageReader(self)
reader.add_message(DataRow, self._fetch_datarow, rows, row_desc)
reader.add_message(PortalSuspended, lambda msg: 1)
reader.add_message(CommandComplete, self._fetch_commandcomplete, portal)
retval = reader.handle_messages()
# retval = 2 when command complete, indicating that we've hit the
# end of the available data for this command
return (retval == 2), rows
def _fetch_datarow(self, msg, rows, row_desc):
rows.append(
[
types.py_value(
msg.fields[i],
row_desc.fields[i],
client_encoding=self._client_encoding,
integer_datetimes=self._integer_datetimes,
record_field_names=self._record_field_names
)
for i in range(len(msg.fields))
]
)
def _fetch_commandcomplete(self, msg, portal):
self._send(ClosePortal(portal))
self._send(Sync())
self._flush()
reader = MessageReader(self)
reader.add_message(ReadyForQuery, self._fetch_commandcomplete_rfq)
reader.add_message(CloseComplete, lambda msg: False)
reader.handle_messages()
return 2 # signal end-of-data
def _fetch_commandcomplete_rfq(self, msg):
self._state = "ready"
return True
# Send a Sync message, then read and discard all messages until we
# receive a ReadyForQuery message.
def _sync(self):
# it is assumed _sync is called from sync_on_error, which holds
# a _sock_lock throughout the call
self._send(Sync())
self._flush()
reader = MessageReader(self)
reader.ignore_unhandled_messages = True
reader.add_message(ReadyForQuery, lambda msg: True)
reader.handle_messages()
def close_statement(self, statement):
if self._state == "closed":
return
self.verifyState("ready")
self._sock_lock.acquire()
try:
self._send(ClosePreparedStatement(statement))
self._send(Sync())
self._flush()
reader = MessageReader(self)
reader.add_message(CloseComplete, lambda msg: 0)
reader.add_message(ReadyForQuery, lambda msg: 1)
reader.handle_messages()
finally:
self._sock_lock.release()
def close_portal(self, portal):
if self._state == "closed":
return
self.verifyState("ready")
self._sock_lock.acquire()
try:
self._send(ClosePortal(portal))
self._send(Sync())
self._flush()
reader = MessageReader(self)
reader.add_message(CloseComplete, lambda msg: 0)
reader.add_message(ReadyForQuery, lambda msg: 1)
reader.handle_messages()
finally:
self._sock_lock.release()
def close(self):
self._sock_lock.acquire()
try:
self._send(Terminate())
self._flush()
self._sock.close()
self._state = "closed"
finally:
self._sock_lock.release()
def _onParameterStatusReceived(self, msg):
if msg.key == "client_encoding":
self._client_encoding = msg.value
elif msg.key == "integer_datetimes":
self._integer_datetimes = (msg.value == "on")
def handleNoticeResponse(self, msg):
self.NoticeReceived(msg)
def handleParameterStatus(self, msg):
self.ParameterStatusReceived(msg)
def handleNotificationResponse(self, msg):
self.NotificationReceived(msg)
def fileno(self):
# This should be safe to do without a lock
return self._sock.fileno()
def isready(self):
self._sock_lock.acquire()
try:
rlst, _wlst, _xlst = select.select([self], [], [], 0)
if not rlst:
return False
self._sync()
return True
finally:
self._sock_lock.release()
message_types = {
"N": NoticeResponse,
"R": AuthenticationRequest,
"S": ParameterStatus,
"K": BackendKeyData,
"Z": ReadyForQuery,
"T": RowDescription,
"E": ErrorResponse,
"D": DataRow,
"C": CommandComplete,
"1": ParseComplete,
"2": BindComplete,
"3": CloseComplete,
"s": PortalSuspended,
"n": NoData,
"t": ParameterDescription,
"A": NotificationResponse,
"c": CopyDone,
"d": CopyData,
"G": CopyInResponse,
"H": CopyOutResponse,
}
| apache-2.0 | -6,018,158,253,664,066,000 | 34.28806 | 173 | 0.611555 | false |
sandeepgupta2k4/tensorflow | tensorflow/contrib/keras/python/keras/utils/data_utils.py | 25 | 10259 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for file download and caching."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import hashlib
import os
import shutil
import sys
import tarfile
import zipfile
import six
from six.moves.urllib.error import HTTPError
from six.moves.urllib.error import URLError
from six.moves.urllib.request import urlopen
from tensorflow.contrib.keras.python.keras.utils.generic_utils import Progbar
if sys.version_info[0] == 2:
def urlretrieve(url, filename, reporthook=None, data=None):
"""Replacement for `urlretrive` for Python 2.
Under Python 2, `urlretrieve` relies on `FancyURLopener` from legacy
`urllib` module, known to have issues with proxy management.
Arguments:
url: url to retrieve.
filename: where to store the retrieved data locally.
reporthook: a hook function that will be called once
on establishment of the network connection and once
after each block read thereafter.
The hook will be passed three arguments;
a count of blocks transferred so far,
a block size in bytes, and the total size of the file.
data: `data` argument passed to `urlopen`.
"""
def chunk_read(response, chunk_size=8192, reporthook=None):
total_size = response.info().get('Content-Length').strip()
total_size = int(total_size)
count = 0
while 1:
chunk = response.read(chunk_size)
count += 1
if not chunk:
reporthook(count, total_size, total_size)
break
if reporthook:
reporthook(count, chunk_size, total_size)
yield chunk
response = urlopen(url, data)
with open(filename, 'wb') as fd:
for chunk in chunk_read(response, reporthook=reporthook):
fd.write(chunk)
else:
from six.moves.urllib.request import urlretrieve # pylint: disable=g-import-not-at-top
def _extract_archive(file_path, path='.', archive_format='auto'):
"""Extracts an archive if it matches tar, tar.gz, tar.bz, or zip formats.
Arguments:
file_path: path to the archive file
path: path to extract the archive file
archive_format: Archive format to try for extracting the file.
Options are 'auto', 'tar', 'zip', and None.
'tar' includes tar, tar.gz, and tar.bz files.
The default 'auto' is ['tar', 'zip'].
None or an empty list will return no matches found.
Returns:
True if a match was found and an archive extraction was completed,
False otherwise.
"""
if archive_format is None:
return False
if archive_format is 'auto':
archive_format = ['tar', 'zip']
if isinstance(archive_format, six.string_types):
archive_format = [archive_format]
for archive_type in archive_format:
if archive_type is 'tar':
open_fn = tarfile.open
is_match_fn = tarfile.is_tarfile
if archive_type is 'zip':
open_fn = zipfile.ZipFile
is_match_fn = zipfile.is_zipfile
if is_match_fn(file_path):
with open_fn(file_path) as archive:
try:
archive.extractall(path)
except (tarfile.TarError, RuntimeError, KeyboardInterrupt):
if os.path.exists(path):
if os.path.isfile(path):
os.remove(path)
else:
shutil.rmtree(path)
raise
return True
return False
def get_file(fname,
origin,
untar=False,
md5_hash=None,
file_hash=None,
cache_subdir='datasets',
hash_algorithm='auto',
extract=False,
archive_format='auto',
cache_dir=None):
"""Downloads a file from a URL if it not already in the cache.
By default the file at the url `origin` is downloaded to the
cache_dir `~/.keras`, placed in the cache_subdir `datasets`,
and given the filename `fname`. The final location of a file
`example.txt` would therefore be `~/.keras/datasets/example.txt`.
Files in tar, tar.gz, tar.bz, and zip formats can also be extracted.
Passing a hash will verify the file after download. The command line
programs `shasum` and `sha256sum` can compute the hash.
Arguments:
fname: Name of the file. If an absolute path `/path/to/file.txt` is
specified the file will be saved at that location.
origin: Original URL of the file.
untar: Deprecated in favor of 'extract'.
boolean, whether the file should be decompressed
md5_hash: Deprecated in favor of 'file_hash'.
md5 hash of the file for verification
file_hash: The expected hash string of the file after download.
The sha256 and md5 hash algorithms are both supported.
cache_subdir: Subdirectory under the Keras cache dir where the file is
saved. If an absolute path `/path/to/folder` is
specified the file will be saved at that location.
hash_algorithm: Select the hash algorithm to verify the file.
options are 'md5', 'sha256', and 'auto'.
The default 'auto' detects the hash algorithm in use.
extract: True tries extracting the file as an Archive, like tar or zip.
archive_format: Archive format to try for extracting the file.
Options are 'auto', 'tar', 'zip', and None.
'tar' includes tar, tar.gz, and tar.bz files.
The default 'auto' is ['tar', 'zip'].
None or an empty list will return no matches found.
cache_dir: Location to store cached files, when None it
defaults to the [Keras
Directory](/faq/#where-is-the-keras-configuration-filed-stored).
Returns:
Path to the downloaded file
"""
if cache_dir is None:
cache_dir = os.path.expanduser(os.path.join('~', '.keras'))
if md5_hash is not None and file_hash is None:
file_hash = md5_hash
hash_algorithm = 'md5'
datadir_base = os.path.expanduser(cache_dir)
if not os.access(datadir_base, os.W_OK):
datadir_base = os.path.join('/tmp', '.keras')
datadir = os.path.join(datadir_base, cache_subdir)
if not os.path.exists(datadir):
os.makedirs(datadir)
if untar:
untar_fpath = os.path.join(datadir, fname)
fpath = untar_fpath + '.tar.gz'
else:
fpath = os.path.join(datadir, fname)
download = False
if os.path.exists(fpath):
# File found; verify integrity if a hash was provided.
if file_hash is not None:
if not validate_file(fpath, file_hash, algorithm=hash_algorithm):
print('A local file was found, but it seems to be '
'incomplete or outdated because the ' + hash_algorithm +
' file hash does not match the original value of ' + file_hash +
' so we will re-download the data.')
download = True
else:
download = True
if download:
print('Downloading data from', origin)
progbar = None
def dl_progress(count, block_size, total_size, progbar=None):
if progbar is None:
progbar = Progbar(total_size)
else:
progbar.update(count * block_size)
error_msg = 'URL fetch failure on {}: {} -- {}'
try:
try:
urlretrieve(origin, fpath,
functools.partial(dl_progress, progbar=progbar))
except URLError as e:
raise Exception(error_msg.format(origin, e.errno, e.reason))
except HTTPError as e:
raise Exception(error_msg.format(origin, e.code, e.msg))
except (Exception, KeyboardInterrupt) as e:
if os.path.exists(fpath):
os.remove(fpath)
raise
progbar = None
if untar:
if not os.path.exists(untar_fpath):
_extract_archive(fpath, datadir, archive_format='tar')
return untar_fpath
if extract:
_extract_archive(fpath, datadir, archive_format)
return fpath
def _hash_file(fpath, algorithm='sha256', chunk_size=65535):
"""Calculates a file sha256 or md5 hash.
Example:
```python
>>> from keras.data_utils import _hash_file
>>> _hash_file('/path/to/file.zip')
'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855'
```
Arguments:
fpath: path to the file being validated
algorithm: hash algorithm, one of 'auto', 'sha256', or 'md5'.
The default 'auto' detects the hash algorithm in use.
chunk_size: Bytes to read at a time, important for large files.
Returns:
The file hash
"""
if (algorithm is 'sha256') or (algorithm is 'auto' and len(hash) is 64):
hasher = hashlib.sha256()
else:
hasher = hashlib.md5()
with open(fpath, 'rb') as fpath_file:
for chunk in iter(lambda: fpath_file.read(chunk_size), b''):
hasher.update(chunk)
return hasher.hexdigest()
def validate_file(fpath, file_hash, algorithm='auto', chunk_size=65535):
"""Validates a file against a sha256 or md5 hash.
Arguments:
fpath: path to the file being validated
file_hash: The expected hash string of the file.
The sha256 and md5 hash algorithms are both supported.
algorithm: Hash algorithm, one of 'auto', 'sha256', or 'md5'.
The default 'auto' detects the hash algorithm in use.
chunk_size: Bytes to read at a time, important for large files.
Returns:
Whether the file is valid
"""
if ((algorithm is 'sha256') or
(algorithm is 'auto' and len(file_hash) is 64)):
hasher = 'sha256'
else:
hasher = 'md5'
if str(_hash_file(fpath, hasher, chunk_size)) == str(file_hash):
return True
else:
return False
| apache-2.0 | 8,883,221,928,144,193,000 | 33.658784 | 89 | 0.649868 | false |
int19h/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/site-packages/asn1crypto/csr.py | 14 | 2142 | # coding: utf-8
"""
ASN.1 type classes for certificate signing requests (CSR). Exports the
following items:
- CertificatationRequest()
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from .algos import SignedDigestAlgorithm
from .core import (
Any,
Integer,
ObjectIdentifier,
OctetBitString,
Sequence,
SetOf,
)
from .keys import PublicKeyInfo
from .x509 import DirectoryString, Extensions, Name
# The structures in this file are taken from https://tools.ietf.org/html/rfc2986
# and https://tools.ietf.org/html/rfc2985
class Version(Integer):
_map = {
0: 'v1',
}
class CSRAttributeType(ObjectIdentifier):
_map = {
'1.2.840.113549.1.9.7': 'challenge_password',
'1.2.840.113549.1.9.9': 'extended_certificate_attributes',
'1.2.840.113549.1.9.14': 'extension_request',
}
class SetOfDirectoryString(SetOf):
_child_spec = DirectoryString
class Attribute(Sequence):
_fields = [
('type', ObjectIdentifier),
('values', SetOf, {'spec': Any}),
]
class SetOfAttributes(SetOf):
_child_spec = Attribute
class SetOfExtensions(SetOf):
_child_spec = Extensions
class CRIAttribute(Sequence):
_fields = [
('type', CSRAttributeType),
('values', Any),
]
_oid_pair = ('type', 'values')
_oid_specs = {
'challenge_password': SetOfDirectoryString,
'extended_certificate_attributes': SetOfAttributes,
'extension_request': SetOfExtensions,
}
class CRIAttributes(SetOf):
_child_spec = CRIAttribute
class CertificationRequestInfo(Sequence):
_fields = [
('version', Version),
('subject', Name),
('subject_pk_info', PublicKeyInfo),
('attributes', CRIAttributes, {'implicit': 0, 'optional': True}),
]
class CertificationRequest(Sequence):
_fields = [
('certification_request_info', CertificationRequestInfo),
('signature_algorithm', SignedDigestAlgorithm),
('signature', OctetBitString),
]
| apache-2.0 | -9,004,212,906,114,262,000 | 21.3125 | 82 | 0.654062 | false |
Workday/OpenFrame | tools/grit/grit/format/policy_templates/writers/admx_writer_unittest.py | 41 | 21095 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Unittests for grit.format.policy_templates.writers.admx_writer."""
import os
import sys
import unittest
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
from grit.format.policy_templates.writers import admx_writer
from grit.format.policy_templates.writers import xml_writer_base_unittest
from xml.dom import minidom
class AdmxWriterUnittest(xml_writer_base_unittest.XmlWriterBaseTest):
def _CreateDocumentElement(self):
dom_impl = minidom.getDOMImplementation('')
doc = dom_impl.createDocument(None, 'root', None)
return doc.documentElement
def setUp(self):
# Writer configuration. This dictionary contains parameter used by the ADMX
# Writer
config = {
'win_group_policy_class': 'TestClass',
'win_supported_os': 'SUPPORTED_TESTOS',
'win_reg_mandatory_key_name': 'Software\\Policies\\Test',
'win_reg_recommended_key_name': 'Software\\Policies\\Test\\Recommended',
'win_mandatory_category_path': ['test_category'],
'win_recommended_category_path': ['test_recommended_category'],
'admx_namespace': 'ADMXWriter.Test.Namespace',
'admx_prefix': 'test_prefix',
'build': 'test_product',
}
self.writer = admx_writer.GetWriter(config)
self.writer.Init()
def _GetPoliciesElement(self, doc):
node_list = doc.getElementsByTagName('policies')
self.assertTrue(node_list.length == 1)
return node_list.item(0)
def _GetCategoriesElement(self, doc):
node_list = doc.getElementsByTagName('categories')
self.assertTrue(node_list.length == 1)
return node_list.item(0)
def testEmpty(self):
self.writer.BeginTemplate()
self.writer.EndTemplate()
output = self.writer.GetTemplateText()
expected_output = (
'<?xml version="1.0" ?>\n'
'<policyDefinitions revision="1.0" schemaVersion="1.0">\n'
' <policyNamespaces>\n'
' <target namespace="ADMXWriter.Test.Namespace"'
' prefix="test_prefix"/>\n'
' <using namespace="Microsoft.Policies.Windows" prefix="windows"/>\n'
' </policyNamespaces>\n'
' <resources minRequiredRevision="1.0"/>\n'
' <supportedOn>\n'
' <definitions>\n'
' <definition displayName="'
'$(string.SUPPORTED_TESTOS)" name="SUPPORTED_TESTOS"/>\n'
' </definitions>\n'
' </supportedOn>\n'
' <categories>\n'
' <category displayName="$(string.test_category)"'
' name="test_category"/>\n'
' <category displayName="$(string.test_recommended_category)"'
' name="test_recommended_category"/>\n'
' </categories>\n'
' <policies/>\n'
'</policyDefinitions>')
self.AssertXMLEquals(output, expected_output)
def testEmptyVersion(self):
self.writer.config['version'] = '39.0.0.0'
self.writer.BeginTemplate()
self.writer.EndTemplate()
output = self.writer.GetTemplateText()
expected_output = (
'<?xml version="1.0" ?>\n'
'<policyDefinitions revision="1.0" schemaVersion="1.0">\n'
' <!--test_product version: 39.0.0.0-->\n'
' <policyNamespaces>\n'
' <target namespace="ADMXWriter.Test.Namespace"'
' prefix="test_prefix"/>\n'
' <using namespace="Microsoft.Policies.Windows" prefix="windows"/>\n'
' </policyNamespaces>\n'
' <resources minRequiredRevision="1.0"/>\n'
' <supportedOn>\n'
' <definitions>\n'
' <definition displayName="'
'$(string.SUPPORTED_TESTOS)" name="SUPPORTED_TESTOS"/>\n'
' </definitions>\n'
' </supportedOn>\n'
' <categories>\n'
' <category displayName="$(string.test_category)"'
' name="test_category"/>\n'
' <category displayName="$(string.test_recommended_category)"'
' name="test_recommended_category"/>\n'
' </categories>\n'
' <policies/>\n'
'</policyDefinitions>')
self.AssertXMLEquals(output, expected_output)
def testEmptyPolicyGroup(self):
empty_policy_group = {
'name': 'PolicyGroup',
'policies': []
}
# Initialize writer to write a policy group.
self.writer.BeginTemplate()
# Write policy group
self.writer.BeginPolicyGroup(empty_policy_group)
self.writer.EndPolicyGroup()
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = ''
self.AssertXMLEquals(output, expected_output)
output = self.GetXMLOfChildren(
self._GetCategoriesElement(self.writer._doc))
expected_output = (
'<category displayName="$(string.test_category)"'
' name="test_category"/>\n'
'<category displayName="$(string.test_recommended_category)"'
' name="test_recommended_category"/>\n'
'<category displayName="$(string.PolicyGroup_group)"'
' name="PolicyGroup">\n'
' <parentCategory ref="test_category"/>\n'
'</category>')
self.AssertXMLEquals(output, expected_output)
def testPolicyGroup(self):
empty_policy_group = {
'name': 'PolicyGroup',
'policies': [
{'name': 'PolicyStub2',
'type': 'main'},
{'name': 'PolicyStub1',
'type': 'main'},
]
}
# Initialize writer to write a policy group.
self.writer.BeginTemplate()
# Write policy group
self.writer.BeginPolicyGroup(empty_policy_group)
self.writer.EndPolicyGroup()
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = ''
self.AssertXMLEquals(output, expected_output)
output = self.GetXMLOfChildren(
self._GetCategoriesElement(self.writer._doc))
expected_output = (
'<category displayName="$(string.test_category)"'
' name="test_category"/>\n'
'<category displayName="$(string.test_recommended_category)"'
' name="test_recommended_category"/>\n'
'<category displayName="$(string.PolicyGroup_group)"'
' name="PolicyGroup">\n'
' <parentCategory ref="test_category"/>\n'
'</category>')
self.AssertXMLEquals(output, expected_output)
def _initWriterForPolicy(self, writer, policy):
'''Initializes the writer to write the given policy next.
'''
policy_group = {
'name': 'PolicyGroup',
'policies': [policy]
}
writer.BeginTemplate()
writer.BeginPolicyGroup(policy_group)
def testMainPolicy(self):
main_policy = {
'name': 'DummyMainPolicy',
'type': 'main',
}
self._initWriterForPolicy(self.writer, main_policy)
self.writer.WritePolicy(main_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.DummyMainPolicy)"'
' explainText="$(string.DummyMainPolicy_Explain)"'
' key="Software\\Policies\\Test" name="DummyMainPolicy"'
' presentation="$(presentation.DummyMainPolicy)"'
' valueName="DummyMainPolicy">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <enabledValue>\n'
' <decimal value="1"/>\n'
' </enabledValue>\n'
' <disabledValue>\n'
' <decimal value="0"/>\n'
' </disabledValue>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testRecommendedPolicy(self):
main_policy = {
'name': 'DummyMainPolicy',
'type': 'main',
}
policy_group = {
'name': 'PolicyGroup',
'policies': [main_policy],
}
self.writer.BeginTemplate()
self.writer.BeginRecommendedPolicyGroup(policy_group)
self.writer.WriteRecommendedPolicy(main_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.DummyMainPolicy)"'
' explainText="$(string.DummyMainPolicy_Explain)"'
' key="Software\\Policies\\Test\\Recommended"'
' name="DummyMainPolicy_recommended"'
' presentation="$(presentation.DummyMainPolicy)"'
' valueName="DummyMainPolicy">\n'
' <parentCategory ref="PolicyGroup_recommended"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <enabledValue>\n'
' <decimal value="1"/>\n'
' </enabledValue>\n'
' <disabledValue>\n'
' <decimal value="0"/>\n'
' </disabledValue>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testRecommendedOnlyPolicy(self):
main_policy = {
'name': 'DummyMainPolicy',
'type': 'main',
'features': {
'can_be_recommended': True,
'can_be_mandatory': False,
}
}
policy_group = {
'name': 'PolicyGroup',
'policies': [main_policy],
}
self.writer.BeginTemplate()
self.writer.BeginRecommendedPolicyGroup(policy_group)
self.writer.WritePolicy(main_policy)
self.writer.WriteRecommendedPolicy(main_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.DummyMainPolicy)"'
' explainText="$(string.DummyMainPolicy_Explain)"'
' key="Software\\Policies\\Test\\Recommended"'
' name="DummyMainPolicy_recommended"'
' presentation="$(presentation.DummyMainPolicy)"'
' valueName="DummyMainPolicy">\n'
' <parentCategory ref="PolicyGroup_recommended"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <enabledValue>\n'
' <decimal value="1"/>\n'
' </enabledValue>\n'
' <disabledValue>\n'
' <decimal value="0"/>\n'
' </disabledValue>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testStringPolicy(self):
string_policy = {
'name': 'SampleStringPolicy',
'type': 'string',
}
self._initWriterForPolicy(self.writer, string_policy)
self.writer.WritePolicy(string_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.SampleStringPolicy)"'
' explainText="$(string.SampleStringPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleStringPolicy"'
' presentation="$(presentation.SampleStringPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <text id="SampleStringPolicy" maxLength="1000000"'
' valueName="SampleStringPolicy"/>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testIntPolicy(self):
int_policy = {
'name': 'SampleIntPolicy',
'type': 'int',
}
self._initWriterForPolicy(self.writer, int_policy)
self.writer.WritePolicy(int_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.SampleIntPolicy)"'
' explainText="$(string.SampleIntPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleIntPolicy"'
' presentation="$(presentation.SampleIntPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <decimal id="SampleIntPolicy" maxValue="2000000000" '
'valueName="SampleIntPolicy"/>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testIntEnumPolicy(self):
enum_policy = {
'name': 'SampleEnumPolicy',
'type': 'int-enum',
'items': [
{'name': 'item_1', 'value': 0},
{'name': 'item_2', 'value': 1},
]
}
self._initWriterForPolicy(self.writer, enum_policy)
self.writer.WritePolicy(enum_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.SampleEnumPolicy)"'
' explainText="$(string.SampleEnumPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleEnumPolicy"'
' presentation="$(presentation.SampleEnumPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <enum id="SampleEnumPolicy" valueName="SampleEnumPolicy">\n'
' <item displayName="$(string.item_1)">\n'
' <value>\n'
' <decimal value="0"/>\n'
' </value>\n'
' </item>\n'
' <item displayName="$(string.item_2)">\n'
' <value>\n'
' <decimal value="1"/>\n'
' </value>\n'
' </item>\n'
' </enum>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testStringEnumPolicy(self):
enum_policy = {
'name': 'SampleEnumPolicy',
'type': 'string-enum',
'items': [
{'name': 'item_1', 'value': 'one'},
{'name': 'item_2', 'value': 'two'},
]
}
# This test is different than the others because it also tests that space
# usage inside <string> nodes is correct.
dom_impl = minidom.getDOMImplementation('')
self.writer._doc = dom_impl.createDocument(None, 'policyDefinitions', None)
self.writer._active_policies_elem = self.writer._doc.documentElement
self.writer._active_mandatory_policy_group_name = 'PolicyGroup'
self.writer.WritePolicy(enum_policy)
output = self.writer.GetTemplateText()
expected_output = (
'<?xml version="1.0" ?>\n'
'<policyDefinitions>\n'
' <policy class="TestClass" displayName="$(string.SampleEnumPolicy)"'
' explainText="$(string.SampleEnumPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleEnumPolicy"'
' presentation="$(presentation.SampleEnumPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <enum id="SampleEnumPolicy" valueName="SampleEnumPolicy">\n'
' <item displayName="$(string.item_1)">\n'
' <value>\n'
' <string>one</string>\n'
' </value>\n'
' </item>\n'
' <item displayName="$(string.item_2)">\n'
' <value>\n'
' <string>two</string>\n'
' </value>\n'
' </item>\n'
' </enum>\n'
' </elements>\n'
' </policy>\n'
'</policyDefinitions>')
self.AssertXMLEquals(output, expected_output)
def testListPolicy(self):
list_policy = {
'name': 'SampleListPolicy',
'type': 'list',
}
self._initWriterForPolicy(self.writer, list_policy)
self.writer.WritePolicy(list_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.SampleListPolicy)"'
' explainText="$(string.SampleListPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleListPolicy"'
' presentation="$(presentation.SampleListPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <list id="SampleListPolicyDesc"'
' key="Software\Policies\Test\SampleListPolicy" valuePrefix=""/>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testStringEnumListPolicy(self):
list_policy = {
'name': 'SampleListPolicy',
'type': 'string-enum-list',
'items': [
{'name': 'item_1', 'value': 'one'},
{'name': 'item_2', 'value': 'two'},
]
}
self._initWriterForPolicy(self.writer, list_policy)
self.writer.WritePolicy(list_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.SampleListPolicy)"'
' explainText="$(string.SampleListPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleListPolicy"'
' presentation="$(presentation.SampleListPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <list id="SampleListPolicyDesc"'
' key="Software\Policies\Test\SampleListPolicy" valuePrefix=""/>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testDictionaryPolicy(self):
dict_policy = {
'name': 'SampleDictionaryPolicy',
'type': 'dict',
}
self._initWriterForPolicy(self.writer, dict_policy)
self.writer.WritePolicy(dict_policy)
output = self.GetXMLOfChildren(self._GetPoliciesElement(self.writer._doc))
expected_output = (
'<policy class="TestClass" displayName="$(string.'
'SampleDictionaryPolicy)"'
' explainText="$(string.SampleDictionaryPolicy_Explain)"'
' key="Software\\Policies\\Test" name="SampleDictionaryPolicy"'
' presentation="$(presentation.SampleDictionaryPolicy)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <text id="SampleDictionaryPolicy" maxLength="1000000"'
' valueName="SampleDictionaryPolicy"/>\n'
' </elements>\n'
'</policy>')
self.AssertXMLEquals(output, expected_output)
def testPlatform(self):
# Test that the writer correctly chooses policies of platform Windows.
self.assertTrue(self.writer.IsPolicySupported({
'supported_on': [
{'platforms': ['win', 'zzz']}, {'platforms': ['aaa']}
]
}))
self.assertFalse(self.writer.IsPolicySupported({
'supported_on': [
{'platforms': ['mac', 'linux']}, {'platforms': ['aaa']}
]
}))
def testStringEncodings(self):
enum_policy_a = {
'name': 'SampleEnumPolicy.A',
'type': 'string-enum',
'items': [
{'name': 'tls1.2', 'value': 'tls1.2'}
]
}
enum_policy_b = {
'name': 'SampleEnumPolicy.B',
'type': 'string-enum',
'items': [
{'name': 'tls1.2', 'value': 'tls1.2'}
]
}
dom_impl = minidom.getDOMImplementation('')
self.writer._doc = dom_impl.createDocument(None, 'policyDefinitions', None)
self.writer._active_policies_elem = self.writer._doc.documentElement
self.writer._active_mandatory_policy_group_name = 'PolicyGroup'
self.writer.WritePolicy(enum_policy_a)
self.writer.WritePolicy(enum_policy_b)
output = self.writer.GetTemplateText()
expected_output = (
'<?xml version="1.0" ?>\n'
'<policyDefinitions>\n'
' <policy class="TestClass" displayName="$(string.SampleEnumPolicy_A)"'
' explainText="$(string.SampleEnumPolicy_A_Explain)"'
' key="Software\\Policies\\Test" name="SampleEnumPolicy.A"'
' presentation="$(presentation.SampleEnumPolicy.A)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <enum id="SampleEnumPolicy.A" valueName="SampleEnumPolicy.A">\n'
' <item displayName="$(string.tls1_2)">\n'
' <value>\n'
' <string>tls1.2</string>\n'
' </value>\n'
' </item>\n'
' </enum>\n'
' </elements>\n'
' </policy>\n'
' <policy class="TestClass" displayName="$(string.SampleEnumPolicy_B)"'
' explainText="$(string.SampleEnumPolicy_B_Explain)"'
' key="Software\\Policies\\Test" name="SampleEnumPolicy.B"'
' presentation="$(presentation.SampleEnumPolicy.B)">\n'
' <parentCategory ref="PolicyGroup"/>\n'
' <supportedOn ref="SUPPORTED_TESTOS"/>\n'
' <elements>\n'
' <enum id="SampleEnumPolicy.B" valueName="SampleEnumPolicy.B">\n'
' <item displayName="$(string.tls1_2)">\n'
' <value>\n'
' <string>tls1.2</string>\n'
' </value>\n'
' </item>\n'
' </enum>\n'
' </elements>\n'
' </policy>\n'
'</policyDefinitions>')
self.AssertXMLEquals(output, expected_output)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -5,673,786,201,235,686,000 | 35.815009 | 80 | 0.597203 | false |
vmax-feihu/hue | desktop/core/ext-py/Django-1.6.10/tests/decorators/tests.py | 52 | 8243 | from functools import wraps
from django.contrib.admin.views.decorators import staff_member_required
from django.contrib.auth.decorators import login_required, permission_required, user_passes_test
from django.http import HttpResponse, HttpRequest, HttpResponseNotAllowed
from django.middleware.clickjacking import XFrameOptionsMiddleware
from django.utils.decorators import method_decorator
from django.utils.functional import allow_lazy, lazy, memoize
from django.utils.unittest import TestCase
from django.views.decorators.cache import cache_page, never_cache, cache_control
from django.views.decorators.clickjacking import xframe_options_deny, xframe_options_sameorigin, xframe_options_exempt
from django.views.decorators.http import require_http_methods, require_GET, require_POST, require_safe, condition
from django.views.decorators.vary import vary_on_headers, vary_on_cookie
def fully_decorated(request):
"""Expected __doc__"""
return HttpResponse('<html><body>dummy</body></html>')
fully_decorated.anything = "Expected __dict__"
def compose(*functions):
# compose(f, g)(*args, **kwargs) == f(g(*args, **kwargs))
functions = list(reversed(functions))
def _inner(*args, **kwargs):
result = functions[0](*args, **kwargs)
for f in functions[1:]:
result = f(result)
return result
return _inner
full_decorator = compose(
# django.views.decorators.http
require_http_methods(["GET"]),
require_GET,
require_POST,
require_safe,
condition(lambda r: None, lambda r: None),
# django.views.decorators.vary
vary_on_headers('Accept-language'),
vary_on_cookie,
# django.views.decorators.cache
cache_page(60*15),
cache_control(private=True),
never_cache,
# django.contrib.auth.decorators
# Apply user_passes_test twice to check #9474
user_passes_test(lambda u:True),
login_required,
permission_required('change_world'),
# django.contrib.admin.views.decorators
staff_member_required,
# django.utils.functional
lambda f: memoize(f, {}, 1),
allow_lazy,
lazy,
)
fully_decorated = full_decorator(fully_decorated)
class DecoratorsTest(TestCase):
def test_attributes(self):
"""
Tests that django decorators set certain attributes of the wrapped
function.
"""
self.assertEqual(fully_decorated.__name__, 'fully_decorated')
self.assertEqual(fully_decorated.__doc__, 'Expected __doc__')
self.assertEqual(fully_decorated.__dict__['anything'], 'Expected __dict__')
def test_user_passes_test_composition(self):
"""
Test that the user_passes_test decorator can be applied multiple times
(#9474).
"""
def test1(user):
user.decorators_applied.append('test1')
return True
def test2(user):
user.decorators_applied.append('test2')
return True
def callback(request):
return request.user.decorators_applied
callback = user_passes_test(test1)(callback)
callback = user_passes_test(test2)(callback)
class DummyUser(object): pass
class DummyRequest(object): pass
request = DummyRequest()
request.user = DummyUser()
request.user.decorators_applied = []
response = callback(request)
self.assertEqual(response, ['test2', 'test1'])
def test_cache_page_new_style(self):
"""
Test that we can call cache_page the new way
"""
def my_view(request):
return "response"
my_view_cached = cache_page(123)(my_view)
self.assertEqual(my_view_cached(HttpRequest()), "response")
my_view_cached2 = cache_page(123, key_prefix="test")(my_view)
self.assertEqual(my_view_cached2(HttpRequest()), "response")
def test_require_safe_accepts_only_safe_methods(self):
"""
Test for the require_safe decorator.
A view returns either a response or an exception.
Refs #15637.
"""
def my_view(request):
return HttpResponse("OK")
my_safe_view = require_safe(my_view)
request = HttpRequest()
request.method = 'GET'
self.assertIsInstance(my_safe_view(request), HttpResponse)
request.method = 'HEAD'
self.assertIsInstance(my_safe_view(request), HttpResponse)
request.method = 'POST'
self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed)
request.method = 'PUT'
self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed)
request.method = 'DELETE'
self.assertIsInstance(my_safe_view(request), HttpResponseNotAllowed)
# For testing method_decorator, a decorator that assumes a single argument.
# We will get type arguments if there is a mismatch in the number of arguments.
def simple_dec(func):
def wrapper(arg):
return func("test:" + arg)
return wraps(func)(wrapper)
simple_dec_m = method_decorator(simple_dec)
# For testing method_decorator, two decorators that add an attribute to the function
def myattr_dec(func):
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.myattr = True
return wraps(func)(wrapper)
myattr_dec_m = method_decorator(myattr_dec)
def myattr2_dec(func):
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
wrapper.myattr2 = True
return wraps(func)(wrapper)
myattr2_dec_m = method_decorator(myattr2_dec)
class MethodDecoratorTests(TestCase):
"""
Tests for method_decorator
"""
def test_preserve_signature(self):
class Test(object):
@simple_dec_m
def say(self, arg):
return arg
self.assertEqual("test:hello", Test().say("hello"))
def test_preserve_attributes(self):
# Sanity check myattr_dec and myattr2_dec
@myattr_dec
@myattr2_dec
def func():
pass
self.assertEqual(getattr(func, 'myattr', False), True)
self.assertEqual(getattr(func, 'myattr2', False), True)
# Now check method_decorator
class Test(object):
@myattr_dec_m
@myattr2_dec_m
def method(self):
"A method"
pass
self.assertEqual(getattr(Test().method, 'myattr', False), True)
self.assertEqual(getattr(Test().method, 'myattr2', False), True)
self.assertEqual(getattr(Test.method, 'myattr', False), True)
self.assertEqual(getattr(Test.method, 'myattr2', False), True)
self.assertEqual(Test.method.__doc__, 'A method')
self.assertEqual(Test.method.__name__, 'method')
class XFrameOptionsDecoratorsTests(TestCase):
"""
Tests for the X-Frame-Options decorators.
"""
def test_deny_decorator(self):
"""
Ensures @xframe_options_deny properly sets the X-Frame-Options header.
"""
@xframe_options_deny
def a_view(request):
return HttpResponse()
r = a_view(HttpRequest())
self.assertEqual(r['X-Frame-Options'], 'DENY')
def test_sameorigin_decorator(self):
"""
Ensures @xframe_options_sameorigin properly sets the X-Frame-Options
header.
"""
@xframe_options_sameorigin
def a_view(request):
return HttpResponse()
r = a_view(HttpRequest())
self.assertEqual(r['X-Frame-Options'], 'SAMEORIGIN')
def test_exempt_decorator(self):
"""
Ensures @xframe_options_exempt properly instructs the
XFrameOptionsMiddleware to NOT set the header.
"""
@xframe_options_exempt
def a_view(request):
return HttpResponse()
req = HttpRequest()
resp = a_view(req)
self.assertEqual(resp.get('X-Frame-Options', None), None)
self.assertTrue(resp.xframe_options_exempt)
# Since the real purpose of the exempt decorator is to suppress
# the middleware's functionality, let's make sure it actually works...
r = XFrameOptionsMiddleware().process_response(req, resp)
self.assertEqual(r.get('X-Frame-Options', None), None)
| apache-2.0 | 509,149,651,526,826,240 | 31.972 | 118 | 0.645396 | false |
strands-project/aaf_deployment | aaf_walking_group/scripts/insert_yaml.py | 4 | 1233 | #!/usr/bin/env python
import std_msgs.msg
from mongodb_store.message_store import MessageStoreProxy
import yaml
import json
import pprint
import argparse
def loadDialogue(inputfile, dataset_name):
print "openning %s" % inputfile
with open(inputfile) as f:
content = f.readlines()
print "Done"
return content
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("dataset_name", help="The name of the dataset. Saved in meta information using 'meta_name'", type=str)
parser.add_argument("-i", "--input", help="Input yaml file", type=str, required=True)
parser.add_argument("--collection_name", help="The collection name. Default: aaf_walking_group", type=str, default="aaf_walking_group")
parser.add_argument("--meta_name", help="The name of the meta filed to store 'dataset_name' in. Default: waypoint_set", type=str, default="waypoint_set")
args = parser.parse_args()
msg_store = MessageStoreProxy(collection=args.collection_name)
data = yaml.load(open(args.input))
meta = {}
meta[args.meta_name] = args.dataset_name
pprint.pprint(meta)
pprint.pprint(data)
msg_store.insert(std_msgs.msg.String(json.dumps(data)), meta)
| mit | -2,928,932,509,161,116,700 | 36.363636 | 157 | 0.699108 | false |
ahmed-mahran/hue | desktop/core/ext-py/pysaml2-2.4.0/src/saml2/authn_context/__init__.py | 33 | 7759 | from saml2.saml import AuthnContext, AuthnContextClassRef
from saml2.samlp import RequestedAuthnContext
__author__ = 'rolandh'
from saml2 import extension_elements_to_elements
UNSPECIFIED = "urn:oasis:names:tc:SAML:2.0:ac:classes:unspecified"
INTERNETPROTOCOLPASSWORD = \
'urn:oasis:names:tc:SAML:2.0:ac:classes:InternetProtocolPassword'
MOBILETWOFACTORCONTRACT = \
'urn:oasis:names:tc:SAML:2.0:ac:classes:MobileTwoFactorContract'
PASSWORDPROTECTEDTRANSPORT = \
'urn:oasis:names:tc:SAML:2.0:ac:classes:PasswordProtectedTransport'
PASSWORD = 'urn:oasis:names:tc:SAML:2.0:ac:classes:Password'
TLSCLIENT = 'urn:oasis:names:tc:SAML:2.0:ac:classes:TLSClient'
TIMESYNCTOKEN = "urn:oasis:names:tc:SAML:2.0:ac:classes:TimeSyncToken"
AL1 = "http://idmanagement.gov/icam/2009/12/saml_2.0_profile/assurancelevel1"
AL2 = "http://idmanagement.gov/icam/2009/12/saml_2.0_profile/assurancelevel2"
AL3 = "http://idmanagement.gov/icam/2009/12/saml_2.0_profile/assurancelevel3"
AL4 = "http://idmanagement.gov/icam/2009/12/saml_2.0_profile/assurancelevel4"
from saml2.authn_context import ippword
from saml2.authn_context import mobiletwofactor
from saml2.authn_context import ppt
from saml2.authn_context import pword
from saml2.authn_context import sslcert
CMP_TYPE = ['exact', 'minimum', 'maximum', 'better']
class AuthnBroker(object):
def __init__(self):
self.db = {"info": {}, "key": {}}
self.next = 0
@staticmethod
def exact(a, b):
return a == b
@staticmethod
def minimum(a, b):
return b >= a
@staticmethod
def maximum(a, b):
return b <= a
@staticmethod
def better(a, b):
return b > a
def add(self, spec, method, level=0, authn_authority="", reference=None):
"""
Adds a new authentication method.
Assumes not more than one authentication method per AuthnContext
specification.
:param spec: What the authentication endpoint offers in the form
of an AuthnContext
:param method: A identifier of the authentication method.
:param level: security level, positive integers, 0 is lowest
:param reference: Desired unique reference to this `spec'
:return:
"""
if spec.authn_context_class_ref:
key = spec.authn_context_class_ref.text
_info = {
"class_ref": key,
"method": method,
"level": level,
"authn_auth": authn_authority
}
elif spec.authn_context_decl:
key = spec.authn_context_decl.c_namespace
_info = {
"method": method,
"decl": spec.authn_context_decl,
"level": level,
"authn_auth": authn_authority
}
else:
raise NotImplementedError()
self.next += 1
_ref = reference
if _ref is None:
_ref = str(self.next)
assert _ref not in self.db["info"]
self.db["info"][_ref] = _info
try:
self.db["key"][key].append(_ref)
except KeyError:
self.db["key"][key] = [_ref]
def remove(self, spec, method=None, level=0, authn_authority=""):
if spec.authn_context_class_ref:
_cls_ref = spec.authn_context_class_ref.text
try:
_refs = self.db["key"][_cls_ref]
except KeyError:
return
else:
_remain = []
for _ref in _refs:
item = self.db["info"][_ref]
if method and method != item["method"]:
_remain.append(_ref)
if level and level != item["level"]:
_remain.append(_ref)
if authn_authority and \
authn_authority != item["authn_authority"]:
_remain.append(_ref)
if _remain:
self.db[_cls_ref] = _remain
def _pick_by_class_ref(self, cls_ref, comparision_type="exact"):
func = getattr(self, comparision_type)
try:
_refs = self.db["key"][cls_ref]
except KeyError:
return []
else:
_item = self.db["info"][_refs[0]]
_level = _item["level"]
if comparision_type != "better":
if _item["method"]:
res = [(_item["method"], _refs[0])]
else:
res = []
else:
res = []
for ref in _refs[1:]:
item = self.db["info"][ref]
res.append((item["method"], ref))
if func(_level, item["level"]):
_level = item["level"]
for ref, _dic in self.db["info"].items():
if ref in _refs:
continue
elif func(_level, _dic["level"]):
if _dic["method"]:
_val = (_dic["method"], ref)
if _val not in res:
res.append(_val)
return res
def pick(self, req_authn_context=None):
"""
Given the authentication context find zero or more places where
the user could be sent next. Ordered according to security level.
:param req_authn_context: The requested context as an
RequestedAuthnContext instance
:return: An URL
"""
if req_authn_context is None:
return self._pick_by_class_ref(UNSPECIFIED, "minimum")
if req_authn_context.authn_context_class_ref:
if req_authn_context.comparison:
_cmp = req_authn_context.comparison
else:
_cmp = "exact"
if _cmp == 'exact':
res = []
for cls_ref in req_authn_context.authn_context_class_ref:
res += (self._pick_by_class_ref(cls_ref.text, _cmp))
return res
else:
return self._pick_by_class_ref(
req_authn_context.authn_context_class_ref[0].text, _cmp)
elif req_authn_context.authn_context_decl_ref:
if req_authn_context.comparison:
_cmp = req_authn_context.comparison
else:
_cmp = "exact"
return self._pick_by_class_ref(
req_authn_context.authn_context_decl_ref, _cmp)
def match(self, requested, provided):
if requested == provided:
return True
else:
return False
def __getitem__(self, ref):
return self.db["info"][ref]
def get_authn_by_accr(self, accr):
_ids = self.db["key"][accr]
return self[_ids[0]]
def authn_context_factory(text):
# brute force
for mod in [ippword, mobiletwofactor, ppt, pword, sslcert]:
inst = mod.authentication_context_declaration_from_string(text)
if inst:
return inst
return None
def authn_context_decl_from_extension_elements(extelems):
res = extension_elements_to_elements(extelems, [ippword, mobiletwofactor,
ppt, pword, sslcert])
try:
return res[0]
except IndexError:
return None
def authn_context_class_ref(ref):
return AuthnContext(authn_context_class_ref=AuthnContextClassRef(text=ref))
def requested_authn_context(class_ref, comparison="minimum"):
if not isinstance(class_ref, list):
class_ref = [class_ref]
return RequestedAuthnContext(
authn_context_class_ref=[AuthnContextClassRef(text=i) for i in class_ref],
comparison=comparison)
| apache-2.0 | -7,827,574,503,652,388,000 | 33.030702 | 82 | 0.552777 | false |
simonwydooghe/ansible | lib/ansible/modules/cloud/amazon/aws_inspector_target.py | 6 | 7846 | #!/usr/bin/python
# Copyright (c) 2018 Dennis Conrad for Sainsbury's
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: aws_inspector_target
short_description: Create, Update and Delete Amazon Inspector Assessment
Targets
description: Creates, updates, or deletes Amazon Inspector Assessment Targets
and manages the required Resource Groups.
version_added: "2.6"
author: "Dennis Conrad (@dennisconrad)"
options:
name:
description:
- The user-defined name that identifies the assessment target. The name
must be unique within the AWS account.
required: true
type: str
state:
description:
- The state of the assessment target.
choices:
- absent
- present
default: present
type: str
tags:
description:
- Tags of the EC2 instances to be added to the assessment target.
- Required if C(state=present).
type: dict
extends_documentation_fragment:
- aws
- ec2
requirements:
- boto3
- botocore
'''
EXAMPLES = '''
- name: Create my_target Assessment Target
aws_inspector_target:
name: my_target
tags:
role: scan_target
- name: Update Existing my_target Assessment Target with Additional Tags
aws_inspector_target:
name: my_target
tags:
env: dev
role: scan_target
- name: Delete my_target Assessment Target
aws_inspector_target:
name: my_target
state: absent
'''
RETURN = '''
arn:
description: The ARN that specifies the Amazon Inspector assessment target.
returned: success
type: str
sample: "arn:aws:inspector:eu-west-1:123456789012:target/0-O4LnL7n1"
created_at:
description: The time at which the assessment target was created.
returned: success
type: str
sample: "2018-01-29T13:48:51.958000+00:00"
name:
description: The name of the Amazon Inspector assessment target.
returned: success
type: str
sample: "my_target"
resource_group_arn:
description: The ARN that specifies the resource group that is associated
with the assessment target.
returned: success
type: str
sample: "arn:aws:inspector:eu-west-1:123456789012:resourcegroup/0-qY4gDel8"
tags:
description: The tags of the resource group that is associated with the
assessment target.
returned: success
type: list
sample: {"role": "scan_target", "env": "dev"}
updated_at:
description: The time at which the assessment target was last updated.
returned: success
type: str
sample: "2018-01-29T13:48:51.958000+00:00"
'''
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import AWSRetry
from ansible.module_utils.ec2 import (
ansible_dict_to_boto3_tag_list,
boto3_tag_list_to_ansible_dict,
camel_dict_to_snake_dict,
compare_aws_tags,
)
try:
import botocore
except ImportError:
pass # caught by AnsibleAWSModule
@AWSRetry.backoff(tries=5, delay=5, backoff=2.0)
def main():
argument_spec = dict(
name=dict(required=True),
state=dict(choices=['absent', 'present'], default='present'),
tags=dict(type='dict'),
)
required_if = [['state', 'present', ['tags']]]
module = AnsibleAWSModule(
argument_spec=argument_spec,
supports_check_mode=False,
required_if=required_if,
)
name = module.params.get('name')
state = module.params.get('state').lower()
tags = module.params.get('tags')
if tags:
tags = ansible_dict_to_boto3_tag_list(tags, 'key', 'value')
client = module.client('inspector')
try:
existing_target_arn = client.list_assessment_targets(
filter={'assessmentTargetNamePattern': name},
).get('assessmentTargetArns')[0]
existing_target = camel_dict_to_snake_dict(
client.describe_assessment_targets(
assessmentTargetArns=[existing_target_arn],
).get('assessmentTargets')[0]
)
existing_resource_group_arn = existing_target.get('resource_group_arn')
existing_resource_group_tags = client.describe_resource_groups(
resourceGroupArns=[existing_resource_group_arn],
).get('resourceGroups')[0].get('tags')
target_exists = True
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
) as e:
module.fail_json_aws(e, msg="trying to retrieve targets")
except IndexError:
target_exists = False
if state == 'present' and target_exists:
ansible_dict_tags = boto3_tag_list_to_ansible_dict(tags)
ansible_dict_existing_tags = boto3_tag_list_to_ansible_dict(
existing_resource_group_tags
)
tags_to_add, tags_to_remove = compare_aws_tags(
ansible_dict_tags,
ansible_dict_existing_tags
)
if not (tags_to_add or tags_to_remove):
existing_target.update({'tags': ansible_dict_existing_tags})
module.exit_json(changed=False, **existing_target)
else:
try:
updated_resource_group_arn = client.create_resource_group(
resourceGroupTags=tags,
).get('resourceGroupArn')
client.update_assessment_target(
assessmentTargetArn=existing_target_arn,
assessmentTargetName=name,
resourceGroupArn=updated_resource_group_arn,
)
updated_target = camel_dict_to_snake_dict(
client.describe_assessment_targets(
assessmentTargetArns=[existing_target_arn],
).get('assessmentTargets')[0]
)
updated_target.update({'tags': ansible_dict_tags})
module.exit_json(changed=True, **updated_target),
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
) as e:
module.fail_json_aws(e, msg="trying to update target")
elif state == 'present' and not target_exists:
try:
new_resource_group_arn = client.create_resource_group(
resourceGroupTags=tags,
).get('resourceGroupArn')
new_target_arn = client.create_assessment_target(
assessmentTargetName=name,
resourceGroupArn=new_resource_group_arn,
).get('assessmentTargetArn')
new_target = camel_dict_to_snake_dict(
client.describe_assessment_targets(
assessmentTargetArns=[new_target_arn],
).get('assessmentTargets')[0]
)
new_target.update({'tags': boto3_tag_list_to_ansible_dict(tags)})
module.exit_json(changed=True, **new_target)
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
) as e:
module.fail_json_aws(e, msg="trying to create target")
elif state == 'absent' and target_exists:
try:
client.delete_assessment_target(
assessmentTargetArn=existing_target_arn,
)
module.exit_json(changed=True)
except (
botocore.exceptions.BotoCoreError,
botocore.exceptions.ClientError,
) as e:
module.fail_json_aws(e, msg="trying to delete target")
elif state == 'absent' and not target_exists:
module.exit_json(changed=False)
if __name__ == '__main__':
main()
| gpl-3.0 | 8,585,673,368,683,661,000 | 30.637097 | 92 | 0.62414 | false |
shizeeg/pyicqt | src/services/ConnectUsers.py | 1 | 1807 | # Copyright 2004-2006 Daniel Henninger <[email protected]>
# Licensed for distribution under the GPL version 2, check COPYING for details
import utils
from twisted.words.xish.domish import Element
from twisted.words.protocols.jabber.jid import internJID
import jabw
import config
import lang
from debug import LogEvent, INFO, WARN, ERROR
import globals
from adhoc import rights_guest, rights_user, rights_admin
class ConnectUsers:
def __init__(self, pytrans):
self.pytrans = pytrans
self.pytrans.adhoc.addCommand("connectusers", self.incomingIq, "command_ConnectUsers", rights_admin)
def sendProbes(self):
for jid in self.pytrans.xdb.getRegistrationList():
jabw.sendPresence(self.pytrans, jid, config.jid, ptype="probe")
def incomingIq(self, el):
to = el.getAttribute("from")
ID = el.getAttribute("id")
ulang = utils.getLang(el)
if config.admins.count(internJID(to).userhost()) == 0:
self.pytrans.iq.sendIqError(to=to, fro=config.jid, ID=ID, xmlns=globals.COMMANDS, etype="cancel", condition="not-authorized")
return
self.sendProbes()
iq = Element((None, "iq"))
iq.attributes["to"] = to
iq.attributes["from"] = config.jid
if ID:
iq.attributes["id"] = ID
iq.attributes["type"] = "result"
command = iq.addElement("command")
command.attributes["sessionid"] = self.pytrans.makeMessageID()
command.attributes["xmlns"] = globals.COMMANDS
command.attributes["status"] = "completed"
x = command.addElement("x")
x.attributes["xmlns"] = globals.XDATA
x.attributes["type"] = "result"
title = x.addElement("title")
title.addContent(lang.get("command_ConnectUsers", ulang))
field = x.addElement("field")
field.attributes["type"] = "fixed"
field.addElement("value").addContent(lang.get("command_Done", ulang))
self.pytrans.send(iq)
| gpl-2.0 | -1,061,608,070,177,733,500 | 30.701754 | 128 | 0.722745 | false |
NewpTone/stacklab-nova | debian/python-nova/usr/share/pyshared/nova/tests/integrated/test_servers.py | 7 | 16851 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
import unittest
from nova.openstack.common.log import logging
from nova.tests import fake_network
from nova.tests.integrated.api import client
from nova.tests.integrated import integrated_helpers
import nova.virt.fake
LOG = logging.getLogger(__name__)
class ServersTest(integrated_helpers._IntegratedTestBase):
def _wait_for_state_change(self, server, from_status):
for i in xrange(0, 50):
server = self.api.get_server(server['id'])
if server['status'] != from_status:
break
time.sleep(.1)
return server
def _restart_compute_service(self, *args, **kwargs):
"""restart compute service. NOTE: fake driver forgets all instances."""
self.compute.kill()
self.compute = self.start_service('compute', *args, **kwargs)
def test_get_servers(self):
"""Simple check that listing servers works."""
servers = self.api.get_servers()
for server in servers:
LOG.debug("server: %s" % server)
def test_create_server_with_error(self):
"""Create a server which will enter error state."""
fake_network.set_stub_network_methods(self.stubs)
def throw_error(*_):
raise Exception()
self.stubs.Set(nova.virt.fake.FakeDriver, 'spawn', throw_error)
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({"server": server})
created_server_id = created_server['id']
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
found_server = self._wait_for_state_change(found_server, 'BUILD')
self.assertEqual('ERROR', found_server['status'])
self._delete_server(created_server_id)
def test_create_and_delete_server(self):
"""Creates and deletes a server."""
fake_network.set_stub_network_methods(self.stubs)
# Create server
# Build the server data gradually, checking errors along the way
server = {}
good_server = self._build_minimal_create_server_request()
post = {'server': server}
# Without an imageRef, this throws 500.
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
# With an invalid imageRef, this throws 500.
server['imageRef'] = self.get_invalid_image()
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
# Add a valid imageRef
server['imageRef'] = good_server.get('imageRef')
# Without flavorRef, this throws 500
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
server['flavorRef'] = good_server.get('flavorRef')
# Without a name, this throws 500
# TODO(justinsb): Check whatever the spec says should be thrown here
self.assertRaises(client.OpenStackApiException,
self.api.post_server, post)
# Set a valid server name
server['name'] = good_server['name']
created_server = self.api.post_server(post)
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Check it's there
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
# It should also be in the all-servers list
servers = self.api.get_servers()
server_ids = [server['id'] for server in servers]
self.assertTrue(created_server_id in server_ids)
found_server = self._wait_for_state_change(found_server, 'BUILD')
# It should be available...
# TODO(justinsb): Mock doesn't yet do this...
self.assertEqual('ACTIVE', found_server['status'])
servers = self.api.get_servers(detail=True)
for server in servers:
self.assertTrue("image" in server)
self.assertTrue("flavor" in server)
self._delete_server(created_server_id)
def test_deferred_delete(self):
"""Creates, deletes and waits for server to be reclaimed."""
self.flags(reclaim_instance_interval=1)
fake_network.set_stub_network_methods(self.stubs)
# enforce periodic tasks run in short time to avoid wait for 60s.
self._restart_compute_service(
periodic_interval=0.3, periodic_fuzzy_delay=0)
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Wait for it to finish being created
found_server = self._wait_for_state_change(created_server, 'BUILD')
# It should be available...
self.assertEqual('ACTIVE', found_server['status'])
# Cannot restore unless instance is deleted
self.assertRaises(client.OpenStackApiException,
self.api.post_server_action, created_server_id,
{'restore': {}})
# Cannot forceDelete unless instance is deleted
self.assertRaises(client.OpenStackApiException,
self.api.post_server_action, created_server_id,
{'forceDelete': {}})
# Delete the server
self.api.delete_server(created_server_id)
# Wait for queued deletion
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('DELETED', found_server['status'])
# Wait for real deletion
self._wait_for_deletion(created_server_id)
def test_deferred_delete_restore(self):
"""Creates, deletes and restores a server."""
self.flags(reclaim_instance_interval=1)
fake_network.set_stub_network_methods(self.stubs)
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Wait for it to finish being created
found_server = self._wait_for_state_change(created_server, 'BUILD')
# It should be available...
self.assertEqual('ACTIVE', found_server['status'])
# Delete the server
self.api.delete_server(created_server_id)
# Wait for queued deletion
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('DELETED', found_server['status'])
# Restore server
self.api.post_server_action(created_server_id, {'restore': {}})
# Wait for server to become active again
found_server = self._wait_for_state_change(found_server, 'DELETED')
self.assertEqual('ACTIVE', found_server['status'])
def test_deferred_delete_force(self):
"""Creates, deletes and force deletes a server."""
self.flags(reclaim_instance_interval=1)
fake_network.set_stub_network_methods(self.stubs)
# Create server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# Wait for it to finish being created
found_server = self._wait_for_state_change(created_server, 'BUILD')
# It should be available...
self.assertEqual('ACTIVE', found_server['status'])
# Delete the server
self.api.delete_server(created_server_id)
# Wait for queued deletion
found_server = self._wait_for_state_change(found_server, 'ACTIVE')
self.assertEqual('DELETED', found_server['status'])
# Force delete server
self.api.post_server_action(created_server_id, {'forceDelete': {}})
# Wait for real deletion
self._wait_for_deletion(created_server_id)
def _wait_for_deletion(self, server_id):
# Wait (briefly) for deletion
for _retries in range(50):
try:
found_server = self.api.get_server(server_id)
except client.OpenStackApiNotFoundException:
found_server = None
LOG.debug("Got 404, proceeding")
break
LOG.debug("Found_server=%s" % found_server)
# TODO(justinsb): Mock doesn't yet do accurate state changes
#if found_server['status'] != 'deleting':
# break
time.sleep(.1)
# Should be gone
self.assertFalse(found_server)
def _delete_server(self, server_id):
# Delete the server
self.api.delete_server(server_id)
self._wait_for_deletion(server_id)
def test_create_server_with_metadata(self):
"""Creates a server with metadata."""
fake_network.set_stub_network_methods(self.stubs)
# Build the server data gradually, checking errors along the way
server = self._build_minimal_create_server_request()
metadata = {}
for i in range(30):
metadata['key_%s' % i] = 'value_%s' % i
server['metadata'] = metadata
post = {'server': server}
created_server = self.api.post_server(post)
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
self.assertEqual(metadata, found_server.get('metadata'))
# The server should also be in the all-servers details list
servers = self.api.get_servers(detail=True)
server_map = dict((server['id'], server) for server in servers)
found_server = server_map.get(created_server_id)
self.assertTrue(found_server)
# Details do include metadata
self.assertEqual(metadata, found_server.get('metadata'))
# The server should also be in the all-servers summary list
servers = self.api.get_servers(detail=False)
server_map = dict((server['id'], server) for server in servers)
found_server = server_map.get(created_server_id)
self.assertTrue(found_server)
# Summary should not include metadata
self.assertFalse(found_server.get('metadata'))
# Cleanup
self._delete_server(created_server_id)
def test_create_and_rebuild_server(self):
"""Rebuild a server with metadata."""
fake_network.set_stub_network_methods(self.stubs)
# create a server with initially has no metadata
server = self._build_minimal_create_server_request()
server_post = {'server': server}
metadata = {}
for i in range(30):
metadata['key_%s' % i] = 'value_%s' % i
server_post['server']['metadata'] = metadata
created_server = self.api.post_server(server_post)
LOG.debug("created_server: %s" % created_server)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
created_server = self._wait_for_state_change(created_server, 'BUILD')
# rebuild the server with metadata and other server attributes
post = {}
post['rebuild'] = {
"imageRef": "76fa36fc-c930-4bf3-8c8a-ea2a2420deb6",
"name": "blah",
"accessIPv4": "172.19.0.2",
"accessIPv6": "fe80::2",
"metadata": {'some': 'thing'},
}
self.api.post_server_action(created_server_id, post)
LOG.debug("rebuilt server: %s" % created_server)
self.assertTrue(created_server['id'])
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
self.assertEqual({'some': 'thing'}, found_server.get('metadata'))
self.assertEqual('blah', found_server.get('name'))
self.assertEqual(post['rebuild']['imageRef'],
found_server.get('image')['id'])
self.assertEqual('172.19.0.2', found_server['accessIPv4'])
self.assertEqual('fe80::2', found_server['accessIPv6'])
# rebuild the server with empty metadata and nothing else
post = {}
post['rebuild'] = {
"imageRef": "76fa36fc-c930-4bf3-8c8a-ea2a2420deb6",
"metadata": {},
}
self.api.post_server_action(created_server_id, post)
LOG.debug("rebuilt server: %s" % created_server)
self.assertTrue(created_server['id'])
found_server = self.api.get_server(created_server_id)
self.assertEqual(created_server_id, found_server['id'])
self.assertEqual({}, found_server.get('metadata'))
self.assertEqual('blah', found_server.get('name'))
self.assertEqual(post['rebuild']['imageRef'],
found_server.get('image')['id'])
self.assertEqual('172.19.0.2', found_server['accessIPv4'])
self.assertEqual('fe80::2', found_server['accessIPv6'])
# Cleanup
self._delete_server(created_server_id)
def test_rename_server(self):
"""Test building and renaming a server."""
fake_network.set_stub_network_methods(self.stubs)
# Create a server
server = self._build_minimal_create_server_request()
created_server = self.api.post_server({'server': server})
LOG.debug("created_server: %s" % created_server)
server_id = created_server['id']
self.assertTrue(server_id)
# Rename the server to 'new-name'
self.api.put_server(server_id, {'server': {'name': 'new-name'}})
# Check the name of the server
created_server = self.api.get_server(server_id)
self.assertEqual(created_server['name'], 'new-name')
# Cleanup
self._delete_server(server_id)
def test_create_multiple_servers(self):
"""Creates multiple servers and checks for reservation_id"""
# Create 2 servers, setting 'return_reservation_id, which should
# return a reservation_id
server = self._build_minimal_create_server_request()
server['min_count'] = 2
server['return_reservation_id'] = True
post = {'server': server}
response = self.api.post_server(post)
self.assertIn('reservation_id', response)
reservation_id = response['reservation_id']
self.assertNotIn(reservation_id, ['', None])
# Create 1 more server, which should not return a reservation_id
server = self._build_minimal_create_server_request()
post = {'server': server}
created_server = self.api.post_server(post)
self.assertTrue(created_server['id'])
created_server_id = created_server['id']
# lookup servers created by the first request.
servers = self.api.get_servers(detail=True,
search_opts={'reservation_id': reservation_id})
server_map = dict((server['id'], server) for server in servers)
found_server = server_map.get(created_server_id)
# The server from the 2nd request should not be there.
self.assertEqual(found_server, None)
# Should have found 2 servers.
self.assertEqual(len(server_map), 2)
# Cleanup
self._delete_server(created_server_id)
for server_id in server_map.iterkeys():
self._delete_server(server_id)
if __name__ == "__main__":
unittest.main()
| apache-2.0 | 1,706,596,903,855,364,600 | 37.297727 | 79 | 0.6221 | false |
adviti/melange | app/soc/views/helper/surveys.py | 1 | 4496 | #!/usr/bin/env python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper classes that abstract survey form structure and fields meta data.
"""
import urllib
from django.utils.datastructures import SortedDict
from django.utils.simplejson import loads
from soc.modules.gsoc.logic.survey import getSurveysForProgram
class SurveyField(object):
"""Meta data for single field in the survey form.
"""
def __init__(self, fields, field_id):
"""Initialize the meta data dictionary for the field
"""
self.fields = fields
self.field_id = field_id
# Assign the meta dictionary corresponding to an individial field
# to an object attribute
self.meta_dict = self.fields.get(self.field_id, {})
# If the field contains multiple choices, it contains additional
# meta data for each choice and which of them must be checked
self.choices = []
self.checked = []
def getFieldName(self):
"""Returns the name to be used as the property name in the survey record.
"""
return self.field_id
def getType(self):
"""Returns the type of the field to which it should be rendered.
Possible types:
1. input_text
2. textarea
3. radio
4. checkbox
"""
return self.meta_dict.get('field_type', '')
def getLabel(self):
"""Returns the label which should be shown along with the field.
"""
return urllib.unquote(self.meta_dict.get('label', ''))
def isRequired(self):
"""Returns True if the field is a mandatory field on the form else False
"""
return self.meta_dict.get('required', True)
def requireOtherField(self):
"""Returns True if field needs "Other" option to be rendered automatically.
"""
return self.meta_dict.get('other', False)
def getHelpText(self):
"""Returns the help text which should be shown along with the field.
"""
return self.meta_dict.get('tip', '')
def getValues(self):
"""Returns the list of options which should be rendered for the field.
"""
return self.meta_dict.get('values', '')
def getChoices(self):
"""Returns the list of choices for the field as 2-tuple.
This format of returning the list of 2-tuples where each 2-tuple
corresponds to a single option for the multiple choice fields is
the format that Django uses in its form. So we can directly use this
list in the Django forms.
"""
for choice in self.getValues():
value = urllib.unquote(choice.get('value'))
self.choices.append((value, value))
if choice['checked']:
self.checked.append(value)
return self.choices
def getCheckedChoices(self):
"""Returns the list of choices that must be checked as initial values.
"""
return self.checked
class SurveySchema(object):
"""Meta data containing the form elements needed to build surveys.
"""
def __init__(self, survey):
"""Intialize the Survey Schema from the provided survey entity.
"""
self.order, self.fields = loads(survey.schema)
def __iter__(self):
"""Iterator for providing the fields in order to be used to build surveys.
"""
for field_id in self.order:
yield SurveyField(self.fields, field_id)
def dictForSurveyModel(model, program, surveys):
"""Returns a dictionary of link id and entity pairs for given model.
Args:
model: The survey model class for which the dictionary must be built
program: The program to query
surveys: The list containing the link ids of the surveys
"""
survey_dict = dict([(e.link_id, e) for e in getSurveysForProgram(
model, program, surveys)])
# Create a sorted dictionary to ensure that the surveys are stored
# in the same order they were asked for in addition to giving key
# based access to surveys fetched
survey_sorted_dict = SortedDict()
for s in surveys:
survey = survey_dict.get(s)
if survey:
survey_sorted_dict[s] = survey
return survey_sorted_dict
| apache-2.0 | -4,689,414,171,984,274,000 | 29.585034 | 79 | 0.694395 | false |
base2Services/alfajor | alfajor/get_elb_metrics.py | 1 | 1182 | import sys
import boto.ec2.cloudwatch
import datetime
sys.path.append("alfajor")
from aws_base import AWS_BASE
from boto.exception import BotoServerError
class ElbMetrics(AWS_BASE):
def init(self):
self.set_conn(boto.ec2.cloudwatch.connect_to_region(**self.get_connection_settings()))
def get_elb_stats(self, name, metric, namespace, statistic, period=300, unit='Count'):
try:
stats = self.get_conn().get_metric_statistics(
period,
datetime.datetime.utcnow() - datetime.timedelta(seconds=300),
datetime.datetime.utcnow(),
metric,
namespace,
statistic,
dimensions={'LoadBalancerName': [name]},
unit=unit
)
# if stats is empty, there is no traffic, therefore sum of requests is zero
if not stats:
sum_of_req = 0.0
else:
sum_of_req = (stats[0][statistic])
current_value = int(round(sum_of_req))
print current_value
except BotoServerError, error:
print >> sys.stderr, 'Boto API error: ', error
| mit | -9,160,569,638,259,769,000 | 33.764706 | 94 | 0.572758 | false |
overxfl0w/BioinspiredIA | langtons_ant.py | 1 | 3107 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# langtons_ant.py
#
# Copyright 2015 Overxflow13
UP,DOWN,RIGHT,LEFT,WHITE,BLACK = 0,1,2,3,0,1
class Ant:
def __init__(self,row,col):
self.row,self.col,self.ori = row,col,DOWN
def getRow(self): return self.row
def getCol(self): return self.col
def getPos(self): return (self.row,self.col)
def getOri(self): return self.ori
def setRow(self,row): self.row = row
def setCol(self,col): self.col = col
def setPos(self,row,col): self.row,self.col = row,col
def setOri(self,ori): self.ori = ori
def makeStep(self,color,rows,cols):
self.turnRight() if color==WHITE else self.turnLeft()
self.nextCell(rows,cols)
return BLACK if color==WHITE else WHITE
def turnRight(self):
if self.ori==UP: self.ori = RIGHT
elif self.ori==RIGHT: self.ori = DOWN
elif self.ori==DOWN: self.ori = LEFT
elif self.ori==LEFT: self.ori = UP
def turnLeft(self):
if self.ori==UP: self.ori = LEFT
elif self.ori==RIGHT: self.ori = UP
elif self.ori==DOWN: self.ori = RIGHT
elif self.ori==LEFT: self.ori = DOWN
def nextCell(self,rows,cols):
""" Toroidal 2d space """
if self.ori==UP: self.row = (self.row-1)%rows
if self.ori==RIGHT: self.col = (self.col+1)%rows
if self.ori==DOWN: self.row = (self.row+1)%rows
if self.ori==LEFT: self.col = (self.col-1)%rows
class Table:
def __init__(self,rows,cols,color):
self.table,self.rows,self.cols,self.color = None,rows,cols,color
self.initTable()
def getRows(self): return self.rows
def getCols(self): return self.cols
def getColor(self,row,col): return self.table[row][col]
def setRows(self,rows): self.rows = rows
def setCols(self,cols): self.cols = cols
def setColor(self,row,col,color): self.table[row][col] = color
def initTable(self):
self.table = [[] for row in xrange(self.rows)]
for i in xrange(self.rows):
for j in xrange(self.cols):
self.table[i].append(self.color)
def __str__(self,antRow,antCol):
for i in xrange(24): print
for row in xrange(self.getRows()):
for col in xrange(self.getCols()):
if row==antRow and col==antCol: print "*",
else: print self.getColor(row,col),
print "\n"
class AntAC:
def __init__(self,row,col,rows,cols,color,steps):
self.ant,self.table,self.steps = Ant(row,col),Table(rows,cols,color),steps
def __run__(self):
steps = 0
while steps<=self.steps:
antRow,antCol = self.ant.getRow(),self.ant.getCol()
newColor = self.ant.makeStep(self.table.getColor(antRow,antCol),self.table.getRows(),self.table.getCols())
self.table.setColor(antRow,antCol,newColor)
antRow,antCol = self.ant.getRow(),self.ant.getCol()
self.table.__str__(antRow,antCol)
self.__str__()
steps += 1; raw_input()
def __str__(self):
print "\t----- INFO -----\n"
print "-> Ant row:",self.ant.getRow()
print "-> Ant col:",self.ant.getCol()
print "-> Cell color:",self.table.getColor(self.ant.getRow(),self.ant.getCol())
print "-> Ant orientation:",self.ant.getOri()
a = AntAC(5,5,10,10,BLACK,100)
a.__run__()
| gpl-2.0 | 2,344,499,427,747,806,000 | 29.460784 | 109 | 0.650467 | false |
molobrakos/home-assistant | homeassistant/components/plant/__init__.py | 6 | 13645 | """Support for monitoring plants."""
from collections import deque
from datetime import datetime, timedelta
import logging
import voluptuous as vol
from homeassistant.components import group
from homeassistant.components.recorder.util import execute, session_scope
from homeassistant.const import (
ATTR_TEMPERATURE, ATTR_UNIT_OF_MEASUREMENT, CONF_SENSORS, STATE_OK,
STATE_PROBLEM, STATE_UNKNOWN, TEMP_CELSIUS)
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.entity_component import EntityComponent
from homeassistant.helpers.event import async_track_state_change
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'plant'
READING_BATTERY = 'battery'
READING_TEMPERATURE = ATTR_TEMPERATURE
READING_MOISTURE = 'moisture'
READING_CONDUCTIVITY = 'conductivity'
READING_BRIGHTNESS = 'brightness'
ATTR_PROBLEM = 'problem'
ATTR_SENSORS = 'sensors'
PROBLEM_NONE = 'none'
ATTR_MAX_BRIGHTNESS_HISTORY = 'max_brightness'
# we're not returning only one value, we're returning a dict here. So we need
# to have a separate literal for it to avoid confusion.
ATTR_DICT_OF_UNITS_OF_MEASUREMENT = 'unit_of_measurement_dict'
CONF_MIN_BATTERY_LEVEL = 'min_' + READING_BATTERY
CONF_MIN_TEMPERATURE = 'min_' + READING_TEMPERATURE
CONF_MAX_TEMPERATURE = 'max_' + READING_TEMPERATURE
CONF_MIN_MOISTURE = 'min_' + READING_MOISTURE
CONF_MAX_MOISTURE = 'max_' + READING_MOISTURE
CONF_MIN_CONDUCTIVITY = 'min_' + READING_CONDUCTIVITY
CONF_MAX_CONDUCTIVITY = 'max_' + READING_CONDUCTIVITY
CONF_MIN_BRIGHTNESS = 'min_' + READING_BRIGHTNESS
CONF_MAX_BRIGHTNESS = 'max_' + READING_BRIGHTNESS
CONF_CHECK_DAYS = 'check_days'
CONF_SENSOR_BATTERY_LEVEL = READING_BATTERY
CONF_SENSOR_MOISTURE = READING_MOISTURE
CONF_SENSOR_CONDUCTIVITY = READING_CONDUCTIVITY
CONF_SENSOR_TEMPERATURE = READING_TEMPERATURE
CONF_SENSOR_BRIGHTNESS = READING_BRIGHTNESS
DEFAULT_MIN_BATTERY_LEVEL = 20
DEFAULT_MIN_MOISTURE = 20
DEFAULT_MAX_MOISTURE = 60
DEFAULT_MIN_CONDUCTIVITY = 500
DEFAULT_MAX_CONDUCTIVITY = 3000
DEFAULT_CHECK_DAYS = 3
SCHEMA_SENSORS = vol.Schema({
vol.Optional(CONF_SENSOR_BATTERY_LEVEL): cv.entity_id,
vol.Optional(CONF_SENSOR_MOISTURE): cv.entity_id,
vol.Optional(CONF_SENSOR_CONDUCTIVITY): cv.entity_id,
vol.Optional(CONF_SENSOR_TEMPERATURE): cv.entity_id,
vol.Optional(CONF_SENSOR_BRIGHTNESS): cv.entity_id,
})
PLANT_SCHEMA = vol.Schema({
vol.Required(CONF_SENSORS): vol.Schema(SCHEMA_SENSORS),
vol.Optional(CONF_MIN_BATTERY_LEVEL,
default=DEFAULT_MIN_BATTERY_LEVEL): cv.positive_int,
vol.Optional(CONF_MIN_TEMPERATURE): vol.Coerce(float),
vol.Optional(CONF_MAX_TEMPERATURE): vol.Coerce(float),
vol.Optional(CONF_MIN_MOISTURE,
default=DEFAULT_MIN_MOISTURE): cv.positive_int,
vol.Optional(CONF_MAX_MOISTURE,
default=DEFAULT_MAX_MOISTURE): cv.positive_int,
vol.Optional(CONF_MIN_CONDUCTIVITY,
default=DEFAULT_MIN_CONDUCTIVITY): cv.positive_int,
vol.Optional(CONF_MAX_CONDUCTIVITY,
default=DEFAULT_MAX_CONDUCTIVITY): cv.positive_int,
vol.Optional(CONF_MIN_BRIGHTNESS): cv.positive_int,
vol.Optional(CONF_MAX_BRIGHTNESS): cv.positive_int,
vol.Optional(CONF_CHECK_DAYS,
default=DEFAULT_CHECK_DAYS): cv.positive_int,
})
DOMAIN = 'plant'
GROUP_NAME_ALL_PLANTS = 'all plants'
ENTITY_ID_ALL_PLANTS = group.ENTITY_ID_FORMAT.format('all_plants')
CONFIG_SCHEMA = vol.Schema({
DOMAIN: {
cv.string: PLANT_SCHEMA
},
}, extra=vol.ALLOW_EXTRA)
# Flag for enabling/disabling the loading of the history from the database.
# This feature is turned off right now as its tests are not 100% stable.
ENABLE_LOAD_HISTORY = False
async def async_setup(hass, config):
"""Set up the Plant component."""
component = EntityComponent(
_LOGGER, DOMAIN, hass, group_name=GROUP_NAME_ALL_PLANTS)
entities = []
for plant_name, plant_config in config[DOMAIN].items():
_LOGGER.info("Added plant %s", plant_name)
entity = Plant(plant_name, plant_config)
entities.append(entity)
await component.async_add_entities(entities)
return True
class Plant(Entity):
"""Plant monitors the well-being of a plant.
It also checks the measurements against
configurable min and max values.
"""
READINGS = {
READING_BATTERY: {
ATTR_UNIT_OF_MEASUREMENT: '%',
'min': CONF_MIN_BATTERY_LEVEL,
},
READING_TEMPERATURE: {
ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS,
'min': CONF_MIN_TEMPERATURE,
'max': CONF_MAX_TEMPERATURE,
},
READING_MOISTURE: {
ATTR_UNIT_OF_MEASUREMENT: '%',
'min': CONF_MIN_MOISTURE,
'max': CONF_MAX_MOISTURE,
},
READING_CONDUCTIVITY: {
ATTR_UNIT_OF_MEASUREMENT: 'µS/cm',
'min': CONF_MIN_CONDUCTIVITY,
'max': CONF_MAX_CONDUCTIVITY,
},
READING_BRIGHTNESS: {
ATTR_UNIT_OF_MEASUREMENT: 'lux',
'min': CONF_MIN_BRIGHTNESS,
'max': CONF_MAX_BRIGHTNESS,
}
}
def __init__(self, name, config):
"""Initialize the Plant component."""
self._config = config
self._sensormap = dict()
self._readingmap = dict()
self._unit_of_measurement = dict()
for reading, entity_id in config['sensors'].items():
self._sensormap[entity_id] = reading
self._readingmap[reading] = entity_id
self._state = None
self._name = name
self._battery = None
self._moisture = None
self._conductivity = None
self._temperature = None
self._brightness = None
self._problems = PROBLEM_NONE
self._conf_check_days = 3 # default check interval: 3 days
if CONF_CHECK_DAYS in self._config:
self._conf_check_days = self._config[CONF_CHECK_DAYS]
self._brightness_history = DailyHistory(self._conf_check_days)
@callback
def state_changed(self, entity_id, _, new_state):
"""Update the sensor status.
This callback is triggered, when the sensor state changes.
"""
value = new_state.state
_LOGGER.debug("Received callback from %s with value %s",
entity_id, value)
if value == STATE_UNKNOWN:
return
reading = self._sensormap[entity_id]
if reading == READING_MOISTURE:
self._moisture = int(float(value))
elif reading == READING_BATTERY:
self._battery = int(float(value))
elif reading == READING_TEMPERATURE:
self._temperature = float(value)
elif reading == READING_CONDUCTIVITY:
self._conductivity = int(float(value))
elif reading == READING_BRIGHTNESS:
self._brightness = int(float(value))
self._brightness_history.add_measurement(
self._brightness, new_state.last_updated)
else:
raise HomeAssistantError(
"Unknown reading from sensor {}: {}".format(entity_id, value))
if ATTR_UNIT_OF_MEASUREMENT in new_state.attributes:
self._unit_of_measurement[reading] = \
new_state.attributes.get(ATTR_UNIT_OF_MEASUREMENT)
self._update_state()
def _update_state(self):
"""Update the state of the class based sensor data."""
result = []
for sensor_name in self._sensormap.values():
params = self.READINGS[sensor_name]
value = getattr(self, '_{}'.format(sensor_name))
if value is not None:
if sensor_name == READING_BRIGHTNESS:
result.append(self._check_min(
sensor_name, self._brightness_history.max, params))
else:
result.append(self._check_min(sensor_name, value, params))
result.append(self._check_max(sensor_name, value, params))
result = [r for r in result if r is not None]
if result:
self._state = STATE_PROBLEM
self._problems = ', '.join(result)
else:
self._state = STATE_OK
self._problems = PROBLEM_NONE
_LOGGER.debug("New data processed")
self.async_schedule_update_ha_state()
def _check_min(self, sensor_name, value, params):
"""If configured, check the value against the defined minimum value."""
if 'min' in params and params['min'] in self._config:
min_value = self._config[params['min']]
if value < min_value:
return '{} low'.format(sensor_name)
def _check_max(self, sensor_name, value, params):
"""If configured, check the value against the defined maximum value."""
if 'max' in params and params['max'] in self._config:
max_value = self._config[params['max']]
if value > max_value:
return '{} high'.format(sensor_name)
return None
async def async_added_to_hass(self):
"""After being added to hass, load from history."""
if ENABLE_LOAD_HISTORY and 'recorder' in self.hass.config.components:
# only use the database if it's configured
self.hass.async_add_job(self._load_history_from_db)
async_track_state_change(
self.hass, list(self._sensormap), self.state_changed)
for entity_id in self._sensormap:
state = self.hass.states.get(entity_id)
if state is not None:
self.state_changed(entity_id, None, state)
async def _load_history_from_db(self):
"""Load the history of the brightness values from the database.
This only needs to be done once during startup.
"""
from homeassistant.components.recorder.models import States
start_date = datetime.now() - timedelta(days=self._conf_check_days)
entity_id = self._readingmap.get(READING_BRIGHTNESS)
if entity_id is None:
_LOGGER.debug("Not reading the history from the database as "
"there is no brightness sensor configured")
return
_LOGGER.debug("Initializing values for %s from the database",
self._name)
with session_scope(hass=self.hass) as session:
query = session.query(States).filter(
(States.entity_id == entity_id.lower()) and
(States.last_updated > start_date)
).order_by(States.last_updated.asc())
states = execute(query)
for state in states:
# filter out all None, NaN and "unknown" states
# only keep real values
try:
self._brightness_history.add_measurement(
int(state.state), state.last_updated)
except ValueError:
pass
_LOGGER.debug("Initializing from database completed")
self.async_schedule_update_ha_state()
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the entity."""
return self._state
@property
def state_attributes(self):
"""Return the attributes of the entity.
Provide the individual measurements from the
sensor in the attributes of the device.
"""
attrib = {
ATTR_PROBLEM: self._problems,
ATTR_SENSORS: self._readingmap,
ATTR_DICT_OF_UNITS_OF_MEASUREMENT: self._unit_of_measurement,
}
for reading in self._sensormap.values():
attrib[reading] = getattr(self, '_{}'.format(reading))
if self._brightness_history.max is not None:
attrib[ATTR_MAX_BRIGHTNESS_HISTORY] = self._brightness_history.max
return attrib
class DailyHistory:
"""Stores one measurement per day for a maximum number of days.
At the moment only the maximum value per day is kept.
"""
def __init__(self, max_length):
"""Create new DailyHistory with a maximum length of the history."""
self.max_length = max_length
self._days = None
self._max_dict = dict()
self.max = None
def add_measurement(self, value, timestamp=None):
"""Add a new measurement for a certain day."""
day = (timestamp or datetime.now()).date()
if value is None:
return
if self._days is None:
self._days = deque()
self._add_day(day, value)
else:
current_day = self._days[-1]
if day == current_day:
self._max_dict[day] = max(value, self._max_dict[day])
elif day > current_day:
self._add_day(day, value)
else:
_LOGGER.warning("Received old measurement, not storing it")
self.max = max(self._max_dict.values())
def _add_day(self, day, value):
"""Add a new day to the history.
Deletes the oldest day, if the queue becomes too long.
"""
if len(self._days) == self.max_length:
oldest = self._days.popleft()
del self._max_dict[oldest]
self._days.append(day)
self._max_dict[day] = value
| apache-2.0 | 874,114,372,506,767,600 | 35.190981 | 79 | 0.618587 | false |
pieleric/odemis | src/odemis/gui/dev/powermate.py | 2 | 9666 | # -*- coding: utf-8 -*-
"""
@author: Rinze de Laat
Copyright © 2015 Rinze de Laat, Éric Piel, Delmic
This file is part of Odemis.
Odemis is free software: you can redistribute it and/or modify it under the terms of the GNU
General Public License version 2 as published by the Free Software Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
Public License for more details.
You should have received a copy of the GNU General Public License along with Odemis. If not,
see http://www.gnu.org/licenses/.
"""
from __future__ import division
import logging
import os
import struct
import sys
import threading
import time
import wx
from odemis.acq.stream import StaticStream
from odemis.gui.evt import KnobRotateEvent, KnobPressEvent
class Powermate(threading.Thread):
""" Interface to Griffin PowerMate
It will translate the knob rotation movements into EVT_KNOB_ROTATE events
It also automatically turn on/off the led based on the whether the current
stream can focus.
"""
def __init__(self, main_data, **kwargs):
"""
Picks the first Powermate found, and
main_data (MainGUIData)
Raise:
NotImplementedError: if the OS doesn't support it
LookupError: if no Powermate is found
"""
# TODO: support other OS than Linux?
if not sys.platform.startswith('linux'):
raise NotImplementedError("Powermate only supported on Linux")
# Find the Powermate device (will stop here if nothing is found)
self.device = self._find_powermate_device()
# The currently opened GUI tab, so we can keep track of tab switching
self.current_tab = None
# To keep track of ALL streams associated with the current tab (addition, removal etc.)
self.tab_model_streams_va = None # .streams VA of the tab model
# All streams belonging to the current tab
self.current_tab_streams = ()
# Viewport to which we send our events
self.target_viewport = None
self.led_brightness = 255 # [0..255]
self.led_pulse_speed = 0 # [0..510]
self.keep_running = True
# Track tab switching, so we know when to look for a new viewport
main_data.tab.subscribe(self.on_tab_switch, init=True)
# Start listening for Knob input
threading.Thread.__init__(self, **kwargs)
self.daemon = True
self.start()
# TODO: allow to terminate the thread?
def on_tab_switch(self, tab):
""" Subscribe to focussed view changes when the opened tab is changed """
# Clear old tab subscriptions
if self.current_tab:
if hasattr(self.current_tab.tab_data_model, 'focussedView'):
# Clear the current subscription
self.current_tab.tab_data_model.focussedView.unsubscribe(self._on_focussed_view)
if self.tab_model_streams_va is not None:
self.tab_model_streams_va.unsubscribe(self.on_tab_stream_change)
self.current_tab = None
# Set new subscriptions
if hasattr(tab.tab_data_model, 'focussedView') and hasattr(tab, 'view_controller'):
self.tab_model_streams_va = tab.tab_data_model.streams
self.tab_model_streams_va.subscribe(self.on_tab_stream_change, init=True)
self.current_tab = tab
self.current_tab.tab_data_model.focussedView.subscribe(self._on_focussed_view,
init=True)
def on_tab_stream_change(self, streams):
""" Set the subscription for the stream when the tab's stream set changes """
if self.current_tab_streams:
for stream in self.current_tab_streams:
stream.should_update.unsubscribe(self._on_stream_update)
self.current_tab_streams = streams
if self.current_tab_streams:
for stream in self.current_tab_streams:
stream.should_update.subscribe(self._on_stream_update, init=True)
def _on_stream_update(self, _=None):
"""
Check all the streams of the currently focussed Viewport to see if the
LED should be on.
"""
if self.current_tab is None:
self.led_on(False)
return
view = self.current_tab.tab_data_model.focussedView.value
if view is None:
self.led_on(False)
return
for stream in view.stream_tree:
static = isinstance(stream, StaticStream)
updating = stream.should_update.value if hasattr(stream, "should_update") else False
if updating and stream.focuser and not static:
self.led_on(True)
break
else:
self.led_on(False)
def _on_focussed_view(self, view):
""" Set or clear the Viewport where we should send events to """
if view:
self.target_viewport = self.current_tab.view_controller.get_viewport_by_view(view)
self._on_stream_update()
logging.debug("New Viewport target set")
else:
self.target_viewport = None
self.led_on(False)
logging.debug("Viewport target cleared")
def _find_powermate_device(self):
try:
import evdev
except ImportError:
raise LookupError("python-evdev is not present")
# Look at all accessible /dev/input devices
for fn in evdev.util.list_devices():
d = evdev.InputDevice(fn)
# Check for PowerMate in the device name string
if "PowerMate" in d.name:
logging.info("Found Powermate device in %s", fn)
return d
else:
raise LookupError("No Powermate device found")
def run(self):
""" Listen for knob events and translate them into wx.Python events """
from evdev import ecodes
while self.keep_running:
try:
for evt in self.device.read_loop():
if self.target_viewport is not None:
if evt.type == ecodes.EV_REL:
knob_evt = KnobRotateEvent(
self.target_viewport.canvas.GetId(),
direction=(wx.RIGHT if evt.value > 0 else wx.LEFT),
step_value=evt.value,
device=self.device
)
wx.PostEvent(self.target_viewport.canvas, knob_evt)
elif evt.type == ecodes.EV_KEY and evt.value == 1:
knob_evt = KnobPressEvent(
self.target_viewport.canvas.GetId(),
device=self.device
)
wx.PostEvent(self.target_viewport.canvas, knob_evt)
except IOError:
logging.warn("Failed to communicate with the powermate, was unplugged?")
# Sleep and after try and find the device again
while True:
time.sleep(5)
try:
self.device = self._find_powermate_device()
break
except LookupError:
pass
except Exception:
logging.exception("Powermate listener failed")
self.keep_running = False
def led_on(self, on):
self.led_brightness = 255 if on else 0
self._set_led_state()
def led_pulse(self, pulse):
self.led_pulse_speed = 255 if pulse else 0
self._set_led_state()
def terminate(self):
self.led_on(False)
self.keep_running = False
if self.device:
self.device.close()
self.device = None
def _set_led_state(self, pulse_table=0, pulse_on_sleep=False):
""" Changes the led state of the powermate
pulse_table (0, 1, 2)
pulse_on_sleep (bool): starting pulsing when the device is suspended
"""
# What do these magic values mean:
# cf linux/drivers/input/misc/powermate.c:
# bits 0- 7: 8 bits: LED brightness
# bits 8-16: 9 bits: pulsing speed modifier (0 ... 510);
# 0-254 = slower, 255 = standard, 256-510 = faster
# bits 17-18: 2 bits: pulse table (0, 1, 2 valid)
# bit 19: 1 bit : pulse whilst asleep?
# bit 20: 1 bit : pulse constantly?
static_brightness = self.led_brightness & 0xff
pulse_speed = min(510, max(self.led_pulse_speed, 0))
pulse_table = min(2, max(pulse_table, 0))
pulse_on_sleep = not not pulse_on_sleep # not not = convert to 0/1
pulse_on_wake = 1 if pulse_speed else 0
magic = (
static_brightness |
(pulse_speed << 8) |
(pulse_table << 17) |
(pulse_on_sleep << 19) |
(pulse_on_wake << 20)
)
input_event_struct = "@llHHi"
data = struct.pack(input_event_struct, 0, 0, 0x04, 0x01, magic)
if self.device is None:
logging.debug("Powermate has disappeared, skipping led change")
return
try:
os.write(self.device.fd, data)
except OSError:
logging.info("Failed to communicate with the powermate, was unplugged?", exc_info=True)
self.device = None
| gpl-2.0 | -4,553,344,123,934,675,000 | 36.02682 | 99 | 0.580919 | false |
forging2012/taiga-back | taiga/projects/custom_attributes/migrations/0006_add_customattribute_field_type.py | 13 | 1156 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('custom_attributes', '0005_auto_20150505_1639'),
]
operations = [
migrations.AddField(
model_name='issuecustomattribute',
name='field_type',
field=models.CharField(max_length=5, verbose_name='type', choices=[('TEXT', 'Text'), ('MULTI', 'Multi-Line Text')], default='TEXT'),
preserve_default=True,
),
migrations.AddField(
model_name='taskcustomattribute',
name='field_type',
field=models.CharField(max_length=5, verbose_name='type', choices=[('TEXT', 'Text'), ('MULTI', 'Multi-Line Text')], default='TEXT'),
preserve_default=True,
),
migrations.AddField(
model_name='userstorycustomattribute',
name='field_type',
field=models.CharField(max_length=5, verbose_name='type', choices=[('TEXT', 'Text'), ('MULTI', 'Multi-Line Text')], default='TEXT'),
preserve_default=True,
),
]
| agpl-3.0 | 7,346,973,222,814,061,000 | 35.125 | 144 | 0.58218 | false |
Mte90/remo | remo/events/tests/test_views.py | 1 | 32178 | # -*- coding: utf-8 -*-
import datetime
import mock
from django.core import mail
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from django.utils.encoding import iri_to_uri
from django.utils.timezone import make_aware, now
from django_jinja.backend import Template as Jinja_Template
from mock import ANY
from nose.tools import eq_, ok_
from pytz import timezone
from remo.base.tests import RemoTestCase, requires_login, requires_permission
from remo.events.models import Event, EventComment, EventMetricOutcome
from remo.events.tests import (AttendanceFactory, EventCommentFactory,
EventFactory, EventMetricFactory,
EventMetricOutcomeFactory)
from remo.profiles.tests import FunctionalAreaFactory, UserFactory
from remo.reports import ACTIVITY_EVENT_ATTEND, ACTIVITY_EVENT_CREATE
from remo.reports.tests import ActivityFactory, CampaignFactory
class ViewsTest(RemoTestCase):
"""Tests related to Events Views."""
def setUp(self):
"""Setup method for some initial data."""
ActivityFactory.create(name=ACTIVITY_EVENT_ATTEND)
ActivityFactory.create(name=ACTIVITY_EVENT_CREATE)
functional_area = FunctionalAreaFactory.create()
metrics = EventMetricFactory.create_batch(3)
campaign = CampaignFactory.create()
self.data = {
'name': u'Test edit event',
'description': u'This is a description',
'external_link': '',
'categories': [functional_area.id],
'campaign': [campaign.id],
'venue': u'Hackerspace.GR',
'lat': 38.01697,
'lon': 23.7314,
'city': u'Athens',
'region': u'Attica',
'country': u'Greece',
'start_form_0_month': 01,
'start_form_0_day': 25,
'start_form_0_year': now().year + 1,
'start_form_1_hour': 04,
'start_form_1_minute': 01,
'end_form_0_month': 01,
'end_form_0_day': 03,
'end_form_0_year': now().year + 2,
'end_form_1_hour': 03,
'end_form_1_minute': 00,
'timezone': u'Europe/Athens',
'mozilla_event': u'on',
'estimated_attendance': u'10',
'actual_attendance': u'10',
'extra_content': u'This is extra content',
'planning_pad_url': u'',
'hashtag': u'#testevent',
'swag_bug_form': u'',
'budget_bug_form': u'',
'eventmetricoutcome_set-0-id': '',
'eventmetricoutcome_set-0-metric': metrics[0].id,
'eventmetricoutcome_set-0-expected_outcome': 100,
'eventmetricoutcome_set-1-id': '',
'eventmetricoutcome_set-1-metric': metrics[1].id,
'eventmetricoutcome_set-1-expected_outcome': 10,
'eventmetricoutcome_set-TOTAL_FORMS': 2,
'eventmetricoutcome_set-INITIAL_FORMS': 0}
def test_view_events_list(self):
"""Get list events page."""
response = self.client.get(reverse('events_list_events'))
self.assertJinja2TemplateUsed(response, 'list_events.jinja')
def test_view_event_page(self):
"""Get view event page."""
event = EventFactory.create()
response = self.client.get(event.get_absolute_url())
self.assertJinja2TemplateUsed(response, 'view_event.jinja')
@mock.patch('django.contrib.messages.error')
def test_post_comment_on_event_unauthed(self, mock_error):
"""Test post comment on event unauthorized."""
comment = 'This is a new comment'
event = EventFactory.create()
response = self.client.post(event.get_absolute_url(),
{'comment': comment},
follow=True)
self.assertJinja2TemplateUsed(response, 'main.jinja')
mock_error.assert_called_with(ANY, 'Permission Denied')
@mock.patch('django.contrib.messages.success')
def test_post_comment_on_event_rep(self, mock_success):
"""Test post comment on event as rep."""
# Test authenticated user
event = EventFactory.create()
kwargs = {'groups': ['Rep'],
'userprofile__receive_email_on_add_event_comment': True}
user = UserFactory.create(**kwargs)
comment = 'This is a new comment'
with self.login(user) as client:
response = client.post(event.get_absolute_url(), {'comment': comment}, follow=True)
comment = event.eventcomment_set.get(user=user)
ok_('view_event.jinja' in [template.template.name
for template in response.templates
if isinstance(template, Jinja_Template)])
mock_success.assert_called_with(ANY, 'Comment saved')
eq_(comment.comment, 'This is a new comment')
eq_(len(mail.outbox), 1)
@requires_login()
def test_post_delete_event_comment_unauth(self):
"""Test unauthorized delete event comment."""
event = EventFactory.create(slug='test-event')
user = UserFactory.create(groups=['Rep'])
comment = EventCommentFactory.create(event=event, user=user)
comment_delete = reverse('events_delete_event_comment',
kwargs={'slug': event.slug,
'pk': comment.id})
self.client.post(comment_delete, {'comment': comment}, follow=True)
ok_(EventComment.objects.filter(pk=comment.id).exists())
@requires_permission()
def test_post_delete_event_comment_user_no_perms(self):
"""Test delete event comment as rep without delete permissions."""
event = EventFactory.create()
user = UserFactory.create(groups=['Rep'])
comment = EventCommentFactory.create(event=event)
comment_delete = reverse('events_delete_event_comment',
kwargs={'slug': event.slug,
'pk': comment.id})
with self.login(user) as client:
client.post(comment_delete, follow=True)
ok_(EventComment.objects.filter(pk=comment.id).exists())
@mock.patch('django.contrib.messages.success')
def test_post_delete_event_comment_owner(self, mock_success):
""" Test delete event comment as event comment owner."""
event = EventFactory.create()
user = UserFactory.create(groups=['Rep'])
comment = EventCommentFactory.create(event=event, user=user)
comment_delete = reverse('events_delete_event_comment',
kwargs={'slug': event.slug,
'pk': comment.id})
with self.login(user) as client:
response = client.post(comment_delete, follow=True)
mock_success.assert_called_with(ANY, 'Comment successfully deleted.')
ok_(not EventComment.objects.filter(pk=comment.id).exists())
self.assertJinja2TemplateUsed(response, 'view_event.jinja')
@mock.patch('django.contrib.messages.success')
def test_post_delete_event_comment_admin(self, mock_success):
"""Test delete event comment as admin."""
event = EventFactory.create()
user = UserFactory.create(groups=['Admin'])
comment = EventCommentFactory.create(event=event)
comment_delete = reverse('events_delete_event_comment',
kwargs={'slug': event.slug,
'pk': comment.id})
with self.login(user) as client:
response = client.post(comment_delete, follow=True)
mock_success.assert_called_with(ANY, 'Comment successfully deleted.')
self.assertJinja2TemplateUsed(response, 'view_event.jinja')
def test_subscription_management_no_perms(self):
"""Subscribe to event without permissions."""
event = EventFactory()
response = self.client.post(reverse('events_subscribe_to_event',
kwargs={'slug': event.slug}),
follow=True)
self.assertJinja2TemplateUsed(response, 'main.jinja')
@mock.patch('django.contrib.messages.info')
def test_subscription_management_rep(self, mock_info):
""" Subscribe rep to event."""
user = UserFactory.create(groups=['Rep'])
event = EventFactory.create()
with self.login(user) as client:
response = client.post(reverse('events_subscribe_to_event',
kwargs={'slug': event.slug}),
follow=True)
self.assertJinja2TemplateUsed(response, 'view_event.jinja')
ok_(mock_info.called, 'messages.info() was not called')
@mock.patch('django.contrib.messages.warning')
def test_subscription_management_subscribed(self, mock_warning):
""" Test subscribe already subscribed user."""
user = UserFactory.create(groups=['Rep'])
event = EventFactory.create()
AttendanceFactory.create(user=user, event=event)
with self.login(user) as client:
response = client.post(reverse('events_subscribe_to_event',
kwargs={'slug': event.slug}),
follow=True)
self.assertJinja2TemplateUsed(response, 'view_event.jinja')
msg = 'You are already subscribed to this event.'
mock_warning.assert_called_with(ANY, msg)
@requires_login()
def test_unsubscribe_from_event_unauth(self):
"""Unsubscribe from event anonymous user."""
event = EventFactory.create()
self.client.post(reverse('events_unsubscribe_from_event',
kwargs={'slug': event.slug}), follow=True)
@mock.patch('django.contrib.messages.success')
def test_unsubscribe_from_event_subscribed(self, mock_success):
"""Test unsubscribe from event subscribed user."""
user = UserFactory.create(groups=['Rep'])
event = EventFactory.create()
AttendanceFactory.create(user=user, event=event)
with self.login(user) as client:
response = client.post(reverse('events_unsubscribe_from_event',
kwargs={'slug': event.slug}),
follow=True)
self.assertJinja2TemplateUsed(response, 'view_event.jinja')
msg = 'You have unsubscribed from this event.'
mock_success.assert_called_with(ANY, msg)
@mock.patch('django.contrib.messages.warning')
def test_unsubscribe_from_event_unsubscribed(self, mock_warning):
"""Test unsubscribe from event without subscription."""
user = UserFactory.create(groups=['Rep'])
event = EventFactory.create()
with self.login(user) as client:
response = client.post(reverse('events_unsubscribe_from_event',
kwargs={'slug': event.slug}),
follow=True)
self.assertJinja2TemplateUsed(response, 'view_event.jinja')
msg = 'You are not subscribed to this event.'
mock_warning.assert_called_with(ANY, msg)
@requires_login()
def test_delete_event_unauthorized(self):
"""Test delete event unauthorized."""
event = EventFactory.create()
self.client.get(reverse('events_delete_event',
kwargs={'slug': event.slug}), follow=True)
ok_(Event.objects.filter(pk=event.id).exists())
def test_delete_event_no_permissions(self):
"""Test delete event no permissions."""
user = UserFactory.create(groups=['Rep'])
event = EventFactory.create()
with self.login(user) as client:
response = client.get(reverse('events_delete_event',
kwargs={'slug': event.slug}),
follow=True)
self.assertJinja2TemplateUsed(response, 'main.jinja')
ok_(Event.objects.filter(pk=event.id).exists())
@mock.patch('django.contrib.messages.success')
def test_delete_event_owner(self, mock_success):
"""Test delete event with owner permissions."""
user = UserFactory.create(groups=['Rep'])
event = EventFactory.create(owner=user)
with self.login(user) as client:
response = client.post(reverse('events_delete_event',
kwargs={'slug': event.slug}),
follow=True)
self.assertJinja2TemplateUsed(response, 'list_events.jinja')
ok_(not Event.objects.filter(pk=event.id).exists())
mock_success.assert_called_with(ANY, 'Event successfully deleted.')
@mock.patch('django.contrib.messages.success')
def test_delete_event_mentor(self, mock_success):
"""Test delete event with mentor permissions."""
user = UserFactory.create(groups=['Mentor'])
event = EventFactory.create()
with self.login(user) as client:
response = client.post(reverse('events_delete_event',
kwargs={'slug': event.slug}),
follow=True)
self.assertJinja2TemplateUsed(response, 'list_events.jinja')
ok_(not Event.objects.filter(pk=event.id).exists())
mock_success.assert_called_with(ANY, 'Event successfully deleted.')
@mock.patch('django.contrib.messages.success')
def test_delete_event_councelor(self, mock_success):
"""Test delete event with councelor permissions."""
user = UserFactory.create(groups=['Council'])
event = EventFactory.create()
with self.login(user) as client:
response = client.post(reverse('events_delete_event',
kwargs={'slug': event.slug}),
follow=True)
self.assertJinja2TemplateUsed(response, 'list_events.jinja')
ok_(not Event.objects.filter(pk=event.id).exists())
mock_success.assert_called_with(ANY, 'Event successfully deleted.')
@mock.patch('django.contrib.messages.success')
def test_delete_event_admin(self, mock_success):
"""Test delete event with admin permissions."""
user = UserFactory.create(groups=['Admin'])
event = EventFactory.create()
with self.login(user) as client:
response = client.post(reverse('events_delete_event', kwargs={'slug': event.slug}),
follow=True)
self.assertJinja2TemplateUsed(response, 'list_events.jinja')
ok_(not Event.objects.filter(pk=event.id).exists())
mock_success.assert_called_with(ANY, 'Event successfully deleted.')
def test_export_event_to_ical(self):
"""Test ical export."""
event = EventFactory.create()
response = self.client.get(reverse('events_icalendar_event',
kwargs={'slug': event.slug}))
self.assertJinja2TemplateUsed(response, 'multi_event_ical_template.jinja')
self.failUnless(response['Content-Type'].startswith('text/calendar'))
def test_multi_event_ical_export(self):
"""Test multiple event ical export."""
EventFactory.create_batch(2)
# Export all events to iCal
period = 'all'
response = self.client.get(reverse('multiple_event_ical',
kwargs={'period': period}),
follow=True)
self.failUnless(response['Content-Type'].startswith('text/calendar'))
eq_(len(response.context['events']), 2)
def test_multi_event_ical_export_past(self):
"""Test multiple past event ical export."""
EventFactory.create_batch(2)
# Export past events to iCal
period = 'past'
response = self.client.get(reverse('multiple_event_ical',
kwargs={'period': period}),
follow=True)
self.failUnless(response['Content-Type'].startswith('text/calendar'))
eq_(len(response.context['events']), 2)
def test_multi_event_ical_export_future(self):
"""Test multiple past event ical export."""
start = now() + datetime.timedelta(days=1)
end = now() + datetime.timedelta(days=2)
EventFactory.create_batch(2, start=start, end=end)
# Export future events to iCal
period = 'future'
response = self.client.get(reverse('multiple_event_ical',
kwargs={'period': period}),
follow=True)
self.failUnless(response['Content-Type'].startswith('text/calendar'))
eq_(len(response.context['events']), 2)
def test_multi_event_ical_export_custom(self):
"""Test multiple event ical export with custom date."""
event_start = now() + datetime.timedelta(days=1)
event_end = now() + datetime.timedelta(days=2)
EventFactory.create_batch(2, start=event_start, end=event_end)
period = 'custom'
response = self.client.get(reverse('multiple_event_ical',
kwargs={'period': period}),
follow=True)
self.failUnless(response['Content-Type'].startswith('text/calendar'))
start = (event_start - datetime.timedelta(days=1)).strftime('%Y-%m-%d')
end = (event_end + datetime.timedelta(days=1)).strftime('%Y-%m-%d')
query = 'custom/start/%s/end/%s' % (start, end)
response = self.client.get(reverse('multiple_event_ical',
kwargs={'period': query}),
follow=True)
self.failUnless(response['Content-Type'].startswith('text/calendar'))
eq_(len(response.context['events']), 2)
def test_multi_event_ical_export_search(self):
"""Test multiple past event ical export."""
EventFactory.create(name='Test event')
# Test 'search' query
term = 'Test event'
search = 'custom/search/%s' % term
response = self.client.get(reverse('multiple_event_ical',
kwargs={'period': search}),
follow=True)
self.failUnless(response['Content-Type'].startswith('text/calendar'))
eq_(len(response.context['events']), 1)
def test_multi_event_ical_export_extra_chars(self):
"""Test multi event ical export with extra chars.
See bug 956271 for details.
"""
url = '/events/period/future/search/méxico.!@$%&*()/ical/'
response = self.client.get(url, follow=True)
self.failUnless(response['Content-Type'].startswith('text/calendar'))
@mock.patch('django.contrib.messages.success')
def test_post_create_event_rep(self, mock_success):
"""Test create new event with rep permissions."""
user = UserFactory.create(groups=['Rep'])
with self.login(user) as client:
response = client.post(reverse('events_new_event'), self.data,
follow=True)
mock_success.assert_called_with(ANY, 'Event successfully created.')
eq_(mock_success.call_count, 1)
eq_(response.status_code, 200)
def test_get_create_event_rep(self):
"""Test get create event page with rep permissions."""
user = UserFactory.create(groups=['Rep'])
url = reverse('events_new_event')
with self.login(user) as client:
response = client.get(url, follow=True)
eq_(response.request['PATH_INFO'], url)
ok_(response.context['creating'])
ok_(not response.context['event_form'].editable_owner)
self.assertJinja2TemplateUsed(response, 'edit_event.jinja')
def test_get_edit_event_rep(self):
"""Test get event edit page with rep permissions."""
user = UserFactory.create(groups=['Rep'])
event = EventFactory.create()
with self.login(user) as client:
response = client.get(event.get_absolute_edit_url(), follow=True)
eq_(response.request['PATH_INFO'], event.get_absolute_edit_url())
ok_(not response.context['creating'])
ok_(not response.context['event_form'].editable_owner)
eq_(response.context['event'].slug, event.slug)
self.assertJinja2TemplateUsed(response, 'edit_event.jinja')
@mock.patch('django.contrib.messages.success')
def test_get_edit_event_admin(self, mock_success):
"""Test get event edit page with admin permissions"""
user = UserFactory.create(groups=['Admin'])
event = EventFactory.create()
with self.login(user) as client:
response = client.get(event.get_absolute_edit_url(), follow=True)
eq_(response.request['PATH_INFO'], event.get_absolute_edit_url())
ok_(not response.context['creating'])
ok_(response.context['event_form'].editable_owner)
eq_(response.context['event'].slug, event.slug)
self.assertJinja2TemplateUsed(response, 'edit_event.jinja')
@mock.patch('django.contrib.messages.success')
@override_settings(ETHERPAD_URL="http://example.com")
@override_settings(ETHERPAD_PREFIX="remo-")
def test_edit_event_rep(self, mock_success):
"""Test edit event with rep permissions."""
user = UserFactory.create(groups=['Rep'])
start = now() + datetime.timedelta(days=10)
end = now() + datetime.timedelta(days=20)
event = EventFactory.create(owner=user, start=start, end=end,
actual_attendance=None)
times_edited = event.times_edited
with self.login(user) as client:
response = client.post(event.get_absolute_edit_url(), self.data, follow=True)
mock_success.assert_called_with(ANY, 'Event successfully updated.')
eq_(mock_success.call_count, 1)
eq_(response.request['PATH_INFO'], event.get_absolute_url())
event = Event.objects.get(slug=event.slug)
eq_(event.times_edited, times_edited + 1)
eq_(event.owner, user)
# TODO: replace the following section with form tests
# Test fields with the same name in POST data and models
excluded = ['planning_pad_url', 'lat', 'lon', 'mozilla_event',
'categories', 'campaign']
for field in set(self.data).difference(set(excluded)):
if getattr(event, field, None):
eq_(str(getattr(event, field)), self.data[field])
# Test excluded fields
pad_url = 'http://example.com/remo-' + event.slug
mozilla_event = {'on': True, 'off': False}
eq_(set(self.data['categories']),
set(event.categories.values_list('id', flat=True)))
eq_(self.data['campaign'], [event.campaign_id])
eq_(event.planning_pad_url, pad_url)
eq_(event.lat, self.data['lat'])
eq_(event.lon, self.data['lon'])
eq_(event.mozilla_event, mozilla_event[self.data['mozilla_event']])
# Ensure event metrics are saved
metrics = (EventMetricOutcome.objects.filter(event=event)
.values_list('metric__id', 'expected_outcome'))
for i in range(0, 2):
metric = self.data['eventmetricoutcome_set-%d-metric' % i]
outcome_key = 'eventmetricoutcome_set-%d-expected_outcome' % i
outcome = self.data[outcome_key]
self.assertTrue((metric, outcome) in metrics)
# Ensure event start/end is saved
month = self.data['start_form_0_month']
day = self.data['start_form_0_day']
year = self.data['start_form_0_year']
hour = self.data['start_form_1_hour']
minute = self.data['start_form_1_minute']
zone = timezone(self.data['timezone'])
start = datetime.datetime(year, month, day, hour, minute)
eq_(make_aware(start, zone), event.start)
month = self.data['end_form_0_month']
day = self.data['end_form_0_day']
year = self.data['end_form_0_year']
hour = self.data['end_form_1_hour']
minute = self.data['end_form_1_minute']
end = datetime.datetime(year, month, day, hour, minute)
eq_(make_aware(end, zone), event.end)
def test_required_fields(self):
"""Test required fields error handling"""
# TODO: replace the following test with form tests
# Login as test-event owner
user = UserFactory.create(groups=['Rep'])
event = EventFactory()
# Test invalid event date
invalid_data = self.data.copy()
invalid_data['end_form_0_year'] = invalid_data['start_form_0_year'] - 1
with self.login(user) as client:
response = client.post(event.get_absolute_edit_url(), invalid_data, follow=True)
self.assertNotEqual(response.request['PATH_INFO'],
event.get_absolute_url())
# Test invalid number of metrics
invalid_data = self.data.copy()
invalid_data['eventmetricoutcome_set-TOTAL_FORMS'] = 1
invalid_data.pop('eventmetricoutcome_set-0-id')
invalid_data.pop('eventmetricoutcome_set-0-metric')
invalid_data.pop('eventmetricoutcome_set-0-expected_outcome')
invalid_data.pop('eventmetricoutcome_set-1-id')
invalid_data.pop('eventmetricoutcome_set-1-metric')
invalid_data.pop('eventmetricoutcome_set-1-expected_outcome')
response = self.client.post(event.get_absolute_edit_url(),
invalid_data,
follow=True)
self.assertNotEqual(response.request['PATH_INFO'],
event.get_absolute_url())
# Test invalid event name, description, venue, city
fields = ['name', 'description', 'venue', 'city']
for field in fields:
invalid_data = self.data.copy()
invalid_data[field] = ''
response = self.client.post(event.get_absolute_edit_url(),
invalid_data,
follow=True)
self.assertNotEqual(response.request['PATH_INFO'],
event.get_absolute_url())
@mock.patch('django.contrib.auth.models.User.has_perm')
def test_edit_event_page_no_delete_perms(self, mock_perm):
"""Test view edit event page without delete permissions."""
user = UserFactory.create(groups=['Admin'])
event = EventFactory.create()
mock_perm.side_effect = [True, False]
with self.login(user) as client:
response = client.get(event.get_absolute_edit_url(), follow=True)
eq_(response.request['PATH_INFO'], event.get_absolute_edit_url())
ok_(not response.context['creating'])
ok_(response.context['event_form'].editable_owner)
eq_(response.context['event'].slug, event.slug)
ok_(not response.context['can_delete_event'])
@mock.patch('django.contrib.messages.success')
def test_clone_event_legacy_metrics(self, mock_success):
user = UserFactory.create(groups=['Rep'])
event = EventFactory.create(has_new_metrics=False)
event_clone_url = reverse('events_clone_event',
kwargs={'slug': 'test-edit-event'})
with self.login(user) as client:
response = client.post(event_clone_url, self.data, follow=True)
mock_success.assert_called_with(ANY, 'Event successfully created.')
event = Event.objects.get(slug='test-edit-event')
cloned_event_url = reverse('events_view_event',
kwargs={'slug': 'test-edit-event'})
ok_(event.has_new_metrics)
eq_(response.request['PATH_INFO'], cloned_event_url)
@mock.patch('django.contrib.messages.success')
def test_clone_event_with_actual_outcome(self, mock_success):
user = UserFactory.create(groups=['Rep'])
event = EventFactory.create()
metrics = EventMetricOutcomeFactory.create_batch(3, event=event)
event_clone_url = reverse('events_clone_event',
kwargs={'slug': 'test-edit-event'})
with self.login(user) as client:
response = client.post(event_clone_url, self.data, follow=True)
mock_success.assert_called_with(ANY, 'Event successfully created.')
event = Event.objects.get(slug='test-edit-event')
cloned_event_url = reverse('events_view_event',
kwargs={'slug': 'test-edit-event'})
eq_(response.request['PATH_INFO'], cloned_event_url)
ok_(event.eventmetricoutcome_set.all().exists())
metrics_ids = map(lambda x: x.id, metrics)
for m in event.eventmetricoutcome_set.all():
eq_(m.outcome, None)
ok_(m.id not in metrics_ids)
@mock.patch('django.contrib.messages.success')
def test_clone_event_without_actual_outcome(self, mock_success):
user = UserFactory.create(groups=['Rep'])
event = EventFactory.create()
metrics = EventMetricOutcomeFactory.create_batch(3, event=event,
outcome=None)
event_clone_url = reverse('events_clone_event',
kwargs={'slug': 'test-edit-event'})
with self.login(user) as client:
response = client.post(event_clone_url, self.data, follow=True)
mock_success.assert_called_with(ANY, 'Event successfully created.')
event = Event.objects.get(slug='test-edit-event')
cloned_event_url = reverse('events_view_event',
kwargs={'slug': 'test-edit-event'})
eq_(response.request['PATH_INFO'], cloned_event_url)
ok_(event.eventmetricoutcome_set.all().exists())
metrics_ids = map(lambda x: x.id, metrics)
for m in event.eventmetricoutcome_set.all():
ok_(m.id not in metrics_ids)
@mock.patch('django.contrib.messages.success')
def test_email_event_attendees(self, mock_success):
"""Send email to selected event attendees."""
user = UserFactory.create(groups=['Rep'])
event = EventFactory.create(slug='test-event', owner=user)
AttendanceFactory.create_batch(3, event=event)
reps = event.attendees.all()
valid_data = dict()
for rep in reps:
field_name = '%s' % rep.id
valid_data[field_name] = 'True'
valid_data['subject'] = 'This is the mail subject'
valid_data['body'] = 'This is the mail subject'
valid_data['slug'] = 'test-event'
url = reverse('email_attendees', kwargs={'slug': event.slug})
with self.login(user) as client:
response = client.post(url, valid_data, follow=True)
ok_('view_event.jinja' in [template.template.name
for template in response.templates
if isinstance(template, Jinja_Template)])
mock_success.assert_called_with(ANY, 'Email sent successfully.')
eq_(len(mail.outbox), 4)
for i in range(0, len(mail.outbox)):
eq_(len(mail.outbox[i].cc), 1)
eq_(len(mail.outbox[i].to), 1)
@mock.patch('remo.events.views.iri_to_uri', wraps=iri_to_uri)
def test_view_redirect_list_events(self, mocked_uri):
"""Test redirect to events list."""
events_url = '/events/Paris & Orléans'
response = self.client.get(events_url, follow=True)
mocked_uri.assert_called_once_with(u'/Paris & Orléans')
expected_url = '/events/#/Paris%20&%20Orl%C3%A9ans'
self.assertRedirects(response, expected_url=expected_url,
status_code=301, target_status_code=200)
self.assertJinja2TemplateUsed(response, 'list_events.jinja')
| bsd-3-clause | 1,955,599,350,737,202,400 | 46.177419 | 95 | 0.594126 | false |
coto/beecoss | bp_includes/lib/basehandler.py | 1 | 11353 | # *-* coding: UTF-8 *-*
# standard library imports
import logging
import re
import pytz
import os
# related third party imports
import webapp2
from webapp2_extras import jinja2
from webapp2_extras import auth
from webapp2_extras import sessions
# local application/library specific imports
from bp_includes import models
from bp_includes.lib import utils, i18n
from babel import Locale
def generate_csrf_token():
session = sessions.get_store().get_session()
if '_csrf_token' not in session:
session['_csrf_token'] = utils.random_string()
return session['_csrf_token']
def jinja2_factory(app):
j = jinja2.Jinja2(app)
j.environment.filters.update({
# Set filters.
# ...
})
j.environment.globals.update({
# Set global variables.
'csrf_token': generate_csrf_token,
'uri_for': webapp2.uri_for,
'getattr': getattr,
})
j.environment.tests.update({
# Set test.
# ...
})
return j
class ViewClass:
"""
ViewClass to insert variables into the template.
ViewClass is used in BaseHandler to promote variables automatically that can be used
in jinja2 templates.
Use case in a BaseHandler Class:
self.view.var1 = "hello"
self.view.array = [1, 2, 3]
self.view.dict = dict(a="abc", b="bcd")
Can be accessed in the template by just using the variables liek {{var1}} or {{dict.b}}
"""
pass
class BaseHandler(webapp2.RequestHandler):
"""
BaseHandler for all requests
Holds the auth and session properties so they
are reachable for all requests
"""
def __init__(self, request, response):
""" Override the initialiser in order to set the language.
"""
self.initialize(request, response)
self.locale = i18n.set_locale(self, request)
self.view = ViewClass()
def dispatch(self):
"""
Get a session store for this request.
"""
self.session_store = sessions.get_store(request=self.request)
try:
# csrf protection
if self.request.method == "POST" and not self.request.path.startswith('/taskqueue'):
token = self.session.get('_csrf_token')
if not token or token != self.request.get('_csrf_token'):
self.abort(403)
# Dispatch the request.
webapp2.RequestHandler.dispatch(self)
finally:
# Save all sessions.
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def user_model(self):
"""Returns the implementation of the user model.
Keep consistency when config['webapp2_extras.auth']['user_model'] is set.
"""
return self.auth.store.user_model
@webapp2.cached_property
def auth(self):
return auth.get_auth()
@webapp2.cached_property
def session_store(self):
return sessions.get_store(request=self.request)
@webapp2.cached_property
def session(self):
# Returns a session using the default cookie key.
return self.session_store.get_session()
@webapp2.cached_property
def get_theme(self):
return os.environ['theme']
@webapp2.cached_property
def messages(self):
return self.session.get_flashes(key='_messages')
def add_message(self, message, level=None):
self.session.add_flash(message, level, key='_messages')
@webapp2.cached_property
def auth_config(self):
"""
Dict to hold urls for login/logout
"""
return {
'login_url': self.uri_for('login'),
'logout_url': self.uri_for('logout')
}
@webapp2.cached_property
def language(self):
return str(Locale.parse(self.locale).language)
@webapp2.cached_property
def user(self):
return self.auth.get_user_by_session()
@webapp2.cached_property
def user_id(self):
return str(self.user['user_id']) if self.user else None
@webapp2.cached_property
def user_key(self):
if self.user:
user_info = self.user_model.get_by_id(long(self.user_id))
return user_info.key
return None
@webapp2.cached_property
def username(self):
if self.user:
try:
user_info = self.user_model.get_by_id(long(self.user_id))
if not user_info.activated:
self.auth.unset_session()
self.redirect_to('home')
else:
return str(user_info.username)
except AttributeError, e:
# avoid AttributeError when the session was delete from the server
logging.error(e)
self.auth.unset_session()
self.redirect_to('home')
return None
@webapp2.cached_property
def email(self):
if self.user:
try:
user_info = self.user_model.get_by_id(long(self.user_id))
return user_info.email
except AttributeError, e:
# avoid AttributeError when the session was delete from the server
logging.error(e)
self.auth.unset_session()
self.redirect_to('home')
return None
@webapp2.cached_property
def provider_uris(self):
login_urls = {}
continue_url = self.request.get('continue_url')
for provider in self.provider_info:
if continue_url:
login_url = self.uri_for("social-login", provider_name=provider, continue_url=continue_url)
else:
login_url = self.uri_for("social-login", provider_name=provider)
login_urls[provider] = login_url
return login_urls
@webapp2.cached_property
def provider_info(self):
return models.SocialUser.PROVIDERS_INFO
@webapp2.cached_property
def path_for_language(self):
"""
Get the current path + query_string without language parameter (hl=something)
Useful to put it on a template to concatenate with '&hl=NEW_LOCALE'
Example: .../?hl=en_US
"""
path_lang = re.sub(r'(^hl=(\w{5})\&*)|(\&hl=(\w{5})\&*?)', '', str(self.request.query_string))
return self.request.path + "?" if path_lang == "" else str(self.request.path) + "?" + path_lang
@property
def locales(self):
"""
returns a dict of locale codes to locale display names in both the current locale and the localized locale
example: if the current locale is es_ES then locales['en_US'] = 'Ingles (Estados Unidos) - English (United States)'
"""
if not self.app.config.get('locales'):
return None
locales = {}
for l in self.app.config.get('locales'):
current_locale = Locale.parse(self.locale)
language = current_locale.languages[l.split('_')[0]]
territory = current_locale.territories[l.split('_')[1]]
localized_locale_name = Locale.parse(l).display_name.capitalize()
locales[l] = language.capitalize() + " (" + territory.capitalize() + ") - " + localized_locale_name
return locales
@webapp2.cached_property
def tz(self):
tz = [(tz, tz.replace('_', ' ')) for tz in pytz.all_timezones]
tz.insert(0, ("", ""))
return tz
@webapp2.cached_property
def get_user_tz(self):
user = self.current_user
if user:
if hasattr(user, 'tz') and user.tz:
return pytz.timezone(user.tz)
return pytz.timezone('UTC')
@webapp2.cached_property
def countries(self):
return Locale.parse(self.locale).territories if self.locale else []
@webapp2.cached_property
def countries_tuple(self):
countries = self.countries
if "001" in countries:
del (countries["001"])
countries = [(key, countries[key]) for key in countries]
countries.append(("", ""))
countries.sort(key=lambda tup: tup[1])
return countries
@webapp2.cached_property
def current_user(self):
user = self.auth.get_user_by_session()
if user:
return self.user_model.get_by_id(user['user_id'])
return None
@webapp2.cached_property
def is_mobile(self):
return utils.set_device_cookie_and_return_bool(self)
@webapp2.cached_property
def jinja2(self):
return jinja2.get_jinja2(factory=jinja2_factory, app=self.app)
@webapp2.cached_property
def get_base_layout(self):
"""
Get the current base layout template for jinja2 templating. Uses the variable base_layout set in config
or if there is a base_layout defined, use the base_layout.
"""
return self.base_layout if hasattr(self, 'base_layout') else self.app.config.get('base_layout')
def set_base_layout(self, layout):
"""
Set the base_layout variable, thereby overwriting the default layout template name in config.py.
"""
self.base_layout = layout
def render_template(self, filename, **kwargs):
locales = self.app.config.get('locales') or []
locale_iso = None
language = ''
territory = ''
language_id = self.app.config.get('app_lang')
if self.locale and len(locales) > 1:
locale_iso = Locale.parse(self.locale)
language_id = locale_iso.language
territory_id = locale_iso.territory
language = locale_iso.languages[language_id]
territory = locale_iso.territories[territory_id]
# make all self.view variables available in jinja2 templates
if hasattr(self, 'view'):
kwargs.update(self.view.__dict__)
# set or overwrite special vars for jinja templates
kwargs.update({
'google_analytics_code': self.app.config.get('google_analytics_code'),
'app_name': self.app.config.get('app_name'),
'theme': self.get_theme,
'user_id': self.user_id,
'username': self.username,
'email': self.email,
'url': self.request.url,
'path': self.request.path,
'query_string': self.request.query_string,
'path_for_language': self.path_for_language,
'is_mobile': self.is_mobile,
'locale_iso': locale_iso, # babel locale object
'locale_language': language.capitalize() + " (" + territory.capitalize() + ")", # babel locale object
'locale_language_id': language_id, # babel locale object
'locales': self.locales,
'provider_uris': self.provider_uris,
'provider_info': self.provider_info,
'enable_federated_login': self.app.config.get('enable_federated_login'),
'base_layout': self.get_base_layout
})
kwargs.update(self.auth_config)
if hasattr(self, 'form'):
kwargs['form'] = self.form
if self.messages:
kwargs['messages'] = self.messages
self.response.headers.add_header('X-UA-Compatible', 'IE=Edge,chrome=1')
self.response.write(self.jinja2.render_template(filename, **kwargs)) | lgpl-3.0 | 4,911,491,595,828,795,000 | 33.096096 | 123 | 0.594909 | false |
projectcalico/calico-nova | nova/openstack/common/systemd.py | 24 | 3057 | # Copyright 2012-2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Helper module for systemd service readiness notification.
"""
import os
import socket
import sys
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
def _abstractify(socket_name):
if socket_name.startswith('@'):
# abstract namespace socket
socket_name = '\0%s' % socket_name[1:]
return socket_name
def _sd_notify(unset_env, msg):
notify_socket = os.getenv('NOTIFY_SOCKET')
if notify_socket:
sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
try:
sock.connect(_abstractify(notify_socket))
sock.sendall(msg)
if unset_env:
del os.environ['NOTIFY_SOCKET']
except EnvironmentError:
LOG.debug("Systemd notification failed", exc_info=True)
finally:
sock.close()
def notify():
"""Send notification to Systemd that service is ready.
For details see
http://www.freedesktop.org/software/systemd/man/sd_notify.html
"""
_sd_notify(False, 'READY=1')
def notify_once():
"""Send notification once to Systemd that service is ready.
Systemd sets NOTIFY_SOCKET environment variable with the name of the
socket listening for notifications from services.
This method removes the NOTIFY_SOCKET environment variable to ensure
notification is sent only once.
"""
_sd_notify(True, 'READY=1')
def onready(notify_socket, timeout):
"""Wait for systemd style notification on the socket.
:param notify_socket: local socket address
:type notify_socket: string
:param timeout: socket timeout
:type timeout: float
:returns: 0 service ready
1 service not ready
2 timeout occurred
"""
sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
sock.settimeout(timeout)
sock.bind(_abstractify(notify_socket))
try:
msg = sock.recv(512)
except socket.timeout:
return 2
finally:
sock.close()
if 'READY=1' in msg:
return 0
else:
return 1
if __name__ == '__main__':
# simple CLI for testing
if len(sys.argv) == 1:
notify()
elif len(sys.argv) >= 2:
timeout = float(sys.argv[1])
notify_socket = os.getenv('NOTIFY_SOCKET')
if notify_socket:
retval = onready(notify_socket, timeout)
sys.exit(retval)
| apache-2.0 | 3,678,266,951,559,471,600 | 27.839623 | 78 | 0.63788 | false |
DeviaVir/slackpy | slackpy/slackpy.py | 1 | 6552 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'Takahiro Ikeuchi'
import os
import requests
import json
import traceback
from argparse import ArgumentParser
class SlackLogger:
def __init__(self, web_hook_url, channel=None, username='Logger'):
self.web_hook_url = web_hook_url
self.username = username
if channel is None:
self.channel = None
elif channel.startswith('#') or channel.startswith('@'):
self.channel = channel
else:
raise ValueError('channel must be started with "#" or "@".')
def __build_payload(self, message, title, color, fallback, fields):
if fields is '':
__fields = {
"title": title,
"text": message,
"color": color,
"fallback": fallback
}
__attachments = {
"fields": __fields
}
else:
__attachments = [{
"fallback": fallback,
"color": color,
"text": message,
"fields": fields
}]
payload = {
"channel": self.channel,
"username": self.username,
"attachments": __attachments
}
return payload
def __send_notification(self, message, title, color='good', fallback='',
fields=''):
"""Send a message to a channel.
Args:
title: The message title.
message: The message body.
color: Can either be one of 'good', 'warning', 'danger',
or any hex color code
fallback: What is shown to IRC/fallback clients
Returns:
api_response:
Raises:
TODO:
"""
if fallback is '':
fallback = title
payload = self.__build_payload(message, title, color, fallback, fields)
try:
response = requests.post(self.web_hook_url,
data=json.dumps(payload))
except Exception:
raise Exception(traceback.format_exc())
else:
if response.status_code == 200:
return response
else:
raise Exception(response.content.decode())
def debug(self, message, title='Slack Notification', fallback='',
fields=''):
return self.__send_notification(message=message,
title=title,
color='#03A9F4',
fallback=fallback,
fields=fields)
def info(self, message, title='Slack Notification', fallback='',
fields=''):
return self.__send_notification(message=message,
title=title,
color='good',
fallback=fallback,
fields=fields)
def warn(self, message, title='Slack Notification', fallback='',
fields=''):
return self.__send_notification(message=message,
title=title,
color='warning',
fallback=fallback,
fields=fields)
def error(self, message, title='Slack Notification', fallback='',
fields=''):
return self.__send_notification(message=message,
title=title,
color='danger',
fallback=fallback,
fields=fields)
def message(self, message, title='Slack Notification', fallback='',
color='good', fields=''):
return self.__send_notification(message=message,
title=title,
color=color,
fallback=fallback,
fields=fields)
def main():
try:
web_hook_url = os.environ["SLACK_INCOMING_WEB_HOOK"]
except KeyError:
print('ERROR: Please set the SLACK_INCOMING_WEB_HOOK variable in ' +
' your environment.')
else:
parser = ArgumentParser(description='slackpy command line tool')
parser.add_argument('-m',
'--message',
type=str,
required=True,
help='Message')
parser.add_argument('-c',
'--channel',
required=False,
help='Channel',
default=None)
parser.add_argument('-t',
'--title',
type=str,
required=False,
help='Title',
default='Slack Notification')
parser.add_argument('-n',
'--name',
type=str,
required=False,
help='Name of Postman',
default='Logger')
# The purpose of backward compatibility, old args (1, 2, 3)
# are being retained.
# DEBUG == 10, INFO == 20, # WARNING == 30, ERROR == 40
parser.add_argument('-l',
'--level',
type=int,
default=20,
choices=[10, 20, 30, 40, 1, 2, 3])
args = parser.parse_args()
client = SlackLogger(web_hook_url, args.channel, args.name)
if args.level == 10:
response = client.debug(args.message, args.title)
elif args.level == 20 or args.level == 1:
response = client.info(args.message, args.title)
elif args.level == 30 or args.level == 2:
response = client.warn(args.message, args.title)
elif args.level == 40 or args.level == 3:
response = client.error(args.message, args.title)
else:
raise Exception("'Level' must be selected from among 1 to 3")
if response.status_code == 200:
print(True)
else:
print(False)
| mit | -6,462,750,520,471,736,000 | 32.090909 | 79 | 0.440324 | false |
midroid/mediadrop | mediadrop/migrations/util.py | 10 | 8351 | # This file is a part of MediaDrop (http://www.mediadrop.net),
# Copyright 2009-2015 MediaDrop contributors
# For the exact contribution history, see the git revision log.
# The source code contained in this file is licensed under the GPLv3 or
# (at your option) any later version.
# See LICENSE.txt in the main project directory, for more information.
import logging
from alembic.config import Config
from alembic.environment import EnvironmentContext
from alembic.script import ScriptDirectory
from sqlalchemy import Column, Integer, MetaData, Table, Unicode, UnicodeText
from mediadrop.model import metadata, DBSession
__all__ = ['MediaDropMigrator', 'PluginDBMigrator']
migrate_to_alembic_mapping = {
49: None,
50: u'50258ad7a96d',
51: u'51c050c6bca0',
52: u'432df7befe8d',
53: u'4d27ff5680e5',
54: u'280565a54124',
55: u'16ed4c91d1aa',
56: u'30bb0d88d139',
57: u'3b2f74a50399',
}
fake_meta = MetaData()
migrate_table = Table('migrate_version', fake_meta,
Column('repository_id', Unicode(250), autoincrement=True, primary_key=True),
Column('repository_path', UnicodeText, nullable=True),
Column('version', Integer, nullable=True),
)
def prefix_table_name(conf, table_name):
table_prefix = conf.get('db_table_prefix', None)
if not table_prefix:
return table_name
# treat 'foo' and 'foo_' the same so we're not too harsh on the users
normalized_prefix = table_prefix.rstrip('_')
return normalized_prefix + '_' + table_name
class AlembicMigrator(object):
def __init__(self, context=None, log=None, plugin_name=None, default_data_callable=None):
self.context = context
self.log = log or logging.getLogger(__name__)
self.plugin_name = plugin_name
self.default_data_callable = default_data_callable
@classmethod
def init_environment_context(cls, conf):
file_template = conf.get('alembic.file_template', '%%(day).3d-%%(rev)s-%%(slug)s')
script_location = conf.get('alembic.script_location', 'mediadrop:migrations')
version_table = conf.get('alembic.version_table', 'alembic_migrations')
alembic_cfg = Config(ini_section='main')
alembic_cfg.set_main_option('script_location', script_location)
alembic_cfg.set_main_option('sqlalchemy.url', conf['sqlalchemy.url'])
# TODO: add other sqlalchemy options
alembic_cfg.set_main_option('file_template', file_template)
script = ScriptDirectory.from_config(alembic_cfg)
def upgrade(current_db_revision, context):
return script._upgrade_revs('head', current_db_revision)
table_name = prefix_table_name(conf, table_name=version_table)
return EnvironmentContext(alembic_cfg, script, fn=upgrade, version_table=table_name)
def db_needs_upgrade(self):
return (self.head_revision() != self.current_revision())
def is_db_scheme_current(self):
return (not self.db_needs_upgrade())
def current_revision(self):
if not self.alembic_table_exists():
return None
self.context.configure(connection=metadata.bind.connect(), transactional_ddl=True)
migration_context = self.context.get_context()
return migration_context.get_current_revision()
def head_revision(self):
return self.context.get_head_revision()
def _table_exists(self, table_name):
engine = metadata.bind
db_connection = engine.connect()
exists = engine.dialect.has_table(db_connection, table_name)
return exists
def alembic_table_exists(self):
table_name = self.context.context_opts.get('version_table')
return self._table_exists(table_name)
def migrate_db(self):
target = 'MediaDrop'
if self.plugin_name:
target = self.plugin_name + ' plugin'
if self.current_revision() is None:
if self.alembic_table_exists() and (self.head_revision() is None):
# The plugin has no migrations but db_defaults: adding default
# data should only happen once.
# alembic will create the migration table after the first run
# but as we don't have any migrations "self.head_revision()"
# is still None.
return
self.log.info('Initializing database for %s.' % target)
self.init_db()
return
self.log.info('Running any new migrations for %s, if there are any' % target)
self.context.configure(connection=metadata.bind.connect(), transactional_ddl=True)
with self.context:
self.context.run_migrations()
def init_db(self, revision='head'):
self.stamp(revision)
# -----------------------------------------------------------------------------
# mostly copied from alembic 0.5.0
# The problem in alembic.command.stamp() is that it builds a new
# EnvironmentContext which does not have any ability to configure the
# version table name and MediaDrop uses a custom table name.
def stamp(self, revision):
"""'stamp' the revision table with the given revision; don't
run any migrations."""
script = self.context.script
def do_stamp(rev, context):
if context.as_sql:
current = False
else:
current = context._current_rev()
dest = script.get_revision(revision)
if dest is not None:
dest = dest.revision
context._update_current_rev(current, dest)
return []
context_opts = self.context.context_opts.copy()
context_opts.update(dict(
script=script,
fn=do_stamp,
))
stamp_context = EnvironmentContext(self.context.config, **context_opts)
with stamp_context:
script.run_env()
# --------------------------------------------------------------------------
class MediaDropMigrator(AlembicMigrator):
@classmethod
def from_config(cls, conf, **kwargs):
context = cls.init_environment_context(conf)
return cls(context=context, **kwargs)
def map_migrate_version(self):
migrate_version_query = migrate_table.select(
migrate_table.c.repository_id == u'MediaCore Migrations'
)
result = DBSession.execute(migrate_version_query).fetchone()
db_migrate_version = result.version
if db_migrate_version in migrate_to_alembic_mapping:
return migrate_to_alembic_mapping[db_migrate_version]
earliest_upgradable_version = sorted(migrate_to_alembic_mapping)[0]
if db_migrate_version < earliest_upgradable_version:
error_msg = ('Upgrading from such an old version of MediaDrop is not '
'supported. Your database is at version %d but upgrades are only '
'supported from MediaCore CE 0.9.0 (DB version %d). Please upgrade '
'0.9.0 first.')
self.log.error(error_msg % (db_migrate_version, earliest_upgradable_version))
else:
self.log.error('Unknown DB version %s. Can not upgrade to alembic' % db_migrate_version)
raise AssertionError('unsupported DB migration version.')
def migrate_table_exists(self):
return self._table_exists('migrate_version')
class PluginDBMigrator(AlembicMigrator):
@classmethod
def from_config(cls, plugin, conf, **kwargs):
config = {
'alembic.version_table': plugin.name+'_migrations',
'alembic.script_location': '%s:%s' % (plugin.package_name, 'migrations'),
'sqlalchemy.url': conf['sqlalchemy.url'],
}
context = cls.init_environment_context(config)
return PluginDBMigrator(context=context, plugin_name=plugin.name,
default_data_callable=plugin.add_db_defaults, **kwargs)
# LATER: this code goes into the main AlembicMigrator once the MediaDrop
# initialiation code is moved from websetup.py to db_defaults.py
def init_db(self, revision='head'):
if self.default_data_callable:
self.default_data_callable()
self.stamp(revision)
else:
self.migrate_db()
| gpl-3.0 | -8,129,118,314,801,690,000 | 40.137931 | 100 | 0.628308 | false |
jaggu303619/asylum | openerp/addons/account_bank_statement_extensions/wizard/cancel_statement_line.py | 52 | 1508 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class cancel_statement_line(osv.osv_memory):
_name = 'cancel.statement.line'
_description = 'Cancel selected statement lines'
def cancel_lines(self, cr, uid, ids, context):
line_ids = context['active_ids']
line_obj = self.pool.get('account.bank.statement.line')
line_obj.write(cr, uid, line_ids, {'state': 'draft'}, context=context)
return {}
cancel_statement_line()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -2,903,428,950,234,175,500 | 39.756757 | 78 | 0.62931 | false |
edublancas/sklearn-model-evaluation | src/sklearn_evaluation/table.py | 1 | 1872 | from . import compute
__all__ = ['feature_importances']
# http://ipython.readthedocs.org/en/stable/config/integrating.html
class Table():
def __init__(self, content, header):
try:
self._tabulate = __import__('tabulate').tabulate
except:
raise ImportError('tabulate is required to use the table module')
self.content = content
self.header = header
def to_html(self):
return self._tabulate(self.content, headers=self.header,
tablefmt='html')
def __str__(self):
return self._tabulate(self.content, headers=self.header,
tablefmt='grid')
def _repr_html_(self):
return self.to_html()
def __repr__(self):
return str(self)
def feature_importances(data, top_n=None, feature_names=None):
"""
Get and order feature importances from a scikit-learn model
or from an array-like structure.
If data is a scikit-learn model with sub-estimators (e.g. RandomForest,
AdaBoost) the function will compute the standard deviation of each
feature.
Parameters
----------
data : sklearn model or array-like structure
Object to get the data from.
top_n : int
Only get results for the top_n features.
feature_names : array-like
Feature_names
Returns
-------
table
Table object with the data. Columns are
feature_name, importance (`std_` only included for models with
sub-estimators)
"""
if data is None:
raise ValueError('data is needed to tabulate feature importances. '
'When plotting using the evaluator you need to pass '
'an estimator ')
res = compute.feature_importances(data, top_n, feature_names)
return Table(res, res.dtype.names)
| mit | -6,617,903,667,981,385,000 | 28.25 | 78 | 0.606303 | false |
oztalha/News-Commentary-Tweets-of-Elites | analysis/tweeps-US.py | 1 | 5757 |
# coding: utf-8
# # 113th Congress as News Commentators on Twitter
#
# In this project I am answering the following questions:
#
# * Who are the most active news commentators among senators and congressmen ?
# * Which news got the most attention by the politicians ?
# * How many news (of 7376) are commentated by democrats and/or republicans...
# * How many comments made on these news by each group ?
# * What are the news with the highest difference of comment counts (of groups)?
#
# The news and the curated tweets used in this study are scraped from theplazz.com approximately matching the duration of [113th US Congress](https://en.wikipedia.org/wiki/113th_United_States_Congress), i.e. between Jan 2013 - Jan 2015.
#
# See [here](http://talhaoz.com/news/) for other iPython notebooks on this project.
#
# Project (datasets and the source code) is available on [GitHub](https://github.com/oztalha/News-Commentary-Tweets-of-Elites)
#
# In[2]:
import twitter
import pandas as pd
import plotly.plotly as py
from plotly.graph_objs import *
from mykeys import tw
# In[3]:
def oauth_login():
"""Twitter authorization """
#tw is a dictionary, the only variable in mykeys.py
auth = twitter.oauth.OAuth(tw['OAUTH_TOKEN'], tw['OAUTH_TOKEN_SECRET'],
tw['CONSUMER_KEY'], tw['CONSUMER_SECRET'])
twitter_api = twitter.Twitter(auth=auth)
return twitter_api
def get_members(members):
"""Scrape only the interesting info from twitter json response """
return [(m['id'],m['screen_name'],m['name'],m['location'],m['description'],
m['created_at'], m['friends_count'],m['followers_count'],
m['statuses_count'],m['favourites_count']) for m in members['users']]
def tw_to_pol(twitter_api,slug,owner_screen_name,group):
"""Get members of a twitter list with known political group into a dataframe """
resp = twitter_api.lists.members(slug=slug,owner_screen_name=owner_screen_name,cursor=-1,count=5000)
members = get_members(resp)
df = pd.DataFrame(members,columns=header)
df['party'] = group
return df
def get_politicians():
"""Download 113th congress tweeps using public Twitter lists"""
header = ['id','screen_name','name','location','description','created_at',
'friends','followers','statuses','favorites']
polists = [{'slug':'senaterepublicans', 'owner_screen_name':'Senate_GOPs', 'group':'gop'}, #62
{'slug':'house-republicans', 'owner_screen_name':'HouseGOP', 'group':'gop'}, #260
{'slug':'housegop', 'owner_screen_name':'GOPLeader', 'group':'gop'}, #237
{'slug':'elected-democrats', 'owner_screen_name':'TheDemocrats', 'group':'dem'}, #259
{'slug':'house-democrats', 'owner_screen_name':'DannyMariachi', 'group':'dem'}, #188
{'slug':'senatedemocrats', 'owner_screen_name':'SenateDems', 'group':'dem'} #52
]
twitter_api = oauth_login()
df = pd.DataFrame(columns=header)
for polist in polists:
df = df.append(tw_to_pol(twitter_api,polist['slug'],polist['owner_screen_name'],polist['group']))
df = df.drop_duplicates()
df.to_csv('data/US-politicians.csv',encoding='utf-8',index=False)
return df
# In[4]:
# get twitter IDs of congressmen and senators
df = pd.read_csv('data/US-politicians.csv',encoding='utf-8')
gop = df[df['party']=='gop']
dem = df[df['party']=='dem']
dem_tweeps = set(dem.screen_name.values)
gop_tweeps = set(gop.screen_name.values)
# Principal Accounts of Members of the U.S. Senate (a mix of campaign and government accounts)
senate = pd.read_csv('data/US-senate.csv',encoding='utf-8')
# In[5]:
# get commentary tweets of US newsmakers and opinion-shapers
tweets = pd.read_csv('data/US-tweets.csv',encoding='utf-8')
tweets.twhandle = tweets.twhandle.str[1:]
# In[6]:
# print politician counts curated at least once by theplazz.com
tweepset = set(tweets.twhandle.unique())
senateset = set(senate.screen_name.values)
print('curated senator count:',len(senateset & tweepset))
print('curated democrat count:',len(dem_tweeps & tweepset))
print('curated republican count:',len(gop_tweeps & tweepset))
# In[7]:
# plot commentating activity of these politicians
tweeps = tweets.groupby(by='twhandle')['twtext'].count().order(ascending=False)
poltweeps = tweeps[tweeps.index.isin(df.screen_name)]
colors = ['blue' if x in dem_tweeps else 'red' for x in poltweeps.index]
data = Data([Bar(
x=poltweeps.index,
y=poltweeps.values,
marker=Marker(color=colors)
)])
layout = Layout(yaxis=YAxis(title='# of news commentated (Jan 2013 - Jan 2015)'),
title="News counts commentated by 113th US Congress (curated by theplazz.com)")
fig = Figure(data=data, layout=layout)
py.iplot(fig,filename="113th US Congress as News Commentators")
# In[8]:
# how many news are commentated by how many democrats and/or republicans...
title = tweets.groupby(by=['title','dt'])['twhandle']
demnews = title.apply(lambda g: len(dem_tweeps & set(g.values)))
gopnews = title.apply(lambda g: len(gop_tweeps & set(g.values)))
print (demnews.sum(),'comments made on',demnews[demnews>0].size,'news by democrats.')
print (gopnews.sum(),'comments made on',gopnews[gopnews>0].size,'news by republicans.')
dgtotl = (demnews + gopnews)
print ('News commentated by either group:',(dgtotl[dgtotl>0].size))
both = demnews & gopnews
print ('News commentated by both parties:',(both[both==True].size))
# In[9]:
# Which news got the most attention by the politicians ?
dgtotl.order(ascending=False).head(60)
# In[10]:
# On which news the comment-count differences maximized?
# Number of comments by gop - number of comments by dems
dgdiff = (demnews - gopnews)
dgdiff.order()
| mit | 8,331,257,566,190,518,000 | 36.875 | 236 | 0.687163 | false |
fenginx/django | tests/settings_tests/test_file_charset.py | 27 | 1555 | import sys
from types import ModuleType
from django.conf import FILE_CHARSET_DEPRECATED_MSG, Settings, settings
from django.test import SimpleTestCase, ignore_warnings
from django.utils.deprecation import RemovedInDjango31Warning
class DeprecationTests(SimpleTestCase):
msg = FILE_CHARSET_DEPRECATED_MSG
def test_override_settings_warning(self):
with self.assertRaisesMessage(RemovedInDjango31Warning, self.msg):
with self.settings(FILE_CHARSET='latin1'):
pass
def test_settings_init_warning(self):
settings_module = ModuleType('fake_settings_module')
settings_module.FILE_CHARSET = 'latin1'
settings_module.SECRET_KEY = 'ABC'
sys.modules['fake_settings_module'] = settings_module
try:
with self.assertRaisesMessage(RemovedInDjango31Warning, self.msg):
Settings('fake_settings_module')
finally:
del sys.modules['fake_settings_module']
def test_access_warning(self):
with self.assertRaisesMessage(RemovedInDjango31Warning, self.msg):
settings.FILE_CHARSET
# Works a second time.
with self.assertRaisesMessage(RemovedInDjango31Warning, self.msg):
settings.FILE_CHARSET
@ignore_warnings(category=RemovedInDjango31Warning)
def test_access(self):
with self.settings(FILE_CHARSET='latin1'):
self.assertEqual(settings.FILE_CHARSET, 'latin1')
# Works a second time.
self.assertEqual(settings.FILE_CHARSET, 'latin1')
| bsd-3-clause | 6,694,954,358,733,557,000 | 37.875 | 78 | 0.688746 | false |
simod/geonode | geonode/tests/suite/runner.py | 5 | 21532 | import sys
import time
import logging
import multiprocessing
from multiprocessing import Process, Queue, Event
from Queue import Empty
from twisted.scripts.trial import Options, _getSuite
from twisted.trial.runner import TrialRunner
from django.conf import settings
from django.test.runner import DiscoverRunner
from django.db import connections, DEFAULT_DB_ALIAS
from django.core.exceptions import ImproperlyConfigured
from .base import setup_test_db
# "auto" - one worker per Django application
# "cpu" - one worker per process core
WORKER_MAX = getattr(settings, 'TEST_RUNNER_WORKER_MAX', 3)
WORKER_COUNT = getattr(settings, 'TEST_RUNNER_WORKER_COUNT', 'auto')
NOT_THREAD_SAFE = getattr(settings, 'TEST_RUNNER_NOT_THREAD_SAFE', None)
PARENT_TIMEOUT = getattr(settings, 'TEST_RUNNER_PARENT_TIMEOUT', 10)
WORKER_TIMEOUT = getattr(settings, 'TEST_RUNNER_WORKER_TIMEOUT', 10)
# amqplib spits out a lot of log messages which just add a lot of noise.
logger = logging.getLogger(__name__)
null_file = open('/dev/null', 'w')
class GeoNodeBaseSuiteDiscoverRunner(DiscoverRunner):
def __init__(self, pattern=None, top_level=None, verbosity=1,
interactive=True, failfast=True, keepdb=False,
reverse=False, debug_mode=False, debug_sql=False, parallel=0,
tags=None, exclude_tags=None, **kwargs):
self.pattern = pattern
self.top_level = top_level
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
self.keepdb = keepdb
self.reverse = reverse
self.debug_mode = debug_mode
self.debug_sql = debug_sql
self.parallel = parallel
self.tags = set(tags or [])
self.exclude_tags = set(exclude_tags or [])
class BufferWritesDevice(object):
def __init__(self):
self._data = []
def write(self, string):
self._data.append(string)
def read(self):
return ''.join(self._data)
def flush(self, *args, **kwargs):
pass
def isatty(self):
return False
# Redirect stdout to /dev/null because we don't want to see all the repeated
# "database creation" logging statements from all the workers.
# All the test output is printed to stderr to this is not problematic.
sys.stdout = null_file
class ParallelTestSuiteRunner(object):
def __init__(self, pattern=None, top_level=None, verbosity=1,
interactive=True, failfast=True, keepdb=False,
reverse=False, debug_mode=False, debug_sql=False, parallel=0,
tags=None, exclude_tags=None, **kwargs):
self.pattern = pattern
self.top_level = top_level
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
self.keepdb = keepdb
self.reverse = reverse
self.debug_mode = debug_mode
self.debug_sql = debug_sql
self.parallel = parallel
self.tags = set(tags or [])
self.exclude_tags = set(exclude_tags or [])
self._keyboard_interrupt_intercepted = False
self._worker_max = kwargs.get('worker_max', WORKER_MAX)
self._worker_count = kwargs.get('worker_count', WORKER_COUNT)
self._not_thread_safe = kwargs.get('not_thread_safe', NOT_THREAD_SAFE) or []
self._parent_timeout = kwargs.get('parent_timeout', PARENT_TIMEOUT)
self._worker_timeout = kwargs.get('worker_timeout', WORKER_TIMEOUT)
self._database_names = self._get_database_names()
def _get_database_names(self):
database_names = {}
for alias in connections:
connection = connections[alias]
database_name = connection.settings_dict['NAME']
database_names[alias] = database_name
return database_names
def run_tests(self, test_labels, **kwargs):
return self._run_tests(tests=test_labels)
def _run_tests(self, tests, **kwargs):
# tests = dict where the key is a test group name and the value are
# the tests to run
tests_queue = Queue()
results_queue = Queue()
stop_event = Event()
pending_tests = {}
pending_not_thread_safe_tests = {}
completed_tests = {}
failures = 0
errors = 0
start_time = time.time()
# First tun tests which are not thread safe in the main process
for group in self._not_thread_safe:
if group not in tests.keys():
continue
group_tests = tests[group]
del tests[group]
logger.info('Running tests in a main process: %s' % (group_tests))
pending_not_thread_safe_tests[group] = group_tests
result = self._tests_func(tests=group_tests, worker_index=None)
results_queue.put((group, result), block=False)
for group, tests in tests.iteritems():
tests_queue.put((group, tests), block=False)
pending_tests[group] = tests
worker_count = self._worker_count
if worker_count == 'auto':
worker_count = len(pending_tests)
elif worker_count == 'cpu':
worker_count = multiprocessing.cpu_count()
if worker_count > len(pending_tests):
# No need to spawn more workers then there are tests.
worker_count = len(pending_tests)
worker_max = self._worker_max
if worker_max == 'auto':
worker_max = len(pending_tests)
elif worker_max == 'cpu':
worker_max = multiprocessing.cpu_count()
if worker_count > worker_max:
# No need to spawn more workers then there are tests.
worker_count = worker_max
worker_args = (tests_queue, results_queue, stop_event)
logger.info("Number of workers %s " % worker_count)
workers = self._create_worker_pool(pool_size=worker_count,
target_func=self._run_tests_worker,
worker_args=worker_args)
for index, worker in enumerate(workers):
logger.info('Staring worker %s' % (index))
worker.start()
if workers:
while pending_tests:
try:
try:
group, result = results_queue.get(timeout=self._parent_timeout,
block=True)
except Exception:
raise Empty
try:
if group not in pending_not_thread_safe_tests:
pending_tests.pop(group)
else:
pending_not_thread_safe_tests.pop(group)
except KeyError:
logger.info('Got a result for unknown group: %s' % (group))
else:
completed_tests[group] = result
self._print_result(result)
if result.failures or result.errors:
failures += len(result.failures)
errors += len(result.errors)
if self.failfast:
# failfast is enabled, kill all the active workers
# and stop
for worker in workers:
if worker.is_alive():
worker.terminate()
break
except Empty:
worker_left = False
for worker in workers:
if worker.is_alive():
worker_left = True
break
if not worker_left:
break
# We are done, signalize all the workers to stop
stop_event.set()
end_time = time.time()
self._exit(start_time, end_time, failures, errors)
def _run_tests_worker(self, index, tests_queue, results_queue, stop_event):
def pop_item():
group, tests = tests_queue.get(timeout=self._worker_timeout)
return group, tests
try:
try:
for group, tests in iter(pop_item, None):
if stop_event.is_set():
# We should stop
break
try:
result = None
logger.info(
'Worker %s is running tests %s' %
(index, tests))
result = self._tests_func(
tests=tests, worker_index=index)
results_queue.put((group, result))
logger.info(
'Worker %s has finished running tests %s' %
(index, tests))
except (KeyboardInterrupt, SystemExit):
if isinstance(self, TwistedParallelTestSuiteRunner):
# Twisted raises KeyboardInterrupt when the tests
# have completed
pass
else:
raise
except Exception as e:
import traceback
tb = traceback.format_exc()
logger.error(tb)
logger.info('Running tests failed, reason: %s' % (str(e)))
result = TestResult().from_exception(e)
results_queue.put((group, result))
except Empty:
logger.info(
'Worker %s timed out while waiting for tests to run' %
(index))
finally:
tests_queue.close()
results_queue.close()
logger.info('Worker %s is stopping' % (index))
def _pre_tests_func(self):
# This method gets called before _tests_func is called
pass
def _post_tests_func(self):
# This method gets called after _tests_func has completed and _print_result
# function is called
pass
def _tests_func(self, worker_index):
raise '_tests_func not implements'
def _print_result(self, result):
print >> sys.stderr, result.output
def _exit(self, start_time, end_time, failure_count, error_count):
time_difference = (end_time - start_time)
print >> sys.stderr, 'Total run time: %d seconds' % (time_difference)
try:
sys.exit(failure_count + error_count)
except Exception:
pass
def _group_by_app(self, test_labels):
"""
Groups tests by an app. This helps to partition tests so they can be run
in separate worker processes.
@TODO: Better partitioning of tests based on the previous runs - measure
test suite run time and partition tests so we can spawn as much workers as
it makes sense to get the maximum performance benefits.
"""
tests = {}
for test_label in test_labels:
if not test_label.find('.'):
app = test_label
else:
app = test_label.split('.')[0] + test_label.split('.')[1]
if not tests.get(app):
tests[app] = [test_label]
else:
tests[app].append(test_label)
return tests
def _group_by_file(self, test_names):
tests = {}
for test_name in test_names:
tests[test_name] = test_name
return tests
def _create_worker_pool(self, pool_size, target_func, worker_args):
workers = []
for index in range(0, pool_size):
args = (index,) + worker_args
worker = Process(target=target_func, args=args)
workers.append(worker)
return workers
def log(self, string):
if self.verbosity >= 3:
print string
class DjangoParallelTestSuiteRunner(ParallelTestSuiteRunner,
DiscoverRunner):
def __init__(self, pattern=None, top_level=None, verbosity=1,
interactive=True, failfast=True, keepdb=False,
reverse=False, debug_mode=False, debug_sql=False, parallel=0,
tags=None, exclude_tags=None, **kwargs):
self.pattern = pattern
self.top_level = top_level
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
self.keepdb = keepdb
self.reverse = reverse
self.debug_mode = debug_mode
self.debug_sql = debug_sql
self.parallel = parallel
self.tags = set(tags or [])
self.exclude_tags = set(exclude_tags or [])
self._keyboard_interrupt_intercepted = False
self._worker_max = kwargs.get('worker_max', WORKER_MAX)
self._worker_count = kwargs.get('worker_count', WORKER_COUNT)
self._not_thread_safe = kwargs.get('not_thread_safe', NOT_THREAD_SAFE) or []
self._parent_timeout = kwargs.get('parent_timeout', PARENT_TIMEOUT)
self._worker_timeout = kwargs.get('worker_timeout', WORKER_TIMEOUT)
self._database_names = self._get_database_names()
def run_tests(self, test_labels, extra_tests=None, **kwargs):
app_tests = self._group_by_app(test_labels)
return self._run_tests(tests=app_tests)
def run_suite(self, suite, **kwargs):
return DjangoParallelTestRunner(
verbosity=self.verbosity, failfast=self.failfast).run_suite(suite)
def setup_databases(self, **kwargs):
return self.setup_databases_18(**kwargs)
def dependency_ordered(self, test_databases, dependencies):
"""
Reorder test_databases into an order that honors the dependencies
described in TEST[DEPENDENCIES].
"""
ordered_test_databases = []
resolved_databases = set()
# Maps db signature to dependencies of all its aliases
dependencies_map = {}
# Check that no database depends on its own alias
for sig, (_, aliases) in test_databases:
all_deps = set()
for alias in aliases:
all_deps.update(dependencies.get(alias, []))
if not all_deps.isdisjoint(aliases):
raise ImproperlyConfigured(
"Circular dependency: databases %r depend on each other, "
"but are aliases." % aliases
)
dependencies_map[sig] = all_deps
while test_databases:
changed = False
deferred = []
# Try to find a DB that has all its dependencies met
for signature, (db_name, aliases) in test_databases:
if dependencies_map[signature].issubset(resolved_databases):
resolved_databases.update(aliases)
ordered_test_databases.append((signature, (db_name, aliases)))
changed = True
else:
deferred.append((signature, (db_name, aliases)))
if not changed:
raise ImproperlyConfigured("Circular dependency in TEST[DEPENDENCIES]")
test_databases = deferred
return ordered_test_databases
def setup_databases_18(self, **kwargs):
mirrored_aliases = {}
test_databases = {}
dependencies = {}
worker_index = kwargs.get('worker_index', None)
for alias in connections:
connection = connections[alias]
database_name = 'test_%d_%s' % (
worker_index, connection.settings_dict['NAME'])
connection.settings_dict['TEST_NAME'] = database_name
item = test_databases.setdefault(
connection.creation.test_db_signature(),
(connection.settings_dict['NAME'], [])
)
item[1].append(alias)
if alias != DEFAULT_DB_ALIAS:
dependencies[alias] = connection.settings_dict.get(
'TEST_DEPENDENCIES', [DEFAULT_DB_ALIAS])
old_names = []
mirrors = []
for signature, (db_name, aliases) in self.dependency_ordered(
test_databases.items(), dependencies):
connection = connections[aliases[0]]
old_names.append((connection, db_name, True))
test_db_name = connection.creation.create_test_db(
verbosity=0, autoclobber=not self.interactive)
for alias in aliases[1:]:
connection = connections[alias]
if db_name:
old_names.append((connection, db_name, False))
connection.settings_dict['NAME'] = test_db_name
else:
old_names.append((connection, db_name, True))
connection.creation.create_test_db(
verbosity=0, autoclobber=not self.interactive)
for alias, mirror_alias in mirrored_aliases.items():
mirrors.append((alias, connections[alias].settings_dict['NAME']))
connections[alias].settings_dict['NAME'] = connections[mirror_alias].settings_dict['NAME']
return old_names, mirrors
def _tests_func(self, tests, worker_index):
self.setup_test_environment()
suite = self.build_suite(tests, [])
old_config = self.setup_databases(worker_index=worker_index)
result = self.run_suite(suite)
self.teardown_databases(old_config)
self.teardown_test_environment()
result = TestResult().from_django_result(result)
return result
class DjangoParallelTestRunner(DiscoverRunner):
def __init__(self, verbosity=2, failfast=True, **kwargs):
stream = BufferWritesDevice()
super(DjangoParallelTestRunner, self).__init__(stream=stream,
verbosity=verbosity,
failfast=failfast)
class TwistedParallelTestSuiteRunner(ParallelTestSuiteRunner):
def __init__(self, config, verbosity=1, interactive=False, failfast=True,
**kwargs):
self.config = config
super(TwistedParallelTestSuiteRunner, self).__init__(verbosity, interactive,
failfast, **kwargs)
def run_tests(self, test_labels, extra_tests=None, **kwargs):
app_tests = self._group_by_app(test_labels)
return self._run_tests(tests=app_tests)
def run_suite(self):
# config = self.config
tests = self.config.opts['tests']
tests = self._group_by_file(tests)
self._run_tests(tests=tests)
def _tests_func(self, tests, worker_index):
if not isinstance(tests, (list, set)):
tests = [tests]
args = ['-e']
args.extend(tests)
config = Options()
config.parseOptions(args)
stream = BufferWritesDevice()
runner = self._make_runner(config=config, stream=stream)
suite = _getSuite(config)
result = setup_test_db(worker_index, None, runner.run, suite)
result = TestResult().from_trial_result(result)
return result
def _make_runner(self, config, stream):
# Based on twisted.scripts.trial._makeRunner
mode = None
if config['debug']:
mode = TrialRunner.DEBUG
if config['dry-run']:
mode = TrialRunner.DRY_RUN
return TrialRunner(config['reporter'],
mode=mode,
stream=stream,
profile=config['profile'],
logfile=config['logfile'],
tracebackFormat=config['tbformat'],
realTimeErrors=config['rterrors'],
uncleanWarnings=config['unclean-warnings'],
workingDirectory=config['temp-directory'],
forceGarbageCollection=config['force-gc'])
class TestResult(object):
dots = False
errors = None
failures = None
exception = None
output = None
def from_django_result(self, result_obj):
self.dots = result_obj.dots
self.errors = result_obj.errors
self.failures = self._format_failures(result_obj.failures)
try:
self.output = result_obj.stream.read()
except BaseException:
pass
return self
def from_trial_result(self, result_obj):
self.errors = self._format_failures(result_obj.errors)
self.failures = self._format_failures(result_obj.failures)
try:
self.output = result_obj.stream.read()
except BaseException:
pass
return self
def from_exception(self, exception):
self.exception = str(exception)
return self
def _format_failures(self, failures):
# errors and failures attributes by default contain values which are not
# pickable (class instance)
if not failures:
return failures
formatted = []
for failure in failures:
klass, message = failure
formatted.append((str(klass), message))
return formatted
| gpl-3.0 | -4,626,556,631,041,679,000 | 35.869863 | 102 | 0.556521 | false |
pigeonflight/strider-plone | docker/appengine/lib/django-0.96/django/core/context_processors.py | 32 | 2145 | """
A set of request processors that return dictionaries to be merged into a
template context. Each function takes the request object as its only parameter
and returns a dictionary to add to the context.
These are referenced from the setting TEMPLATE_CONTEXT_PROCESSORS and used by
RequestContext.
"""
from django.conf import settings
def auth(request):
"""
Returns context variables required by apps that use Django's authentication
system.
"""
return {
'user': request.user,
'messages': request.user.get_and_delete_messages(),
'perms': PermWrapper(request.user),
}
def debug(request):
"Returns context variables helpful for debugging."
context_extras = {}
if settings.DEBUG and request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS:
context_extras['debug'] = True
from django.db import connection
context_extras['sql_queries'] = connection.queries
return context_extras
def i18n(request):
context_extras = {}
context_extras['LANGUAGES'] = settings.LANGUAGES
if hasattr(request, 'LANGUAGE_CODE'):
context_extras['LANGUAGE_CODE'] = request.LANGUAGE_CODE
else:
context_extras['LANGUAGE_CODE'] = settings.LANGUAGE_CODE
from django.utils import translation
context_extras['LANGUAGE_BIDI'] = translation.get_language_bidi()
return context_extras
def request(request):
return {'request': request}
# PermWrapper and PermLookupDict proxy the permissions system into objects that
# the template system can understand.
class PermLookupDict(object):
def __init__(self, user, module_name):
self.user, self.module_name = user, module_name
def __repr__(self):
return str(self.user.get_all_permissions())
def __getitem__(self, perm_name):
return self.user.has_perm("%s.%s" % (self.module_name, perm_name))
def __nonzero__(self):
return self.user.has_module_perms(self.module_name)
class PermWrapper(object):
def __init__(self, user):
self.user = user
def __getitem__(self, module_name):
return PermLookupDict(self.user, module_name)
| mit | 8,027,223,015,848,888,000 | 30.086957 | 83 | 0.687646 | false |
chrisspen/burlap | burlap/rsync.py | 1 | 1440 | from __future__ import print_function
from burlap import Satchel
from burlap.constants import *
from burlap.decorators import task
class RsyncSatchel(Satchel):
name = 'rsync'
def set_defaults(self):
self.env.clean = 1
self.env.gzip = 1
self.env.exclusions = [
'*_local.py',
'*.pyc',
'*.pyo',
'*.pyd',
'*.svn',
'*.tar.gz',
#'static',
]
self.src_dir = '.'
#self.env.dir = '.burlap/rsync_cache'
self.env.extra_dirs = []
self.env.chown_user = 'www-data'
self.env.chown_group = 'www-data'
self.env.command = 'rsync --verbose --compress --recursive --delete ' \
'--rsh "ssh -i {key_filename}" {exclusions_str} {rsync_src_dir} {user}@{host_string}:{rsync_dst_dir}'
@task
def deploy_code(self):
"""
Generates a rsync of all deployable code.
"""
assert self.genv.SITE, 'Site unspecified.'
assert self.genv.ROLE, 'Role unspecified.'
r = self.local_renderer
if self.env.exclusions:
r.env.exclusions_str = ' '.join(
"--exclude='%s'" % _ for _ in self.env.exclusions)
r.local(r.env.rsync_command)
r.sudo('chown -R {rsync_chown_user}:{rsync_chown_group} {rsync_dst_dir}')
@task
def configure(self):
pass
rsync_satchel = RsyncSatchel()
| mit | 1,907,329,350,377,407,500 | 26.169811 | 113 | 0.538194 | false |
nicovillanueva/obsidian-core | tests/comparator_tests.py | 2 | 1343 | #from srComparator.screenshots.Comparator import Comparator
import src.screenshots.comparison.Comparator as Comparator
chrome_2 = 'images/chrome_2.png'
chrome = "images/chrome.png"
firefox = "images/firefox.png"
pdiff = "images/diff.png"
pdiff_inv = "images/diff_invert.png"
#c = Comparator()
#Comparator.size_difference(chrome, firefox)
Comparator.__match_sizes(chrome, firefox)
#Comparator.diff(chrome, firefox)
assert(Comparator.size_difference(chrome, firefox) == 287334)
assert(Comparator.difference_percentage(chrome, chrome) == 0.0)
assert(Comparator.difference_percentage(firefox, firefox) == 0.0)
assert(int(Comparator.difference_percentage(chrome, firefox)) == 21)
assert(int(Comparator.difference_percentage(firefox, chrome)) == 21)
assert(Comparator.difference_percentage(chrome, chrome_2) == 0.0)
assert(Comparator.difference_percentage(chrome_2, chrome) == 0.0)
assert(int(Comparator.difference_percentage(firefox, chrome_2)) == 21)
assert(int(Comparator.difference_percentage(chrome_2, firefox)) == 21)
assert(Comparator.difference_percentage(pdiff, pdiff_inv) == 100.0)
assert(Comparator.difference_percentage(pdiff_inv, pdiff) == 100.0)
assert(Comparator.difference_percentage(pdiff_inv, pdiff_inv) == 0.0)
assert(Comparator.difference_percentage(pdiff, pdiff) == 0.0)
Comparator.highlight_differences(firefox, chrome) | gpl-2.0 | 2,411,188,459,411,313,700 | 39.727273 | 70 | 0.779598 | false |
krishnan793/perceptron_learning | perceptron.py | 1 | 1391 | import numpy as np
def compute_epoch(V,T,W,alpha):
for i in range(V.shape[0]):
# V_i current input vector
# T_i actual output
# y predicted output
V_i = V[i]
T_i = T[i]
if (np.dot(W,V_i) >= 0):
y = 1
else:
y = 0
a = alpha * (T_i-y)
#print "a = ",a
W_delta = a * V_i
#print "W_delta = ",W_delta
W = W + W_delta
#print "W = ",W
#print "="*21
return W
def compute_weight(V,T,W,alpha):
Max_iteration = 100
epoch_difference = 1
epoch_1 = compute_epoch(V,T,W,alpha)
while(epoch_difference != 0):
epoch_2 = compute_epoch(V,T,epoch_1,alpha)
tmp = np.absolute(epoch_1-epoch_2)
epoch_difference = np.dot(tmp,tmp)
epoch_1 = epoch_2
Max_iteration = Max_iteration - 1
if (Max_iteration == 0):
print "Max Iteration Reached. ANN not convergerd!"
exit()
return epoch_2
def validate_weight(V,T,W):
print " X Y Predicted"
for i in range(V.shape[0]):
print V[i,1:],T[i],
if (np.dot(V[i],W) >= 0):
print "[ 1 ]"
else:
print "[ 0 ]"
data = np.loadtxt("OR.txt")
V = data[:,:-1]
V = np.insert(V,0,-1,axis=1) # Padd 1 to account for theta
T = data[:,-1:]
# Initialise Weights to zero
W = np.array([0 for i in range(V.shape[1])]) # V.shape[1] = no of columns
# learning rate
alpha = 0.5
W = compute_weight(V,T,W,alpha)
print "Calculated weight: ",W
print "Validating the weights calculated"
validate_weight(V,T,W)
| gpl-2.0 | 26,721,377,981,514,524 | 20.4 | 73 | 0.603163 | false |
rafaelvalle/MDI | preprocess_data.py | 1 | 4200 | #!/usr/bin/python
import numpy as np
import cPickle as pkl
import os
import random
from scipy import delete
from sklearn.preprocessing import StandardScaler
from missing_data_imputation import Imputer
from processing import impute, perturb_data
from params import adult_params, scalers_folder
from params import feats_train_folder, labels_train_folder, perturb_folder
from params import rand_num_seed
np.random.seed(rand_num_seed)
random.seed(rand_num_seed)
# load features and labels
x = np.genfromtxt('data/adult-train-raw', delimiter=', ', dtype=object)
# remove holland from data to match feature size in test data
x = x[x[:, -2] != 'Holand-Netherlands']
# binarize labels
labels = (np.array(x[:, -1]) == '>50K').astype(int)
labels = labels.reshape((-1, 1))
# save labels in binary and one-hot representations
labels.dump(os.path.join(labels_train_folder, 'labels_bin.np'))
# remove redundant education-number and labels features
x = delete(x, (4, 14), 1)
# enumerate parameters
monotone = True
ratios = np.arange(0, .5, .1)
for ratio in ratios:
print '\nPerturbing {}% of data'.format(ratio)
if ratio > 0:
pert_data, _ = perturb_data(x, adult_params['cat_cols'], ratio, monotone,
adult_params['miss_data_symbol'],
adult_params['mnar_values'])
else:
pert_data = x
print "\tRatio is {} of {}".format(
np.sum(pert_data == adult_params['miss_data_symbol']),
len(pert_data) * len(adult_params['cat_cols']))
path = os.path.join(perturb_folder,
'adult_train_pert_mono_{}_ratio_{}.csv'.format(monotone,
ratio))
# save perturbed data to disk as csv
print '\tSaving perturbed data to {}'.format(path)
np.savetxt(path, pert_data, delimiter=",", fmt="%s")
for imp_method in adult_params['imp_methods']:
print '\tImputing with {}'.format(imp_method)
imp = Imputer()
data = impute(pert_data, imp, imp_method, adult_params)
path = "data/imputed/{}_mono_{}_ratio_{}.csv".format(imp_method,
monotone,
ratio)
# save data as csv
print '\tSaving imputed data to {}'.format(path)
np.savetxt(path, data, delimiter=",", fmt="%s")
# scale continuous variables and convert categorial to one-hot
# store the scaler objects to be used on the test set
scaler_path = os.path.join(scalers_folder,
"{}_scaler".format(imp_method))
if os.path.isfile(scaler_path):
scaler_dict = pkl.load(open(scaler_path, "rb"))
else:
scaler_dict = {}
scaler = StandardScaler()
scaler = scaler.fit(data[:, adult_params['non_cat_cols']].astype(float))
data_scaled = np.copy(data)
data_scaled[:, adult_params['non_cat_cols']] = scaler.transform(
data[:, adult_params['non_cat_cols']].astype(float))
# key is imputation method and ratio dependent
# filename is imputation method dependent
scaler_dict["{}_ratio_{}".format(imp_method, ratio)] = scaler
pkl.dump(scaler_dict, open(scaler_path, 'wb'))
# binarize scaled data
data_scaled_bin = imp.binarize_data(data_scaled,
adult_params['cat_cols'],
adult_params['miss_data_symbol'])
# convert to float
data_scaled_bin = data_scaled_bin.astype(float)
# add labels as last column
data_scaled_bin = np.hstack((data_scaled_bin, labels))
# save to disk
filename = "{}_bin_scaled_mono_{}_ratio_{}.np".format(imp_method,
monotone,
ratio)
path = os.path.join(feats_train_folder, filename)
print '\tSaving imputed scaled and binarized data to {}'.format(path)
data_scaled_bin.dump(path)
| mit | -7,595,130,093,406,176,000 | 37.888889 | 81 | 0.568095 | false |
brsyuksel/how-to | blog/models.py | 1 | 1774 | from types import StringTypes
from hashlib import md5
from django.db import models
from django.contrib.auth.models import User as authUser
from .fields import ObjectIdField
class User(authUser):
class Meta:
proxy = True
@property
def gravatar(self):
return 'http://gravatar.com/avatar/' + md5(self.email).hexdigest()
class Common(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
modified_at = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
ordering = ['-created_at']
def __unicode__(self):
attr = self.__class__.__name__ if not hasattr(self.__class__, \
'unicode_attr') else getattr(self, self.__class__.unicode_attr)
return attr if isinstance(attr, StringTypes) else \
self.__class__.__name__
class Tag(Common):
label = models.CharField(max_length=30, unique=True)
unicode_attr = 'label'
@models.permalink
def get_absolute_url(self):
return ('blog-tag', [str(self.label)])
class Entry(Common):
kind = models.CharField(max_length=3, choices=(('doc', 'Document'),\
('gde', 'Guide'), ('trk', 'Trick')), default='doc')
title = models.CharField(max_length=100)
description = models.CharField(max_length=250, blank=True)
content = ObjectIdField(null=True)
tags = models.ManyToManyField(Tag)
slug = models.SlugField(max_length=120, unique=True)
publish = models.BooleanField(default=False)
author = models.ForeignKey(User)
unicode_attr = 'title'
@models.permalink
def get_absolute_url(self):
return ('blog-entry', [str(self.slug)])
class PublishedEntryManager(models.Manager):
def get_queryset(self):
return super(PublishedEntryManager, self).get_queryset().filter(
publish=True
)
class PublishedEntry(Entry):
objects = PublishedEntryManager()
class Meta:
proxy = True | gpl-3.0 | 8,528,493,785,649,637,000 | 23.652778 | 69 | 0.718715 | false |
expobrain/GitPython | git/objects/submodule/root.py | 14 | 17666 | from .base import (
Submodule,
UpdateProgress
)
from .util import (
find_first_remote_branch
)
from git.exc import InvalidGitRepositoryError
import git
import logging
__all__ = ["RootModule", "RootUpdateProgress"]
log = logging.getLogger('git.objects.submodule.root')
log.addHandler(logging.NullHandler())
class RootUpdateProgress(UpdateProgress):
"""Utility class which adds more opcodes to the UpdateProgress"""
REMOVE, PATHCHANGE, BRANCHCHANGE, URLCHANGE = [
1 << x for x in range(UpdateProgress._num_op_codes, UpdateProgress._num_op_codes + 4)]
_num_op_codes = UpdateProgress._num_op_codes + 4
__slots__ = tuple()
BEGIN = RootUpdateProgress.BEGIN
END = RootUpdateProgress.END
REMOVE = RootUpdateProgress.REMOVE
BRANCHCHANGE = RootUpdateProgress.BRANCHCHANGE
URLCHANGE = RootUpdateProgress.URLCHANGE
PATHCHANGE = RootUpdateProgress.PATHCHANGE
class RootModule(Submodule):
"""A (virtual) Root of all submodules in the given repository. It can be used
to more easily traverse all submodules of the master repository"""
__slots__ = tuple()
k_root_name = '__ROOT__'
def __init__(self, repo):
# repo, binsha, mode=None, path=None, name = None, parent_commit=None, url=None, ref=None)
super(RootModule, self).__init__(
repo,
binsha=self.NULL_BIN_SHA,
mode=self.k_default_mode,
path='',
name=self.k_root_name,
parent_commit=repo.head.commit,
url='',
branch_path=git.Head.to_full_path(self.k_head_default)
)
def _clear_cache(self):
"""May not do anything"""
pass
#{ Interface
def update(self, previous_commit=None, recursive=True, force_remove=False, init=True,
to_latest_revision=False, progress=None, dry_run=False, force_reset=False,
keep_going=False):
"""Update the submodules of this repository to the current HEAD commit.
This method behaves smartly by determining changes of the path of a submodules
repository, next to changes to the to-be-checked-out commit or the branch to be
checked out. This works if the submodules ID does not change.
Additionally it will detect addition and removal of submodules, which will be handled
gracefully.
:param previous_commit: If set to a commit'ish, the commit we should use
as the previous commit the HEAD pointed to before it was set to the commit it points to now.
If None, it defaults to HEAD@{1} otherwise
:param recursive: if True, the children of submodules will be updated as well
using the same technique
:param force_remove: If submodules have been deleted, they will be forcibly removed.
Otherwise the update may fail if a submodule's repository cannot be deleted as
changes have been made to it (see Submodule.update() for more information)
:param init: If we encounter a new module which would need to be initialized, then do it.
:param to_latest_revision: If True, instead of checking out the revision pointed to
by this submodule's sha, the checked out tracking branch will be merged with the
latest remote branch fetched from the repository's origin.
Unless force_reset is specified, a local tracking branch will never be reset into its past, therefore
the remote branch must be in the future for this to have an effect.
:param force_reset: if True, submodules may checkout or reset their branch even if the repository has
pending changes that would be overwritten, or if the local tracking branch is in the future of the
remote tracking branch and would be reset into its past.
:param progress: RootUpdateProgress instance or None if no progress should be sent
:param dry_run: if True, operations will not actually be performed. Progress messages
will change accordingly to indicate the WOULD DO state of the operation.
:param keep_going: if True, we will ignore but log all errors, and keep going recursively.
Unless dry_run is set as well, keep_going could cause subsequent/inherited errors you wouldn't see
otherwise.
In conjunction with dry_run, it can be useful to anticipate all errors when updating submodules
:return: self"""
if self.repo.bare:
raise InvalidGitRepositoryError("Cannot update submodules in bare repositories")
# END handle bare
if progress is None:
progress = RootUpdateProgress()
# END assure progress is set
prefix = ''
if dry_run:
prefix = 'DRY-RUN: '
repo = self.repo
try:
# SETUP BASE COMMIT
###################
cur_commit = repo.head.commit
if previous_commit is None:
try:
previous_commit = repo.commit(repo.head.log_entry(-1).oldhexsha)
if previous_commit.binsha == previous_commit.NULL_BIN_SHA:
raise IndexError
# END handle initial commit
except IndexError:
# in new repositories, there is no previous commit
previous_commit = cur_commit
# END exception handling
else:
previous_commit = repo.commit(previous_commit) # obtain commit object
# END handle previous commit
psms = self.list_items(repo, parent_commit=previous_commit)
sms = self.list_items(repo)
spsms = set(psms)
ssms = set(sms)
# HANDLE REMOVALS
###################
rrsm = (spsms - ssms)
len_rrsm = len(rrsm)
for i, rsm in enumerate(rrsm):
op = REMOVE
if i == 0:
op |= BEGIN
# END handle begin
# fake it into thinking its at the current commit to allow deletion
# of previous module. Trigger the cache to be updated before that
progress.update(op, i, len_rrsm, prefix + "Removing submodule %r at %s" % (rsm.name, rsm.abspath))
rsm._parent_commit = repo.head.commit
rsm.remove(configuration=False, module=True, force=force_remove, dry_run=dry_run)
if i == len_rrsm - 1:
op |= END
# END handle end
progress.update(op, i, len_rrsm, prefix + "Done removing submodule %r" % rsm.name)
# END for each removed submodule
# HANDLE PATH RENAMES
#####################
# url changes + branch changes
csms = (spsms & ssms)
len_csms = len(csms)
for i, csm in enumerate(csms):
psm = psms[csm.name]
sm = sms[csm.name]
# PATH CHANGES
##############
if sm.path != psm.path and psm.module_exists():
progress.update(BEGIN | PATHCHANGE, i, len_csms, prefix +
"Moving repository of submodule %r from %s to %s"
% (sm.name, psm.abspath, sm.abspath))
# move the module to the new path
if not dry_run:
psm.move(sm.path, module=True, configuration=False)
# END handle dry_run
progress.update(
END | PATHCHANGE, i, len_csms, prefix + "Done moving repository of submodule %r" % sm.name)
# END handle path changes
if sm.module_exists():
# HANDLE URL CHANGE
###################
if sm.url != psm.url:
# Add the new remote, remove the old one
# This way, if the url just changes, the commits will not
# have to be re-retrieved
nn = '__new_origin__'
smm = sm.module()
rmts = smm.remotes
# don't do anything if we already have the url we search in place
if len([r for r in rmts if r.url == sm.url]) == 0:
progress.update(BEGIN | URLCHANGE, i, len_csms, prefix +
"Changing url of submodule %r from %s to %s" % (sm.name, psm.url, sm.url))
if not dry_run:
assert nn not in [r.name for r in rmts]
smr = smm.create_remote(nn, sm.url)
smr.fetch(progress=progress)
# If we have a tracking branch, it should be available
# in the new remote as well.
if len([r for r in smr.refs if r.remote_head == sm.branch_name]) == 0:
raise ValueError(
"Submodule branch named %r was not available in new submodule remote at %r"
% (sm.branch_name, sm.url)
)
# END head is not detached
# now delete the changed one
rmt_for_deletion = None
for remote in rmts:
if remote.url == psm.url:
rmt_for_deletion = remote
break
# END if urls match
# END for each remote
# if we didn't find a matching remote, but have exactly one,
# we can safely use this one
if rmt_for_deletion is None:
if len(rmts) == 1:
rmt_for_deletion = rmts[0]
else:
# if we have not found any remote with the original url
# we may not have a name. This is a special case,
# and its okay to fail here
# Alternatively we could just generate a unique name and leave all
# existing ones in place
raise InvalidGitRepositoryError(
"Couldn't find original remote-repo at url %r" % psm.url)
# END handle one single remote
# END handle check we found a remote
orig_name = rmt_for_deletion.name
smm.delete_remote(rmt_for_deletion)
# NOTE: Currently we leave tags from the deleted remotes
# as well as separate tracking branches in the possibly totally
# changed repository ( someone could have changed the url to
# another project ). At some point, one might want to clean
# it up, but the danger is high to remove stuff the user
# has added explicitly
# rename the new remote back to what it was
smr.rename(orig_name)
# early on, we verified that the our current tracking branch
# exists in the remote. Now we have to assure that the
# sha we point to is still contained in the new remote
# tracking branch.
smsha = sm.binsha
found = False
rref = smr.refs[self.branch_name]
for c in rref.commit.traverse():
if c.binsha == smsha:
found = True
break
# END traverse all commits in search for sha
# END for each commit
if not found:
# adjust our internal binsha to use the one of the remote
# this way, it will be checked out in the next step
# This will change the submodule relative to us, so
# the user will be able to commit the change easily
log.warn("Current sha %s was not contained in the tracking\
branch at the new remote, setting it the the remote's tracking branch", sm.hexsha)
sm.binsha = rref.commit.binsha
# END reset binsha
# NOTE: All checkout is performed by the base implementation of update
# END handle dry_run
progress.update(
END | URLCHANGE, i, len_csms, prefix + "Done adjusting url of submodule %r" % (sm.name))
# END skip remote handling if new url already exists in module
# END handle url
# HANDLE PATH CHANGES
#####################
if sm.branch_path != psm.branch_path:
# finally, create a new tracking branch which tracks the
# new remote branch
progress.update(BEGIN | BRANCHCHANGE, i, len_csms, prefix +
"Changing branch of submodule %r from %s to %s"
% (sm.name, psm.branch_path, sm.branch_path))
if not dry_run:
smm = sm.module()
smmr = smm.remotes
# As the branch might not exist yet, we will have to fetch all remotes to be sure ... .
for remote in smmr:
remote.fetch(progress=progress)
# end for each remote
try:
tbr = git.Head.create(smm, sm.branch_name, logmsg='branch: Created from HEAD')
except OSError:
# ... or reuse the existing one
tbr = git.Head(smm, sm.branch_path)
# END assure tracking branch exists
tbr.set_tracking_branch(find_first_remote_branch(smmr, sm.branch_name))
# NOTE: All head-resetting is done in the base implementation of update
# but we will have to checkout the new branch here. As it still points to the currently
# checkout out commit, we don't do any harm.
# As we don't want to update working-tree or index, changing the ref is all there is to do
smm.head.reference = tbr
# END handle dry_run
progress.update(
END | BRANCHCHANGE, i, len_csms, prefix + "Done changing branch of submodule %r" % sm.name)
# END handle branch
# END handle
# END for each common submodule
except Exception as err:
if not keep_going:
raise
log.error(str(err))
# end handle keep_going
# FINALLY UPDATE ALL ACTUAL SUBMODULES
######################################
for sm in sms:
# update the submodule using the default method
sm.update(recursive=False, init=init, to_latest_revision=to_latest_revision,
progress=progress, dry_run=dry_run, force=force_reset, keep_going=keep_going)
# update recursively depth first - question is which inconsitent
# state will be better in case it fails somewhere. Defective branch
# or defective depth. The RootSubmodule type will never process itself,
# which was done in the previous expression
if recursive:
# the module would exist by now if we are not in dry_run mode
if sm.module_exists():
type(self)(sm.module()).update(recursive=True, force_remove=force_remove,
init=init, to_latest_revision=to_latest_revision,
progress=progress, dry_run=dry_run, force_reset=force_reset,
keep_going=keep_going)
# END handle dry_run
# END handle recursive
# END for each submodule to update
return self
def module(self):
""":return: the actual repository containing the submodules"""
return self.repo
#} END interface
#} END classes
| bsd-3-clause | 705,845,025,575,164,000 | 49.474286 | 120 | 0.501245 | false |
liqd/a4-meinberlin | meinberlin/apps/budgeting/migrations/0001_squashed_0004_auto_20170420_1456.py | 1 | 3199 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import autoslug.fields
import ckeditor.fields
import django.db.models.deletion
import django.utils.timezone
from django.conf import settings
from django.db import migrations
from django.db import models
import adhocracy4.maps.fields
import meinberlin.apps.moderatorfeedback.fields
class Migration(migrations.Migration):
replaces = [('meinberlin_budgeting', '0001_initial'), ('meinberlin_budgeting', '0002_proposal_moderator_feedback'), ('meinberlin_budgeting', '0003_moderatorstatement'), ('meinberlin_budgeting', '0004_auto_20170420_1456')]
dependencies = [
('a4categories', '__first__'),
('a4modules', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Proposal',
fields=[
('item_ptr', models.OneToOneField(serialize=False, auto_created=True, to='a4modules.Item', primary_key=True, parent_link=True, on_delete=models.CASCADE)),
('slug', autoslug.fields.AutoSlugField(populate_from='name', editable=False, unique=True)),
('name', models.CharField(max_length=120)),
('description', ckeditor.fields.RichTextField()),
('budget', models.PositiveIntegerField(default=0, help_text='Required Budget')),
('category', models.ForeignKey(to='a4categories.Category', null=True, on_delete=django.db.models.deletion.SET_NULL, blank=True)),
('moderator_feedback', meinberlin.apps.moderatorfeedback.fields.ModeratorFeedbackField(default=None, choices=[('CONSIDERATION', 'Under consideration'), ('REJECTED', 'Rejected'), ('ACCEPTED', 'Accepted')], null=True, blank=True, max_length=254)),
],
options={
'ordering': ['-created'],
'abstract': False,
},
bases=('a4modules.item', models.Model),
),
migrations.CreateModel(
name='ModeratorStatement',
fields=[
('created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
('modified', models.DateTimeField(blank=True, editable=False, null=True)),
('proposal', models.OneToOneField(serialize=False, related_name='moderator_statement', to='meinberlin_budgeting.Proposal', primary_key=True, on_delete=models.CASCADE)),
('statement', ckeditor.fields.RichTextField(blank=True)),
('creator', models.ForeignKey(to=settings.AUTH_USER_MODEL, on_delete=models.CASCADE)),
],
options={
'abstract': False,
},
),
migrations.AlterModelOptions(
name='proposal',
options={},
),
migrations.AddField(
model_name='proposal',
name='point',
field=adhocracy4.maps.fields.PointField(default=None, verbose_name='Where can your idea be located on a map?', help_text='Click inside marked area to set a marker. Drag and drop marker to change place.'),
preserve_default=False,
),
]
| agpl-3.0 | -1,701,514,249,277,070,300 | 46.746269 | 261 | 0.628009 | false |
harvitronix/rl-rc-car | rl-rc-car/sensors/sensors.py | 1 | 4881 | """
This module holds classes for interacting with our sensors.
Currently supports:
- IR proximity
- Sonar
- IR distance via Arduino
Example: ir_pins = [24, 25, 28]
Example: sonar_pins = [[24, 25], [28, 29]]
"""
import RPi.GPIO as gpio
import time
from statistics import median
import serial
# Setup the pi.
gpio.setmode(gpio.BCM)
class SonarSensor:
def __init__(self, in_p, out_p, max_iterations=1000,
num_readings=5, max_distance=90):
self.in_p = in_p
self.out_p = out_p
gpio.setup(self.out_p, gpio.OUT)
gpio.setup(self.in_p, gpio.IN)
gpio.output(self.out_p, False)
self.max_distance = max_distance
self.num_readings = num_readings
self.max_iterations = max_iterations
print("Initializing a sonar sensor at %d (in) %d (out)" %
(self.in_p, self.out_p))
time.sleep(2)
print("Ready.")
def get_reading(self):
"""
Take multiple readings and return the median. Helps with highly
variant and error-prone readings.
"""
iterations = 0
all_readings = []
for i in range(self.num_readings):
# Blip.
gpio.output(self.out_p, True)
time.sleep(0.00001)
gpio.output(self.out_p, False)
pulse_start = None
pulse_end = None
# Read.
while gpio.input(self.in_p) == 0 and iterations < 1000:
pulse_start = time.time()
iterations += 1
iterations = 0 # Reset so we can use it again.
while gpio.input(self.in_p) == 1 and \
iterations < self.max_iterations:
pulse_end = time.time()
iterations += 1
if pulse_start is not None and pulse_end is not None:
# Turn time into distance.
pulse_duration = pulse_end - pulse_start
distance = pulse_duration * 17150
# Limit distance returned.
distance = self.max_distance if \
distance > self.max_distance else distance
# Add the measurement.
all_readings.append(distance)
if len(all_readings) > 0:
return median(all_readings)
else:
return self.max_distance
class IRSensor:
def __init__(self, in_p):
self.in_p = in_p
gpio.setup(self.in_p, gpio.IN)
print("Initialized an IR proximity sensor at %d" %
(self.in_p))
def get_reading(self):
return gpio.input(self.in_p)
class IRDistance:
"""
Read it from Arduino because it's analog.
"""
def __init__(self, path, baud=9600):
self.ser = serial.Serial(path, baud)
print("Initialized an IR distance sensor at %s " % path)
def get_reading(self):
"""Read off the serial port and decode the results."""
try:
return self.ser.readline().decode("utf-8").rstrip()
except:
return None
class IRSweep:
"""Use a servo to sweep and take readings."""
def __init__(self, path, baud=9600):
self.IRD = IRDistance(path, baud)
self.readings = [100 for x in range(31)]
def get_reading(self):
"""Get IR reading."""
ir_distance_reading = self.IRD.get_reading()
# Only update the IR readings if we got a good return value.
if ir_distance_reading is not None:
self.readings = self.update_sweep(ir_distance_reading)
# Return the readings even if we don't update it.
# We reverse them because 0-degrees is on the right.
flipped = self.readings[:]
return flipped[::-1]
def update_sweep(self, reading):
# Copy the old value.
new_values = self.readings[:]
# The reading we get from Arduino is in format "X|Y" where
# X = the angle and Y = the distance.
splitup = reading.split('|')
if isinstance(splitup, list) and len(splitup) == 2 and \
splitup[0] is not '' and splitup[1] is not '':
# Get the parts.
angle = int(splitup[0])
distance = int(splitup[1])
# Multiply distance reading to more closely match training.
distance *= 2
# Limit distance returned.
distance = 90 if distance > 90 else distance
# Change the angle into an index.
index = 0 if angle == 0 else int(angle / 6)
# Update the value at the index.
try:
new_values[index] = distance
except:
print("Invalid index:")
print(index)
raise
else:
print('Error reading from IR distance sensor. Received:')
print(splitup)
return new_values
| mit | 8,593,860,555,225,821,000 | 28.944785 | 71 | 0.549682 | false |
sfalkner/pySMAC | pysmac/utils/smac_output_readers.py | 2 | 7437 | import json
import functools
import re
import operator
import numpy as np
from pysmac.remote_smac import process_parameter_definitions
def convert_param_dict_types(param_dict, pcs):
_, parser_dict = process_parameter_definitions(pcs)
for k in param_dict:
param_dict[k] = parser_dict[k](param_dict[k])
return(param_dict)
def json_parse(fileobj, decoder=json.JSONDecoder(), buffersize=2048):
""" Small function to parse a file containing JSON objects separated by a new line. This format is used in the live-rundata-xx.json files produces by SMAC.
taken from http://stackoverflow.com/questions/21708192/how-do-i-use-the-json-module-to-read-in-one-json-object-at-a-time/21709058#21709058
"""
buffer = ''
for chunk in iter(functools.partial(fileobj.read, buffersize), ''):
buffer += chunk
buffer = buffer.strip(' \n')
while buffer:
try:
result, index = decoder.raw_decode(buffer)
yield result
buffer = buffer[index:]
except ValueError:
# Not enough data to decode, read more
break
def read_runs_and_results_file(fn):
""" Converting a runs_and_results file into a numpy array.
Almost all entries in a runs_and_results file are numeric to begin with.
Only the 14th column contains the status which is encoded as ints by SAT = 1,
UNSAT = 0, TIMEOUT = -1, everything else = -2.
\n
+-------+----------------+
| Value | Representation |
+=======+================+
|SAT | 2 |
+-------+----------------+
|UNSAT | 1 |
+-------+----------------+
|TIMEOUT| 0 |
+-------+----------------+
|Others | -1 |
+-------+----------------+
:returns: numpy_array(dtype = double) -- the data
"""
# to convert everything into floats, the run result needs to be mapped
def map_run_result(res):
if b'TIMEOUT' in res: return(0)
if b'UNSAT' in res: return(1) # note UNSAT before SAT, b/c UNSAT contains SAT!
if b'SAT' in res: return(2)
return(-1) # covers ABORT, CRASHED, but that shouldn't happen
return(np.loadtxt(fn, skiprows=1, delimiter=',',
usecols = list(range(1,14))+[15], # skip empty 'algorithm run data' column
converters={13:map_run_result}, ndmin=2))
def read_paramstrings_file(fn):
""" Function to read a paramstring file.
Every line in this file corresponds to a full configuration. Everything is
stored as strings and without knowledge about the pcs, converting that into
any other type would involve guessing, which we shall not do here.
:param fn: the name of the paramstring file
:type fn: str
:returns: dict -- with key-value pairs 'parameter name'-'value as string'
"""
param_dict_list = []
with open(fn,'r') as fh:
for line in fh.readlines():
# remove run id and single quotes
line = line[line.find(':')+1:].replace("'","")
pairs = [s.strip().split("=") for s in line.split(',')]
param_dict_list.append({k:v for [k, v] in pairs})
return(param_dict_list)
def read_validationCallStrings_file(fn):
"""Reads a validationCallString file into a list of dictionaries.
:returns: list of dicts -- each dictionary contains 'parameter name' and 'parameter value as string' key-value pairs
"""
param_dict_list = []
with open(fn,'r') as fh:
for line in fh.readlines()[1:]: # skip header line
config_string = line.split(",")[1].strip('"')
config_string = config_string.split(' ')
tmp_dict = {}
for i in range(0,len(config_string),2):
tmp_dict[config_string[i].lstrip('-')] = config_string[i+1].strip("'")
param_dict_list.append(tmp_dict)
return(param_dict_list)
def read_validationObjectiveMatrix_file(fn):
""" reads the run data of a validation run performed by SMAC.
For cases with instances, not necessarily every instance is used during the
configuration phase to estimate a configuration's performance. If validation
is enabled, SMAC reruns parameter settings (usually just the final incumbent)
on the whole instance set/a designated test set. The data from those runs
is stored in separate files. This function reads one of these files.
:param fn: the name of the validationObjectiveMatrix file
:type fn: str
:returns: dict -- configuration ids as keys, list of performances on each instance as values.
.. todo::
testing of validation runs where more than the final incumbent is validated
"""
values = {}
with open(fn,'r') as fh:
header = fh.readline().split(",")
num_configs = len(header)-2
re_string = '\w?,\w?'.join(['"id\_(\d*)"', '"(\d*)"'] + ['"([0-9.]*)"']*num_configs)
for line in fh.readlines():
match = (re.match(re_string, line))
values[int(match.group(1))] = list(map(float,list(map(match.group, list(range(3,3+num_configs))))))
return(values)
def read_trajectory_file(fn):
"""Reads a trajectory file and returns a list of dicts with all the information.
Due to the way SMAC stores every parameter's value as a string, the configuration returned by this function also has every value stored as a string. All other values, like "Estimated Training Preformance" and so on are floats, though.
:param fn: name of file to read
:type fn: str
:returns: list of dicts -- every dict contains the keys: "CPU Time Used","Estimated Training Performance","Wallclock Time","Incumbent ID","Automatic Configurator (CPU) Time","Configuration"
"""
return_list = []
with open(fn,'r') as fh:
header = list(map(lambda s: s.strip('"'), fh.readline().split(",")))
l_info = len(header)-1
for line in fh.readlines():
tmp = line.split(",")
tmp_dict = {}
for i in range(l_info):
tmp_dict[header[i]] = float(tmp[i])
tmp_dict['Configuration'] = {}
for i in range(l_info, len(tmp)):
name, value = tmp[i].strip().split("=")
tmp_dict['Configuration'][name] = value.strip("'").strip('"')
return_list.append(tmp_dict)
return(return_list)
def read_instances_file(fn):
"""Reads the instance names from an instace file
:param fn: name of file to read
:type fn: str
:returns: list -- each element is a list where the first element is the instance name followed by additional information for the specific instance.
"""
with open(fn,'r') as fh:
instance_names = fh.readlines()
return([s.strip().split() for s in instance_names])
def read_instance_features_file(fn):
"""Function to read a instance_feature file.
:returns: tuple -- first entry is a list of the feature names, second one is a dict with 'instance name' - 'numpy array containing the features' key-value pairs
"""
instances = {}
with open(fn,'r') as fh:
lines = fh.readlines()
for line in lines[1:]:
tmp = line.strip().split(",")
instances[tmp[0]] = np.array(tmp[1:],dtype=np.double)
return(lines[0].split(",")[1:], instances)
| agpl-3.0 | 785,916,097,573,593,000 | 38.349206 | 238 | 0.602931 | false |
zzw922cn/Automatic_Speech_Recognition | speechvalley/feature/wsj/wsj_preprocess.py | 1 | 5018 | # encoding: utf-8
# ******************************************************
# Author : zzw922cn
# Last modified: 2017-12-09 11:00
# Email : [email protected]
# Filename : wsj_preprocess.py
# Description : Feature preprocessing for WSJ dataset
# ******************************************************
import os
import cPickle
import glob
import sklearn
import argparse
import numpy as np
import scipy.io.wavfile as wav
from sklearn import preprocessing
from subprocess import check_call, CalledProcessError
from speechvalley.feature.core import calcfeat_delta_delta
def wav2feature(root_directory, save_directory, name, win_len, win_step, mode, feature_len, seq2seq, save):
"""
To run for WSJ corpus, you should download sph2pipe_v2.5 first!
"""
count = 0
dirid = 0
level = 'cha' if seq2seq is False else 'seq2seq'
for subdir, dirs, files in os.walk(root_directory):
for f in files:
fullFilename = os.path.join(subdir, f)
filenameNoSuffix = os.path.splitext(fullFilename)[0]
if f.endswith('.wv1') or f.endswith('.wav'):
rate = None
sig = None
try:
(rate,sig)= wav.read(fullFilename)
except ValueError as e:
sph2pipe = os.path.join(sph2pipe_dir, 'sph2pipe')
wav_name = fullFilename.replace('wv1', 'wav')
check_call(['./sph2pipe', '-f', 'rif', fullFilename, wav_name])
os.remove(fullFilename)
print(wav_name)
(rate,sig)= wav.read(wav_name)
os.remove(fullFilename)
feat = calcfeat_delta_delta(sig,rate,win_length=win_len,win_step=win_step,feature_len=feature_len,mode=mode)
feat = preprocessing.scale(feat)
feat = np.transpose(feat)
print(feat.shape)
labelFilename = filenameNoSuffix + '.label'
with open(labelFilename,'r') as f:
characters = f.readline().strip().lower()
targets = []
if seq2seq is True:
targets.append(28)
for c in characters:
if c == ' ':
targets.append(0)
elif c == "'":
targets.append(27)
else:
targets.append(ord(c)-96)
if seq2seq is True:
targets.append(29)
targets = np.array(targets)
print(targets)
if save:
count += 1
if count%1000 == 0:
dirid += 1
print('file index:',count)
print('dir index:',dirid)
label_dir = os.path.join(save_directory, level, name, str(dirid), 'label')
feat_dir = os.path.join(save_directory, level, name, str(dirid), mode)
if not os.path.isdir(label_dir):
os.makedirs(label_dir)
if not os.path.isdir(feat_dir):
os.makedirs(feat_dir)
featureFilename = os.path.join(feat_dir, filenameNoSuffix.split('/')[-1] +'.npy')
np.save(featureFilename,feat)
t_f = os.path.join(label_dir, filenameNoSuffix.split('/')[-1] +'.npy')
print(t_f)
np.save(t_f,targets)
if __name__ == '__main__':
parser = argparse.ArgumentParser(prog='wsj_preprocess',
description='Script to preprocess WSJ data')
parser.add_argument("path", help="Directory of WSJ dataset", type=str)
parser.add_argument("save", help="Directory where preprocessed arrays are to be saved",
type=str)
parser.add_argument("-n", "--name", help="Name of the dataset",
choices=['train_si284', 'test_eval92', 'test_dev'],
type=str, default='train_si284')
parser.add_argument("-m", "--mode", help="Mode",
choices=['mfcc', 'fbank'],
type=str, default='mfcc')
parser.add_argument("--featlen", help='Features length', type=int, default=13)
parser.add_argument("-s", "--seq2seq", default=False,
help="set this flag to use seq2seq", action="store_true")
parser.add_argument("-wl", "--winlen", type=float,
default=0.02, help="specify the window length of feature")
parser.add_argument("-ws", "--winstep", type=float,
default=0.01, help="specify the window step length of feature")
args = parser.parse_args()
root_directory = args.path
save_directory = args.save
mode = args.mode
feature_len = args.featlen
seq2seq = args.seq2seq
name = args.name
win_len = args.winlen
win_step = args.winstep
if root_directory == '.':
root_directory = os.getcwd()
if save_directory == '.':
save_directory = os.getcwd()
if not os.path.isdir(root_directory):
raise ValueError("WSJ Directory does not exist!")
if not os.path.isdir(save_directory):
os.makedirs(save_directory)
wav2feature(root_directory, save_directory, name=name, win_len=win_len, win_step=win_step,
mode=mode, feature_len=feature_len, seq2seq=seq2seq, save=True)
| mit | 315,326,521,415,169,800 | 36.17037 | 116 | 0.578717 | false |
ghandiosm/Test | addons/l10n_in_hr_payroll/report/report_payroll_advice.py | 47 | 2468 | #-*- coding:utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import time
from datetime import datetime
from openerp.osv import osv
from openerp.report import report_sxw
from openerp.tools import amount_to_text_en
class payroll_advice_report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(payroll_advice_report, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
'get_month': self.get_month,
'convert': self.convert,
'get_detail': self.get_detail,
'get_bysal_total': self.get_bysal_total,
})
self.context = context
def get_month(self, input_date):
payslip_pool = self.pool.get('hr.payslip')
res = {
'from_name': '', 'to_name': ''
}
slip_ids = payslip_pool.search(self.cr, self.uid, [('date_from','<=',input_date), ('date_to','>=',input_date)], context=self.context)
if slip_ids:
slip = payslip_pool.browse(self.cr, self.uid, slip_ids, context=self.context)[0]
from_date = datetime.strptime(slip.date_from, '%Y-%m-%d')
to_date = datetime.strptime(slip.date_to, '%Y-%m-%d')
res['from_name']= from_date.strftime('%d')+'-'+from_date.strftime('%B')+'-'+from_date.strftime('%Y')
res['to_name']= to_date.strftime('%d')+'-'+to_date.strftime('%B')+'-'+to_date.strftime('%Y')
return res
def convert(self, amount, cur):
return amount_to_text_en.amount_to_text(amount, 'en', cur);
def get_bysal_total(self):
return self.total_bysal
def get_detail(self, line_ids):
result = []
self.total_bysal = 0.00
for l in line_ids:
res = {}
res.update({
'name': l.employee_id.name,
'acc_no': l.name,
'ifsc_code': l.ifsc_code,
'bysal': l.bysal,
'debit_credit': l.debit_credit,
})
self.total_bysal += l.bysal
result.append(res)
return result
class wrapped_report_payroll_advice(osv.AbstractModel):
_name = 'report.l10n_in_hr_payroll.report_payrolladvice'
_inherit = 'report.abstract_report'
_template = 'l10n_in_hr_payroll.report_payrolladvice'
_wrapped_report_class = payroll_advice_report
| gpl-3.0 | 2,207,137,365,483,276,300 | 36.969231 | 141 | 0.561183 | false |
rjschwei/azure-sdk-for-python | azure-mgmt-cdn/azure/mgmt/cdn/models/endpoint.py | 3 | 6701 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class Endpoint(Resource):
"""CDN endpoint is the entity within a CDN profile containing configuration
information such as origin, protocol, content caching and delivery
behavior. The CDN endpoint uses the URL format
<endpointname>.azureedge.net.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Resource ID.
:vartype id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict
:param origin_host_header: The host header CDN sends along with content
requests to origin. The default value is the host name of the origin.
:type origin_host_header: str
:param origin_path: The path used when CDN sends request to origin.
:type origin_path: str
:param content_types_to_compress: List of content types on which
compression applies. The value should be a valid MIME type.
:type content_types_to_compress: list of str
:param is_compression_enabled: Indicates whether content compression is
enabled on CDN. Default value is false. If compression is enabled, content
will be served as compressed if user requests for a compressed version.
Content won't be compressed on CDN when requested content is smaller than
1 byte or larger than 1 MB.
:type is_compression_enabled: bool
:param is_http_allowed: Indicates whether HTTP traffic is allowed on the
endpoint. Default value is true. At least one protocol (HTTP or HTTPS)
must be allowed.
:type is_http_allowed: bool
:param is_https_allowed: Indicates whether HTTPS traffic is allowed on the
endpoint. Default value is true. At least one protocol (HTTP or HTTPS)
must be allowed.
:type is_https_allowed: bool
:param query_string_caching_behavior: Defines the query string caching
behavior. Possible values include: 'IgnoreQueryString', 'BypassCaching',
'UseQueryString', 'NotSet'
:type query_string_caching_behavior: str or
:class:`QueryStringCachingBehavior
<azure.mgmt.cdn.models.QueryStringCachingBehavior>`
:param optimization_type: Customer can specify what scenario they want
this CDN endpoint to optimize, e.g. Download, Media services. With this
information we can apply scenario driven optimization.
:type optimization_type: str
:param geo_filters: List of rules defining user geo access within a CDN
endpoint. Each geo filter defines an acess rule to a specified path or
content, e.g. block APAC for path /pictures/
:type geo_filters: list of :class:`GeoFilter
<azure.mgmt.cdn.models.GeoFilter>`
:ivar host_name: The host name of the endpoint structured as
{endpointName}.{DNSZone}, e.g. consoto.azureedge.net
:vartype host_name: str
:param origins: The source of the content being delivered via CDN.
:type origins: list of :class:`DeepCreatedOrigin
<azure.mgmt.cdn.models.DeepCreatedOrigin>`
:ivar resource_state: Resource status of the endpoint. Possible values
include: 'Creating', 'Deleting', 'Running', 'Starting', 'Stopped',
'Stopping'
:vartype resource_state: str or :class:`EndpointResourceState
<azure.mgmt.cdn.models.EndpointResourceState>`
:ivar provisioning_state: Provisioning status of the endpoint.
:vartype provisioning_state: str
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'host_name': {'readonly': True},
'origins': {'required': True},
'resource_state': {'readonly': True},
'provisioning_state': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'origin_host_header': {'key': 'properties.originHostHeader', 'type': 'str'},
'origin_path': {'key': 'properties.originPath', 'type': 'str'},
'content_types_to_compress': {'key': 'properties.contentTypesToCompress', 'type': '[str]'},
'is_compression_enabled': {'key': 'properties.isCompressionEnabled', 'type': 'bool'},
'is_http_allowed': {'key': 'properties.isHttpAllowed', 'type': 'bool'},
'is_https_allowed': {'key': 'properties.isHttpsAllowed', 'type': 'bool'},
'query_string_caching_behavior': {'key': 'properties.queryStringCachingBehavior', 'type': 'QueryStringCachingBehavior'},
'optimization_type': {'key': 'properties.optimizationType', 'type': 'str'},
'geo_filters': {'key': 'properties.geoFilters', 'type': '[GeoFilter]'},
'host_name': {'key': 'properties.hostName', 'type': 'str'},
'origins': {'key': 'properties.origins', 'type': '[DeepCreatedOrigin]'},
'resource_state': {'key': 'properties.resourceState', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
}
def __init__(self, location, origins, tags=None, origin_host_header=None, origin_path=None, content_types_to_compress=None, is_compression_enabled=None, is_http_allowed=None, is_https_allowed=None, query_string_caching_behavior=None, optimization_type=None, geo_filters=None):
super(Endpoint, self).__init__(location=location, tags=tags)
self.origin_host_header = origin_host_header
self.origin_path = origin_path
self.content_types_to_compress = content_types_to_compress
self.is_compression_enabled = is_compression_enabled
self.is_http_allowed = is_http_allowed
self.is_https_allowed = is_https_allowed
self.query_string_caching_behavior = query_string_caching_behavior
self.optimization_type = optimization_type
self.geo_filters = geo_filters
self.host_name = None
self.origins = origins
self.resource_state = None
self.provisioning_state = None
| mit | -7,319,882,818,121,555,000 | 49.765152 | 280 | 0.66214 | false |
le717/Shutdown-Timer | ShutdownTimer.py | 1 | 11654 | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
"""Shutdown Timer - Small Windows shutdown timer.
Created 2013, 2015 Triangle717
<http://Triangle717.WordPress.com>
Shutdown Timer is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Shutdown Timer is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Shutdown Timer. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import re
import json
import time
import argparse
import subprocess
import constants as const
__all__ = ("ShutdownTimer", "main")
class ShutdownTimer:
"""Core Shutdown Timer code and actions.
Exposes the following public properties and methods:
* {Tuple} verbs Two index tuple containing the action verbs.
Second index is the "ing" form of the verb.
* {Method} TODO.
* {Method} TODO.
* {Method} TODO.
* {Method} TODO.
* {Method} TODO.
"""
def __init__(self):
"""Initalize all properties and methods."""
self.__time = None
self.__force = False
self.__restart = False
self.__configData = None
self.__configPath = self.__getConfigPath()
self.__jsonFile = os.path.join(self.__configPath,
"Shutdown-Timer.json")
self.__loadConfig()
self.__commandLine()
self.verbs = self.__getVerb()
def __getConfigPath(self):
"""Get the file path where configuration files will be stored.
@returns {String} The configuration path,
`%AppData%/Triangle717/*AppName*`.
"""
root = os.path.expandvars("%AppData%")
# Create the path if needed
path = os.path.join(root, "Triangle717", "Shutdown-Timer")
if not os.path.exists(path):
os.makedirs(path)
return path
def __getVerb(self):
"""Set the action verbs for use in messages depending on restart status.
@return {Tuple} Two index tuple containing the action verbs.
Second index is the "ing" form of the verb.
"""
if self.__restart:
return ("restart", "restarting")
return ("shutdown", "shutting down")
def __getCommand(self):
"""Construct the shutdown command based on user option selection.
@returns {Array} The exact command to run, including any arguments.
"""
commands = ["shutdown.exe"]
# Restart or shutdown computer?
if self.__restart:
commands.append("/r")
else:
commands.append("/p")
# Force closing, do not wait for any programs
if self.__force:
commands.append("/f")
# Restarting will always have a timeout dialog before
# the process starts, remove it to match shutdown behavior
if self.__restart:
commands.append("/t")
commands.append("0")
return commands
def __runCommand(self):
"""Run the closing command."""
subprocess.call(self.__getCommand())
def __commandLine(self):
"""Command-line arguments parser.
@returns {Boolean} Always returns True.
"""
parser = argparse.ArgumentParser(
description="{0} Command-line arguments".format(const.appName))
# Force mode
parser.add_argument("-f", "--force",
help="Close Windows without waiting on programs",
action="store_true")
# Restart mode
parser.add_argument("-r", "--restart",
help="Restart Windows instead of shutting down",
action="store_true")
# Assign the arguments
args = parser.parse_args()
self.__force = args.force
self.__restart = args.restart
return True
def __loadConfig(self):
"""Read and store the configuration file.
@returns {Boolean} True if the config file was read, False otherwise.
"""
try:
# Make sure it exists
if os.path.exists(self.__jsonFile):
with open(self.__jsonFile, "rt", encoding="utf-8") as f:
self.__configData = json.load(f)
return True
return False
# The file is not valid JSON, sliently fail
except ValueError:
return False
def saveConfig(self):
"""Write the JSON-based config file.
@returns {Boolean} True if the config file was written,
False otherwise.
"""
try:
jsonData = {
"force": self.__force,
"restart": self.__restart,
"time": self.__time
}
with open(self.__jsonFile, "wt", encoding="utf_8") as f:
f.write(json.dumps(jsonData, indent=4, sort_keys=True))
return True
# Silently fail
except PermissionError:
return False
def __isBetween(self, val, minV, maxV):
"""Check that a value is within inclusive acceptable range.
@param {Number} val The value to check.
@param {Number} minV The maximum value.
@param {Number} maxV The minimum value.
@return {Boolean} True if in range, False if not.
"""
return val >= minV and val <= maxV
def __getCurTime(self):
"""Get the current time, according to the system clock.
@return {Tuple}
"""
curTime = time.localtime()
return (curTime[3], curTime[4], curTime[5])
def __calcHoursLeft(self, curHour, offHour):
"""Calculate the number of hours that remain until closing.
@param {Number} curHour TODO.
@param {Number} offHour TODO.
@return {Number} The number of hours remaining.
"""
# It will happpen this very hour
if curHour == offHour:
return 0
# 4-23 hours over night
elif curHour > offHour:
# Midnight through noon
if self.__isBetween(offHour, 0, 12):
return (24 + offHour) - curHour
# 1 PM through 11 PM
elif self.__isBetween(offHour, 13, 23):
return 24 + (offHour - curHour)
# 1-18 hours today
elif offHour > curHour:
return (offHour - curHour) - 1
def __countDown(self):
"""Calculate remaining time and wait until closing can occur."""
curHour, curMin, curSec = self.__getCurTime()
# If the shutdown time does not equal, the current time,
# as defined by the local system's clock
while (
"{0}:{1}".format(curHour, curMin) !=
"{0}:{1}".format(self.__time[0], self.__time[1])
):
curHour, curMin, curSec = self.__getCurTime()
# Calculate remaining hours
remainHours = self.__calcHoursLeft(curHour, self.__time[0])
# Calculate remaining minutes
if curMin > self.__time[1]:
remainMins = curMin - (self.__time[1] - 1)
else:
remainMins = (self.__time[1] - 1) - curMin
# Prevent the minutes from reading -1
if remainMins == -1:
remainMins = 0
# Calculate remaining seconds
remainSecs = 60 - curSec
# Prevent the seconds from reading 60
if remainSecs == 60:
remainSecs = "00"
# Add the leading zeros
elif self.__isBetween(remainSecs, 1, 9):
remainSecs = "0{0}".format(remainSecs)
# Display remaining time
remainTime = "{0}:{1}".format(remainMins, remainSecs)
# Display hours if needed too
if remainHours != 0:
remainTime = "{0}:{1}".format(remainHours, remainTime)
print("Time remaining until {0}: {1}".format(
self.verbs[0], remainTime))
time.sleep(1)
# Close the computer
print("\nYour computer will now {0}.".format(self.verbs[0]))
return True
def getTime(self):
"""Get the time the computer will close.
@return {String} Closing time.
"""
time = []
# Hours
if self.__isBetween(self.__time[0], 0, 9):
time.append("0{0}".format(self.__time[0]))
else:
time.append(str(self.__time[0]))
# Add the colon
time.append(":")
# Minutes
if self.__isBetween(self.__time[1], 0, 9):
time.append("0{0}".format(self.__time[1]))
else:
time.append(str(self.__time[1]))
return "".join(time)
def setTime(self, userTime):
"""Validate and set the time the computer will close.
@param {String} userTime The user-provided time to close.
@return {!Boolean} True if the time was set,
False if defined time format was not followed,
A ValueError will be raised if a value
is not in acceptable range.
"""
# Make sure it follows a certain format
formatRegex = re.match(r"(\d{2}):(\d{2})", userTime)
if formatRegex is None:
print("The time is not in the required HH:MM format!")
return False
# Convert the values to intergers
hours = int(formatRegex.group(1))
mins = int(formatRegex.group(2))
# Hours value is out of range
if not self.__isBetween(hours, 0, 24):
raise ValueError("Hour values must be between 0 and 24.")
# Minutes value is out of range
if not self.__isBetween(mins, 0, 59):
raise ValueError("Minute values must be between 0 and 59.")
# Store the time
self.__time = (hours, mins)
return True
def start(self):
"""Start the timer and send command to close the computer."""
print()
if self.__countDown():
self.__runCommand()
def setModes(self, force=False, restart=False):
"""TODO.
@param {Boolean} force TODO.
@param {Boolean} restart TODO.
@returns {Boolean} Always returns True.
"""
self.__force = force
self.__restart = restart
return True
def getModes(self):
"""Get the Windows closing options.
@return {Tuple} Three index tuple containing Boolean values for
force and restart modes. In all case, a value of True
represents that mode is enabled and False disabled.
"""
return (self.__force, self.__restart)
def main():
"""Basic temporary UI until TODO GUI is implemented."""
os.system("title {0} v{1}".format(const.appName, const.version))
timer = ShutdownTimer()
print("""
Enter the time you want the computer to {0}.
Use the 24-hour system in the following format: "HH:MM".
Submit a "q" to exit.""".format(timer.verbs[0]))
offTime = input("\n\n> ").lower().strip()
# Quit program
if offTime == "q":
raise SystemExit(0)
# The user's time was successfully set
if timer.setTime(offTime):
timer.saveConfig()
timer.start()
if __name__ == "__main__":
main()
| gpl-3.0 | 3,401,676,235,553,052,000 | 30.160428 | 80 | 0.56487 | false |
wschaeferB/autopsy | setupSleuthkitBranch.py | 5 | 4246 | # This python script is used to automatically set the branch in The Sleuth Kit repository
# for use in automated build environments.
#
# Basic idea is that it determines what Autopsy branch is being used and then checksout
# a corresponding TSK branch.
#
# TSK_HOME environment variable must be set for this to work.
import os
import sys
import subprocess
import xml.etree.ElementTree as ET
TSK_HOME=os.getenv("TSK_HOME",False)
ORIGIN_OWNER="origin"
DEVELOP_BRANCH='develop'
passed = 1
def gitSleuthkitCheckout(branch, branchOwner):
'''
Checksout sleuthkit branch
Args:
branch: String, which branch to checkout
'''
# passed is a global variable that gets set to non-zero integer
# When an error occurs
global passed
#add the remotes
#if the branch owner was origin substitute in the name of that owner
if (branchOwner==ORIGIN_OWNER):
gitHubUser="sleuthkit"
else:
gitHubUser=branchOwner
checkout=['git','checkout','-b',branchOwner+'-'+branch]
print("Command run:" + " ".join(checkout))
passed = subprocess.call(checkout, stdout=sys.stdout,cwd=TSK_HOME)
cmd = ['git','pull', "/".join(["https://github.com", gitHubUser, "sleuthkit.git"]), branch]
if passed != 0: #0 would be success
#unable to create new branch return instead of pulling
return
print("Command run:" + " ".join(cmd))
passed = subprocess.call(cmd,stdout=sys.stdout,cwd=TSK_HOME)
if (passed == 0):
sys.exit() #exit if successful
else:
print("Branch: " + branch + " does not exist for github user: " + gitHubUser)
def parseXML(xmlFile):
'''
parses the TSKVersion.xml file for sleuthkit version
Args:
xmlFile: String, xml file to parse
'''
tree = ET.parse(xmlFile)
root = tree.getroot()
for child in root:
if child.attrib['name']=='TSK_VERSION':
return child.attrib['value']
return None
def main():
global passed
if not TSK_HOME:
sys.exit(1)
print('Please set TSK_HOME env variable')
# Get the Autopsy branch being used. Travis and Appveyor
# will tell us where a pull request is directed
TRAVIS=os.getenv("TRAVIS",False)
APPVEYOR=os.getenv("APPVEYOR",False)
if TRAVIS == "true":
CURRENT_BRANCH=os.getenv("TRAVIS_PULL_REQUEST_BRANCH","") #make default empty string which is same vaule used when not a PR
if (CURRENT_BRANCH != ""): #if it is a PR
BRANCH_OWNER=os.getenv("TRAVIS_PULL_REQUEST_SLUG", ORIGIN_OWNER+"/"+CURRENT_BRANCH).split('/')[0] #default owner is ORIGIN_OWNER
gitSleuthkitCheckout(CURRENT_BRANCH, BRANCH_OWNER)
TARGET_BRANCH=os.getenv("TRAVIS_BRANCH",DEVELOP_BRANCH)
elif APPVEYOR:
CURRENT_BRANCH=os.getenv("APPVEYOR_PULL_REQUEST_HEAD_REPO_BRANCH","") #make default same as value used by travis for readability of code
if (CURRENT_BRANCH != ""): #if it is a PR
BRANCH_OWNER=os.getenv("APPVEYOR_PULL_REQUEST_HEAD_REPO_NAME", ORIGIN_OWNER+"/"+CURRENT_BRANCH).split('/')[0] #default owner is ORIGIN_OWNER
gitSleuthkitCheckout(CURRENT_BRANCH, BRANCH_OWNER)
TARGET_BRANCH=os.getenv("APPVEYOR_REPO_BRANCH",DEVELOP_BRANCH)
else:
cmd=['git','rev-parse','--abbrev-ref','HEAD']
output = subprocess.check_output(cmd)
TARGET_BRANCH=output.strip()
# If we are in an Autopsy release branch, then use the
# info in TSKVersion.xml to find the corresponding TSK
# release branch. For other branches, we don't always
# trust that TSKVersion has been updated.
if TARGET_BRANCH.startswith('release'):
version = parseXML('TSKVersion.xml')
RELEASE_BRANCH = "release-"+version
#Check if the same user has a release branch which corresponds to this release branch
gitSleuthkitCheckout(RELEASE_BRANCH, ORIGIN_OWNER)
else:
gitSleuthkitCheckout(TARGET_BRANCH, ORIGIN_OWNER)
# Otherwise, default to origin develop
gitSleuthkitCheckout(DEVELOP_BRANCH, ORIGIN_OWNER)
if passed != 0:
print('Error checking out a Sleuth Kit branch')
sys.exit(1)
if __name__ == '__main__':
main()
| apache-2.0 | 700,748,941,106,427,100 | 38.682243 | 153 | 0.660386 | false |
live-clones/dolfin-adjoint | tests_dolfin/ode_tentusscher/tentusscher_2004_mcell.py | 4 | 19942 | from __future__ import division
import numpy as _np
list_types = (_np.ndarray, list)
inf = float("infinity")
def value_formatter(value, width=0):
"""
Return a formated string of a value
Arguments
---------
value : any
The value which is formatted
width : int
A min str length value
"""
ret = None
if isinstance(value, list_types):
if len(value)>4:
if isinstance(value[0], integers):
formatstr = "[%d, %d, ..., %d, %d]"
elif isinstance(value[0], scalars):
formatstr = "[%%.%(ff)s, %%.%(ff)s, ..., %%.%(ff)s, %%.%(ff)s]" % \
float_format()
else:
formatstr = "[%s, %s, ..., %s, %s]"
ret = formatstr % (value[0], value[1], value[-2], value[-1])
elif len(value) == 0:
ret = "[]"
else:
if isinstance(value[0], integers):
formatstr = "%d"
elif isinstance(value[0], scalars):
formatstr = "%%.%(ff)s" % float_format()
else:
formatstr = "%s"
formatstr = "[%s]" % (", ".join(formatstr for i in range(len(value))) )
ret = formatstr % tuple(value)
elif isinstance(value, float):
if value == inf:
ret = "\xe2\x88\x9e"
elif value == -inf:
ret = "-\xe2\x88\x9e"
elif isinstance(value, str):
ret = repr(value)
if ret is None:
ret = str(value)
if width == 0:
return ret
return VALUE_JUST(ret, width)
class Range(object):
"""
A simple class for helping checking a given value is within a certain range
"""
def __init__(self, ge=None, le=None, gt=None, lt=None):
"""
Create a Range
Arguments
---------
ge : scalar (optional)
Greater than or equal, range control of argument
le : scalar (optional)
Lesser than or equal, range control of argument
gt : scalar (optional)
Greater than, range control of argument
lt : scalar (optional)
Lesser than, range control of argument
"""
ops = [ge, gt, le, lt]
opnames = ["ge", "gt", "le", "lt"]
# Checking valid combinations of kwargs
if le is not None and lt is not None:
value_error("Cannot create a 'Range' including "\
"both 'le' and 'lt'")
if ge is not None and gt is not None:
value_error("Cannot create a 'Range' including "\
"both 'ge' and 'gt'")
# Checking valid types for 'RangeChecks'
for op, opname in zip(ops, opnames):
if not (op is None or isinstance(op, scalars)):
type_error("expected a scalar for the '%s' arg" % opname)
# get limits
minval = gt if gt is not None else ge if ge is not None else -inf
maxval = lt if lt is not None else le if le is not None else inf
if minval > maxval:
value_error("expected the maxval to be larger than minval")
# Dict for test and repr
range_formats = {}
range_formats["minop"] = ">=" if gt is None else ">"
range_formats["maxop"] = "<=" if lt is None else "<"
range_formats["minvalue"] = str(minval)
range_formats["maxvalue"] = str(maxval)
# Dict for pretty print
range_formats["minop_format"] = "[" if gt is None else "("
range_formats["maxop_format"] = "]" if lt is None else ")"
range_formats["minformat"] = value_formatter(minval)
range_formats["maxformat"] = value_formatter(maxval)
self.range_formats = range_formats
self.range_eval_str = "lambda value : _all(value %(minop)s %(minvalue)s) "\
"and _all(value %(maxop)s %(maxvalue)s)"%\
range_formats
self._in_range = eval(self.range_eval_str)
# Define some string used for pretty print
self._range_str = "%(minop_format)s%(minformat)s, "\
"%(maxformat)s%(maxop_format)s" % range_formats
self._in_str = "%%s \xe2\x88\x88 %s" % self._range_str
self._not_in_str = "%%s \xe2\x88\x89 %s" % self._range_str
self.arg_repr_str = ", ".join("%s=%s" % (opname, op) \
for op, opname in zip(ops, opnames) \
if op is not None)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.arg_repr_str)
def __str__(self):
return self._range_str
def __eq__(self, other):
return isinstance(other, self.__class__) and \
self._in_str == other._in_str
def __contains__(self, value):
"""
Return True of value is in range
Arguments
---------
value : scalar%s
A value to be used in checking range
""" % ("" if _np is None else " and np.ndarray")
if not isinstance(value, range_types):
type_error("only scalars%s can be ranged checked" % \
("" if _np is None else " and np.ndarray"))
return self._in_range(value)
def format(self, value, width=0):
"""
Return a formated range check of the value
Arguments
---------
value : scalar
A value to be used in checking range
width : int
A min str length value
"""
in_range = self.__contains__(value)
if value in self:
return self.format_in(value, width)
return self.format_not_in(value, width)
def format_in(self, value, width=0):
"""
Return a formated range check
Arguments
---------
value : scalar
A value to be used in checking range
width : int
A min str length value
"""
return self._in_str % value_formatter(value, width)
def format_not_in(self, value, width=0):
"""
Return a formated range check
Arguments
---------
value : scalar
A value to be used in checking range
width : int
A min str length value
"""
return self._not_in_str % value_formatter(value, width)
def init_values(**values):
"""
Init values
"""
# Imports
import dolfin
# Init values
# Xr1=0, Xr2=1, Xs=0, m=0, h=0.75, j=0.75, d=0, f=1, fCa=1, s=1, r=0,
# Ca_SR=0.2, Ca_i=0.0002, g=1, Na_i=11.6, V=-86.2, K_i=138.3
init_values = [0, 1, 0, 0, 0.75, 0.75, 0, 1, 1, 1, 0, 0.2, 0.0002, 1,\
11.6, -86.2, 138.3]
# State indices and limit checker
state_ind = dict(Xr1=(0, Range()), Xr2=(1, Range()), Xs=(2, Range()),\
m=(3, Range()), h=(4, Range()), j=(5, Range()), d=(6, Range()), f=(7,\
Range()), fCa=(8, Range()), s=(9, Range()), r=(10, Range()),\
Ca_SR=(11, Range()), Ca_i=(12, Range()), g=(13, Range()), Na_i=(14,\
Range()), V=(15, Range()), K_i=(16, Range()))
for state_name, value in values.items():
if state_name not in state_ind:
raise ValueError("{{0}} is not a state.".format(state_name))
ind, range = state_ind[state_name]
if value not in range:
raise ValueError("While setting '{0}' {1}".format(state_name,\
range.format_not_in(value)))
# Assign value
init_values[ind] = value
init_values = dolfin.Constant(tuple(init_values))
return init_values
def default_parameters(**values):
"""
Parameter values
"""
# Imports
import dolfin
# Param values
# P_kna=0.03, g_K1=5.405, g_Kr=0.096, g_Ks=0.062, g_Na=14.838,
# g_bna=0.00029, g_CaL=0.000175, g_bca=0.000592, g_to=0.294,
# K_mNa=40, K_mk=1, P_NaK=1.362, K_NaCa=1000, K_sat=0.1,
# Km_Ca=1.38, Km_Nai=87.5, alpha=2.5, gamma=0.35, K_pCa=0.0005,
# g_pCa=0.825, g_pK=0.0146, Buf_c=0.15, Buf_sr=10, Ca_o=2,
# K_buf_c=0.001, K_buf_sr=0.3, K_up=0.00025, V_leak=8e-05,
# V_sr=0.001094, Vmax_up=0.000425, a_rel=0.016464, b_rel=0.25,
# c_rel=0.008232, tau_g=2, Na_o=140, Cm=0.185, F=96485.3415,
# R=8314.472, T=310, V_c=0.016404, stim_amplitude=0,
# stim_duration=1, stim_period=1000, stim_start=1, K_o=5.4
param_values = [0.03, 5.405, 0.096, 0.062, 14.838, 0.00029, 0.000175,\
0.000592, 0.294, 40, 1, 1.362, 1000, 0.1, 1.38, 87.5, 2.5, 0.35,\
0.0005, 0.825, 0.0146, 0.15, 10, 2, 0.001, 0.3, 0.00025, 8e-05,\
0.001094, 0.000425, 0.016464, 0.25, 0.008232, 2, 140, 0.185,\
96485.3415, 8314.472, 310, 0.016404, 0, 1, 1000, 1, 5.4]
# Parameter indices and limit checker
param_ind = dict(P_kna=(0, Range()), g_K1=(1, Range()), g_Kr=(2,\
Range()), g_Ks=(3, Range()), g_Na=(4, Range()), g_bna=(5, Range()),\
g_CaL=(6, Range()), g_bca=(7, Range()), g_to=(8, Range()), K_mNa=(9,\
Range()), K_mk=(10, Range()), P_NaK=(11, Range()), K_NaCa=(12,\
Range()), K_sat=(13, Range()), Km_Ca=(14, Range()), Km_Nai=(15,\
Range()), alpha=(16, Range()), gamma=(17, Range()), K_pCa=(18,\
Range()), g_pCa=(19, Range()), g_pK=(20, Range()), Buf_c=(21,\
Range()), Buf_sr=(22, Range()), Ca_o=(23, Range()), K_buf_c=(24,\
Range()), K_buf_sr=(25, Range()), K_up=(26, Range()), V_leak=(27,\
Range()), V_sr=(28, Range()), Vmax_up=(29, Range()), a_rel=(30,\
Range()), b_rel=(31, Range()), c_rel=(32, Range()), tau_g=(33,\
Range()), Na_o=(34, Range()), Cm=(35, Range()), F=(36, Range()),\
R=(37, Range()), T=(38, Range()), V_c=(39, Range()),\
stim_amplitude=(40, Range()), stim_duration=(41, Range()),\
stim_period=(42, Range()), stim_start=(43, Range()), K_o=(44,\
Range()))
for param_name, value in values.items():
if param_name not in param_ind:
raise ValueError("{{0}} is not a param".format(param_name))
ind, range = param_ind[param_name]
if value not in range:
raise ValueError("While setting '{0}' {1}".format(param_name,\
range.format_not_in(value)))
# Assign value
param_values[ind] = value
param_values = dolfin.Constant(tuple(param_values))
return param_values
def rhs(states, time, parameters, dy=None):
"""
Compute right hand side
"""
# Imports
import ufl
import dolfin
# Assign states
assert(isinstance(states, dolfin.Function))
assert(states.function_space().depth() == 1)
assert(states.function_space().num_sub_spaces() == 17)
Xr1, Xr2, Xs, m, h, j, d, f, fCa, s, r, Ca_SR, Ca_i, g, Na_i, V, K_i =\
dolfin.split(states)
# Assign parameters
assert(isinstance(parameters, (dolfin.Function, dolfin.Constant)))
if isinstance(parameters, dolfin.Function):
assert(parameters.function_space().depth() == 1)
assert(parameters.function_space().num_sub_spaces() == 45)
else:
assert(parameters.value_size() == 45)
P_kna, g_K1, g_Kr, g_Ks, g_Na, g_bna, g_CaL, g_bca, g_to, K_mNa, K_mk,\
P_NaK, K_NaCa, K_sat, Km_Ca, Km_Nai, alpha, gamma, K_pCa, g_pCa,\
g_pK, Buf_c, Buf_sr, Ca_o, K_buf_c, K_buf_sr, K_up, V_leak, V_sr,\
Vmax_up, a_rel, b_rel, c_rel, tau_g, Na_o, Cm, F, R, T, V_c,\
stim_amplitude, stim_duration, stim_period, stim_start, K_o =\
dolfin.split(parameters)
# Reversal potentials
E_Na = R*T*ufl.ln(Na_o/Na_i)/F
E_K = R*T*ufl.ln(K_o/K_i)/F
E_Ks = R*T*ufl.ln((Na_o*P_kna + K_o)/(Na_i*P_kna + K_i))/F
E_Ca = 0.5*R*T*ufl.ln(Ca_o/Ca_i)/F
# Inward rectifier potassium current
alpha_K1 = 0.1/(1.0 + 6.14421235332821e-6*ufl.exp(0.06*V - 0.06*E_K))
beta_K1 = (3.06060402008027*ufl.exp(0.0002*V - 0.0002*E_K) +\
0.367879441171442*ufl.exp(0.1*V - 0.1*E_K))/(1.0 + ufl.exp(0.5*E_K -\
0.5*V))
xK1_inf = alpha_K1/(alpha_K1 + beta_K1)
i_K1 = 0.430331482911935*ufl.sqrt(K_o)*(-E_K + V)*g_K1*xK1_inf
# Rapid time dependent potassium current
i_Kr = 0.430331482911935*ufl.sqrt(K_o)*(-E_K + V)*Xr1*Xr2*g_Kr
# Rapid time dependent potassium current xr1 gate
xr1_inf = 1.0/(1.0 + 0.0243728440732796*ufl.exp(-0.142857142857143*V))
alpha_xr1 = 450.0/(1.0 + ufl.exp(-9/2 - V/10.0))
beta_xr1 = 6.0/(1.0 + 13.5813245225782*ufl.exp(0.0869565217391304*V))
tau_xr1 = alpha_xr1*beta_xr1
# Rapid time dependent potassium current xr2 gate
xr2_inf = 1.0/(1.0 + 39.1212839981532*ufl.exp(0.0416666666666667*V))
alpha_xr2 = 3.0/(1.0 + 0.0497870683678639*ufl.exp(-0.05*V))
beta_xr2 = 1.12/(1.0 + 0.0497870683678639*ufl.exp(0.05*V))
tau_xr2 = alpha_xr2*beta_xr2
# Slow time dependent potassium current
i_Ks = (Xs*Xs)*(V - E_Ks)*g_Ks
# Slow time dependent potassium current xs gate
xs_inf = 1.0/(1.0 + 0.69967253737513*ufl.exp(-0.0714285714285714*V))
alpha_xs = 1100.0/ufl.sqrt(1.0 +\
0.188875602837562*ufl.exp(-0.166666666666667*V))
beta_xs = 1.0/(1.0 + 0.0497870683678639*ufl.exp(0.05*V))
tau_xs = alpha_xs*beta_xs
# Fast sodium current
i_Na = (m*m*m)*(-E_Na + V)*g_Na*h*j
# Fast sodium current m gate
m_inf = 1.0/((1.0 +\
0.00184221158116513*ufl.exp(-0.110741971207087*V))*(1.0 +\
0.00184221158116513*ufl.exp(-0.110741971207087*V)))
alpha_m = 1.0/(1.0 + ufl.exp(-12.0 - V/5.0))
beta_m = 0.1/(1.0 + 0.778800783071405*ufl.exp(0.005*V)) + 0.1/(1.0 +\
ufl.exp(7.0 + V/5.0))
tau_m = alpha_m*beta_m
# Fast sodium current h gate
h_inf = 1.0/((1.0 + 15212.5932856544*ufl.exp(0.134589502018843*V))*(1.0 +\
15212.5932856544*ufl.exp(0.134589502018843*V)))
alpha_h = 4.43126792958051e-7*ufl.exp(-0.147058823529412*V)/(1.0 +\
2.3538526683702e+17*ufl.exp(1.0*V))
beta_h = (310000.0*ufl.exp(0.3485*V) + 2.7*ufl.exp(0.079*V))/(1.0 +\
2.3538526683702e+17*ufl.exp(1.0*V)) + 0.77*(1.0 - 1.0/(1.0 +\
2.3538526683702e+17*ufl.exp(1.0*V)))/(0.13 +\
0.0497581410839387*ufl.exp(-0.0900900900900901*V))
tau_h = 1.0/(alpha_h + beta_h)
# Fast sodium current j gate
j_inf = 1.0/((1.0 + 15212.5932856544*ufl.exp(0.134589502018843*V))*(1.0 +\
15212.5932856544*ufl.exp(0.134589502018843*V)))
alpha_j = (37.78 + V)*(-6.948e-6*ufl.exp(-0.04391*V) -\
25428.0*ufl.exp(0.2444*V))/((1.0 +\
2.3538526683702e+17*ufl.exp(1.0*V))*(1.0 +\
50262745825.954*ufl.exp(0.311*V)))
beta_j = 0.6*(1.0 - 1.0/(1.0 +\
2.3538526683702e+17*ufl.exp(1.0*V)))*ufl.exp(0.057*V)/(1.0 +\
0.0407622039783662*ufl.exp(-0.1*V)) +\
0.02424*ufl.exp(-0.01052*V)/((1.0 +\
2.3538526683702e+17*ufl.exp(1.0*V))*(1.0 +\
0.00396086833990426*ufl.exp(-0.1378*V)))
tau_j = 1.0/(alpha_j + beta_j)
# Sodium background current
i_b_Na = (-E_Na + V)*g_bna
# L type ca current
i_CaL = 4.0*(F*F)*(-0.341*Ca_o +\
Ca_i*ufl.exp(2.0*F*V/(R*T)))*V*d*f*fCa*g_CaL/((-1.0 +\
ufl.exp(2.0*F*V/(R*T)))*R*T)
# L type ca current d gate
d_inf = 1.0/(1.0 + 0.513417119032592*ufl.exp(-0.133333333333333*V))
alpha_d = 0.25 + 1.4/(1.0 +\
0.0677244716592409*ufl.exp(-0.0769230769230769*V))
beta_d = 1.4/(1.0 + ufl.exp(1.0 + V/5.0))
gamma_d = 1.0/(1.0 + 12.1824939607035*ufl.exp(-0.05*V))
tau_d = gamma_d + alpha_d*beta_d
# L type ca current f gate
f_inf = 1.0/(1.0 + 17.4117080633276*ufl.exp(0.142857142857143*V))
tau_f = 80.0 + 165.0/(1.0 + ufl.exp(5/2 - V/10.0)) +\
1125.0*ufl.exp(-0.00416666666666667*((27.0 + V)*(27.0 + V)))
# L type ca current fca gate
alpha_fCa = 1.0/(1.0 + 8.03402376701711e+27*ufl.elem_pow(Ca_i, 8.0))
beta_fCa = 0.1/(1.0 + 0.00673794699908547*ufl.exp(10000.0*Ca_i))
gama_fCa = 0.2/(1.0 + 0.391605626676799*ufl.exp(1250.0*Ca_i))
fCa_inf = 0.157534246575342 + 0.684931506849315*gama_fCa +\
0.684931506849315*beta_fCa + 0.684931506849315*alpha_fCa
tau_fCa = 2.0
d_fCa = (-fCa + fCa_inf)/tau_fCa
# Calcium background current
i_b_Ca = (V - E_Ca)*g_bca
# Transient outward current
i_to = (-E_K + V)*g_to*r*s
# Transient outward current s gate
s_inf = 1.0/(1.0 + ufl.exp(4.0 + V/5.0))
tau_s = 3.0 + 85.0*ufl.exp(-0.003125*((45.0 + V)*(45.0 + V))) + 5.0/(1.0 +\
ufl.exp(-4.0 + V/5.0))
# Transient outward current r gate
r_inf = 1.0/(1.0 + 28.0316248945261*ufl.exp(-0.166666666666667*V))
tau_r = 0.8 + 9.5*ufl.exp(-0.000555555555555556*((40.0 + V)*(40.0 + V)))
# Sodium potassium pump current
i_NaK = K_o*Na_i*P_NaK/((K_mk + K_o)*(Na_i + K_mNa)*(1.0 +\
0.0353*ufl.exp(-F*V/(R*T)) + 0.1245*ufl.exp(-0.1*F*V/(R*T))))
# Sodium calcium exchanger current
i_NaCa = (-(Na_o*Na_o*Na_o)*Ca_i*alpha*ufl.exp((-1.0 + gamma)*F*V/(R*T))\
+ (Na_i*Na_i*Na_i)*Ca_o*ufl.exp(F*V*gamma/(R*T)))*K_NaCa/((1.0 +\
K_sat*ufl.exp((-1.0 + gamma)*F*V/(R*T)))*((Na_o*Na_o*Na_o) +\
(Km_Nai*Km_Nai*Km_Nai))*(Km_Ca + Ca_o))
# Calcium pump current
i_p_Ca = Ca_i*g_pCa/(K_pCa + Ca_i)
# Potassium pump current
i_p_K = (-E_K + V)*g_pK/(1.0 +\
65.4052157419383*ufl.exp(-0.167224080267559*V))
# Calcium dynamics
i_rel = ((Ca_SR*Ca_SR)*a_rel/((Ca_SR*Ca_SR) + (b_rel*b_rel)) + c_rel)*d*g
i_up = Vmax_up/(1.0 + (K_up*K_up)/(Ca_i*Ca_i))
i_leak = (-Ca_i + Ca_SR)*V_leak
g_inf = (1.0 - 1.0/(1.0 + 0.0301973834223185*ufl.exp(10000.0*Ca_i)))/(1.0 +\
1.97201988740492e+55*ufl.elem_pow(Ca_i, 16.0)) + 1.0/((1.0 +\
0.0301973834223185*ufl.exp(10000.0*Ca_i))*(1.0 +\
5.43991024148102e+20*ufl.elem_pow(Ca_i, 6.0)))
d_g = (-g + g_inf)/tau_g
Ca_i_bufc = 1.0/(1.0 + Buf_c*K_buf_c/((K_buf_c + Ca_i)*(K_buf_c + Ca_i)))
Ca_sr_bufsr = 1.0/(1.0 + Buf_sr*K_buf_sr/((K_buf_sr + Ca_SR)*(K_buf_sr +\
Ca_SR)))
# Sodium dynamics
# Membrane
i_Stim = -(1.0 - 1.0/(1.0 + ufl.exp(-5.0*stim_start +\
5.0*time)))*stim_amplitude/(1.0 + ufl.exp(-5.0*stim_start + 5.0*time\
- 5.0*stim_duration))
# Potassium dynamics
# The ODE system: 17 states
# Init test function
_v = dolfin.TestFunction(states.function_space())
# Derivative for state Xr1
dy = ((-Xr1 + xr1_inf)/tau_xr1)*_v[0]
# Derivative for state Xr2
dy += ((-Xr2 + xr2_inf)/tau_xr2)*_v[1]
# Derivative for state Xs
dy += ((-Xs + xs_inf)/tau_xs)*_v[2]
# Derivative for state m
dy += ((-m + m_inf)/tau_m)*_v[3]
# Derivative for state h
dy += ((-h + h_inf)/tau_h)*_v[4]
# Derivative for state j
dy += ((j_inf - j)/tau_j)*_v[5]
# Derivative for state d
dy += ((d_inf - d)/tau_d)*_v[6]
# Derivative for state f
dy += ((-f + f_inf)/tau_f)*_v[7]
# Derivative for state fCa
dy += ((1.0 - 1.0/((1.0 + ufl.exp(60.0 + V))*(1.0 + ufl.exp(-10.0*fCa +\
10.0*fCa_inf))))*d_fCa)*_v[8]
# Derivative for state s
dy += ((-s + s_inf)/tau_s)*_v[9]
# Derivative for state r
dy += ((-r + r_inf)/tau_r)*_v[10]
# Derivative for state Ca_SR
dy += ((-i_leak + i_up - i_rel)*Ca_sr_bufsr*V_c/V_sr)*_v[11]
# Derivative for state Ca_i
dy += ((-i_up - (i_CaL + i_p_Ca + i_b_Ca - 2.0*i_NaCa)*Cm/(2.0*F*V_c) +\
i_leak + i_rel)*Ca_i_bufc)*_v[12]
# Derivative for state g
dy += ((1.0 - 1.0/((1.0 + ufl.exp(60.0 + V))*(1.0 + ufl.exp(-10.0*g +\
10.0*g_inf))))*d_g)*_v[13]
# Derivative for state Na_i
dy += ((-3.0*i_NaK - 3.0*i_NaCa - i_Na - i_b_Na)*Cm/(F*V_c))*_v[14]
# Derivative for state V
dy += (-i_Ks - i_to - i_Kr - i_p_K - i_NaK - i_NaCa - i_Na - i_p_Ca -\
i_b_Na - i_CaL - i_Stim - i_K1 - i_b_Ca)*_v[15]
# Derivative for state K_i
dy += ((-i_Ks - i_to - i_Kr - i_p_K - i_Stim - i_K1 +\
2.0*i_NaK)*Cm/(F*V_c))*_v[16]
# Return dy
return dy
| lgpl-3.0 | -6,742,335,373,456,474,000 | 35.793358 | 83 | 0.533547 | false |
magiclab/blender_drone_toolkit | addons/magiclab_uav_io/volume_import.py | 1 | 1884 | import csv
import bmesh
import bpy
from io_mesh_ply import import_ply
from bpy_extras.io_utils import ImportHelper
from bpy.props import StringProperty
from bpy.types import Operator
def inport_csv_boxes(filename):
""" import original csv format bounds as a pointcloud """
cos = []
with open(filename) as csvfile:
spamreader = csv.reader(csvfile, delimiter=',', quotechar='"')
for row in spamreader:
cos.append([float(i) for i in row])
mesh = bpy.data.meshes.new("volume")
bm = bmesh.new()
for v_co in cos:
bm.verts.new(v_co)
bm.verts.ensure_lookup_table()
bm.to_mesh(mesh)
ob = bpy.data.objects.new("volume", mesh)
bpy.context.scene.objects.link(ob)
ob.layers = bpy.context.scene.layers
return ob
class ImportCaptureVolume(Operator, ImportHelper):
""" Import Captured Volume as a ply mesh, cleanup ancap display """
bl_idname = "object.magiclab_volume_import"
bl_label = "Magiclab Capture Volume Import"
filename_ext = ".ply"
filter_glob = StringProperty(
default="*.ply",
options={'HIDDEN'},
maxlen=255,
)
def execute(self, context):
import_ply.load_ply(self.filepath) # we could use load poly mesh instead
ob = context.scene.objects.active # active object
# cleanups
bpy.ops.object.editmode_toggle()
bpy.ops.mesh.remove_doubles()
bpy.ops.mesh.normals_make_consistent()
bpy.ops.object.editmode_toggle()
# assign a material
mat = ob.active_material = bpy.data.materials.new("UAVs_hull")
mat.alpha = 0.1
mat.game_settings.alpha_blend = "ALPHA"
ob.show_transparent = True
return {'FINISHED'}
def register():
bpy.utils.register_class(ImportCaptureVolume)
def unregister():
bpy.utils.unregister_class(ImportCaptureVolume)
| gpl-3.0 | 1,093,681,672,572,365,400 | 28.4375 | 80 | 0.651805 | false |
weera00/xbmc | addons/service.xbmc.versioncheck/lib/jsoninterface.py | 89 | 1991 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Team-XBMC
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import sys
import xbmc
import xbmcaddon
import xbmcvfs
import lib.common
from lib.common import log
__addonpath__ = lib.common.__addonpath__
import json as jsoninterface
def get_installedversion():
# retrieve current installed version
json_query = xbmc.executeJSONRPC('{ "jsonrpc": "2.0", "method": "Application.GetProperties", "params": {"properties": ["version", "name"]}, "id": 1 }')
json_query = unicode(json_query, 'utf-8', errors='ignore')
json_query = jsoninterface.loads(json_query)
version_installed = []
if json_query.has_key('result') and json_query['result'].has_key('version'):
version_installed = json_query['result']['version']
return version_installed
def get_versionfilelist():
# retrieve versionlists from supplied version file
version_file = os.path.join(__addonpath__, 'resources/versions.txt')
# Eden didn't have xbmcvfs.File()
if xbmcaddon.Addon('xbmc.addon').getAddonInfo('version') < "11.9.3":
file = open(version_file, 'r')
else:
file = xbmcvfs.File(version_file)
data = file.read()
file.close()
version_query = unicode(data, 'utf-8', errors='ignore')
version_query = jsoninterface.loads(version_query)
return version_query | gpl-2.0 | 7,702,316,556,259,414,000 | 36.584906 | 155 | 0.690105 | false |
archetipo/server-tools | web_context_tunnel/__openerp__.py | 2 | 4113 | {
'name': 'Web Context Tunnel',
'category': 'Hidden',
'author': "Akretion,Odoo Community Association (OCA)",
'license': 'AGPL-3',
'description': """
Web Context Tunnel.
===================
The problem with OpenERP on_changes
-----------------------------------
OpenERP uses to pass on_change Ajax events arguments using positional
arguments. This is annoying as modules often need to pass extra arguments
that are not present in the base on_change signatures. As soon as two modules
try to alter this signature to add their extra arguments, they are incompatible
between them unless some extra glue module make them compatible again by
taking all extra arguments into account. But this leads to a combinatorial
explosion to make modules compatible again.
The solution
------------
This module provides a simple work around that will work in most of the cases.
In fact it works if the base on_change is designed to pass the context
argument. Else it won't work and you should go the old way. But in any case
it's a bad practice if an on_change doesn't pass the context argument and you
can certainly rant about these bad on_changes to the the context added in the
arguments.
So for an on_change passing the context, how does this module works?
Well OpenERP already has an elegant solution for an extension module to alter
an XML attributes: put an extension point in the view using
position="attributes" and then redefine the attribute. That is already used at
several places to replace the "context" attribute that the client will send to
the server.
The idea here is to wrap the extra arguments needed by your on_change inside
that context dictionary just as it were a regular Python kwargs. That context
should then be automatically propagated accross the on_change call chain,
no matter of the module order and without any need to hack any on_change
signature.
The issue with just position="attributes" and redefining the context, is that
again, if two independent modules redefine the context, they are incompatible
unless a third module accounts for both of them.
But with this module, an extension point can now use position="attributes" and
instead of redefining the "context" attribute, you will now just define a new
"context_foo" attribute this way:
<attribute name="context_foo">{'my_extra_field': my_extra_field}</attribute>.
This module modifies the web client in such a way that before sending the Ajax
on_change event request to the server, all the node attributes starting with
"context" are merged into a single context dictionnary, keeping the keys and
values from all extensions. In the rare case a module really wants to override
the value in context, then it needs to still override the original context
attribute (or the other original attribute).
And of course, if you should call your on_change by API or webservice instead
of using the web client, simply ensure you are wrapping the required extra
arguments in the context dictionary.
Tests
-----
This module comes with a simple test in static/test/context_tunnel.js.
To run it, open the page /web/tests?mod=web_context_tunnel in your browser
as explained here https://doc.openerp.com/trunk/web/testing
It should also by picked by the Python testing when testing with PhantomJS.
As for testing modules using web_context_tunnel with YAML, yes it's possible.
In fact you need to manually mimic the new web-client behavior by manually
ensuring you add the extra context keys you will need later in your on_change.
For instance, before the on_change is called, you can alter the context with
a !python statement like context.update({'my_extra_field': my_extra_field}).
You can see an example of module conversion to use web_context_tunnel here
for instance:
https://github.com/openerpbrasil/l10n_br_core/commit/33065366726a83dbc69b9f0031c81d82362fbfae
""",
'version': '2.0',
'depends': ['web'],
'js': ['static/src/js/context_tunnel.js'],
'test': [
'static/test/context_tunnel.js',
],
'css': [],
'auto_install': False,
'installable': True,
'web_preload': False,
}
| agpl-3.0 | 6,084,684,503,951,155,000 | 43.225806 | 93 | 0.7593 | false |
thinkWhere/Roadnet | tests/integration/test_params_and_settings.py | 1 | 10845 | from textwrap import dedent
import unittest
import xml.etree.ElementTree as ETree
from mock import call, patch, MagicMock, mock_open, sentinel
from Roadnet.tests.integration.roadnet_test_cases import QgisTestCase
import Roadnet.params_and_settings as p_and_s
class TestParamsFileHandler(unittest.TestCase):
def setUp(self):
xml_string = """
<TwParams>
<Application id="Roadnet">
<Parameter name="RNDataStorePath">/home/tw-johns/Roadnet/database_files</Parameter>
<Parameter name="DbName">roadnet_demo.sqlite</Parameter>
<Parameter name="RNPolyEdit">true</Parameter>
<Parameter name="RNsrwr">true</Parameter>
<Parameter name="Language">ENG</Parameter>
<Parameter name="UserName">thinkwhere</Parameter>
<Parameter name="Blank"></Parameter>
</Application>
</TwParams>"""
xml_string = dedent(xml_string)
self.test_root = ETree.fromstring(xml_string)
def test_init(self):
file_path = 'testtesttest'
pfh = p_and_s.ParamsFileHandler(file_path)
self.assertEqual(
pfh.xmlfile_path, file_path,
"xmlfile_path was not {} ({})".format(
file_path, pfh.xmlfile_path))
@patch.object(p_and_s.ParamsFileHandler, '_update_tree')
def test_read_to_dictionary(self, mock_update_tree):
file_path = 'testtesttest'
pfh = p_and_s.ParamsFileHandler(file_path)
pfh.root = self.test_root
params = pfh.read_to_dictionary()
expected = {"RNDataStorePath": '/home/tw-johns/Roadnet/database_files',
"DbName": 'roadnet_demo.sqlite',
"RNPolyEdit": 'true',
"RNsrwr": 'true',
"Language": 'ENG',
"UserName": 'thinkwhere'}
mock_update_tree.assert_called_once_with()
for key in expected:
self.assertEqual(
params[key], expected[key],
"Wrong value read for params {}: {}, not {}".format(
key, params[key], expected[key]))
@patch.object(p_and_s.ParamsFileHandler, '_update_tree')
def test_update_xml_file(self, mock_update_tree):
file_path = 'testtesttest'
pfh = p_and_s.ParamsFileHandler(file_path)
pfh.root = self.test_root
pfh.tree = MagicMock()
test_params = {"RNDataStorePath": '/home/tw-johns/Roadnet/database_files',
"DbName": 'roadnet_demo.sqlite',
"RNPolyEdit": 'true',
"Language": 'ENG',
"RNsrwr": 'true',
"Blank": 'true',
"UserName": 'thinkwhere',
"ShouldNotBeUsed": 'should not appear in output'}
m = mock_open()
with patch('Roadnet.params_and_settings.open', m, create=True):
pfh.update_xml_file(test_params)
# Check that the file is opened
m.assert_called_once_with(file_path, 'w')
mock_outfile = m()
# Check data is written to correct file
pfh.tree.assert_has_calls([call.write(mock_outfile)])
@patch.object(p_and_s.os.path, 'isfile')
@patch.object(p_and_s.rn_except.QMessageBoxWarningError, 'show_message_box')
@patch.object(p_and_s.ParamsFileHandler, 'read_to_dictionary')
def test_validate_missing_fields(self,
mock_read_dictionary,
mock_show_warning,
mock_isfile):
file_path = 'testtesttest'
test_params = {"RNDataStorePath": '/home/tw-johns/Roadnet/database_files',
"DbName": 'roadnet_demo.sqlite',
"RNPolyEdit": 'true',
"RNsrwr": 'true',
"Blank": 'true',
"UserName": 'thinkwhere',
"RAMP_output_directory": '',
"PreventOverlappingPolygons": 'true',
"ShouldNotBeUsed": 'should not appear in output'}
mock_isfile.return_value = True
mock_read_dictionary.return_value = test_params
pfh = p_and_s.ParamsFileHandler(file_path)
with self.assertRaises(p_and_s.rn_except.InvalidParamsKeysPopupError):
pfh.validate_params_file()
@patch.object(p_and_s.os.path, 'isfile')
@patch.object(p_and_s.rn_except.QMessageBoxWarningError, 'show_message_box')
@patch.object(p_and_s.ParamsFileHandler, 'read_to_dictionary')
def test_validate_extra_fields(self,
mock_read_dictionary,
mock_show_warning,
mock_isfile):
file_path = 'testtesttest'
test_params = {"RNDataStorePath": '/home/tw-johns/Roadnet/database_files',
"DbName": 'roadnet_demo.sqlite',
"RNPolyEdit": 'true',
"RNsrwr": 'true',
"Language": 'EN',
"RAMP": 'true',
"RAMP_output_directory": '',
"AutoSplitESUs": 'true',
"Blank": 'true',
"UserName": 'thinkwhere',
"PreventOverlappingPolygons": 'true',
"ShouldNotBeUsed": 'should not appear in output'}
mock_isfile.return_value = True
mock_read_dictionary.return_value = test_params
pfh = p_and_s.ParamsFileHandler(file_path)
with self.assertRaises(p_and_s.rn_except.ExtraParamsKeysPopupError):
pfh.validate_params_file()
@patch.object(p_and_s.os.path, 'isfile')
@patch.object(p_and_s.rn_except.QMessageBoxWarningError, 'show_message_box')
def test_validate_missing_file(self,
mock_show_warning,
mock_isfile):
file_path = 'testtesttest'
mock_isfile.return_value = False
pfh = p_and_s.ParamsFileHandler(file_path)
with self.assertRaises(p_and_s.rn_except.MissingParamsFilePopupError):
pfh.validate_params_file()
class TestSettingsDialogHandler(QgisTestCase):
@patch.object(p_and_s, 'SettingsDlg')
def test_settings_dialog_created(self, mock_dlg):
params = {'test': 123}
settings_dialog_handler = p_and_s.SettingsDialogHandler(params)
mock_dlg.assert_called_once_with()
@patch.object(p_and_s.SettingsDialogHandler,
'show_ramp_settings_changed_warning')
@patch.object(p_and_s.SettingsDialogHandler, 'get_params_from_checkboxes')
@patch.object(p_and_s, 'SettingsDlg')
def test_update_via_dialog(self, mock_dlg, mock_get_params, mock_warning):
checkbox_params = {"AutoSplitESUs": 'true',
"PreventOverlappingPolygons": 'false',
"RAMP": 'true'}
mock_get_params.return_value = checkbox_params
test_input = {"RNDataStorePath": '/home/tw-johns/Roadnet/database_files',
"DbName": 'roadnet_demo.sqlite',
"RNPolyEdit": 'true',
"RNsrwr": 'true',
"Language": 'ENG',
"UserName": 'thinkwhere',
"AutoSplitESUs": 'false',
"PreventOverlappingPolygons": 'true',
"RAMP": 'false'}
expected = {"RNDataStorePath": '/home/tw-johns/Roadnet/database_files',
"DbName": 'roadnet_demo.sqlite',
"RNPolyEdit": 'true',
"RNsrwr": 'true',
"Language": 'ENG',
"UserName": 'thinkwhere',
"AutoSplitESUs": 'true',
"PreventOverlappingPolygons": 'false',
"RAMP": 'true'}
settings_handler = p_and_s.SettingsDialogHandler(test_input)
params = settings_handler.show_dialog_and_update_params()
# Check results
for key in expected:
self.assertEqual(expected[key], params[key],
"{} parameter was not updated to {} ({})".format(
key, expected[key], params[key]))
mock_warning.assert_called_once_with(checkbox_params)
@patch.object(p_and_s, 'SettingsDlg')
def test_checkboxes_updated_from_params(self, mock_settings_dlg):
mock_dlg = MagicMock()
mock_settings_dlg.return_value = mock_dlg
test_params = {"RNDataStorePath": '/home/tw-johns/Roadnet/database_files',
"AutoSplitESUs": 'false',
"PreventOverlappingPolygons": 'true',
"RAMP": 'true'}
settings_dialog_handler = p_and_s.SettingsDialogHandler(test_params)
settings_dialog_handler.set_checkboxes_from_params()
expected_calls = [call.ui.esuCheckBox.setChecked(False),
call.ui.rdpolyCheckBox.setChecked(True),
call.ui.rampCheckBox.setChecked(True)]
mock_dlg.assert_has_calls(expected_calls, any_order=True)
@patch.object(p_and_s, 'SettingsDlg')
def test_checkboxes_updated_from_params_raises_value_error(
self, mock_settings_dlg):
mock_dlg = MagicMock()
mock_settings_dlg.return_value = mock_dlg
test_params = {"RNDataStorePath": '/home/tw-johns/Roadnet/database_files',
"AutoSplitESUs": 'false',
"PreventOverlappingPolygons": 'true',
"RAMP": 'test'}
with self.assertRaises(ValueError):
settings_dialog_handler = p_and_s.SettingsDialogHandler(test_params)
settings_dialog_handler.set_checkboxes_from_params()
@patch.object(p_and_s, 'SettingsDlg')
def test_get_params_from_checkboxes(self, mock_settings_dlg):
mock_dlg = MagicMock()
mock_dlg.ui.esuCheckBox.isChecked.return_value = True
mock_dlg.ui.rdpolyCheckBox.isChecked.return_value = False
mock_dlg.ui.rampCheckBox.isChecked.return_value = True
mock_settings_dlg.return_value = mock_dlg
expected = {"AutoSplitESUs": 'true',
"PreventOverlappingPolygons": 'false',
"RAMP": 'true'}
input_params = {'test': 123}
settings_dialog_handler = p_and_s.SettingsDialogHandler(input_params)
params = settings_dialog_handler.get_params_from_checkboxes()
for key in expected:
self.assertEqual(expected[key], params[key],
"{} checkbox was not {} ({})".format(
key, expected[key], params[key]))
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 6,235,684,351,119,655,000 | 45.148936 | 95 | 0.556385 | false |
HERA-Team/Monitor_and_Control | hera_mc/cm_sysutils.py | 1 | 26804 | # -*- mode: python; coding: utf-8 -*-
# Copyright 2018 the HERA Collaboration
# Licensed under the 2-clause BSD license.
"""Methods for handling locating correlator and various system aspects."""
from sqlalchemy import func, and_, or_
import numpy as np
from . import mc, cm_partconnect, cm_utils, cm_sysdef, cm_hookup
from . import geo_handling
class SystemInfo:
"""
Object containing system information, a convenience for the system info methods below.
Parameters
----------
stn : None or geo_handling object. If None, it initializes a class with empty lists.
Otherwise, it initializes based on the geo_handling object class.
Anything else will generate an error.
"""
sys_info = ['station_name', 'station_type_name', 'tile', 'datum', 'easting',
'northing', 'lon', 'lat',
'elevation', 'antenna_number', 'correlator_input', 'start_date',
'stop_date', 'epoch']
def __init__(self, stn=None):
if stn is None:
for s in self.sys_info:
setattr(self, s, [])
else:
for s in self.sys_info:
setattr(self, s, None)
try:
a = getattr(stn, s)
except AttributeError:
continue
setattr(self, s, a)
def update_arrays(self, stn):
"""
Will update the object based on the supplied station information.
Parameters
----------
stn : geo_handling object or None
Contains the init station information. If None, it will initial a blank object.
"""
if stn is None:
return
for s in self.sys_info:
try:
arr = getattr(self, s)
except AttributeError: # pragma: no cover
continue
arr.append(getattr(stn, s))
class Handling:
"""
Class to allow various manipulations of correlator inputs etc.
Parameters
----------
session : object
session on current database. If session is None, a new session
on the default database is created and used.
"""
def __init__(self, session=None):
if session is None: # pragma: no cover
db = mc.connect_to_mc_db(None)
self.session = db.sessionmaker()
else:
self.session = session
self.geo = geo_handling.Handling(self.session)
self.H = None
self.sysdef = cm_sysdef.Sysdef()
self.apriori_status_set = None
def close(self): # pragma: no cover
"""Close the session."""
self.session.close()
def cofa(self):
"""
Return the geographic information for the center-of-array.
Returns
-------
object
Geo object for the center-of-array (cofa)
"""
cofa = self.geo.cofa()
return cofa
def get_connected_stations(self, at_date, hookup_type=None):
"""
Return a list of class SystemInfo of all of the stations connected at_date.
Each location is returned class SystemInfo. Attributes are:
'station_name': name of station (string, e.g. 'HH27')
'station_type_name': type of station (type 'herahexe', etc)
'tile': UTM tile name (string, e.g. '34J'
'datum': UTM datum (string, e.g. 'WGS84')
'easting': station UTM easting (float)
'northing': station UTM northing (float)
'lon': station longitude (float)
'lat': station latitude (float)
'elevation': station elevation (float)
'antenna_number': antenna number (integer)
'correlator_input': correlator input for x (East) pol and y (North) pol
(string tuple-pair)
'timing': start and stop gps seconds for both pols
Parameters
----------
at_date : str, int
Date to check for connections. Anything intelligible by cm_utils.get_astropytime
hookup_type : str
Type of hookup to use (current observing system is 'parts_hera').
If 'None' it will determine which system it thinks it is based on
the part-type. The order in which it checks is specified in cm_sysdef.
Only change if you know you want a different system (like 'parts_paper').
Returns
-------
list
List of stations connected.
"""
at_date = cm_utils.get_astropytime(at_date)
HU = cm_hookup.Hookup(self.session)
hud = HU.get_hookup(hpn=cm_sysdef.hera_zone_prefixes, pol='all', at_date=at_date,
exact_match=False, use_cache=False, hookup_type=hookup_type)
station_conn = []
found_keys = list(hud.keys())
found_stations = [x.split(':')[0] for x in found_keys]
station_geo = self.geo.get_location(found_stations, at_date)
for i, key in enumerate(found_keys):
stn, rev = cm_utils.split_part_key(key)
ant_num = int(stn[2:])
station_info = SystemInfo(station_geo[i])
station_info.antenna_number = ant_num
current_hookup = hud[key].hookup
corr = {}
pe = {}
station_info.timing = {}
for ppkey, hu in current_hookup.items():
pol = ppkey[0].lower()
pe[pol] = hud[key].hookup_type[ppkey]
cind = self.sysdef.corr_index[pe[pol]] - 1 # The '- 1' makes it the downstream_part
try:
corr[pol] = "{}>{}".format(
hu[cind].downstream_input_port, hu[cind].downstream_part)
except IndexError: # pragma: no cover
corr[pol] = 'None'
station_info.timing[pol] = hud[key].timing[ppkey]
if corr['e'] == 'None' and corr['n'] == 'None':
continue
station_info.correlator_input = (str(corr['e']), str(corr['n']))
station_info.epoch = 'e:{}, n:{}'.format(pe['e'], pe['n'])
station_conn.append(station_info)
return station_conn
def get_cminfo_correlator(self, hookup_type=None):
"""
Return a dict with info needed by the correlator.
Note: This method requires pyuvdata
Parameters
----------
hookup_type : str or None
Type of hookup to use (current observing system is 'parts_hera').
If 'None' it will determine which system it thinks it is based on
the part-type. The order in which it checks is specified in cm_sysdef.
Only change if you know you want a different system (like 'parts_paper').
Default is None.
Returns
-------
dict
cm info formatted for the correlator.
Dict keys are:
'antenna_numbers': Antenna numbers (list of integers)
'antenna_names': Station names (we use antenna_names because that's
what they're called in data files) (list of strings)
'correlator_inputs': Correlator input strings for x/y (e/n)
polarizations (list of 2 element tuples of strings)
'antenna_positions': Antenna positions in relative ECEF coordinates
(list of 3-element vectors of floats)
'cm_version': CM git hash (string)
'cofa_lat': latitude of the center-of-array in degrees
'cofa_lon': longitude of the center-of-array in degrees
'cofa_alt': altitude of center-of-array in meters
"""
from pyuvdata import utils as uvutils
from . import cm_handling
cm_h = cm_handling.Handling(session=self.session)
cm_version = cm_h.get_cm_version()
cofa_loc = self.geo.cofa()[0]
cofa_xyz = uvutils.XYZ_from_LatLonAlt(cofa_loc.lat * np.pi / 180.,
cofa_loc.lon * np.pi / 180.,
cofa_loc.elevation)
stations_conn = self.get_connected_stations(at_date='now', hookup_type=hookup_type)
stn_arrays = SystemInfo()
for stn in stations_conn:
stn_arrays.update_arrays(stn)
# latitudes, longitudes output by get_connected_stations are in degrees
# XYZ_from_LatLonAlt wants radians
ecef_positions = uvutils.XYZ_from_LatLonAlt(np.array(stn_arrays.lat) * np.pi / 180.,
np.array(stn_arrays.lon) * np.pi / 180.,
stn_arrays.elevation)
rel_ecef_positions = ecef_positions - cofa_xyz
return {'antenna_numbers': stn_arrays.antenna_number,
# This is actually station names, not antenna names,
# but antenna_names is what it's called in pyuvdata
'antenna_names': stn_arrays.station_name,
# this is a tuple giving the f-engine names for x, y
'correlator_inputs': stn_arrays.correlator_input,
'antenna_positions': rel_ecef_positions.tolist(),
'cm_version': cm_version,
'cofa_lat': cofa_loc.lat,
'cofa_lon': cofa_loc.lon,
'cofa_alt': cofa_loc.elevation}
def get_part_at_station_from_type(self, stn, at_date, part_type, include_revs=False,
include_ports=False, hookup_type=None):
"""
Get the part number at a given station of a given part type.
E.g. find the 'post-amp' at station 'HH68'.
Parameters
----------
stn : str, list
Antenna number of format HHi where i is antenna number (string or list of strings)
at_date : str
Date at which connection is true, format 'YYYY-M-D' or 'now'
part_type : str
Part type to look for
include_revs : bool
Flag whether to include all revisions. Default is False
include_ports : bool
Flag whether to include ports. Default is False
hookup_type : str
Type of hookup to use (current observing system is 'parts_hera').
If 'None' it will determine which system it thinks it is based on
the part-type. The order in which it checks is specified in cm_sysdef.
Only change if you know you want a different system (like 'parts_paper').
Default is None.
Returns
-------
dict
{pol:(location, #)}
"""
parts = {}
H = cm_hookup.Hookup(self.session)
if isinstance(stn, str):
stn = [stn]
hud = H.get_hookup(hpn=stn, at_date=at_date, exact_match=True, hookup_type=hookup_type)
for k, hu in hud.items():
parts[k] = hu.get_part_from_type(
part_type, include_revs=include_revs, include_ports=include_ports)
return parts
def publish_summary(self, hlist=['default'], exact_match=False, hookup_cols='all',
sortby='node,station'):
"""
Publish the hookup on hera.today.
Parameters
----------
hlist : list
List of prefixes or stations to use in summary.
Default is the "default" prefix list in cm_utils.
exact_match : bool
Flag for exact_match or included characters.
hookup_cols : str, list
List of hookup columns to use, or 'all'.
Returns
-------
str
Status string. "OK" or "Not on 'main'"
"""
import os.path
if hlist[0].lower() == 'default':
hlist = cm_sysdef.hera_zone_prefixes
output_file = os.path.expanduser('~/.hera_mc/sys_conn_tmp.html')
H = cm_hookup.Hookup(self.session)
hookup_dict = H.get_hookup(hpn=hlist, pol='all', at_date='now',
exact_match=exact_match, hookup_type=None)
H.show_hookup(hookup_dict=hookup_dict, cols_to_show=hookup_cols,
state='full', ports=True, revs=True,
sortby=sortby, filename=output_file, output_format='html')
def get_apriori_status_for_antenna(self, antenna, at_date='now'):
"""
Get the "apriori" status of an antenna station (e.g. HH12) at a date.
The status enum list may be found by module
cm_partconnect.get_apriori_antenna_status_enum().
Parameters
----------
ant : str
Antenna station designator (e.g. HH12, HA330) it is a single string
at_date : str or int
Date to look for. Anything intelligible by cm_utils.get_astropytime.
Returns
-------
str
The apriori antenna status as a string. Returns None if not in table.
"""
ant = antenna.upper()
at_date = cm_utils.get_astropytime(at_date).gps
cmapa = cm_partconnect.AprioriAntenna
apa = self.session.query(cmapa).filter(
or_(and_(func.upper(cmapa.antenna) == ant, cmapa.start_gpstime <= at_date,
cmapa.stop_gpstime.is_(None)),
and_(func.upper(cmapa.antenna) == ant, cmapa.start_gpstime <= at_date,
cmapa.stop_gpstime > at_date))).first()
if apa is not None:
return apa.status
def get_apriori_antennas_with_status(self, status, at_date='now'):
"""
Get a list of all antennas with the provided status query at_date.
Parameters
----------
status : str
Apriori antenna status type (see cm_partconnect.get_apriori_antenna_status_enum())
at_date : str or int
Date for which to get apriori state -- anything
cm_utils.get_astropytime can handle.
Returns
-------
list of str
List of the antenna station designators with the specified status.
"""
at_date = cm_utils.get_astropytime(at_date).gps
ap_ants = []
cmapa = cm_partconnect.AprioriAntenna
for apa in self.session.query(cmapa).filter(
or_(and_(cmapa.status == status, cmapa.start_gpstime <= at_date,
cmapa.stop_gpstime.is_(None)),
and_(cmapa.status == status, cmapa.start_gpstime <= at_date,
cmapa.stop_gpstime > at_date))):
ap_ants.append(apa.antenna)
return ap_ants
def get_apriori_antenna_status_set(self, at_date='now'):
"""
Get a dictionary with the antennas for each apriori status type.
Parameters
----------
at_date : str or int
Date for which to get apriori state -- anything
cm_utils.get_astropytime can handle.
Returns
-------
dict
dictionary of antennas, keyed on the apriori antenna status value
containing the antennas with that status value
"""
ap_stat = {}
for _status in cm_partconnect.get_apriori_antenna_status_enum():
ap_stat[_status] = self.get_apriori_antennas_with_status(_status, at_date=at_date)
return ap_stat
def get_apriori_antenna_status_for_rtp(self, status, at_date='now'):
"""
Get a csv-string of all antennas for an apriori status for RTP.
Parameters
----------
status : str
Apriori antenna status type (see cm_partconnect.get_apriori_antenna_status_enum())
at_date : str or int
Date for which to get apriori state -- anything
cm_utils.get_astropytime can handle. Default is 'now'
Returns
-------
str
csv string of antennas of a given apriori status
"""
return ','.join(self.get_apriori_antennas_with_status(status=status, at_date=at_date))
def node_antennas(source='file', session=None):
"""
Get the antennas associated with nodes.
If source (as string) is 'file' it will use the 'nodes.txt' file of designed nodes.
If source (as string) is 'hookup', it will find them via the current hookup.
if source is a hookup instance, it will use that instance.
Parameters
----------
source : str or hookup instance
Source of node antennas - either 'file' or 'hookup' or a hookup
Returns
-------
dict
Antennas per node key.
"""
ants_per_node = {}
if isinstance(source, str) and source.lower().startswith('f'):
from . import geo_sysdef
node = geo_sysdef.read_nodes()
for this_node in node.keys():
node_hpn = 'N{:02d}'.format(this_node)
ants_per_node[node_hpn] = []
for ant in node[this_node]['ants']:
if ant in geo_sysdef.region['heraringa']:
prefix = 'HA'
elif ant in geo_sysdef.region['heraringb']:
prefix = 'HB'
else:
prefix = 'HH'
ants_per_node[node_hpn].append("{}{}".format(prefix, ant))
else:
if isinstance(source, str) and source.lower().startswith('h'):
source = cm_hookup.Hookup(session=session)
hu_dict = source.get_hookup(cm_sysdef.hera_zone_prefixes, hookup_type='parts_hera')
for this_ant, vna in hu_dict.items():
key = vna.hookup['E<ground'][-1].downstream_part
if key[0] != 'N':
continue
ants_per_node.setdefault(key, [])
ants_per_node[key].append(cm_utils.split_part_key(this_ant)[0])
return ants_per_node
def _get_dict_elements(npk, hu, ele_a, ele_b):
"""Return the appropriate hookup elements for node_info."""
a = ele_a.lower()
b = ele_b.lower()
A = ele_a.upper()
B = ele_b.upper()
e_ret = {a: '', b: ''}
try:
e_hookup = hu[npk].hookup['@<middle']
except KeyError:
return e_ret
for element in e_hookup:
if element.upstream_part.startswith(A):
e_ret[a] = element.upstream_part
e_ret[b] = element.downstream_part
break
elif element.upstream_part.startswith(B):
e_ret[b] = element.upstream_part
return e_ret
def _find_ant_node(pnsearch, na_dict):
found_node = None
for node, antennas in na_dict.items():
for ant in antennas:
antint = cm_utils.peel_key(ant, 'NPR')[0]
if pnsearch == antint:
if found_node is not None:
raise ValueError("Antenna {} already listed in node {}"
.format(pnsearch, found_node))
else:
found_node = node
return found_node
def which_node(ant_num, session=None):
"""
Find node for antenna.
Parameters
----------
ant_num : int or list of int or csv-list or hyphen-range str
Antenna numbers, as int
Returns
-------
dict
Contains antenna and node
"""
na_from_file = node_antennas('file', session=session)
na_from_hookup = node_antennas('hookup', session=session)
ant_num = cm_utils.listify(ant_num)
ant_node = {}
for pn in ant_num:
pnint = cm_utils.peel_key(str(pn), 'NPR')[0]
ant_node[pnint] = [_find_ant_node(pnint, na_from_file)]
ant_node[pnint].append(_find_ant_node(pnint, na_from_hookup))
return ant_node
def print_which_node(ant_node):
"""
Print formatted 'which_node' print string.
Parameter
---------
ant_node : dict
Dictionary returned from method 'which_node'.
"""
print(formatted__which_node__string(ant_node))
def formatted__which_node__string(ant_node):
"""
Return formatted 'which_node' print string.
Parameter
---------
ant_node : dict
Dictionary returned from method 'which_node'.
Returns
-------
str
Formatted print string.
"""
print_str = ''
for ant, node in ant_node.items():
if node[0] is not None:
if node[1] is None:
print_str += 'Antenna {}: Not installed ({})\n'.format(ant, node[0])
elif node[1] == node[0]:
print_str += 'Antenna {}: {}\n'.format(ant, node[0])
else:
print_str += 'Warning: Antenna {}\n\tSpecified for {}\n'.format(ant, node[0])
print_str += '\tInstalled in {}'.format(node[1])
else:
print_str += 'Warning: Antenna {} not specified for a node.\n'.format(ant)
if node[1] is not None:
print_str += '\tBut shown as installed in {}\n'.format(node[1])
return print_str
def node_info(node_num='active', session=None):
"""
Generate information per node.
Parameters
----------
node_num : list of int or str (can be mixed), or str
Node numbers, as int or hera part number.
If 'active', use list of active nodes.
if 'all', use list of all.
Returns
-------
dict
Contains node and node component information
"""
hu = cm_hookup.Hookup(session)
na_from_file = node_antennas('file', session=session)
na_from_hookup = node_antennas(hu, session=session)
if node_num == 'active':
node_num = sorted(list(na_from_hookup))
elif node_num == 'all':
node_num = sorted(list(na_from_file))
info = {'nodes': []}
for node in node_num:
# Set up
if isinstance(node, int):
node = 'N{:02d}'.format(node)
npk = cm_utils.make_part_key(node, 'A')
info['nodes'].append(node)
info[node] = {}
# Get antenna info
try:
info[node]['ants-file'] = na_from_file[node]
except KeyError:
info[node]['ants-file'] = []
try:
info[node]['ants-hookup'] = na_from_hookup[node]
except KeyError:
info[node]['ants-hookup'] = []
# Get hookup info
snaps = hu.get_hookup(node, hookup_type='parts_hera')
wr = hu.get_hookup(node, hookup_type='wr_hera')
rd = hu.get_hookup(node, hookup_type='arduino_hera')
# Find snaps
info[node]['snaps'] = [cm_utils.split_part_key(x)[0] for x in snaps.keys()]
# Find white rabbit, arduino and node control module
wr_ret = _get_dict_elements(npk, wr, 'wr', 'ncm')
info[node]['wr'] = wr_ret['wr']
rd_ret = _get_dict_elements(npk, rd, 'rd', 'ncm')
info[node]['arduino'] = rd_ret['rd']
info[node]['ncm'] = ''
if len(wr_ret['ncm']) and len(rd_ret['ncm']) \
and wr_ret['ncm'] != rd_ret['ncm']: # pragma: no cover
raise ValueError("NCMs don't match for node {}: {} vs {}"
.format(node, wr_ret['ncm'], rd_ret['ncm']))
elif len(wr_ret['ncm']):
info[node]['ncm'] = wr_ret['ncm']
elif len(rd_ret['ncm']): # pragma: no cover
info[node]['ncm'] = rd_ret['ncm']
# Get notes
notes = hu.get_notes(snaps, state='all', return_dict=True)
for snp in info[node]['snaps']:
spk = cm_utils.make_part_key(snp, 'A')
try:
info[snp] = [x['note'] for x in notes[spk][spk].values()]
except KeyError:
info[snp] = []
notes = hu.get_notes(wr, state='all', return_dict=True)
wpk = cm_utils.make_part_key(info[node]['wr'], 'A')
try:
info[info[node]['wr']] = [x['note'] for x in notes[npk][wpk].values()]
except KeyError:
info[info[node]['wr']] = []
notes = hu.get_notes(rd, state='all', return_dict=True)
apk = cm_utils.make_part_key(info[node]['arduino'], 'A')
try:
info[info[node]['arduino']] = [x['note'] for x in notes[npk][apk].values()]
except KeyError:
info[info[node]['arduino']] = []
if '' in info.keys():
del info['']
return info
def _get_macip(info):
data = []
for this_note in info:
if this_note.startswith('MAC') or this_note.startswith('IP'):
data.append(this_note.split('-')[1].strip())
return data
def _convert_ant_list(alist):
ants = [int(x.strip('HH').strip('HA').strip('HB')) for x in alist]
ants = sorted(ants)
ants = [str(x) for x in ants]
ants = ','.join(ants)
return ants
def print_node(info, filename=None, output_format='table'):
"""Print node info as determined in method node_info above."""
headers = ['Node', 'SNAPs', 'NCM', 'WR', 'Arduino']
spacer = [5 * '-', 44 * '-', 5 * '-', 17 * '-', 17 * '-']
table_data = []
for node in info['nodes']:
is_there = 0
for hdr in headers[1:]:
is_there += len(info[node][hdr.lower()])
if not is_there:
continue
# ############# WR
this_wr = info[node]['wr']
try:
wr_notes = _get_macip(info[this_wr])
except KeyError:
wr_notes = []
# ############# RD
this_rd = info[node]['arduino']
try:
rd_notes = _get_macip(info[this_rd])
except KeyError:
rd_notes = []
# ############# SNP and entry
for i in range(4):
try:
this_snp = info[node]['snaps'][i]
try:
snp_notes = _get_macip(info[this_snp])
except KeyError:
snp_notes = []
except IndexError:
snp_notes = []
snp_entry = "{} - {}".format(this_snp, ', '.join(snp_notes))
snp_entry = snp_entry.strip().strip('-')
if i:
try:
wr_entry = wr_notes[i - 1]
except IndexError:
wr_entry = ''
try:
rd_entry = rd_notes[i - 1]
except IndexError:
rd_entry = ''
row = ['', snp_entry, '', wr_entry, rd_entry]
else:
row = [node, snp_entry, info[node]['ncm'], this_wr, this_rd]
table_data.append(row)
ants = _convert_ant_list(info[node]['ants-file'])
table_data.append(['Ants', ants, '', '', ''])
ants = _convert_ant_list(info[node]['ants-hookup'])
table_data.append(['Conn', ants, '', '', ''])
table_data.append(spacer)
table = cm_utils.general_table_handler(headers, table_data, output_format)
if filename is not None: # pragma: no cover
with open(filename, 'w') as fp:
print(table, file=fp)
else:
print(table)
| bsd-2-clause | -152,592,945,233,629,400 | 36.124654 | 100 | 0.541524 | false |
fbradyirl/home-assistant | tests/helpers/test_condition.py | 1 | 6075 | """Test the condition helper."""
from unittest.mock import patch
from homeassistant.helpers import condition
from homeassistant.util import dt
from tests.common import get_test_home_assistant
class TestConditionHelper:
"""Test condition helpers."""
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_and_condition(self):
"""Test the 'and' condition."""
test = condition.from_config(
{
"condition": "and",
"conditions": [
{
"condition": "state",
"entity_id": "sensor.temperature",
"state": "100",
},
{
"condition": "numeric_state",
"entity_id": "sensor.temperature",
"below": 110,
},
],
}
)
self.hass.states.set("sensor.temperature", 120)
assert not test(self.hass)
self.hass.states.set("sensor.temperature", 105)
assert not test(self.hass)
self.hass.states.set("sensor.temperature", 100)
assert test(self.hass)
def test_and_condition_with_template(self):
"""Test the 'and' condition."""
test = condition.from_config(
{
"condition": "and",
"conditions": [
{
"condition": "template",
"value_template": '{{ states.sensor.temperature.state == "100" }}',
},
{
"condition": "numeric_state",
"entity_id": "sensor.temperature",
"below": 110,
},
],
}
)
self.hass.states.set("sensor.temperature", 120)
assert not test(self.hass)
self.hass.states.set("sensor.temperature", 105)
assert not test(self.hass)
self.hass.states.set("sensor.temperature", 100)
assert test(self.hass)
def test_or_condition(self):
"""Test the 'or' condition."""
test = condition.from_config(
{
"condition": "or",
"conditions": [
{
"condition": "state",
"entity_id": "sensor.temperature",
"state": "100",
},
{
"condition": "numeric_state",
"entity_id": "sensor.temperature",
"below": 110,
},
],
}
)
self.hass.states.set("sensor.temperature", 120)
assert not test(self.hass)
self.hass.states.set("sensor.temperature", 105)
assert test(self.hass)
self.hass.states.set("sensor.temperature", 100)
assert test(self.hass)
def test_or_condition_with_template(self):
"""Test the 'or' condition."""
test = condition.from_config(
{
"condition": "or",
"conditions": [
{
"condition": "template",
"value_template": '{{ states.sensor.temperature.state == "100" }}',
},
{
"condition": "numeric_state",
"entity_id": "sensor.temperature",
"below": 110,
},
],
}
)
self.hass.states.set("sensor.temperature", 120)
assert not test(self.hass)
self.hass.states.set("sensor.temperature", 105)
assert test(self.hass)
self.hass.states.set("sensor.temperature", 100)
assert test(self.hass)
def test_time_window(self):
"""Test time condition windows."""
sixam = dt.parse_time("06:00:00")
sixpm = dt.parse_time("18:00:00")
with patch(
"homeassistant.helpers.condition.dt_util.now",
return_value=dt.now().replace(hour=3),
):
assert not condition.time(after=sixam, before=sixpm)
assert condition.time(after=sixpm, before=sixam)
with patch(
"homeassistant.helpers.condition.dt_util.now",
return_value=dt.now().replace(hour=9),
):
assert condition.time(after=sixam, before=sixpm)
assert not condition.time(after=sixpm, before=sixam)
with patch(
"homeassistant.helpers.condition.dt_util.now",
return_value=dt.now().replace(hour=15),
):
assert condition.time(after=sixam, before=sixpm)
assert not condition.time(after=sixpm, before=sixam)
with patch(
"homeassistant.helpers.condition.dt_util.now",
return_value=dt.now().replace(hour=21),
):
assert not condition.time(after=sixam, before=sixpm)
assert condition.time(after=sixpm, before=sixam)
def test_if_numeric_state_not_raise_on_unavailable(self):
"""Test numeric_state doesn't raise on unavailable/unknown state."""
test = condition.from_config(
{
"condition": "numeric_state",
"entity_id": "sensor.temperature",
"below": 42,
}
)
with patch("homeassistant.helpers.condition._LOGGER.warning") as logwarn:
self.hass.states.set("sensor.temperature", "unavailable")
assert not test(self.hass)
assert len(logwarn.mock_calls) == 0
self.hass.states.set("sensor.temperature", "unknown")
assert not test(self.hass)
assert len(logwarn.mock_calls) == 0
| apache-2.0 | 1,700,890,156,897,550,800 | 31.837838 | 91 | 0.489053 | false |
tomncooper/heron | heronpy/api/tests/python/metrics_unittest.py | 4 | 2861 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=missing-docstring
import unittest
from heronpy.api.metrics import (CountMetric, MultiCountMetric,
MeanReducedMetric, MultiMeanReducedMetric)
class MetricsTest(unittest.TestCase):
def test_count_metric(self):
metric = CountMetric()
for _ in range(10):
metric.incr()
self.assertEqual(metric.get_value_and_reset(), 10)
for _ in range(10):
metric.incr(to_add=10)
self.assertEqual(metric.get_value_and_reset(), 100)
self.assertEqual(metric.get_value_and_reset(), 0)
def test_multi_count_metric(self):
metric = MultiCountMetric()
key_list = ["key1", "key2", "key3"]
for _ in range(10):
for key in key_list:
metric.incr(key=key)
self.assertEqual(metric.get_value_and_reset(), dict(zip(key_list, [10] * 3)))
self.assertEqual(metric.get_value_and_reset(), dict(zip(key_list, [0] * 3)))
metric.add_key("key4")
ret = metric.get_value_and_reset()
self.assertIn("key4", ret)
self.assertEqual(ret["key4"], 0)
def test_mean_reduced_metric(self):
metric = MeanReducedMetric()
# update from 1 to 10
for i in range(1, 11):
metric.update(i)
self.assertEqual(metric.get_value_and_reset(), 5.5)
self.assertIsNone(metric.get_value_and_reset())
for i in range(1, 11):
metric.update(i * 10)
self.assertEqual(metric.get_value_and_reset(), 55)
def test_multi_mean_reduced_metric(self):
metric = MultiMeanReducedMetric()
key_list = ["key1", "key2", "key3"]
for i in range(1, 11):
metric.update(key=key_list[0], value=i)
metric.update(key=key_list[1], value=i * 2)
metric.update(key=key_list[2], value=i * 3)
self.assertEqual(metric.get_value_and_reset(), dict(zip(key_list, [5.5, 11, 16.5])))
self.assertEqual(metric.get_value_and_reset(), dict(zip(key_list, [None] * 3)))
metric.add_key("key4")
ret = metric.get_value_and_reset()
self.assertIn("key4", ret)
self.assertIsNone(ret["key4"])
| apache-2.0 | 4,165,216,284,624,127,500 | 34.7625 | 88 | 0.67389 | false |
nathanielvarona/airflow | tests/utils/test_logging_mixin.py | 3 | 2888 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import warnings
from unittest import mock
from airflow.utils.log.logging_mixin import StreamLogWriter, set_context
class TestLoggingMixin(unittest.TestCase):
def setUp(self):
warnings.filterwarnings(action='always')
def test_set_context(self):
handler1 = mock.MagicMock()
handler2 = mock.MagicMock()
parent = mock.MagicMock()
parent.propagate = False
parent.handlers = [
handler1,
]
log = mock.MagicMock()
log.handlers = [
handler2,
]
log.parent = parent
log.propagate = True
value = "test"
set_context(log, value)
handler1.set_context.assert_called_once_with(value)
handler2.set_context.assert_called_once_with(value)
def tearDown(self):
warnings.resetwarnings()
class TestStreamLogWriter(unittest.TestCase):
def test_write(self):
logger = mock.MagicMock()
logger.log = mock.MagicMock()
log = StreamLogWriter(logger, 1)
msg = "test_message"
log.write(msg)
assert log._buffer == msg
log.write(" \n")
logger.log.assert_called_once_with(1, msg)
assert log._buffer == ""
def test_flush(self):
logger = mock.MagicMock()
logger.log = mock.MagicMock()
log = StreamLogWriter(logger, 1)
msg = "test_message"
log.write(msg)
assert log._buffer == msg
log.flush()
logger.log.assert_called_once_with(1, msg)
assert log._buffer == ""
def test_isatty(self):
logger = mock.MagicMock()
logger.log = mock.MagicMock()
log = StreamLogWriter(logger, 1)
assert not log.isatty()
def test_encoding(self):
logger = mock.MagicMock()
logger.log = mock.MagicMock()
log = StreamLogWriter(logger, 1)
assert log.encoding is None
def test_iobase_compatibility(self):
log = StreamLogWriter(None, 1)
assert not log.closed
# has no specific effect
log.close()
| apache-2.0 | -610,835,348,194,508,500 | 25.990654 | 72 | 0.640928 | false |
ptonner/GPy | GPy/util/normalizer.py | 13 | 1116 | '''
Created on Aug 27, 2014
@author: t-mazwie
'''
import logging
import numpy as np
class Norm(object):
def __init__(self):
pass
def scale_by(self, Y):
"""
Use data matrix Y as normalization space to work in.
"""
raise NotImplementedError
def normalize(self, Y):
"""
Project Y into normalized space
"""
raise NotImplementedError
def inverse_mean(self, X):
"""
Project the normalized object X into space of Y
"""
raise NotImplementedError
def inverse_variance(self, var):
return var
def scaled(self):
"""
Whether this Norm object has been initialized.
"""
raise NotImplementedError
class MeanNorm(Norm):
def __init__(self):
self.mean = None
def scale_by(self, Y):
Y = np.ma.masked_invalid(Y, copy=False)
self.mean = Y.mean(0).view(np.ndarray)
def normalize(self, Y):
return Y-self.mean
def inverse_mean(self, X):
return X+self.mean
def scaled(self):
return self.mean is not None
| bsd-3-clause | 2,688,401,603,414,214,700 | 23.8 | 60 | 0.575269 | false |
surdy/dcos | conftest.py | 8 | 1055 | import os
import pytest
import release
@pytest.fixture
def release_config():
if not os.path.exists('dcos-release.config.yaml'):
pytest.skip("Skipping because there is no configuration in dcos-release.config.yaml")
return release.load_config('dcos-release.config.yaml')
@pytest.fixture
def release_config_testing(release_config):
if 'testing' not in release_config:
pytest.skip("Skipped because there is no `testing` configuration in dcos-release.config.yaml")
return release_config['testing']
@pytest.fixture
def release_config_aws(release_config_testing):
if 'aws' not in release_config_testing:
pytest.skip("Skipped because there is no `testing.aws` configuration in dcos-release.config.yaml")
return release_config_testing['aws']
@pytest.fixture
def release_config_azure(release_config_testing):
if 'azure' not in release_config_testing:
pytest.skip("Skipped because there is no `testing.azure` configuration in dcos-release.config.yaml")
return release_config_testing['azure']
| apache-2.0 | 7,706,216,305,006,280,000 | 30.969697 | 108 | 0.739336 | false |
AthinaB/synnefo | snf-cyclades-app/synnefo/logic/management/commands/server-list.py | 8 | 5001 | # Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from optparse import make_option
from functools import partial
from snf_django.management.commands import ListCommand
from synnefo.db.models import VirtualMachine
from synnefo.management.common import get_resource
from synnefo.api.util import get_image
from synnefo.settings import (CYCLADES_SERVICE_TOKEN as ASTAKOS_TOKEN,
ASTAKOS_AUTH_URL)
from logging import getLogger
log = getLogger(__name__)
class Command(ListCommand):
help = "List servers"
option_list = ListCommand.option_list + (
make_option(
'--suspended',
action='store_true',
dest='suspended',
default=False,
help="List only suspended servers"),
make_option(
'--backend-id',
dest='backend_id',
help="List only servers of the specified backend"),
make_option(
"--build",
action="store_true",
dest="build",
default=False,
help="List only servers in the building state"),
make_option(
"--image-name",
action="store_true",
dest="image_name",
default=False,
help="Display image name instead of image ID"),
)
object_class = VirtualMachine
deleted_field = "deleted"
user_uuid_field = "userid"
astakos_auth_url = ASTAKOS_AUTH_URL
astakos_token = ASTAKOS_TOKEN
select_related = ["flavor.volume_type"]
def get_ips(version, vm):
ips = []
for nic in vm.nics.all():
for ip in nic.ips.all():
if ip.subnet.ipversion == version:
ips.append(ip.address)
return ips
def format_vm_state(vm):
if vm.operstate == "BUILD":
return "BUILD(" + str(vm.buildpercentage) + "%)"
else:
return vm.operstate
FIELDS = {
"id": ("id", "ID of the server"),
"name": ("name", "Name of the server"),
"user.uuid": ("userid", "The UUID of the server's owner"),
"flavor": ("flavor.name", "The name of the server's flavor"),
"backend": ("backend", "The Ganeti backend that hosts the VM"),
"image.id": ("imageid", "The ID of the server's image"),
"image.name": ("image", "The name of the server's image"),
"state": (format_vm_state, "The current state of the server"),
"ipv4": (partial(get_ips, 4),
"The IPv4 addresses of the server"),
"ipv6": (partial(get_ips, 6),
"The IPv6 addresses of the server"),
"created": ("created", "The date the server was created"),
"deleted": ("deleted", "Whether the server is deleted or not"),
"suspended": ("suspended", "Whether the server is administratively"
" suspended"),
"project": ("project", "The project UUID"),
}
fields = ["id", "name", "user.uuid", "state", "flavor", "image.id",
"backend"]
def handle_args(self, *args, **options):
if options["suspended"]:
self.filters["suspended"] = True
if options["backend_id"]:
backend = get_resource("backend", options["backend_id"])
self.filters["backend"] = backend.id
if options["build"]:
self.filters["operstate"] = "BUILD"
if options["image_name"]:
self.fields = ["image.name" if x == "image.id" else x
for x in self.fields]
if "ipv4" in self.fields or "ipv6" in self.fields:
self.prefetch_related.append("nics__ips__subnet")
def handle_db_objects(self, rows, *args, **kwargs):
if "image.name" in self.fields:
icache = ImageCache()
for vm in rows:
vm.image = icache.get_image(vm.imageid, vm.userid)
class ImageCache(object):
def __init__(self):
self.images = {}
def get_image(self, imageid, userid):
if not imageid in self.images:
try:
self.images[imageid] = get_image(imageid, userid)['name']
except Exception as e:
log.warning("Error getting image name from imageid %s: %s",
imageid, e)
self.images[imageid] = imageid
return self.images[imageid]
| gpl-3.0 | -3,986,686,938,511,067,000 | 34.978417 | 75 | 0.580884 | false |
f2nd/yandex-tank | yandextank/stepper/missile.py | 1 | 14458 | """
Missile object and generators
You should update Stepper.status.ammo_count and Stepper.status.loop_count in your custom generators!
"""
import logging
from itertools import cycle
from netort.resource import manager as resource
from . import info
from .module_exceptions import AmmoFileError
class HttpAmmo(object):
"""
Represents HTTP missile
>>> print HttpAmmo('/', []).to_s() # doctest: +NORMALIZE_WHITESPACE
GET / HTTP/1.1
>>> print HttpAmmo('/', ['Connection: Close', 'Content-Type: Application/JSON']).to_s() # doctest: +NORMALIZE_WHITESPACE
GET / HTTP/1.1
Connection: Close
Content-Type: Application/JSON
>>> print HttpAmmo('/', ['Connection: Close'], method='POST', body='hello!').to_s() # doctest: +NORMALIZE_WHITESPACE
POST / HTTP/1.1
Connection: Close
Content-Length: 6
<BLANKLINE>
hello!
"""
def __init__(self, uri, headers, method='GET', http_ver='1.1', body=''):
self.method = method
self.uri = uri
self.proto = 'HTTP/%s' % http_ver
self.headers = set(headers)
self.body = body
if len(body):
self.headers.add("Content-Length: %s" % len(body))
def to_s(self):
if self.headers:
headers = '\r\n'.join(self.headers) + '\r\n'
else:
headers = ''
return "%s %s %s\r\n%s\r\n%s" % (
self.method, self.uri, self.proto, headers, self.body)
class SimpleGenerator(object):
"""
Generates ammo based on a given sample.
"""
def __init__(self, missile_sample):
"""
Missile sample is any object that has to_s method which
returns its string representation.
"""
self.missiles = cycle([(missile_sample.to_s(), None)])
def __iter__(self):
for m in self.missiles:
info.status.inc_loop_count()
yield m
class UriStyleGenerator(object):
"""
Generates GET ammo based on given URI list.
"""
def __init__(self, uris, headers, http_ver='1.1'):
"""
uris - a list of URIs as strings.
"""
self.uri_count = len(uris)
self.missiles = cycle([(
HttpAmmo(
uri, headers, http_ver=http_ver).to_s(), None) for uri in uris])
def __iter__(self):
for m in self.missiles:
yield m
info.status.loop_count = info.status.ammo_count / self.uri_count
class Reader(object):
def __init__(self, filename, use_cache=True, **kwargs):
self.filename = filename
self.use_cache = use_cache
class AmmoFileReader(Reader):
"""Read missiles from ammo file"""
def __init__(self, filename, use_cache=True, **kwargs):
super(AmmoFileReader, self).__init__(filename, use_cache)
self.log = logging.getLogger(__name__)
self.log.info("Loading ammo from '%s'" % filename)
def __iter__(self):
def read_chunk_header(ammo_file):
chunk_header = ''
while chunk_header == '':
line = ammo_file.readline()
if line == '':
return line
chunk_header = line.strip('\r\n')
return chunk_header
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
# if we got StopIteration here, the file is empty
chunk_header = read_chunk_header(ammo_file)
while chunk_header:
if chunk_header != '':
try:
fields = chunk_header.split()
chunk_size = int(fields[0])
if chunk_size == 0:
if info.status.loop_count == 0:
self.log.info(
'Zero-sized chunk in ammo file at %s. Starting over.'
% ammo_file.tell())
ammo_file.seek(0)
info.status.inc_loop_count()
chunk_header = read_chunk_header(ammo_file)
continue
marker = fields[1] if len(fields) > 1 else None
missile = ammo_file.read(chunk_size)
if len(missile) < chunk_size:
raise AmmoFileError(
"Unexpected end of file: read %s bytes instead of %s"
% (len(missile), chunk_size))
yield (missile, marker)
except (IndexError, ValueError) as e:
raise AmmoFileError(
"Error while reading ammo file. Position: %s, header: '%s', original exception: %s"
% (ammo_file.tell(), chunk_header, e))
chunk_header = read_chunk_header(ammo_file)
if chunk_header == '':
ammo_file.seek(0)
info.status.inc_loop_count()
chunk_header = read_chunk_header(ammo_file)
info.status.af_position = ammo_file.tell()
class SlowLogReader(Reader):
"""Read missiles from SQL slow log. Not usable with Phantom"""
def __iter__(self):
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
request = ""
while True:
for line in ammo_file:
info.status.af_position = ammo_file.tell()
if line.startswith('#'):
if request != "":
yield (request, None)
request = ""
else:
request += line
ammo_file.seek(0)
info.status.af_position = 0
info.status.inc_loop_count()
class LineReader(Reader):
"""One line -- one missile"""
def __iter__(self):
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
while True:
for line in ammo_file:
info.status.af_position = ammo_file.tell()
yield (line.rstrip('\r\n'), None)
ammo_file.seek(0)
info.status.af_position = 0
info.status.inc_loop_count()
class CaseLineReader(Reader):
"""One line -- one missile with case, tab separated"""
def __iter__(self):
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
while True:
for line in ammo_file:
line = line.decode()
info.status.af_position = ammo_file.tell()
parts = line.rstrip('\r\n').split('\t', 1)
if len(parts) == 2:
yield (parts[1], parts[0])
elif len(parts) == 1:
yield (parts[0], None)
else:
raise RuntimeError("Unreachable branch")
ammo_file.seek(0)
info.status.af_position = 0
info.status.inc_loop_count()
class AccessLogReader(Reader):
"""Missiles from access log"""
def __init__(self, filename, headers=None, http_ver='1.1', use_cache=True, **kwargs):
super(AccessLogReader, self).__init__(filename, use_cache)
self.warned = False
self.headers = set(headers) if headers else set()
self.log = logging.getLogger(__name__)
def warn(self, message):
if not self.warned:
self.warned = True
self.log.warning(
"There are some skipped lines. See full log for details.")
self.log.debug(message)
def __iter__(self):
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
while True:
for line in ammo_file:
info.status.af_position = ammo_file.tell()
try:
request = line.split('"')[1]
method, uri, proto = request.split()
http_ver = proto.split('/')[1]
if method == "GET":
yield (
HttpAmmo(
uri,
headers=self.headers,
http_ver=http_ver, ).to_s(), None)
else:
self.warn(
"Skipped line: %s (unsupported method)" % line)
except (ValueError, IndexError) as e:
self.warn("Skipped line: %s (%s)" % (line, e))
ammo_file.seek(0)
info.status.af_position = 0
info.status.inc_loop_count()
def _parse_header(header):
return dict([(h.strip() for h in header.split(':', 1))])
class UriReader(Reader):
def __init__(self, filename, headers=None, http_ver='1.1', use_cache=True, **kwargs):
super(UriReader, self).__init__(filename, use_cache)
self.headers = {pair[0].strip(): pair[1].strip() for pair in [h.split(':', 1) for h in headers]} \
if headers else {}
self.http_ver = http_ver
self.log = logging.getLogger(__name__)
self.log.info("Loading ammo from '%s' using URI format." % filename)
def __iter__(self):
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
while True:
for line in ammo_file:
info.status.af_position = ammo_file.tell()
if line.startswith('['):
self.headers.update(
_parse_header(line.strip('\r\n[]\t ')))
elif len(line.rstrip('\r\n')):
fields = line.split()
uri = fields[0]
if len(fields) > 1:
marker = fields[1]
else:
marker = None
yield (
HttpAmmo(
uri,
headers=[
': '.join(header)
for header in list(self.headers.items())
],
http_ver=self.http_ver, ).to_s(), marker)
if info.status.ammo_count == 0:
self.log.error("No ammo in uri-style file")
raise AmmoFileError("No ammo! Cover me!")
ammo_file.seek(0)
info.status.af_position = 0
info.status.inc_loop_count()
class UriPostReader(Reader):
"""Read POST missiles from ammo file"""
def __init__(self, filename, headers=None, http_ver='1.1', use_cache=True, **kwargs):
super(UriPostReader, self).__init__(filename, use_cache)
self.headers = {pair[0].strip(): pair[1].strip() for pair in [h.split(':', 1) for h in headers]} \
if headers else {}
self.http_ver = http_ver
self.log = logging.getLogger(__name__)
self.log.info("Loading ammo from '%s' using URI+POST format", filename)
def __iter__(self):
def read_chunk_header(ammo_file):
chunk_header = ''
while chunk_header == '':
line = ammo_file.readline()
if line.startswith('['):
self.headers.update(_parse_header(line.strip('\r\n[]\t ')))
elif line == '':
return line
else:
chunk_header = line.strip('\r\n')
return chunk_header
opener = resource.get_opener(self.filename)
with opener(self.use_cache) as ammo_file:
info.status.af_size = opener.data_length
# if we got StopIteration here, the file is empty
chunk_header = read_chunk_header(ammo_file)
while chunk_header:
if chunk_header != '':
try:
fields = chunk_header.split()
chunk_size = int(fields[0])
uri = fields[1]
marker = fields[2] if len(fields) > 2 else None
if chunk_size == 0:
missile = ""
else:
missile = ammo_file.read(chunk_size)
if len(missile) < chunk_size:
raise AmmoFileError(
"Unexpected end of file: read %s bytes instead of %s"
% (len(missile), chunk_size))
yield (
HttpAmmo(
uri=uri,
headers=[
': '.join(header)
for header in list(self.headers.items())
],
method='POST',
body=missile,
http_ver=self.http_ver, ).to_s(), marker)
except (IndexError, ValueError) as e:
raise AmmoFileError(
"Error while reading ammo file. Position: %s, header: '%s', original exception: %s"
% (ammo_file.tell(), chunk_header, e))
chunk_header = read_chunk_header(ammo_file)
if chunk_header == '':
self.log.debug(
'Reached the end of ammo file. Starting over.')
ammo_file.seek(0)
info.status.inc_loop_count()
chunk_header = read_chunk_header(ammo_file)
info.status.af_position = ammo_file.tell()
| lgpl-2.1 | 5,008,206,693,039,682,000 | 38.395095 | 125 | 0.474201 | false |
seslab/MIPVC | MIPVC/FuenteV.py | 1 | 8648 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# -*- coding: utf-850 -*-
#Titulo :FuenteV.py
#Descripción :Interfaz de control de fuentes en modo Tensión.
#Autor :Javier Campos Rojas
#Fecha :Agosto-2017
#Versión :1.0
#Notas :
#==============================================================================
from graphics import *
from button import *
import SerialKepco as SK
#from HarmGen import *
import matplotlib.pyplot as plt
import numpy as np
import math
import io
import base64
import Tkinter as tk
from urllib2 import urlopen
import glob ##### para buscar los puertos USB disponibles
import subprocess
import tkMessageBox
global SK
def main():
xgrid=80;
ygrid=48;
refx=10;
refy=ygrid-14;
width_b=10;
heigh_b=2;
width_b2=5;
heigh_b2=2.5;
Tm=0.0005;
global Source;
win = GraphWin("Fuente de Tensión",width=800, height=480)
win.setCoords(0,0,xgrid,ygrid) #x1 y1 x2 y2
background = Image(Point(xgrid/2,ygrid/2), 'backg.gif')
background.draw(win)
logoTEC = Image(Point(xgrid/2-20,ygrid-5), 'TEC.gif')
logoTEC.draw(win)
LogoSESLab = Image(Point(xgrid/2+20,ygrid-5), 'SESLab.gif')
LogoSESLab.draw(win)
line0 = Line(Point(0, refy+2), Point(xgrid,refy+2))
line0.setFill("white")
line0.setWidth(2)
line0.draw(win)
line = Line(Point(xgrid/2, refy-15), Point(xgrid/2, refy+2))
line.setFill("white")
line.setWidth(2)
line.draw(win)
line2 = Line(Point(0, refy-10), Point(xgrid,refy-10))
line2.setFill("white")
line2.setWidth(2)
line2.draw(win)
line3 = Line(Point(0, refy-15), Point(xgrid, refy-15))
line3.setFill("white")
line3.setWidth(2)
line3.draw(win)
line4 = Line(Point(0, refy-20), Point(xgrid, refy-20))
line4.setFill("white")
line4.setWidth(2)
line4.draw(win)
##Fuente 1
Vout1 = Button(win, Point(refx+23,refy-4), width_b2, heigh_b2, "☑")
stop1 = Button(win, Point(refx+23,refy-8), width_b2, heigh_b2, "■")
##Fuente 2
Vout2 = Button(win, Point(refx+63,refy-4), width_b2, heigh_b2, "☑")
stop2 = Button(win, Point(refx+63,refy-8), width_b2, heigh_b2, "■")
connects1 = Button(win, Point(refx+23,refy), width_b, heigh_b, "Conectar")
connects1.activate()
connects2 = Button(win, Point(refx+63,refy), width_b, heigh_b, "Conectar")
puertos=glob.glob('/dev/tty[U]*')
try:
puerto1 = puertos[0]
except IndexError:
Vout1.deactivate()
stop1.deactivate()
connects1.deactivate()
puerto1 = 'no hay dispositivo'
try:
puerto2 = puertos[1]
except IndexError:
Vout2.deactivate()
stop2.deactivate()
connects2.deactivate()
puerto2 = 'no hay dispositivo'
cal = Button(win, Point(xgrid/4+10,refy-17.5), width_b, heigh_b, "Calibrar")
cal.activate()
quitButton = Button(win, Point(3*xgrid/4-10,refy-17.5), width_b, heigh_b, "Salir")
quitButton.rect.setFill("#C01A19")
quitButton.activate()
###############################---Datos Fuente 1---###############################
################## Puerto Serial 1 ##################
port1_name=Text(Point(refx-3,refy),"Puerto Serial 1: ")
port1_name.setFace('arial')
port1_name.setStyle('bold')
port1_name.setSize(10)
port1_name.setTextColor("black")
port1_name.draw(win)
port1_val=Entry(Point(refx+10,refy),19)
port1_val.setFace('arial')
port1_val.setSize(10)
port1_val.setTextColor("white")
port1_val.setFill('#6B6B6B')
port1_val.setText(puerto1)
port1_val.draw(win)
################## Tensión 1 ##################
volt=Text(Point(refx,refy-4),"Tensión(V): ")
volt.setFace('arial')
volt.setStyle('bold')
volt.setSize(10)
volt.setTextColor("black")
volt.draw(win)
volt_val=Entry(Point(refx+10,refy-4),10)
volt_val.setFace('arial')
volt_val.setSize(10)
volt_val.setTextColor("white")
volt_val.setFill('#6B6B6B')
volt_val.setText('0')
volt_val.draw(win)
################## Corriente 1 ##################
curr=Text(Point(refx,refy-8),"Corriente Limite(A): ")
curr.setFace('arial')
curr.setStyle('bold')
curr.setSize(10)
curr.setTextColor("black")
curr.draw(win)
curr_val=Entry(Point(refx+10,refy-8),10)
curr_val.setFace('arial')
curr_val.setSize(10)
curr_val.setTextColor("white")
curr_val.setFill('#6B6B6B')
curr_val.setText('0')
curr_val.draw(win)
#############################################################################################3
###############################---Datos Fuente 2---###############################
################## Puerto Serial 2 ##################
port2_name=Text(Point(refx+37,refy),"Puerto Serial 2: ")
port2_name.setFace('arial')
port2_name.setStyle('bold')
port2_name.setSize(10)
port2_name.setTextColor("black")
port2_name.draw(win)
port2_val=Entry(Point(refx+50,refy),18)
port2_val.setFace('arial')
port2_val.setSize(10)
port2_val.setTextColor("white")
port2_val.setFill('#6B6B6B')
port2_val.setText(puerto2)
port2_val.draw(win)
################## Tensión 2 ##################
volt2=Text(Point(refx+40,refy-4),"Tensión(V): ")
volt2.setFace('arial')
volt2.setStyle('bold')
volt2.setSize(10)
volt2.setTextColor("black")
volt2.draw(win)
volt2_val=Entry(Point(refx+50,refy-4),10)
volt2_val.setFace('arial')
volt2_val.setSize(10)
volt2_val.setTextColor("white")
volt2_val.setFill('#6B6B6B')
volt2_val.setText('0')
volt2_val.draw(win)
################## Corriente 2 ##################
curr2=Text(Point(refx+40,refy-8),"Corriente Limite(A): ")
curr2.setFace('arial')
curr2.setStyle('bold')
curr2.setSize(10)
curr2.setTextColor("black")
curr2.draw(win)
curr2_val=Entry(Point(refx+50,refy-8),10)
curr2_val.setFace('arial')
curr2_val.setSize(10)
curr2_val.setTextColor("white")
curr2_val.setFill('#6B6B6B')
curr2_val.setText('0')
curr2_val.draw(win)
################## Mensaje de lectura ##################
mensaje1=Text(Point(xgrid/4,refy-12.5),"Fuente 1")
mensaje1.setFace('arial')
mensaje1.setStyle('bold')
mensaje1.setSize(10)
mensaje1.setTextColor("black")
mensaje1.draw(win)
mensaje2=Text(Point(3*xgrid/4,refy-12.5),"Fuente 2")
mensaje2.setFace('arial')
mensaje2.setStyle('bold')
mensaje2.setSize(10)
mensaje2.setTextColor("black")
mensaje2.draw(win)
pt = win.getMouse()
while not quitButton.clicked(pt):
V=float(volt_val.getText())
C=float(curr_val.getText())
V2=float(volt2_val.getText())
C2=float(curr2_val.getText())
if (C > 4) or (C < -4):
curr_val.setText('0')
tkMessageBox.showerror("Error", "Valor C no puede ser mayor a 4A o menor a -4A")
if (C2 > 4) or (C2 < -4):
curr2_val.setText('0')
tkMessageBox.showerror("Error", "Valor C no puede ser mayor a 4A o menor a -4A")
if (V > 50) or (V < -50):
volt_val.setText('0')
tkMessageBox.showerror("Error", "Valor de tensión V máximo "+"\n"+" no puede ser mayor a 50V o menor a -50V")
if (V2 > 50) or (V2 < -50) :
volt2_val.setText('0')
tkMessageBox.showerror("Error", "Valor de tensión V máximo "+"\n"+" no puede ser mayor a 50V o menor a -50V")
puertos=glob.glob('/dev/tty[U]*')
try:
puerto1 = puertos[0]
connects1.activate()
except IndexError:
Vout1.deactivate()
stop1.deactivate()
connects1.deactivate()
puerto1 = 'no hay dispositivo'
try:
puerto2 = puertos[1]
connects2.activate()
except IndexError:
Vout2.deactivate()
stop2.deactivate()
connects2.deactivate()
puerto2 = 'no hay dispositivo'
port1_val.setText(puerto1)
port2_val.setText(puerto2)
try:
mensaje1.setText(puerto1)
mensaje2.setText(puerto2)
except Exception, e:
mensaje1.setText('no hay dispositivo')
mensaje2.setText('no hay dispositivo')
if connects1.clicked(pt):
port1=port1_val.getText()
kepco1=SK.Source("Fuente1",port1)
m1=kepco1.connectport()
m2=kepco1.identify()
mensaje1.setText(m1 + "\n" + m2)
Vout1.activate()
stop1.activate()
Vout1.rect.setFill("#33CC00")
stop1.rect.setFill("#C01A19")
if connects2.clicked(pt):
port2=port2_val.getText()
kepco2=SK.Source("Fuente2",port2)
m1=kepco2.connectport()
m2=kepco2.identify()
mensaje2.setText(m1 + "\n" + m2)
Vout2.activate()
stop2.activate()
Vout2.rect.setFill("#33CC00")
stop2.rect.setFill("#C01A19")
if Vout1.clicked(pt):
V=float(volt_val.getText())
C=float(curr_val.getText())
kepco1.WriteVolt(V,C)
if Vout2.clicked(pt):
V2=float(volt2_val.getText())
C2=float(curr2_val.getText())
kepco2.WriteVolt(V2,C2)
if stop1.clicked(pt):
kepco1.stop()
if stop2.clicked(pt):
kepco2.stop()
if cal.clicked(pt):
execfile('calv.py')
pt = win.getMouse()
if Vout1.active==True:
kepco1.stop()
if Vout2.active==True:
kepco2.stop()
win.close()
main()
| gpl-3.0 | 614,082,857,715,307,300 | 24.602374 | 112 | 0.63491 | false |
carl-mastrangelo/grpc | tools/run_tests/run_build_statistics.py | 14 | 10526 | #!/usr/bin/env python
# Copyright 2016 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool to get build statistics from Jenkins and upload to BigQuery."""
from __future__ import print_function
import argparse
import jenkinsapi
from jenkinsapi.custom_exceptions import JenkinsAPIException
from jenkinsapi.jenkins import Jenkins
import json
import os
import re
import sys
import urllib
gcp_utils_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../gcp/utils'))
sys.path.append(gcp_utils_dir)
import big_query_utils
_PROJECT_ID = 'grpc-testing'
_HAS_MATRIX = True
_BUILDS = {
'gRPC_interop_master': not _HAS_MATRIX,
'gRPC_master_linux': not _HAS_MATRIX,
'gRPC_master_macos': not _HAS_MATRIX,
'gRPC_master_windows': not _HAS_MATRIX,
'gRPC_performance_master': not _HAS_MATRIX,
'gRPC_portability_master_linux': not _HAS_MATRIX,
'gRPC_portability_master_windows': not _HAS_MATRIX,
'gRPC_master_asanitizer_c': not _HAS_MATRIX,
'gRPC_master_asanitizer_cpp': not _HAS_MATRIX,
'gRPC_master_msan_c': not _HAS_MATRIX,
'gRPC_master_tsanitizer_c': not _HAS_MATRIX,
'gRPC_master_tsan_cpp': not _HAS_MATRIX,
'gRPC_interop_pull_requests': not _HAS_MATRIX,
'gRPC_performance_pull_requests': not _HAS_MATRIX,
'gRPC_portability_pull_requests_linux': not _HAS_MATRIX,
'gRPC_portability_pr_win': not _HAS_MATRIX,
'gRPC_pull_requests_linux': not _HAS_MATRIX,
'gRPC_pull_requests_macos': not _HAS_MATRIX,
'gRPC_pr_win': not _HAS_MATRIX,
'gRPC_pull_requests_asan_c': not _HAS_MATRIX,
'gRPC_pull_requests_asan_cpp': not _HAS_MATRIX,
'gRPC_pull_requests_msan_c': not _HAS_MATRIX,
'gRPC_pull_requests_tsan_c': not _HAS_MATRIX,
'gRPC_pull_requests_tsan_cpp': not _HAS_MATRIX,
}
_URL_BASE = 'https://grpc-testing.appspot.com/job'
# This is a dynamic list where known and active issues should be added.
# Fixed ones should be removed.
# Also try not to add multiple messages from the same failure.
_KNOWN_ERRORS = [
'Failed to build workspace Tests with scheme AllTests',
'Build timed out',
'TIMEOUT: tools/run_tests/pre_build_node.sh',
'TIMEOUT: tools/run_tests/pre_build_ruby.sh',
'FATAL: Unable to produce a script file',
'FAILED: build_docker_c\+\+',
'cannot find package \"cloud.google.com/go/compute/metadata\"',
'LLVM ERROR: IO failure on output stream.',
'MSBUILD : error MSB1009: Project file does not exist.',
'fatal: git fetch_pack: expected ACK/NAK',
'Failed to fetch from http://github.com/grpc/grpc.git',
('hudson.remoting.RemotingSystemException: java.io.IOException: '
'Backing channel is disconnected.'),
'hudson.remoting.ChannelClosedException',
'Could not initialize class hudson.Util',
'Too many open files in system',
'FAILED: bins/tsan/qps_openloop_test GRPC_POLL_STRATEGY=epoll',
'FAILED: bins/tsan/qps_openloop_test GRPC_POLL_STRATEGY=legacy',
'FAILED: bins/tsan/qps_openloop_test GRPC_POLL_STRATEGY=poll',
('tests.bins/asan/h2_proxy_test streaming_error_response '
'GRPC_POLL_STRATEGY=legacy'),
'hudson.plugins.git.GitException',
'Couldn\'t find any revision to build',
'org.jenkinsci.plugin.Diskcheck.preCheckout',
'Something went wrong while deleting Files',
]
_NO_REPORT_FILES_FOUND_ERROR = 'No test report files were found.'
_UNKNOWN_ERROR = 'Unknown error'
_DATASET_ID = 'build_statistics'
def _scrape_for_known_errors(html):
error_list = []
for known_error in _KNOWN_ERRORS:
errors = re.findall(known_error, html)
this_error_count = len(errors)
if this_error_count > 0:
error_list.append({
'description': known_error,
'count': this_error_count
})
print('====> %d failures due to %s' % (this_error_count,
known_error))
return error_list
def _no_report_files_found(html):
return _NO_REPORT_FILES_FOUND_ERROR in html
def _get_last_processed_buildnumber(build_name):
query = 'SELECT max(build_number) FROM [%s:%s.%s];' % (_PROJECT_ID,
_DATASET_ID,
build_name)
query_job = big_query_utils.sync_query_job(bq, _PROJECT_ID, query)
page = bq.jobs().getQueryResults(
pageToken=None, **query_job['jobReference']).execute(num_retries=3)
if page['rows'][0]['f'][0]['v']:
return int(page['rows'][0]['f'][0]['v'])
return 0
def _process_matrix(build, url_base):
matrix_list = []
for matrix in build.get_matrix_runs():
matrix_str = re.match('.*\\xc2\\xbb ((?:[^,]+,?)+) #.*',
matrix.name).groups()[0]
matrix_tuple = matrix_str.split(',')
json_url = '%s/config=%s,language=%s,platform=%s/testReport/api/json' % (
url_base, matrix_tuple[0], matrix_tuple[1], matrix_tuple[2])
console_url = '%s/config=%s,language=%s,platform=%s/consoleFull' % (
url_base, matrix_tuple[0], matrix_tuple[1], matrix_tuple[2])
matrix_dict = {
'name': matrix_str,
'duration': matrix.get_duration().total_seconds()
}
matrix_dict.update(_process_build(json_url, console_url))
matrix_list.append(matrix_dict)
return matrix_list
def _process_build(json_url, console_url):
build_result = {}
error_list = []
try:
html = urllib.urlopen(json_url).read()
test_result = json.loads(html)
print('====> Parsing result from %s' % json_url)
failure_count = test_result['failCount']
build_result['pass_count'] = test_result['passCount']
build_result['failure_count'] = failure_count
# This means Jenkins failure occurred.
build_result['no_report_files_found'] = _no_report_files_found(html)
# Only check errors if Jenkins failure occurred.
if build_result['no_report_files_found']:
error_list = _scrape_for_known_errors(html)
except Exception as e:
print('====> Got exception for %s: %s.' % (json_url, str(e)))
print('====> Parsing errors from %s.' % console_url)
html = urllib.urlopen(console_url).read()
build_result['pass_count'] = 0
build_result['failure_count'] = 1
# In this case, the string doesn't exist in the result html but the fact
# that we fail to parse the result html indicates Jenkins failure and hence
# no report files were generated.
build_result['no_report_files_found'] = True
error_list = _scrape_for_known_errors(html)
if error_list:
build_result['error'] = error_list
elif build_result['no_report_files_found']:
build_result['error'] = [{'description': _UNKNOWN_ERROR, 'count': 1}]
else:
build_result['error'] = [{'description': '', 'count': 0}]
return build_result
# parse command line
argp = argparse.ArgumentParser(description='Get build statistics.')
argp.add_argument('-u', '--username', default='jenkins')
argp.add_argument(
'-b',
'--builds',
choices=['all'] + sorted(_BUILDS.keys()),
nargs='+',
default=['all'])
args = argp.parse_args()
J = Jenkins('https://grpc-testing.appspot.com', args.username, 'apiToken')
bq = big_query_utils.create_big_query()
for build_name in _BUILDS.keys() if 'all' in args.builds else args.builds:
print('====> Build: %s' % build_name)
# Since get_last_completed_build() always fails due to malformatted string
# error, we use get_build_metadata() instead.
job = None
try:
job = J[build_name]
except Exception as e:
print('====> Failed to get build %s: %s.' % (build_name, str(e)))
continue
last_processed_build_number = _get_last_processed_buildnumber(build_name)
last_complete_build_number = job.get_last_completed_buildnumber()
# To avoid processing all builds for a project never looked at. In this case,
# only examine 10 latest builds.
starting_build_number = max(last_processed_build_number + 1,
last_complete_build_number - 9)
for build_number in xrange(starting_build_number,
last_complete_build_number + 1):
print('====> Processing %s build %d.' % (build_name, build_number))
build = None
try:
build = job.get_build_metadata(build_number)
print('====> Build status: %s.' % build.get_status())
if build.get_status() == 'ABORTED':
continue
# If any build is still running, stop processing this job. Next time, we
# start from where it was left so that all builds are processed
# sequentially.
if build.is_running():
print('====> Build %d is still running.' % build_number)
break
except KeyError:
print('====> Build %s is missing. Skip.' % build_number)
continue
build_result = {
'build_number': build_number,
'timestamp': str(build.get_timestamp())
}
url_base = json_url = '%s/%s/%d' % (_URL_BASE, build_name, build_number)
if _BUILDS[build_name]: # The build has matrix, such as gRPC_master.
build_result['matrix'] = _process_matrix(build, url_base)
else:
json_url = '%s/testReport/api/json' % url_base
console_url = '%s/consoleFull' % url_base
build_result['duration'] = build.get_duration().total_seconds()
build_stat = _process_build(json_url, console_url)
build_result.update(build_stat)
rows = [big_query_utils.make_row(build_number, build_result)]
if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID,
build_name, rows):
print('====> Error uploading result to bigquery.')
sys.exit(1)
| apache-2.0 | 1,030,119,117,399,214,600 | 40.769841 | 84 | 0.622744 | false |
SciTools/cartopy | lib/cartopy/tests/test_img_transform.py | 2 | 3880 | # Copyright Cartopy Contributors
#
# This file is part of Cartopy and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
import numpy as np
from numpy.testing import assert_array_equal
import pytest
import cartopy.img_transform as img_trans
import cartopy.crs as ccrs
@pytest.mark.parametrize('xmin, xmax', [
(-90, 0), (-90, 90), (-90, None),
(0, 90), (0, None),
(None, 0), (None, 90), (None, None)])
@pytest.mark.parametrize('ymin, ymax', [
(-45, 0), (-45, 45), (-45, None),
(0, 45), (0, None),
(None, 0), (None, 45), (None, None)])
def test_mesh_projection_extent(xmin, xmax, ymin, ymax):
proj = ccrs.PlateCarree()
nx = 4
ny = 2
target_x, target_y, extent = img_trans.mesh_projection(
proj, nx, ny,
x_extents=(xmin, xmax),
y_extents=(ymin, ymax))
if xmin is None:
xmin = proj.x_limits[0]
if xmax is None:
xmax = proj.x_limits[1]
if ymin is None:
ymin = proj.y_limits[0]
if ymax is None:
ymax = proj.y_limits[1]
assert_array_equal(extent, [xmin, xmax, ymin, ymax])
assert_array_equal(np.diff(target_x, axis=1), (xmax - xmin) / nx)
assert_array_equal(np.diff(target_y, axis=0), (ymax - ymin) / ny)
def test_griding_data_std_range():
# Data which exists inside the standard projection bounds i.e.
# [-180, 180].
target_prj = ccrs.PlateCarree()
# create 3 data points
lats = np.array([65, 10, -45])
lons = np.array([-90, 0, 90])
data = np.array([1, 2, 3])
data_trans = ccrs.Geodetic()
target_x, target_y, extent = img_trans.mesh_projection(target_prj, 8, 4)
image = img_trans.regrid(data, lons, lats, data_trans, target_prj,
target_x, target_y,
mask_extrapolated=True)
# The expected image. n.b. on a map the data is reversed in the y axis.
expected = np.array([[3, 3, 3, 3, 3, 3, 3, 3],
[3, 1, 2, 2, 2, 3, 3, 3],
[1, 1, 1, 2, 2, 2, 3, 1],
[1, 1, 1, 1, 1, 1, 1, 1]], dtype=np.float64)
expected_mask = np.array(
[[True, True, True, True, True, True, True, True],
[True, False, False, False, False, False, False, True],
[True, False, False, False, False, False, False, True],
[True, True, True, True, True, True, True, True]])
assert_array_equal([-180, 180, -90, 90], extent)
assert_array_equal(expected, image)
assert_array_equal(expected_mask, image.mask)
def test_griding_data_outside_projection():
# Data which exists outside the standard projection e.g. [0, 360] rather
# than [-180, 180].
target_prj = ccrs.PlateCarree()
# create 3 data points
lats = np.array([65, 10, -45])
lons = np.array([120, 180, 240])
data = np.array([1, 2, 3])
data_trans = ccrs.Geodetic()
target_x, target_y, extent = img_trans.mesh_projection(target_prj, 8, 4)
image = img_trans.regrid(data, lons, lats, data_trans, target_prj,
target_x, target_y,
mask_extrapolated=True)
# The expected image. n.b. on a map the data is reversed in the y axis.
expected = np.array(
[[3, 3, 3, 3, 3, 3, 3, 3],
[3, 3, 3, 3, 3, 1, 2, 2],
[2, 2, 3, 1, 1, 1, 1, 2],
[1, 1, 1, 1, 1, 1, 1, 1]], dtype=np.float64)
expected_mask = np.array(
[[True, True, True, True, True, True, True, True],
[False, False, True, True, True, True, False, False],
[False, False, True, True, True, True, False, False],
[True, True, True, True, True, True, True, True]])
assert_array_equal([-180, 180, -90, 90], extent)
assert_array_equal(expected, image)
assert_array_equal(expected_mask, image.mask)
| lgpl-3.0 | 5,755,524,756,542,709,000 | 34.272727 | 76 | 0.570619 | false |
openfisca/openfisca-france-indirect-taxation | openfisca_france_indirect_taxation/examples/transports/depenses_par_categories/plot_depenses_par_strate.py | 4 | 1693 | # -*- coding: utf-8 -*-
"""
Created on Fri Sep 18 11:11:34 2015
@author: thomas.douenne
"""
# L'objectif est de calculer, pour chaque zone de résidence "strate", les dépenses moyennes en carburants.
# L'analyse peut être affinée afin de comparer les dépenses en diesel et en essence.
# On constate que pour les deux carburants les ménages ruraux consomment davantage que les urbains.
# Import de modules généraux
from __future__ import division
import pandas
import seaborn
# Import de modules spécifiques à Openfisca
from openfisca_france_indirect_taxation.examples.utils_example import graph_builder_bar
from openfisca_france_indirect_taxation.surveys import SurveyScenario
# Import d'une nouvelle palette de couleurs
seaborn.set_palette(seaborn.color_palette("Set2", 12))
if __name__ == '__main__':
# Sélection des variables utilisées pour la simulation
simulated_variables = ['depenses_carburants', 'depenses_essence', 'depenses_diesel', 'revtot']
for year in [2000, 2005, 2011]:
survey_scenario = SurveyScenario.create(year = year)
pivot_table = pandas.DataFrame()
for values in simulated_variables:
pivot_table = pandas.concat([
pivot_table,
survey_scenario.compute_pivot_table(values = [values], columns = ['strate'])
])
df = pivot_table.T
# Réalisation de graphiques
for element in simulated_variables:
if element == 'revtot':
continue
df['part_{}_revtot'.format(element)] = \
df['{}'.format(element)] / df['revtot']
graph_builder_bar(df[['part_{}_revtot'.format(element)]])
| agpl-3.0 | 2,423,337,781,783,484,000 | 36.333333 | 106 | 0.667262 | false |
costadorione/purestream | channels/toonitalia.py | 1 | 7937 | # -*- coding: utf-8 -*-
# ------------------------------------------------------------
# streamondemand.- XBMC Plugin
# Canale per toointalia
# http://www.mimediacenter.info/foro/viewforum.php?f=36
# ------------------------------------------------------------
import re
from core import config
from core import logger
from core import scrapertools
from core import servertools
from core.item import Item
from core.tmdb import infoSod
__channel__ = "toonitalia"
__category__ = "A"
__type__ = "generic"
__title__ = "Toonitalia"
__language__ = "IT"
host = "http://toonitalia.altervista.org"
headers = [
['User-Agent', 'Mozilla/5.0 (Windows NT 6.1; rv:38.0) Gecko/20100101 Firefox/38.0'],
['Accept-Encoding', 'gzip, deflate']
]
DEBUG = config.get_setting("debug")
def isGeneric():
return True
def mainlist(item):
logger.info("streamondemand.toointalia mainlist")
itemlist = [Item(channel=__channel__,
title="[COLOR azure]Home[/COLOR]",
action="anime",
url=host,
thumbnail="http://i.imgur.com/a8Vwz1V.png"),
Item(channel=__channel__,
title="[COLOR azure]Anime[/COLOR]",
action="anime",
url=host + "/category/anime/",
thumbnail="http://i.imgur.com/a8Vwz1V.png"),
Item(channel=__channel__,
title="[COLOR azure]Anime Sub-Ita[/COLOR]",
action="anime",
url=host + "/category/anime-sub-ita/",
thumbnail="http://i.imgur.com/a8Vwz1V.png"),
Item(channel=__channel__,
title="[COLOR azure]Film Animazione[/COLOR]",
action="animazione",
url="%s/category/film-animazione/" % host,
thumbnail="http://i.imgur.com/a8Vwz1V.png"),
Item(channel=__channel__,
title="[COLOR azure]Serie TV[/COLOR]",
action="anime",
url=host + "/category/serie-tv/",
thumbnail="http://i.imgur.com/a8Vwz1V.png"),
Item(channel=__channel__,
title="[COLOR yellow]Cerca...[/COLOR]",
action="search",
extra="anime",
thumbnail="http://dc467.4shared.com/img/fEbJqOum/s7/13feaf0c8c0/Search")]
return itemlist
def search(item, texto):
logger.info("[toonitalia.py] " + item.url + " search " + texto)
item.url = host + "/?s=" + texto
try:
return anime(item)
# Se captura la excepción, para no interrumpir al buscador global si un canal falla
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
def anime(item):
logger.info("streamondemand.toointalia peliculas")
itemlist = []
## Descarga la pagina
data = scrapertools.cache_page(item.url)
## Extrae las entradas (carpetas)
patron = '<figure class="post-image left">\s*<a href="([^"]+)"><img src="[^"]*"[^l]+lt="([^"]+)" /></a>\s*</figure>'
matches = re.compile(patron, re.DOTALL).findall(data)
for scrapedurl, scrapedtitle in matches:
title = scrapertools.decodeHtmlentities(scrapedtitle)
scrapedthumbnail = ""
itemlist.append(infoSod(
Item(channel=__channel__,
action="episodi",
title=title,
url=scrapedurl,
thumbnail=scrapedthumbnail,
fulltitle=title,
show=title,
viewmode="movie_with_plot"), tipo='tv'))
# Older Entries
patron = '<link rel="next" href="([^"]+)" />'
next_page = scrapertools.find_single_match(data, patron)
if next_page != "":
itemlist.append(
Item(channel=__channel__,
title="[COLOR orange]Post più vecchi...[/COLOR]",
url=next_page,
action="anime",
thumbnail="http://2.bp.blogspot.com/-fE9tzwmjaeQ/UcM2apxDtjI/AAAAAAAAeeg/WKSGM2TADLM/s1600/pager+old.png"))
return itemlist
def animazione(item):
logger.info("streamondemand.toointalia peliculas")
itemlist = []
## Descarga la pagina
data = scrapertools.cache_page(item.url)
## Extrae las entradas (carpetas)
patron = '<figure class="post-image left">\s*<a href="([^"]+)"><img src="[^"]*"[^l]+lt="([^"]+)" /></a>\s*</figure>'
matches = re.compile(patron, re.DOTALL).findall(data)
for scrapedurl, scrapedtitle in matches:
title = scrapertools.decodeHtmlentities(scrapedtitle)
scrapedthumbnail = ""
itemlist.append(infoSod(
Item(channel=__channel__,
action="film",
title=title,
url=scrapedurl,
thumbnail=scrapedthumbnail,
fulltitle=title,
show=title,
viewmode="movie_with_plot"), tipo='movie'))
# Older Entries
patron = '<link rel="next" href="([^"]+)" />'
next_page = scrapertools.find_single_match(data, patron)
if next_page != "":
itemlist.append(
Item(channel=__channel__,
title="[COLOR orange]Post più vecchi...[/COLOR]",
url=next_page,
action="animazione",
thumbnail="http://2.bp.blogspot.com/-fE9tzwmjaeQ/UcM2apxDtjI/AAAAAAAAeeg/WKSGM2TADLM/s1600/pager+old.png"))
return itemlist
def episodi(item):
logger.info("toonitalia.py episodi")
itemlist = []
# Downloads page
data = scrapertools.cache_page(item.url)
# Extracts the entries
patron = '<a\s*href="([^"]+)"\s*target="_blank">([^<]+)</a><'
matches = re.compile(patron, re.DOTALL).findall(data)
for scrapedurl, scrapedtitle in matches:
if 'adf.ly' not in scrapedurl:
scrapedtitle = scrapertools.decodeHtmlentities(scrapedtitle)
itemlist.append(
Item(channel=__channel__,
action="findvid",
title=scrapedtitle,
thumbnail=item.thumbnail,
url=scrapedurl))
return itemlist
def film(item):
logger.info("toonitalia.py film")
itemlist = []
# Downloads page
data = scrapertools.cache_page(item.url)
# Extracts the entries
# patron = '<img class="aligncenter.*?src="([^"]+)" alt="([^"]+)".*?<strong><a href="([^"]+)" target="_blank">'
patron = '<img.*?src="([^"]+)".*?alt="([^"]+)".*?strong><a href="([^"]+)" target="_blank">'
matches = re.compile(patron, re.DOTALL).findall(data)
for scrapedthumbnail, scrapedtitle, scrapedurl in matches:
scrapedtitle = scrapertools.decodeHtmlentities(scrapedtitle)
itemlist.append(
Item(channel=__channel__,
action="findvid",
title=scrapedtitle,
thumbnail=scrapedthumbnail,
url=scrapedurl))
# Older Entries
patron = '<link rel="next" href="([^"]+)" />'
next_page = scrapertools.find_single_match(data, patron)
if next_page != "":
itemlist.append(
Item(channel=__channel__,
title="[COLOR orange]Post più vecchi...[/COLOR]",
url=next_page,
action="film",
thumbnail="http://2.bp.blogspot.com/-fE9tzwmjaeQ/UcM2apxDtjI/AAAAAAAAeeg/WKSGM2TADLM/s1600/pager+old.png"))
return itemlist
def findvid(item):
logger.info("[toonitalia.py] findvideos")
itemlist = servertools.find_video_items(data=item.url)
for videoitem in itemlist:
videoitem.title = item.title + videoitem.title
videoitem.fulltitle = item.fulltitle
videoitem.thumbnail = item.thumbnail
videoitem.channel = __channel__
return itemlist
| gpl-3.0 | -4,788,120,387,632,032,000 | 33.193966 | 124 | 0.546199 | false |
vasiliykochergin/euca2ools | euca2ools/commands/ec2/createvpngateway.py | 5 | 1980 | # Copyright 2014 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from requestbuilder import Arg
from euca2ools.commands.ec2 import EC2Request
class CreateVpnGateway(EC2Request):
DESCRIPTION = ('Create a virtual private gateway\n\nThis is the VPC side '
'of a VPN connection. You will also need to create a VPN '
'customer gateway with euca-create-customer-gateway(1).')
ARGS = [Arg('-t', '--type', dest='Type', metavar='ipsec.1', required=True,
choices=('ipsec.1',),
help='the type of VPN connection to use (required)')]
def print_result(self, result):
self.print_vpn_gateway(result.get('vpnGateway') or {})
| bsd-2-clause | 7,845,498,100,838,874,000 | 48.5 | 78 | 0.737374 | false |
SaikWolf/gnuradio | gnuradio-runtime/python/gnuradio/ctrlport/GrDataPlotter.py | 2 | 16170 | #!/usr/bin/env python
#
# Copyright 2012,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr
from gnuradio import blocks
from gnuradio import filter
from gnuradio.ctrlport.GNURadio import ControlPort
import sys, time, struct
try:
from gnuradio import qtgui
from PyQt4 import QtGui, QtCore
import sip
except ImportError:
print "Error: Program requires PyQt4 and gr-qtgui."
sys.exit(1)
class GrDataPlotParent(gr.top_block, QtGui.QWidget):
# Setup signals
plotupdated = QtCore.pyqtSignal(QtGui.QWidget)
def __init__(self, name, rate, pmin=None, pmax=None):
gr.top_block.__init__(self)
QtGui.QWidget.__init__(self, None)
self._name = name
self._npts = 500
self._rate = rate
self.knobnames = [name,]
self.layout = QtGui.QVBoxLayout()
self.setLayout(self.layout)
self.setAcceptDrops(True)
def _setup(self, nconnections):
self.stop()
self.wait()
if(self.layout.count() > 0):
# Remove and disconnect. Making sure no references to snk
# remain so that the plot gets deleted.
self.layout.removeWidget(self.py_window)
self.disconnect(self.thr, (self.snk, 0))
self.disconnect(self.src[0], self.thr)
for n in xrange(1, self._ncons):
self.disconnect(self.src[n], (self.snk,n))
self._ncons = nconnections
self._data_len = self._ncons*[0,]
self.thr = blocks.throttle(self._datasize, self._rate)
self.snk = self.get_qtsink()
self.connect(self.thr, (self.snk, 0))
self._last_data = []
self.src = []
for n in xrange(self._ncons):
self.set_line_label(n, self.knobnames[n])
self._last_data.append(int(self._npts)*[0,])
self.src.append(self.get_vecsource())
if(n == 0):
self.connect(self.src[n], self.thr)
else:
self.connect(self.src[n], (self.snk,n))
self.py_window = sip.wrapinstance(self.snk.pyqwidget(), QtGui.QWidget)
self.layout.addWidget(self.py_window)
def __del__(self):
pass
def close(self):
self.snk.close()
def qwidget(self):
return self.py_window
def name(self):
return self._name
def semilogy(self, en=True):
self.snk.enable_semilogy(en)
def dragEnterEvent(self, e):
e.acceptProposedAction()
def dropEvent(self, e):
if(e.mimeData().hasFormat("text/plain")):
data = str(e.mimeData().text())
#"PlotData:{0}:{1}".format(tag, iscomplex)
datalst = data.split(":::")
tag = datalst[0]
name = datalst[1]
cpx = datalst[2] != "0"
if(tag == "PlotData" and cpx == self._iscomplex):
self.knobnames.append(name)
# create a new qwidget plot with the new data stream.
self._setup(len(self.knobnames))
# emit that this plot has been updated with a new qwidget.
self.plotupdated.emit(self)
e.acceptProposedAction()
def data_to_complex(self, data):
if(self._iscomplex):
data_r = data[0::2]
data_i = data[1::2]
data = [complex(r,i) for r,i in zip(data_r, data_i)]
return data
def update(self, data):
# Ask GUI if there has been a change in nsamps
npts = self.get_npts()
if(self._npts != npts):
# Adjust buffers to accommodate new settings
for n in xrange(self._ncons):
if(npts < self._npts):
if(self._data_len[n] < npts):
self._last_data[n] = self._last_data[n][0:npts]
else:
self._last_data[n] = self._last_data[n][self._data_len[n]-npts:self._data_len[n]]
self._data_len[n] = npts
else:
self._last_data[n] += (npts - self._npts)*[0,]
self._npts = npts
self.snk.reset()
if(self._stripchart):
# Update the plot data depending on type
for n in xrange(self._ncons):
if(type(data[n]) == list):
data[n] = self.data_to_complex(data[n])
if(len(data[n]) > self._npts):
self.src[n].set_data(data[n])
self._last_data[n] = data[n][-self._npts:]
else:
newdata = self._last_data[n][-(self._npts-len(data)):]
newdata += data[n]
self.src[n].set_data(newdata)
self._last_data[n] = newdata
else: # single value update
if(self._iscomplex):
data[n] = complex(data[n][0], data[n][1])
if(self._data_len[n] < self._npts):
self._last_data[n][self._data_len[n]] = data[n]
self._data_len[n] += 1
else:
self._last_data[n] = self._last_data[n][1:]
self._last_data[n].append(data[n])
self.src[n].set_data(self._last_data[n])
else:
for n in xrange(self._ncons):
if(type(data[n]) != list):
data[n] = [data[n],]
data[n] = self.data_to_complex(data[n])
self.src[n].set_data(data[n])
class GrDataPlotterC(GrDataPlotParent):
def __init__(self, name, rate, pmin=None, pmax=None, stripchart=False):
GrDataPlotParent.__init__(self, name, rate, pmin, pmax)
self._stripchart = stripchart
self._datasize = gr.sizeof_gr_complex
self._iscomplex = True
self._setup(1)
def stem(self, en=True):
self.snk.enable_stem_plot(en)
def get_qtsink(self):
snk = qtgui.time_sink_c(self._npts, 1.0,
self._name, self._ncons)
snk.enable_autoscale(True)
return snk
def get_vecsource(self):
return blocks.vector_source_c([])
def get_npts(self):
self._npts = self.snk.nsamps()
return self._npts
def set_line_label(self, n, name):
self.snk.set_line_label(2*n+0, "Re{" + self.knobnames[n] + "}")
self.snk.set_line_label(2*n+1, "Im{" + self.knobnames[n] + "}")
class GrDataPlotterF(GrDataPlotParent):
def __init__(self, name, rate, pmin=None, pmax=None, stripchart=False):
GrDataPlotParent.__init__(self, name, rate, pmin, pmax)
self._stripchart = stripchart
self._datasize = gr.sizeof_float
self._iscomplex = False
self._setup(1)
def stem(self, en=True):
self.snk.enable_stem_plot(en)
def get_qtsink(self):
snk = qtgui.time_sink_f(self._npts, 1.0,
self._name, self._ncons)
snk.enable_autoscale(True)
return snk
def get_vecsource(self):
return blocks.vector_source_f([])
def get_npts(self):
self._npts = self.snk.nsamps()
return self._npts
def set_line_label(self, n, name):
self.snk.set_line_label(n, self.knobnames[n])
class GrDataPlotterConst(GrDataPlotParent):
def __init__(self, name, rate, pmin=None, pmax=None, stripchart=False):
GrDataPlotParent.__init__(self, name, rate, pmin, pmax)
self._datasize = gr.sizeof_gr_complex
self._stripchart = stripchart
self._iscomplex = True
self._setup(1)
def get_qtsink(self):
snk = qtgui.const_sink_c(self._npts,
self._name,
self._ncons)
snk.enable_autoscale(True)
return snk
def get_vecsource(self):
return blocks.vector_source_c([])
def get_npts(self):
self._npts = self.snk.nsamps()
return self._npts
def scatter(self, en=True):
if(en):
self.snk.set_line_style(0, 0)
else:
self.snk.set_line_style(0, 1)
def set_line_label(self, n, name):
self.snk.set_line_label(n, self.knobnames[n])
class GrDataPlotterPsdC(GrDataPlotParent):
def __init__(self, name, rate, pmin=None, pmax=None):
GrDataPlotParent.__init__(self, name, rate, pmin, pmax)
self._datasize = gr.sizeof_gr_complex
self._stripchart = True
self._iscomplex = True
self._npts = 2048
self._wintype = filter.firdes.WIN_BLACKMAN_hARRIS
self._fc = 0
self._setup(1)
def get_qtsink(self):
snk = qtgui.freq_sink_c(self._npts, self._wintype,
self._fc, 1.0,
self._name,
self._ncons)
snk.enable_autoscale(True)
return snk
def get_vecsource(self):
return blocks.vector_source_c([])
def get_npts(self):
self._npts = self.snk.fft_size()
return self._npts
def set_line_label(self, n, name):
self.snk.set_line_label(n, self.knobnames[n])
class GrDataPlotterPsdF(GrDataPlotParent):
def __init__(self, name, rate, pmin=None, pmax=None):
GrDataPlotParent.__init__(self, name, rate, pmin, pmax)
self._datasize = gr.sizeof_float
self._stripchart = True
self._iscomplex = False
self._npts = 2048
self._wintype = filter.firdes.WIN_BLACKMAN_hARRIS
self._fc = 0
self._setup(1)
def get_qtsink(self):
snk = qtgui.freq_sink_f(self._npts, self._wintype,
self._fc, 1.0,
self._name,
self._ncons)
snk.enable_autoscale(True)
return snk
def get_vecsource(self):
return blocks.vector_source_f([])
def get_npts(self):
self._npts = self.snk.fft_size()
return self._npts
def set_line_label(self, n, name):
self.snk.set_line_label(n, self.knobnames[n])
class GrTimeRasterF(GrDataPlotParent):
def __init__(self, name, rate, pmin=None, pmax=None):
GrDataPlotParent.__init__(self, name, rate, pmin, pmax)
self._npts = 10
self._rows = 40
self._datasize = gr.sizeof_float
self._stripchart = False
self._iscomplex = False
self._setup(1)
def get_qtsink(self):
snk = qtgui.time_raster_sink_f(1.0, self._npts, self._rows,
[], [], self._name,
self._ncons)
return snk
def get_vecsource(self):
return blocks.vector_source_f([])
def get_npts(self):
self._npts = self.snk.num_cols()
return self._npts
def set_line_label(self, n, name):
self.snk.set_line_label(n, self.knobnames[n])
class GrTimeRasterB(GrDataPlotParent):
def __init__(self, name, rate, pmin=None, pmax=None):
GrDataPlotParent.__init__(self, name, rate, pmin, pmax)
self._npts = 10
self._rows = 40
self._datasize = gr.sizeof_char
self._stripchart = False
self._iscomplex = False
self._setup(1)
def get_qtsink(self):
snk = qtgui.time_raster_sink_b(1.0, self._npts, self._rows,
[], [], self._name,
self._ncons)
return snk
def get_vecsource(self):
return blocks.vector_source_b([])
def get_npts(self):
self._npts = self.snk.num_cols()
return self._npts
def set_line_label(self, n, name):
self.snk.set_line_label(n, self.knobnames[n])
class GrDataPlotterValueTable:
def __init__(self, uid, parent, x, y, xsize, ysize,
headers=['Statistic Key ( Source Block :: Stat Name ) ',
'Curent Value', 'Units', 'Description']):
# must encapsulate, cuz Qt's bases are not classes
self.uid = uid
self.treeWidget = QtGui.QTreeWidget(parent)
self.treeWidget.setColumnCount(len(headers))
self.treeWidget.setGeometry(x,y,xsize,ysize)
self.treeWidget.setHeaderLabels(headers)
self.treeWidget.resizeColumnToContents(0)
def updateItems(self, knobs, knobprops):
items = []
foundKeys = []
deleteKeys = []
numItems = self.treeWidget.topLevelItemCount()
# The input knobs variable is a dict of stats to display in the tree.
# Update tree stat values with new values found in knobs.
# Track found keys and track keys in tree that are not in input knobs.
for i in range(0, numItems):
item = self.treeWidget.topLevelItem(i)
# itemKey is the text in the first column of a QTreeWidgetItem
itemKey = str(item.text(0))
if itemKey in knobs.keys():
# This key was found in the tree, update its values.
foundKeys.append(itemKey)
v = knobs[itemKey].value
units = str(knobprops[itemKey].units)
descr = str(knobprops[itemKey].description)
if(type(v) == ControlPort.complex):
v = v.re + v.im*1j
# If it's a byte stream, Python thinks it's a string.
# Unpack and convert to floats for plotting.
# Ignore the edge list knob if it's being exported
elif(type(v) == str and itemKey.find('probe2_b') == 0):
v = struct.unpack(len(v)*'b', v)
# Convert the final value to a string for displaying
v = str(v)
if (item.text(1) != v or
item.text(2) != units or
item.text(3) != descr):
item.setText(1, v)
item.setText(2, units)
item.setText(3, descr)
else:
# This item is not in the knobs list...track it for removal.
deleteKeys.append(itemKey)
# Add items to tree that are not currently in the tree.
for k in knobs.keys():
if k not in foundKeys:
v = knobs[k].value
if(type(v) == ControlPort.complex):
v = v.re + v.im*1j
# If it's a byte stream, Python thinks it's a string.
# Unpack and convert to floats for plotting.
# Ignore the edge list knob if it's being exported
elif(type(v) == str and k.find('probe2_b') == 0):
v = struct.unpack(len(v)*'b', v)
item = QtGui.QTreeWidgetItem([k, str(v),
knobprops[k].units, knobprops[k].description])
self.treeWidget.addTopLevelItem(item)
# Remove items currently in tree that are not in the knob list.
for itemKey in deleteKeys:
qtwiList = self.treeWidget.findItems(itemKey, Qt.Qt.MatchFixedString)
if (len(qtwiList) > 1):
raise Exception('More than one item with key %s in tree' %
itemKey)
elif (len(qtwiList) == 1):
i = self.treeWidget.indexOfTopLevelItem(qtwiList[0])
self.treeWidget.takeTopLevelItem(i)
| gpl-3.0 | 3,292,079,363,314,600,400 | 31.93279 | 105 | 0.541682 | false |
dbfuentes/update-p2p-lists | update_lists.py | 1 | 4628 | #!/usr/bin/env python
# title :update_lists.py
# description :download multiple lists and make a single result list
# author :Daniel Fuentes B.
# date :08-06-2014
# version :0.1
# licence :MIT License/X11 license (see COPYING.txt file)
# usage :python update_lists.py
# ===================================================================
########### START EDIT HERE ###########
# Level 1, Borgon and Spyware list from Bluetack (.zip files)
# Add or remove lists, as you wish (only in p2p format and .zip files)
urls = ["http://list.iblocklist.com/?list=ydxerpxkpcfqjaybcssw&fileformat=p2p&archiveformat=zip",
"http://list.iblocklist.com/?list=gihxqmhyunbxhbmgqrla&fileformat=p2p&archiveformat=zip",
"http://list.iblocklist.com/?list=llvtlsjyoyiczbkjsxpf&fileformat=p2p&archiveformat=zip" ]
# Name of the final list (output list)
ouputlistname = "listas.p2p"
########### STOP EDIT HERE ############
# ===================================================================
# import modules
# ===================================================================
import os.path
import os
import shutil
import urllib
import zipfile
# ===================================================================
# class and funtions
# ===================================================================
class MyOpener(urllib.FancyURLopener):
"""change the python/urllib user agent"""
# By default python use: URLopener.version = "Python-urllib/1.xx"
version = "Mozilla/5.0 (Windows; U; MSIE 7.0; Windows NT 6.0; en-US)"
def create_dir(directory):
if not os.path.exists(directory):
try:
os.makedirs(directory)
except OSError:
if not os.path.isdir(path):
raise
else:
pass
def delete_dir(directory):
shutil.rmtree(directory)
def download_file(URL, filename):
# Cambiamos el user agent por el definido anteriormente:
urlretrieve = MyOpener().retrieve
f = urlretrieve(URL, filename)
def unzip(source_filename, dest_dir):
with zipfile.ZipFile(source_filename) as zf:
for member in zf.infolist():
# Path traversal defense from
# http://hg.python.org/cpython/file/tip/Lib/http/server.py
words = member.filename.split('/')
path = dest_dir
for word in words[:-1]:
drive, word = os.path.splitdrive(word)
head, word = os.path.split(word)
if word in (os.curdir, os.pardir, ''): continue
path = os.path.join(path, word)
zf.extract(member, path)
# ===================================================================
# Main program:
# ===================================================================
if __name__ == "__main__":
#creamos un directorio temporal para trabajar, llamado "temp0001"
temp_dir = os.path.join(os.curdir, "temp0001")
create_dir(temp_dir)
# descargamos los .ZIP con las listas y los descomprimimos
for lists in urls:
try:
zipfilename = os.path.join(temp_dir, "temp.zip")
myopener = MyOpener()
myopener.retrieve(lists, zipfilename)
unzip(zipfilename, temp_dir)
# Mensaje en caso de error o de no encontrarse la ruta
except:
print "Error: Failed to obtain information"
# se buscan todas las listas y unen en un solo archivo
listfilenames = []
for file in os.listdir(temp_dir):
if file.endswith(".txt"):
listfilenames.append(str(file))
if file.endswith(".p2p"):
listfilenames.append(str(file))
else:
pass
if os.path.exists(os.path.join(os.curdir, ouputlistname)):
#se sobrescribe el archivo existente
print "yes"
f = open(os.path.join(os.curdir, ouputlistname), "w")
f.write("")
f.close()
else:
f = open(os.path.join(os.curdir, ouputlistname), "w")
f.close()
print listfilenames
# se agregan las listas a la principal
for elemento in listfilenames:
# archivo origen
inputfile = open(os.path.join(temp_dir, elemento), "r")
# archivo destino
outputfile = open(os.path.join(os.curdir, ouputlistname), "a")
# se escribe en el destino una a una las lines del origen
linea = inputfile.readline()
while linea != "":
outputfile.write(linea)
linea = inputfile.readline()
inputfile.close()
outputfile.close()
# se elimina el directorio temporal
delete_dir(temp_dir)
| mit | -7,457,388,271,560,356,000 | 33.266667 | 97 | 0.552529 | false |
lifu-tu/TE_TweeboParser | token_selection/pipeline.py | 2 | 3310 | # Copyright (c) 2013-2014 Lingpeng Kong
# All Rights Reserved.
#
# This file is part of TweeboParser 1.0.
#
# TweeboParser 1.0 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# TweeboParser 1.0 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with TweeboParser 1.0. If not, see <http://www.gnu.org/licenses/>.
# Author: Swabha Swayamdipta, Lingpeng Kong
# /usr/bin/python
from __future__ import division
import viterbi, sys
import codecs
def print_line_withmodification(cline, tag):
s = ""
for i in xrange(0,13):
s += (cline[i] + "\t")
s += tag
print s
def main(testfile, featsfile):
labelset = ['0', '1', '*']
test = codecs.open(testfile, 'r', 'utf-8')
feats = set([])
sents = []
tagseqs = []
postagseqs = []
vecs1 = []
vecs2 = []
contents = []
sent = []
tags = []
postags = []
vec1 = []
vec2 = []
content = []
while 1:
line = test.readline()
if not line:
break
line = line.strip()
if line == "":
sents.append(sent)
tagseqs.append(tags)
postagseqs.append(postags)
vecs1.append(vec1)
vecs2.append(vec2)
contents.append(content)
sent = []
tags = []
postags = []
vec1 = []
vec2 = []
content = []
continue
cline = line.split("\t")
word = cline[1].strip()
#tag = cline[13].strip()
tag = '1'
pos = cline[3].strip()
v1 = cline[10].strip()
v2 = cline[11].strip()
sent.append(word.strip())
tags.append(tag.strip())
postags.append(pos.strip())
vec1.append(v1.strip())
vec2.append(v2.strip())
content.append(cline)
test.close()
weights = {}
feats = open(featsfile, 'r')
while 1:
line = feats.readline()
if not line:
break
line = line.strip()
f, wt = line.split(' ')
weights[f] = float(wt)
feats.close()
acc = 0.0
tot = 0
for i in range(len(sents)):
sent = sents[i]
postags = postagseqs[i]
vec1 = vecs1[i]
vec2 = vecs2[i]
tags, f = viterbi.execute(sent, labelset, postags, vec1, vec2, weights)
for j in range(len(tags)):
print_line_withmodification(contents[i][j],tags[j])
if tags[j] == tagseqs[i][j]:
acc += 1
print
tot += len(tags)
#print ' '.join(sent)
#print ' '.join(tags), '\n', ' '.join(tagseqs[i])
#print
#sys.stderr.write(str(acc/tot) + "\n")
if __name__ == "__main__":
sys.stdout = codecs.getwriter('utf-8')(sys.stdout)
sys.stderr = codecs.getwriter('utf-8')(sys.stderr)
main(sys.argv[1], sys.argv[2])
| gpl-3.0 | -3,171,694,777,585,097,700 | 25.062992 | 79 | 0.548036 | false |
jmagnusson/ramlfications | tests/test_loader.py | 4 | 8553 | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Spotify AB
from __future__ import absolute_import, division, print_function
import os
import json
import pytest
from six import iteritems
from ramlfications import loader
from ramlfications.errors import LoadRAMLError
from .base import EXAMPLES, JSONREF
from .data.fixtures import load_fixtures as lf
def dict_equal(dict1, dict2):
for k, v1 in list(iteritems(dict1)):
assert k in dict2
v2 = dict2[k]
assert v1 == v2
return True
def test_load_file():
raml_file = os.path.join(EXAMPLES + "base-includes.raml")
with open(raml_file) as f:
raml = loader.RAMLLoader().load(f)
expected_data = lf.load_file_expected_data
assert dict_equal(raml, expected_data)
def test_load_file_with_nested_includes():
raml_file = os.path.join(EXAMPLES + "nested-includes.raml")
with open(raml_file) as f:
raml = loader.RAMLLoader().load(f)
expected_data = lf.load_file_with_nested_includes_expected
assert dict_equal(raml, expected_data)
def test_load_file_with_nonyaml_include():
raml_file = os.path.join(EXAMPLES + "nonyaml-includes.raml")
with open(raml_file) as f:
raml = loader.RAMLLoader().load(f)
expected_data = {
'not_yaml': "This is just a string.\n",
'title': 'GitHub API Demo - Includes',
'version': 'v3'
}
assert dict_equal(raml, expected_data)
def test_load_string():
raml_str = ("""
- foo
- bar
- baz
""")
raml = loader.RAMLLoader().load(raml_str)
expected_data = ["foo", "bar", "baz"]
assert raml.sort() == expected_data.sort()
def test_yaml_parser_error():
raml_obj = os.path.join(EXAMPLES, "invalid_yaml.yaml")
with pytest.raises(LoadRAMLError) as e:
loader.RAMLLoader().load(open(raml_obj))
msg = "Error parsing RAML:"
assert msg in e.value.args[0]
def test_include_json():
raml_file = os.path.join(EXAMPLES + "json_includes.raml")
with open(raml_file) as f:
raml = loader.RAMLLoader().load(f)
expected_data = lf.include_json_expected
assert dict_equal(raml, expected_data)
def test_include_xsd():
raml_file = os.path.join(EXAMPLES + "xsd_includes.raml")
with open(raml_file) as f:
raml = loader.RAMLLoader().load(f)
expected_data = lf.include_xsd_expected
assert dict_equal(raml, expected_data)
def test_include_markdown():
raml_file = os.path.join(EXAMPLES + "md_includes.raml")
with open(raml_file) as f:
raml = loader.RAMLLoader().load(f)
expected_data = lf.include_markdown_expected
assert dict_equal(raml, expected_data)
def test_invalid_yaml_tag():
raml_file = os.path.join(EXAMPLES, "invalid_yaml_tag.raml")
with pytest.raises(LoadRAMLError) as e:
loader.RAMLLoader().load(open(raml_file))
msg = "Error parsing RAML:"
assert msg in e.value.args[0]
def test_includes_has_invalid_tag():
raml_file = os.path.join(EXAMPLES, "include_has_invalid_tag.raml")
with pytest.raises(LoadRAMLError) as e:
loader.RAMLLoader().load(open(raml_file))
msg = "Error parsing RAML:"
assert msg in e.value.args[0]
def test_jsonref_relative_empty_fragment():
raml_file = os.path.join(JSONREF, "jsonref_empty_fragment.raml")
with open(raml_file) as f:
raml = loader.RAMLLoader().load(f)
expected_data = lf.jsonref_relative_empty_fragment_expected
assert dict_equal(raml, expected_data)
def test_jsonref_relative_nonempty_fragment():
raml_file = os.path.join(JSONREF, "jsonref_nonempty_fragment.raml")
with open(raml_file) as f:
raml = loader.RAMLLoader().load(f)
expected_data = lf.jsonref_relative_nonempty_fragment_expected
assert dict_equal(raml, expected_data)
def test_jsonref_internal_fragment_reference():
raml_file = os.path.join(JSONREF, "jsonref_internal_fragment.raml")
with open(raml_file) as f:
raml = loader.RAMLLoader().load(f)
expected_data = lf.jsonref_internal_fragment_reference_expected
assert dict_equal(raml, expected_data)
def test_jsonref_multiref_internal_fragments():
raml_file = os.path.join(JSONREF,
"jsonref_multiref_internal_fragment.raml")
with open(raml_file) as f:
raml = loader.RAMLLoader().load(f)
expected_data = lf.jsonref_multiref_internal_fragments_expected
assert dict_equal(raml, expected_data)
def test_jsonref_absolute_local_uri(tmpdir):
# Set up a tmp RAML file with an absolute path
json_schema_file = tmpdir.join("json_absolute_ref.json")
data = lf.json_ref_absolute_jsondump
json_schema_file.write(json.dumps(data))
raml_file = tmpdir.join("json_absolute_ref.raml")
output = lf.json_ref_absolute_ramlfile
raml_file.write(output.format(json_file=json_schema_file.strpath))
# Now load it
raml = loader.RAMLLoader().load(raml_file.read())
expected_data = lf.json_ref_absolute_expected
assert dict_equal(raml, expected_data)
def test_jsonref_relative_local_uri():
# RAML file includes a JSON schema. The JSON schema includes a
# reference to another local JSON file (same level)
ramlfile = os.path.join(JSONREF, "jsonref_relative_local.raml")
with open(ramlfile, "r") as f:
loaded_raml = loader.RAMLLoader().load(f)
schemas = loaded_raml.get("schemas")
expected = lf.jsonref_relative_local_expected
actual = schemas[0].get("jsonexample")
assert dict_equal(expected, actual)
def test_jsonref_relative_local_uri_includes():
# RAML file includes a JSON schema. JSON schema includes a reference
# to another local JSON file (in another directory)
ramlfile = os.path.join(JSONREF, "jsonref_relative_local_includes.raml")
with open(ramlfile, "r") as f:
loaded_raml = loader.RAMLLoader().load(f)
schemas = loaded_raml.get("schemas")
expected = lf.jsonref_relative_local_includes_expected
actual = schemas[0].get("jsonexample")
assert dict_equal(expected, actual)
def test_jsonref_remote_uri(tmpdir, httpserver):
mock_remote_json = os.path.join(JSONREF, "jsonref_mock_remote.json")
httpserver.serve_content(open(mock_remote_json).read())
# since we don't know what port httpserver will be on until it's
# created, have to create the JSON file that ref's it with the url
# variable & RAML
jsonfile = tmpdir.join("jsonref_remote_url.json")
data = lf.jsonref_remote_uri_jsondump
mock_remote_url = httpserver.url + "#properties"
data["properties"]["images"]["items"][0]["$ref"] = mock_remote_url
jsonfile.write(json.dumps(data))
ramlfile = tmpdir.join("jsonref_remote_url.raml")
output = lf.jsonref_remote_uri_raml.format(json_file=jsonfile.strpath)
ramlfile.write(output)
readfile = ramlfile.read()
loaded = loader.RAMLLoader().load(readfile)
expected = lf.json_remote_uri_expected
assert dict_equal(expected, loaded)
def test_jsonref_relative_local_file():
# 'file:' prepends the local filename in the JSON schema
ramlfile = os.path.join(JSONREF, "jsonref_relative_local_file.raml")
with open(ramlfile, "r") as f:
loaded_raml = loader.RAMLLoader().load(f)
schemas = loaded_raml.get("schemas")
expected = lf.jsonref_relative_local_file_expected
actual = schemas[0].get("jsonexample")
assert dict_equal(expected, actual)
def test_jsonref_relative_local_includes_file():
# 'file:' prepends the local filename in the JSON schema
filename = "jsonref_relative_local_includes_file.raml"
ramlfile = os.path.join(JSONREF, filename)
with open(ramlfile, "r") as f:
loaded_raml = loader.RAMLLoader().load(f)
schemas = loaded_raml.get("schemas")
expected = lf.jsonref_relative_local_includes_expected
actual = schemas[0].get("jsonexample")
assert dict_equal(expected, actual)
def test_jsonref_absolute_local_uri_file(tmpdir):
# Set up a tmp RAML file with an absolute path
json_schema_file = tmpdir.join("json_absolute_ref_file.json")
data = lf.json_ref_absolute_jsondump_file
json_schema_file.write(json.dumps(data))
raml_file = tmpdir.join("json_absolute_ref_file.raml")
output = lf.json_ref_absolute_ramlfile
raml_file.write(output.format(json_file=json_schema_file.strpath))
# Now load it
raml = loader.RAMLLoader().load(raml_file.read())
expected_data = lf.json_ref_absolute_expected
assert dict_equal(raml, expected_data)
| apache-2.0 | -470,538,562,066,484,860 | 31.033708 | 76 | 0.679177 | false |
YuxuanLing/trunk | trunk/code/study/python/Fluent-Python-example-code/attic/dicts/strkeydict_dictsub.py | 1 | 2301 | """StrKeyDict always converts non-string keys to `str`
This is a variation of `strkeydict.StrKeyDict` implemented
as a `dict` built-in subclass (instead of a `UserDict` subclass)
Test for initializer: keys are converted to `str`.
>>> d = StrKeyDict([(2, 'two'), ('4', 'four')])
>>> sorted(d.keys())
['2', '4']
Tests for item retrieval using `d[key]` notation::
>>> d['2']
'two'
>>> d[4]
'four'
>>> d[1]
Traceback (most recent call last):
...
KeyError: '1'
Tests for item retrieval using `d.get(key)` notation::
>>> d.get('2')
'two'
>>> d.get(4)
'four'
>>> d.get(1, 'N/A')
'N/A'
Tests for the `in` operator::
>>> 2 in d
True
>>> 1 in d
False
Test for item assignment using non-string key::
>>> d[0] = 'zero'
>>> d['0']
'zero'
Tests for update using a `dict` or a sequence of pairs::
>>> d.update({6:'six', '8':'eight'})
>>> sorted(d.keys())
['0', '2', '4', '6', '8']
>>> d.update([(10, 'ten'), ('12', 'twelve')])
>>> sorted(d.keys())
['0', '10', '12', '2', '4', '6', '8']
>>> d.update([1, 3, 5])
Traceback (most recent call last):
...
TypeError: 'int' object is not iterable
"""
import collections.abc
class StrKeyDict(dict):
def __init__(self, iterable=None, **kwds):
super().__init__()
self.update(iterable, **kwds)
def __missing__(self, key):
if isinstance(key, str):
raise KeyError(key)
return self[str(key)]
def __contains__(self, key):
return key in self.keys() or str(key) in self.keys()
def __setitem__(self, key, item):
super().__setitem__(str(key), item)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def update(self, iterable=None, **kwds):
if iterable is not None:
if isinstance(iterable, collections.abc.Mapping):
pairs = iterable.items()
else:
pairs = ((k, v) for k, v in iterable)
for key, value in pairs:
self[key] = value
if kwds:
self.update(kwds)
| gpl-3.0 | -7,410,471,302,178,293,000 | 22.221053 | 64 | 0.498914 | false |
PhilHarnish/forge | spec/data/warehouse_spec.py | 1 | 2387 | import collections
import itertools
from mock.mock import patch
from data import warehouse
from spec.mamba import *
with description('warehouse'):
with before.all:
self.warehouse_patch = patch.object(
warehouse, '_DATA', collections.ChainMap())
self.warehouse_patch.start()
with after.all:
self.warehouse_patch.stop()
with before.each:
warehouse.init()
with after.each:
warehouse.reset()
with it('protects against non-hermetic tests'):
expect(calling(warehouse.init)).to(raise_error)
with it('protects against redundant registration'):
expect(calling(warehouse.register, '/some/path', 1)).not_to(raise_error)
expect(calling(warehouse.register, '/some/path', 1)).to(raise_error)
with it('enforces deadline'):
c = itertools.count()
time_stub = lambda: next(c) * 1000
with patch('data.warehouse.time.time', side_effect=time_stub):
warehouse.register('/slow/path', lambda: 1)
expect(calling(warehouse.get, '/slow/path')).to(raise_error)
with description('simple data'):
with it('should register data'):
warehouse.register('/some/path', 'some value')
expect(call(warehouse.get, '/some/path')).to(equal('some value'))
with it('should raise KeyError for unregistered data'):
expect(calling(warehouse.get, '/some/path')).to(raise_error(KeyError))
with description('constructed data'):
with before.each:
self.value = {'key': 'value'}
self.source = lambda: self.value
with it('should register data'):
warehouse.register('/some/path', self.source)
expect(call(warehouse.get, '/some/path')).to(equal(self.value))
with description('save and restore'):
with it('should read the underlying when saved'):
warehouse.register('/some/path', 1)
warehouse.save()
expect(calling(warehouse.get, '/some/path')).not_to(raise_error(KeyError))
with it('should allow new changes'):
warehouse.save()
warehouse.register('/some/path', 2)
expect(calling(warehouse.get, '/some/path')).not_to(raise_error(KeyError))
with it('should allow restore old values'):
warehouse.register('/some/path', 1)
warehouse.save()
warehouse.register('/some/path', 2)
expect(call(warehouse.get, '/some/path')).to(equal(2))
warehouse.restore()
expect(call(warehouse.get, '/some/path')).to(equal(1))
| mit | -188,881,297,935,912,670 | 32.152778 | 80 | 0.671973 | false |
zzzeek/mako | test/__init__.py | 8 | 5060 | import contextlib
import os
import re
import unittest
from mako import compat
from mako.cache import CacheImpl
from mako.cache import register_plugin
from mako.compat import py3k
from mako.template import Template
from mako.util import update_wrapper
template_base = os.path.join(os.path.dirname(__file__), "templates")
module_base = os.path.join(template_base, "modules")
class TemplateTest(unittest.TestCase):
def _file_template(self, filename, **kw):
filepath = self._file_path(filename)
return Template(
uri=filename, filename=filepath, module_directory=module_base, **kw
)
def _file_path(self, filename):
name, ext = os.path.splitext(filename)
if py3k:
py3k_path = os.path.join(template_base, name + "_py3k" + ext)
if os.path.exists(py3k_path):
return py3k_path
return os.path.join(template_base, filename)
def _do_file_test(
self,
filename,
expected,
filters=None,
unicode_=True,
template_args=None,
**kw
):
t1 = self._file_template(filename, **kw)
self._do_test(
t1,
expected,
filters=filters,
unicode_=unicode_,
template_args=template_args,
)
def _do_memory_test(
self,
source,
expected,
filters=None,
unicode_=True,
template_args=None,
**kw
):
t1 = Template(text=source, **kw)
self._do_test(
t1,
expected,
filters=filters,
unicode_=unicode_,
template_args=template_args,
)
def _do_test(
self,
template,
expected,
filters=None,
template_args=None,
unicode_=True,
):
if template_args is None:
template_args = {}
if unicode_:
output = template.render_unicode(**template_args)
else:
output = template.render(**template_args)
if filters:
output = filters(output)
eq_(output, expected)
def eq_(a, b, msg=None):
"""Assert a == b, with repr messaging on failure."""
assert a == b, msg or "%r != %r" % (a, b)
def teardown():
import shutil
shutil.rmtree(module_base, True)
if py3k:
from unittest import mock # noqa
else:
import mock # noqa
@contextlib.contextmanager
def raises(except_cls, message=None):
try:
yield
success = False
except except_cls as e:
if message:
assert re.search(
message, compat.text_type(e), re.UNICODE
), "%r !~ %s" % (message, e)
print(compat.text_type(e).encode("utf-8"))
success = True
# assert outside the block so it works for AssertionError too !
assert success, "Callable did not raise an exception"
def assert_raises(except_cls, callable_, *args, **kw):
with raises(except_cls):
return callable_(*args, **kw)
def assert_raises_message(except_cls, msg, callable_, *args, **kwargs):
with raises(except_cls, msg):
return callable_(*args, **kwargs)
def skip_if(predicate, reason=None):
"""Skip a test if predicate is true."""
reason = reason or predicate.__name__
def decorate(fn):
fn_name = fn.__name__
def maybe(*args, **kw):
if predicate():
msg = "'%s' skipped: %s" % (fn_name, reason)
raise unittest.SkipTest(msg)
else:
return fn(*args, **kw)
return update_wrapper(maybe, fn)
return decorate
def requires_python_3(fn):
return skip_if(lambda: not py3k, "Requires Python 3.xx")(fn)
def requires_python_2(fn):
return skip_if(lambda: py3k, "Requires Python 2.xx")(fn)
def requires_pygments_14(fn):
try:
import pygments
version = pygments.__version__
except:
version = "0"
return skip_if(
lambda: version < "1.4", "Requires pygments 1.4 or greater"
)(fn)
def requires_no_pygments_exceptions(fn):
def go(*arg, **kw):
from mako import exceptions
exceptions._install_fallback()
try:
return fn(*arg, **kw)
finally:
exceptions._install_highlighting()
return update_wrapper(go, fn)
class PlainCacheImpl(CacheImpl):
"""Simple memory cache impl so that tests which
use caching can run without beaker. """
def __init__(self, cache):
self.cache = cache
self.data = {}
def get_or_create(self, key, creation_function, **kw):
if key in self.data:
return self.data[key]
else:
self.data[key] = data = creation_function(**kw)
return data
def put(self, key, value, **kw):
self.data[key] = value
def get(self, key, **kw):
return self.data[key]
def invalidate(self, key, **kw):
del self.data[key]
register_plugin("plain", __name__, "PlainCacheImpl")
| mit | 3,002,261,049,196,691,000 | 22.981043 | 79 | 0.568182 | false |
pgmillon/ansible | test/units/modules/network/f5/test_bigip_device_group_member.py | 16 | 2967 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
import sys
if sys.version_info < (2, 7):
pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7")
from ansible.module_utils.basic import AnsibleModule
try:
from library.modules.bigip_device_group_member import Parameters
from library.modules.bigip_device_group_member import ModuleManager
from library.modules.bigip_device_group_member import ArgumentSpec
# In Ansible 2.8, Ansible changed import paths.
from test.units.compat import unittest
from test.units.compat.mock import Mock
from test.units.compat.mock import patch
from test.units.modules.utils import set_module_args
except ImportError:
from ansible.modules.network.f5.bigip_device_group_member import Parameters
from ansible.modules.network.f5.bigip_device_group_member import ModuleManager
from ansible.modules.network.f5.bigip_device_group_member import ArgumentSpec
# Ansible 2.8 imports
from units.compat import unittest
from units.compat.mock import Mock
from units.compat.mock import patch
from units.modules.utils import set_module_args
fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures')
fixture_data = {}
def load_fixture(name):
path = os.path.join(fixture_path, name)
if path in fixture_data:
return fixture_data[path]
with open(path) as f:
data = f.read()
try:
data = json.loads(data)
except Exception:
pass
fixture_data[path] = data
return data
class TestParameters(unittest.TestCase):
def test_module_parameters(self):
args = dict(
name='bigip1',
device_group='dg1'
)
p = Parameters(params=args)
assert p.name == 'bigip1'
assert p.device_group == 'dg1'
class TestManager(unittest.TestCase):
def setUp(self):
self.spec = ArgumentSpec()
def test_create(self, *args):
set_module_args(
dict(
name="bigip1",
device_group="dg1",
state="present",
provider=dict(
server='localhost',
password='password',
user='admin'
)
)
)
module = AnsibleModule(
argument_spec=self.spec.argument_spec,
supports_check_mode=self.spec.supports_check_mode
)
mm = ModuleManager(module=module)
# Override methods to force specific logic in the module to happen
mm.create_on_device = Mock(return_value=True)
mm.exists = Mock(return_value=False)
results = mm.exec_module()
assert results['changed'] is True
| gpl-3.0 | -2,699,941,114,701,965,000 | 26.990566 | 91 | 0.642063 | false |
ZhaoCJ/django | django/db/backends/creation.py | 3 | 20784 | import hashlib
import sys
import time
import warnings
from django.conf import settings
from django.db.utils import load_backend
from django.utils.encoding import force_bytes
from django.utils.six.moves import input
from .utils import truncate_name
# The prefix to put on the default database name when creating
# the test database.
TEST_DATABASE_PREFIX = 'test_'
class BaseDatabaseCreation(object):
"""
This class encapsulates all backend-specific differences that pertain to
database *creation*, such as the column types to use for particular Django
Fields, the SQL used to create and destroy tables, and the creation and
destruction of test databases.
"""
data_types = {}
data_types_suffix = {}
data_type_check_constraints = {}
def __init__(self, connection):
self.connection = connection
@classmethod
def _digest(cls, *args):
"""
Generates a 32-bit digest of a set of arguments that can be used to
shorten identifying names.
"""
h = hashlib.md5()
for arg in args:
h.update(force_bytes(arg))
return h.hexdigest()[:8]
def sql_create_model(self, model, style, known_models=set()):
"""
Returns the SQL required to create a single model, as a tuple of:
(list_of_sql, pending_references_dict)
"""
opts = model._meta
if not opts.managed or opts.proxy or opts.swapped:
return [], {}
final_output = []
table_output = []
pending_references = {}
qn = self.connection.ops.quote_name
for f in opts.local_fields:
col_type = f.db_type(connection=self.connection)
col_type_suffix = f.db_type_suffix(connection=self.connection)
tablespace = f.db_tablespace or opts.db_tablespace
if col_type is None:
# Skip ManyToManyFields, because they're not represented as
# database columns in this table.
continue
# Make the definition (e.g. 'foo VARCHAR(30)') for this field.
field_output = [style.SQL_FIELD(qn(f.column)),
style.SQL_COLTYPE(col_type)]
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
null = f.null
if (f.empty_strings_allowed and not f.primary_key and
self.connection.features.interprets_empty_strings_as_nulls):
null = True
if not null:
field_output.append(style.SQL_KEYWORD('NOT NULL'))
if f.primary_key:
field_output.append(style.SQL_KEYWORD('PRIMARY KEY'))
elif f.unique:
field_output.append(style.SQL_KEYWORD('UNIQUE'))
if tablespace and f.unique:
# We must specify the index tablespace inline, because we
# won't be generating a CREATE INDEX statement for this field.
tablespace_sql = self.connection.ops.tablespace_sql(
tablespace, inline=True)
if tablespace_sql:
field_output.append(tablespace_sql)
if f.rel and f.db_constraint:
ref_output, pending = self.sql_for_inline_foreign_key_references(
model, f, known_models, style)
if pending:
pending_references.setdefault(f.rel.to, []).append(
(model, f))
else:
field_output.extend(ref_output)
if col_type_suffix:
field_output.append(style.SQL_KEYWORD(col_type_suffix))
table_output.append(' '.join(field_output))
for field_constraints in opts.unique_together:
table_output.append(style.SQL_KEYWORD('UNIQUE') + ' (%s)' %
", ".join(
[style.SQL_FIELD(qn(opts.get_field(f).column))
for f in field_constraints]))
full_statement = [style.SQL_KEYWORD('CREATE TABLE') + ' ' +
style.SQL_TABLE(qn(opts.db_table)) + ' (']
for i, line in enumerate(table_output): # Combine and add commas.
full_statement.append(
' %s%s' % (line, ',' if i < len(table_output) - 1 else ''))
full_statement.append(')')
if opts.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(
opts.db_tablespace)
if tablespace_sql:
full_statement.append(tablespace_sql)
full_statement.append(';')
final_output.append('\n'.join(full_statement))
if opts.has_auto_field:
# Add any extra SQL needed to support auto-incrementing primary
# keys.
auto_column = opts.auto_field.db_column or opts.auto_field.name
autoinc_sql = self.connection.ops.autoinc_sql(opts.db_table,
auto_column)
if autoinc_sql:
for stmt in autoinc_sql:
final_output.append(stmt)
return final_output, pending_references
def sql_for_inline_foreign_key_references(self, model, field, known_models, style):
"""
Return the SQL snippet defining the foreign key reference for a field.
"""
qn = self.connection.ops.quote_name
rel_to = field.rel.to
if rel_to in known_models or rel_to == model:
output = [style.SQL_KEYWORD('REFERENCES') + ' ' +
style.SQL_TABLE(qn(rel_to._meta.db_table)) + ' (' +
style.SQL_FIELD(qn(rel_to._meta.get_field(
field.rel.field_name).column)) + ')' +
self.connection.ops.deferrable_sql()
]
pending = False
else:
# We haven't yet created the table to which this field
# is related, so save it for later.
output = []
pending = True
return output, pending
def sql_for_pending_references(self, model, style, pending_references):
"""
Returns any ALTER TABLE statements to add constraints after the fact.
"""
opts = model._meta
if not opts.managed or opts.swapped:
return []
qn = self.connection.ops.quote_name
final_output = []
if model in pending_references:
for rel_class, f in pending_references[model]:
rel_opts = rel_class._meta
r_table = rel_opts.db_table
r_col = f.column
table = opts.db_table
col = opts.get_field(f.rel.field_name).column
# For MySQL, r_name must be unique in the first 64 characters.
# So we are careful with character usage here.
r_name = '%s_refs_%s_%s' % (
r_col, col, self._digest(r_table, table))
final_output.append(style.SQL_KEYWORD('ALTER TABLE') +
' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' %
(qn(r_table), qn(truncate_name(
r_name, self.connection.ops.max_name_length())),
qn(r_col), qn(table), qn(col),
self.connection.ops.deferrable_sql()))
del pending_references[model]
return final_output
def sql_indexes_for_model(self, model, style):
"""
Returns the CREATE INDEX SQL statements for a single model.
"""
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
output = []
for f in model._meta.local_fields:
output.extend(self.sql_indexes_for_field(model, f, style))
for fs in model._meta.index_together:
fields = [model._meta.get_field_by_name(f)[0] for f in fs]
output.extend(self.sql_indexes_for_fields(model, fields, style))
return output
def sql_indexes_for_field(self, model, f, style):
"""
Return the CREATE INDEX SQL statements for a single model field.
"""
if f.db_index and not f.unique:
return self.sql_indexes_for_fields(model, [f], style)
else:
return []
def sql_indexes_for_fields(self, model, fields, style):
if len(fields) == 1 and fields[0].db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(fields[0].db_tablespace)
elif model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
else:
tablespace_sql = ""
if tablespace_sql:
tablespace_sql = " " + tablespace_sql
field_names = []
qn = self.connection.ops.quote_name
for f in fields:
field_names.append(style.SQL_FIELD(qn(f.column)))
index_name = "%s_%s" % (model._meta.db_table, self._digest([f.name for f in fields]))
return [
style.SQL_KEYWORD("CREATE INDEX") + " " +
style.SQL_TABLE(qn(truncate_name(index_name, self.connection.ops.max_name_length()))) + " " +
style.SQL_KEYWORD("ON") + " " +
style.SQL_TABLE(qn(model._meta.db_table)) + " " +
"(%s)" % style.SQL_FIELD(", ".join(field_names)) +
"%s;" % tablespace_sql,
]
def sql_destroy_model(self, model, references_to_delete, style):
"""
Return the DROP TABLE and restraint dropping statements for a single
model.
"""
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
# Drop the table now
qn = self.connection.ops.quote_name
output = ['%s %s;' % (style.SQL_KEYWORD('DROP TABLE'),
style.SQL_TABLE(qn(model._meta.db_table)))]
if model in references_to_delete:
output.extend(self.sql_remove_table_constraints(
model, references_to_delete, style))
if model._meta.has_auto_field:
ds = self.connection.ops.drop_sequence_sql(model._meta.db_table)
if ds:
output.append(ds)
return output
def sql_remove_table_constraints(self, model, references_to_delete, style):
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
output = []
qn = self.connection.ops.quote_name
for rel_class, f in references_to_delete[model]:
table = rel_class._meta.db_table
col = f.column
r_table = model._meta.db_table
r_col = model._meta.get_field(f.rel.field_name).column
r_name = '%s_refs_%s_%s' % (
col, r_col, self._digest(table, r_table))
output.append('%s %s %s %s;' % (
style.SQL_KEYWORD('ALTER TABLE'),
style.SQL_TABLE(qn(table)),
style.SQL_KEYWORD(self.connection.ops.drop_foreignkey_sql()),
style.SQL_FIELD(qn(truncate_name(
r_name, self.connection.ops.max_name_length())))
))
del references_to_delete[model]
return output
def sql_destroy_indexes_for_model(self, model, style):
"""
Returns the DROP INDEX SQL statements for a single model.
"""
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
output = []
for f in model._meta.local_fields:
output.extend(self.sql_destroy_indexes_for_field(model, f, style))
for fs in model._meta.index_together:
fields = [model._meta.get_field_by_name(f)[0] for f in fs]
output.extend(self.sql_destroy_indexes_for_fields(model, fields, style))
return output
def sql_destroy_indexes_for_field(self, model, f, style):
"""
Return the DROP INDEX SQL statements for a single model field.
"""
if f.db_index and not f.unique:
return self.sql_destroy_indexes_for_fields(model, [f], style)
else:
return []
def sql_destroy_indexes_for_fields(self, model, fields, style):
if len(fields) == 1 and fields[0].db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(fields[0].db_tablespace)
elif model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
else:
tablespace_sql = ""
if tablespace_sql:
tablespace_sql = " " + tablespace_sql
field_names = []
qn = self.connection.ops.quote_name
for f in fields:
field_names.append(style.SQL_FIELD(qn(f.column)))
index_name = "%s_%s" % (model._meta.db_table, self._digest([f.name for f in fields]))
return [
style.SQL_KEYWORD("DROP INDEX") + " " +
style.SQL_TABLE(qn(truncate_name(index_name, self.connection.ops.max_name_length()))) + " " +
";",
]
def create_test_db(self, verbosity=1, autoclobber=False):
"""
Creates a test database, prompting the user for confirmation if the
database already exists. Returns the name of the test database created.
"""
# Don't import django.core.management if it isn't needed.
from django.core.management import call_command
test_database_name = self._get_test_db_name()
if verbosity >= 1:
test_db_repr = ''
if verbosity >= 2:
test_db_repr = " ('%s')" % test_database_name
print("Creating test database for alias '%s'%s..." % (
self.connection.alias, test_db_repr))
self._create_test_db(verbosity, autoclobber)
self.connection.close()
settings.DATABASES[self.connection.alias]["NAME"] = test_database_name
self.connection.settings_dict["NAME"] = test_database_name
# Report migrate messages at one level lower than that requested.
# This ensures we don't get flooded with messages during testing
# (unless you really ask to be flooded)
call_command('migrate',
verbosity=max(verbosity - 1, 0),
interactive=False,
database=self.connection.alias,
load_initial_data=False,
test_database=True)
# We need to then do a flush to ensure that any data installed by
# custom SQL has been removed. The only test data should come from
# test fixtures, or autogenerated from post_migrate triggers.
# This has the side effect of loading initial data (which was
# intentionally skipped in the syncdb).
call_command('flush',
verbosity=max(verbosity - 1, 0),
interactive=False,
database=self.connection.alias)
from django.core.cache import get_cache
from django.core.cache.backends.db import BaseDatabaseCache
for cache_alias in settings.CACHES:
cache = get_cache(cache_alias)
if isinstance(cache, BaseDatabaseCache):
call_command('createcachetable', cache._table,
database=self.connection.alias)
# Get a cursor (even though we don't need one yet). This has
# the side effect of initializing the test database.
self.connection.cursor()
return test_database_name
def _get_test_db_name(self):
"""
Internal implementation - returns the name of the test DB that will be
created. Only useful when called from create_test_db() and
_create_test_db() and when no external munging is done with the 'NAME'
or 'TEST_NAME' settings.
"""
if self.connection.settings_dict['TEST_NAME']:
return self.connection.settings_dict['TEST_NAME']
return TEST_DATABASE_PREFIX + self.connection.settings_dict['NAME']
def _create_test_db(self, verbosity, autoclobber):
"""
Internal implementation - creates the test db tables.
"""
suffix = self.sql_table_creation_suffix()
test_database_name = self._get_test_db_name()
qn = self.connection.ops.quote_name
# Create the test database and connect to it.
cursor = self.connection.cursor()
try:
cursor.execute(
"CREATE DATABASE %s %s" % (qn(test_database_name), suffix))
except Exception as e:
sys.stderr.write(
"Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = input(
"Type 'yes' if you would like to try deleting the test "
"database '%s', or 'no' to cancel: " % test_database_name)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print("Destroying old test database '%s'..."
% self.connection.alias)
cursor.execute(
"DROP DATABASE %s" % qn(test_database_name))
cursor.execute(
"CREATE DATABASE %s %s" % (qn(test_database_name),
suffix))
except Exception as e:
sys.stderr.write(
"Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print("Tests cancelled.")
sys.exit(1)
return test_database_name
def destroy_test_db(self, old_database_name, verbosity=1):
"""
Destroy a test database, prompting the user for confirmation if the
database already exists.
"""
self.connection.close()
test_database_name = self.connection.settings_dict['NAME']
if verbosity >= 1:
test_db_repr = ''
if verbosity >= 2:
test_db_repr = " ('%s')" % test_database_name
print("Destroying test database for alias '%s'%s..." % (
self.connection.alias, test_db_repr))
# Temporarily use a new connection and a copy of the settings dict.
# This prevents the production database from being exposed to potential
# child threads while (or after) the test database is destroyed.
# Refs #10868 and #17786.
settings_dict = self.connection.settings_dict.copy()
settings_dict['NAME'] = old_database_name
backend = load_backend(settings_dict['ENGINE'])
new_connection = backend.DatabaseWrapper(
settings_dict,
alias='__destroy_test_db__',
allow_thread_sharing=False)
new_connection.creation._destroy_test_db(test_database_name, verbosity)
def _destroy_test_db(self, test_database_name, verbosity):
"""
Internal implementation - remove the test db tables.
"""
# Remove the test database to clean up after
# ourselves. Connect to the previous database (not the test database)
# to do so, because it's not allowed to delete a database while being
# connected to it.
cursor = self.connection.cursor()
# Wait to avoid "database is being accessed by other users" errors.
time.sleep(1)
cursor.execute("DROP DATABASE %s"
% self.connection.ops.quote_name(test_database_name))
self.connection.close()
def set_autocommit(self):
"""
Make sure a connection is in autocommit mode. - Deprecated, not used
anymore by Django code. Kept for compatibility with user code that
might use it.
"""
warnings.warn(
"set_autocommit was moved from BaseDatabaseCreation to "
"BaseDatabaseWrapper.", DeprecationWarning, stacklevel=2)
return self.connection.set_autocommit(True)
def sql_table_creation_suffix(self):
"""
SQL to append to the end of the test table creation statements.
"""
return ''
def test_db_signature(self):
"""
Returns a tuple with elements of self.connection.settings_dict (a
DATABASES setting value) that uniquely identify a database
accordingly to the RDBMS particularities.
"""
settings_dict = self.connection.settings_dict
return (
settings_dict['HOST'],
settings_dict['PORT'],
settings_dict['ENGINE'],
settings_dict['NAME']
)
| bsd-3-clause | -2,469,110,843,963,781,000 | 40.818913 | 105 | 0.567552 | false |
cevaris/pants | contrib/go/src/python/pants/contrib/go/tasks/go_go.py | 14 | 3511 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
import subprocess
from abc import abstractmethod
from colors import green, red, yellow
from pants.base.exceptions import TaskError
from pants.task.task import QuietTaskMixin
from twitter.common.collections import OrderedSet
from pants.contrib.go.tasks.go_workspace_task import GoWorkspaceTask
class GoInteropTask(QuietTaskMixin, GoWorkspaceTask):
class MissingArgsError(TaskError):
"""Indicates missing go targets or missing pass-through arguments."""
@classmethod
def supports_passthru_args(cls):
return True
def execute(self, **kwargs):
# NB: kwargs are for testing and pass-through to underlying subprocess process spawning.
go_targets = OrderedSet(target for target in self.context.target_roots if self.is_go(target))
args = self.get_passthru_args()
if not go_targets or not args:
msg = (yellow('The pants `{goal}` goal expects at least one go target and at least one '
'pass-through argument to be specified, call with:\n') +
green(' ./pants {goal} {targets} -- {args}')
.format(goal=self.options_scope,
targets=(green(' '.join(t.address.reference() for t in go_targets))
if go_targets else red('[missing go targets]')),
args=green(' '.join(args)) if args else red('[missing pass-through args]')))
raise self.MissingArgsError(msg)
go_path = OrderedSet()
import_paths = OrderedSet()
for target in go_targets:
self.ensure_workspace(target)
go_path.add(self.get_gopath(target))
import_paths.add(target.import_path)
self.execute_with_go_env(os.pathsep.join(go_path), list(import_paths), args, **kwargs)
@abstractmethod
def execute_with_go_env(self, go_path, import_paths, args, **kwargs):
"""Subclasses should execute the go interop task in the given environment.
:param string go_path: The pre-formatted $GOPATH for the environment.
:param list import_paths: The import paths for all the go targets specified in the environment.
:param list args: The pass through arguments for the command to run in the go environment.
:param **kwargs: Any additional `subprocess` keyword-args; for testing.
"""
class GoEnv(GoInteropTask):
"""Runs an arbitrary command in a go workspace defined by zero or more go targets."""
def execute_with_go_env(self, go_path, import_paths, args, **kwargs):
cmd = ' '.join(args)
env = os.environ.copy()
env.update(GOROOT=self.go_dist.goroot, GOPATH=go_path)
process = subprocess.Popen(cmd, shell=True, env=env, **kwargs)
result = process.wait()
if result != 0:
raise TaskError('{} failed with exit code {}'.format(cmd, result), exit_code=result)
class GoGo(GoInteropTask):
"""Runs an arbitrary go command against zero or more go targets."""
def execute_with_go_env(self, go_path, import_paths, args, **kwargs):
args = args + import_paths
cmd = args.pop(0)
go_cmd = self.go_dist.create_go_cmd(gopath=go_path, cmd=cmd, args=args)
result = go_cmd.spawn(**kwargs).wait()
if result != 0:
raise TaskError('{} failed with exit code {}'.format(go_cmd, result), exit_code=result)
| apache-2.0 | 2,845,736,358,736,101,000 | 40.305882 | 99 | 0.685844 | false |
xzturn/tensorflow | tensorflow/python/kernel_tests/linalg/linear_operator_toeplitz_test.py | 2 | 6189 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import numpy as np
import scipy.linalg
from tensorflow.python.eager import backprop
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables as variables_module
from tensorflow.python.ops.linalg import linalg as linalg_lib
from tensorflow.python.ops.linalg import linear_operator_test_util
from tensorflow.python.ops.linalg import linear_operator_toeplitz
from tensorflow.python.platform import test
linalg = linalg_lib
_to_complex = linear_operator_toeplitz._to_complex
@test_util.run_all_in_graph_and_eager_modes
class LinearOperatorToeplitzTest(
linear_operator_test_util.SquareLinearOperatorDerivedClassTest):
"""Most tests done in the base class LinearOperatorDerivedClassTest."""
@contextlib.contextmanager
def _constrain_devices_and_set_default(self, sess, use_gpu, force_gpu):
"""We overwrite the FFT operation mapping for testing."""
with test.TestCase._constrain_devices_and_set_default(
self, sess, use_gpu, force_gpu) as sess:
yield sess
def setUp(self):
# TODO(srvasude): Lower these tolerances once specialized solve and
# determinants are implemented.
self._atol[dtypes.float32] = 1e-4
self._rtol[dtypes.float32] = 1e-4
self._atol[dtypes.float64] = 1e-9
self._rtol[dtypes.float64] = 1e-9
self._atol[dtypes.complex64] = 1e-4
self._rtol[dtypes.complex64] = 1e-4
self._atol[dtypes.complex128] = 1e-9
self._rtol[dtypes.complex128] = 1e-9
@staticmethod
def skip_these_tests():
# Skip solve tests, as these could have better stability
# (currently exercises the base class).
# TODO(srvasude): Enable these when solve is implemented.
return ["cholesky", "cond", "inverse", "solve", "solve_with_broadcast"]
@staticmethod
def operator_shapes_infos():
shape_info = linear_operator_test_util.OperatorShapesInfo
# non-batch operators (n, n) and batch operators.
return [
shape_info((1, 1)),
shape_info((1, 6, 6)),
shape_info((3, 4, 4)),
shape_info((2, 1, 3, 3))
]
def operator_and_matrix(
self, build_info, dtype, use_placeholder,
ensure_self_adjoint_and_pd=False):
shape = list(build_info.shape)
row = np.random.uniform(low=1., high=5., size=shape[:-1])
col = np.random.uniform(low=1., high=5., size=shape[:-1])
# Make sure first entry is the same
row[..., 0] = col[..., 0]
if ensure_self_adjoint_and_pd:
# Note that a Toeplitz matrix generated from a linearly decreasing
# non-negative sequence is positive definite. See
# https://www.math.cinvestav.mx/~grudsky/Papers/118_29062012_Albrecht.pdf
# for details.
row = np.linspace(start=10., stop=1., num=shape[-1])
# The entries for the first row and column should be the same to guarantee
# symmetric.
row = col
lin_op_row = math_ops.cast(row, dtype=dtype)
lin_op_col = math_ops.cast(col, dtype=dtype)
if use_placeholder:
lin_op_row = array_ops.placeholder_with_default(
lin_op_row, shape=None)
lin_op_col = array_ops.placeholder_with_default(
lin_op_col, shape=None)
operator = linear_operator_toeplitz.LinearOperatorToeplitz(
row=lin_op_row,
col=lin_op_col,
is_self_adjoint=True if ensure_self_adjoint_and_pd else None,
is_positive_definite=True if ensure_self_adjoint_and_pd else None)
flattened_row = np.reshape(row, (-1, shape[-1]))
flattened_col = np.reshape(col, (-1, shape[-1]))
flattened_toeplitz = np.zeros(
[flattened_row.shape[0], shape[-1], shape[-1]])
for i in range(flattened_row.shape[0]):
flattened_toeplitz[i] = scipy.linalg.toeplitz(
flattened_col[i],
flattened_row[i])
matrix = np.reshape(flattened_toeplitz, shape)
matrix = math_ops.cast(matrix, dtype=dtype)
return operator, matrix
def test_scalar_row_col_raises(self):
with self.assertRaisesRegexp(ValueError, "must have at least 1 dimension"):
linear_operator_toeplitz.LinearOperatorToeplitz(1., 1.)
with self.assertRaisesRegexp(ValueError, "must have at least 1 dimension"):
linear_operator_toeplitz.LinearOperatorToeplitz([1.], 1.)
with self.assertRaisesRegexp(ValueError, "must have at least 1 dimension"):
linear_operator_toeplitz.LinearOperatorToeplitz(1., [1.])
def test_tape_safe(self):
col = variables_module.Variable([1.])
row = variables_module.Variable([1.])
operator = linear_operator_toeplitz.LinearOperatorToeplitz(
col, row, is_self_adjoint=True, is_positive_definite=True)
self.check_tape_safe(
operator,
skip_options=[
# .diag_part, .trace depend only on `col`, so test explicitly below.
linear_operator_test_util.CheckTapeSafeSkipOptions.DIAG_PART,
linear_operator_test_util.CheckTapeSafeSkipOptions.TRACE,
])
with backprop.GradientTape() as tape:
self.assertIsNotNone(tape.gradient(operator.diag_part(), col))
with backprop.GradientTape() as tape:
self.assertIsNotNone(tape.gradient(operator.trace(), col))
if __name__ == "__main__":
linear_operator_test_util.add_tests(LinearOperatorToeplitzTest)
test.main()
| apache-2.0 | 1,574,812,059,705,161,700 | 36.737805 | 80 | 0.689449 | false |
sebastianduran/Bolirana | ruleta.py | 1 | 3728 | import pygame, random, sys, time, os, subprocess, glob
import RPi.GPIO as GPIO
from pygame.locals import *
screenW=1280
screenH=1000
######### Configuracion de la pantalla
gameDisplay=pygame.display.set_mode((screenW,screenH))
pygame.display.set_caption('Boliranas El Original')
pygame.init()
greendark=(0,128,0)
bluedark=(0,0,128)
reddark=(128,0,0)
magentadark=(128,0,128)
green=(0,255,0)
blue=(0,0,255)
red=(255,0,0)
magenta=(255,0,255)
blue2=(0,90,255)
orange2 = (255,195,0)
magenta2=(255,0,90)
mangobiche=(255,255,51)
blue3=(26,0,179)
yellow=(255,255,0)
black=(0,0,0)
white=(255,255,255)
#gameDisplay.blit(pygame.image.load('ruleta.png'),(415,250))
posx = list(range(8))
posy = list(range(8))
posx[0]=715
posy[0]=230
posx[1]=810
posy[1]=330
posx[2]=810
posy[2]=465
posx[3]=715
posy[3]=568
posx[4]=568
posy[4]=568
posx[5]=470
posy[5]=465
posx[6]=470
posy[6]=330
posx[7]=568
posy[7]=228
posibles = [300,350,400,450,300,350,400,450]
vivo = [green,blue,red,magenta,green,blue,red,magenta]
muerto = [greendark,bluedark,reddark,magentadark,greendark,bluedark,reddark,magentadark]
ruletaFont = pygame.font.Font(None,165)
posiblesFont= pygame.font.Font(None,80)
radio=65
ajustx=48
ajusty=30
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
quit()
pygame.draw.circle(gameDisplay, orange2, [640,400],250)
pygame.draw.circle(gameDisplay, blue3, [640,400],100)
for i in range(0,8):
pygame.draw.circle(gameDisplay, muerto[i], [posx[i],posy[i]],radio)
gameDisplay.blit(posiblesFont.render(str(posibles[i]),1, white),(posx[i]-ajustx,posy[i]-ajusty))
pygame.display.update()
time.sleep(1)
## aca va la ruleta
rand = random.randint(0,7)
rand8 = rand+8
rand_6 = rand8/6
for i in range(0,rand_6+1):
for j in range(0,8):
pygame.draw.circle(gameDisplay, vivo[j], [posx[j],posy[j]],radio)
gameDisplay.blit(posiblesFont.render(str(posibles[j]),1, white),(posx[j]-ajustx,posy[j]-ajusty))
if j != 0 :
pygame.draw.circle(gameDisplay, muerto[j-1], [posx[j-1],posy[j-1]],radio)
gameDisplay.blit(posiblesFont.render(str(posibles[j-1]),1, white),(posx[j-1]-ajustx,posy[j-1]-ajusty))
else :
pygame.draw.circle(gameDisplay, muerto[7], [posx[7],posy[7]],radio)
gameDisplay.blit(posiblesFont.render(str(posibles[7]),1, white),(posx[7]-ajustx,posy[7]-ajusty))
pygame.display.update()
time.sleep(0.1)
for k in range(0,rand):
print str(k)
pygame.draw.circle(gameDisplay, vivo[k], [posx[k],posy[k]],radio)
gameDisplay.blit(posiblesFont.render(str(posibles[k]),1, white),(posx[k]-ajustx,posy[k]-ajusty))
if k != 0:
pygame.draw.circle(gameDisplay, muerto[k-1], [posx[k-1],posy[k -1]],radio)
gameDisplay.blit(posiblesFont.render(str(posibles[k-1]),1, white),(posx[k-1]-ajustx,posy[k-1]-ajusty))
else:
pygame.draw.circle(gameDisplay, muerto[7], [posx[7],posy[7]],radio)
gameDisplay.blit(posiblesFont.render(str(posibles[7]),1, white),(posx[7]-ajustx,posy[7]-ajusty))
pygame.display.update()
time.sleep(0.2)
pygame.draw.circle(gameDisplay, vivo[rand-1], [posx[rand-1],posy[rand-1]],radio)
gameDisplay.blit(posiblesFont.render(str(posibles[rand-1]),1, white),(posx[rand-1]-ajustx,posy[rand-1]-ajusty))
gameDisplay.blit(ruletaFont.render(str(posibles[rand-1]),1, mangobiche),(545,335))
pygame.display.update()
time.sleep(2)
| gpl-3.0 | -922,650,006,805,618,700 | 28.824 | 118 | 0.628219 | false |
crs4/ProMort | promort/slides_manager/serializers.py | 2 | 5115 | # Copyright (c) 2019, CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from django.contrib.auth.models import User
from rest_framework import serializers
from slides_manager.models import Laboratory, Case, Slide, SlideEvaluation, \
SlidesSet, SlidesSetItem
from reviews_manager.models import ROIsAnnotationStep
class LaboratorySerializer(serializers.ModelSerializer):
class Meta:
model = Laboratory
fields = ('label',)
class CaseSerializer(serializers.ModelSerializer):
laboratory = serializers.SlugRelatedField(
slug_field='label',
queryset=Laboratory.objects.all(),
)
class Meta:
model = Case
fields = ('id', 'import_date', 'laboratory', 'slides')
read_only_fields = ('import_date', 'slides')
class SlideSerializer(serializers.ModelSerializer):
class Meta:
model = Slide
fields = ('id', 'case', 'import_date', 'omero_id',
'image_type', 'image_microns_per_pixel')
read_only_fields = ('import_date',)
class SlideEvaluationSerializer(serializers.ModelSerializer):
reviewer = serializers.SlugRelatedField(
slug_field='username',
queryset=User.objects.all(),
)
slide = serializers.PrimaryKeyRelatedField(
write_only=True,
queryset=Slide.objects.all(),
)
rois_annotation_step = serializers.PrimaryKeyRelatedField(
write_only=True,
queryset=ROIsAnnotationStep.objects.all()
)
not_adequacy_reason_text = serializers.SerializerMethodField()
staining_text = serializers.SerializerMethodField()
class Meta:
model = SlideEvaluation
fields = ('slide', 'rois_annotation_step', 'staining', 'staining_text', 'adequate_slide', 'not_adequacy_reason',
'not_adequacy_reason_text', 'reviewer', 'acquisition_date', 'notes')
read_only_fields = ('acquisition_date', 'staining_text', 'not_adequacy_reason_text')
@staticmethod
def get_not_adequacy_reason_text(obj):
return obj.get_not_adequacy_reason_text()
@staticmethod
def get_staining_text(obj):
return obj.get_staining_text()
class LaboratoryDetailSerializer(serializers.ModelSerializer):
cases = CaseSerializer(many=True, read_only=True)
class Meta:
model = Laboratory
fields = ('label', 'cases')
read_only_fields = ('cases',)
class CaseDetailedSerializer(serializers.ModelSerializer):
slides = SlideSerializer(many=True, read_only=True)
class Meta:
model = Case
fields = ('id', 'import_date', 'slides')
read_only_fields = ('import_date',)
class SlideDetailSerializer(serializers.ModelSerializer):
case = serializers.PrimaryKeyRelatedField(read_only=True)
class Meta:
model = Slide
fields = ('id', 'case', 'import_date', 'omero_id',
'image_type', 'image_microns_per_pixel')
read_only_fields = ('import_date',)
class SlidesSetSerializer(serializers.ModelSerializer):
class Meta:
model = SlidesSet
fields = ('id', 'creation_date')
class SlidesSetItemSerializer(serializers.ModelSerializer):
omero_id = serializers.SerializerMethodField()
image_type = serializers.SerializerMethodField()
class Meta:
model = SlidesSetItem
fields = ('slide', 'omero_id', 'image_type', 'slides_set', 'set_label', 'set_index')
@staticmethod
def get_omero_id(obj):
return obj.slide.omero_id
@staticmethod
def get_image_type(obj):
return obj.slide.image_type
class SlidesSetItemDetailedSerializer(serializers.ModelSerializer):
slide = SlideSerializer(read_only=True)
slides_set = SlidesSetSerializer(read_only=True)
class Meta:
model = SlidesSetItem
fields = ('slide', 'slides_set', 'set_label', 'set_index')
class SlidesSetDetailedSerializer(serializers.ModelSerializer):
items = SlidesSetItemSerializer(many=True, read_only=True)
class Meta:
model = SlidesSet
fields = ('id', 'creation_date', 'items')
| mit | -935,676,158,968,680,100 | 30 | 120 | 0.687195 | false |
Maccimo/intellij-community | python/testData/inspections/PyStringFormatInspection/Basic.py | 9 | 6071 | '#%(language)s has %(#)03d quote types.' % {'language': "Python", "#": 2} #ok
'%d %s' % <warning descr="Too few arguments for format string">5</warning> #Too few arguments for format string
'Hello world' % <warning descr="Too many arguments for format string">25</warning> #Too many arguments for format string
"%(name)f(name)" % {'name': 23.2} #ok
"%()s" % {'': "name"} #ok
<warning descr="Format specifier character missing">'test%(name)'</warning> % {'name': 23} #There are no format specifier character
'work%*d' % (2, 34) #ok
<warning descr="Cannot use '*' in formats when using a mapping">'work%(name)*d'</warning> % (12, 32) #Can't use '*' in formats when using a mapping
'%*.*d' % (2, 5, 5) #ok
'%*.*d' % (<warning descr="Too few arguments for format string">2, 4</warning>) #Too few arguments for format string
'%*.*d' % (<warning descr="Too many arguments for format string">2, 4, 5, 6</warning>) #Too many arguments for format string
<warning descr="Format specifier character missing">'%**d'</warning> % (2, 5) #There are no format specifier character
<warning descr="Too few mapping keys">'%(name1)s %(name2)s (name3) %s'</warning> % {'name1': 'a', 'name2': 'b', 'name3': 'c'} #Too few mapping keys
<warning descr="Too few mapping keys">'%(name1s'</warning> % {'name1': 'a'} #Too few mapping keys
'%%%(name)ld' % {'name': 12} #ok
"%(name)f(name)" % <warning descr="Format requires a mapping">23.2</warning> #Format requires a mapping
"%(name)f(name)" % (<warning descr="Format requires a mapping">23.2</warning>) #Format requires a mapping
'%d%d' % <warning descr="Format does not require a mapping">{'name1': 2, 'name2': 3}</warning> #Format doesn't require a mapping
'%12.2f' % 2.74 #ok
'Hello world' % () #ok
'Hello world' % [] #ok
'Hello world' % {} #ok
'%d%d' % ((5), (5)) #ok
"%(name)d %(name)d" % {"name": 43} #ok
"%(name)d" % {'a': 4, "name": 5} #ok
'%% name %(name)c' % <warning descr="Key 'name' has no corresponding argument">{'a': 4}</warning> #Key 'name' has no following argument
'%d %u %f %F %s %r' % (2, 3, 4.1, 4.0, "name", "str") #ok
'%d %d %d' % (4, <warning descr="Unexpected type str">"a"</warning>, <warning descr="Unexpected type str">"b"</warning>) #Unexpected type
'%f %f %f' % (4, 5, <warning descr="Unexpected type str">"test"</warning>) #Unexpected type
'%d' % <warning descr="Unexpected type str">"name"</warning> #Unexpected type
m = {'language': "Python", "#": 2}
'#%(language)s has %(#)03d quote types.' % m #ok
i = "test"
'%(name)s' % {'name': i} #ok
'%s' % i #ok
'%f' % <warning descr="Unexpected type str">i</warning> #Unexpected type
'%f' % (2 * 3 + 5) #ok
s = "%s" % "a".upper() #ok
x = ['a', 'b', 'c']
print "%d: %s" % (len(x), ", ".join(x)) #ok
m = [1, 2, 3, 4, 5]
"%d" % m[0] #ok
"%d %s" % (m[0], m[4]) #ok
"%s" % m #ok
"%s" % m[1:3] #ok
"%d" % <warning descr="Unexpected type str">m[1:2]</warning> #ok
"%d" % <warning descr="Unexpected type str">m</warning> #Unexpected type
"%d" % <warning descr="Unexpected type str">[]</warning> #Unexpected type
def greet(all):
print "Hello %s" % ("World" if all else "Human") #ok
"%s" % [x + 1 for x in [1, 2, 3, 4]] #ok
"%s" % [x + y for x in []] #ok
"%s" % [] #ok
"%f" % <warning descr="Unexpected type str">[x + 1 for x in [1, 2, 3, 4]]</warning> #Unexpected type
"%d %d" % (3, 5) #ok
"Hello %s %s" % tuple(['world', '!']) #ok
def foo(a):
if a == 1:
return "a", "b"
else:
return "c", "d"
print "%s" % <warning descr="Too many arguments for format string">foo(1)</warning> #Too many arguments for format string
print("| [%(issue_id)s|http://youtrack.jetbrains.net/issue/%(issue_id)s] (%(issue_type)s)|%(summary)s|" % (<warning descr="Format requires a mapping">issue_id, issue_type, summary</warning>)) #Format requires a mapping (PY-704)
my_list = list()
for i in range(0,3):
my_list.append( ("hey", "you") )
for item in my_list:
print '%s %s' % item # ok (PY-734)
def bar():
return None
"%s %s" % <warning descr="Too few arguments for format string">bar()</warning> #Too few arguments for format string
"%s" % {} # ok; str() works
"%s" % {'a': 1, 'b': 2} # ok, no names in template and arg counts don't match
"%s" % object() # ok, str() works
"foo" % {'bar':1, 'baz':2} # ok: empty template that could use names
a = ('a', 1) if 1 else ('b', 2)
"%s is %d" % a # ok, must infer unified tuple type
#PY-3064, because original type of a is tuple, not list
a = (1,2,3)
print '%d:%d' % a[:2]
print '%d:%d' % <warning descr="Too few arguments for format string">a[1:2]</warning>
string = "qwerty"
print '%d:%d' % <warning descr="Too few arguments for format string"><warning descr="Unexpected type str">string[:2]</warning></warning>
print '%s:%s' % <warning descr="Too few arguments for format string">string[:2]</warning>
print '%s' % string[:2]
print '%d' % <warning descr="Unexpected type str">string[:2]</warning>
my_tuple = (1,2,3,4,5,6,7,8)
print '%d, %d' % <warning descr="Too many arguments for format string">my_tuple[:7:3]</warning>
print '%d, %d, %d' % my_tuple[:7:3]
print '%d, %d, %d, %d' % <warning descr="Too few arguments for format string">my_tuple[:7:3]</warning>
# PY-12801
print '%d %s' % ((42,) + ('spam',))
print '%d %s' % (<warning descr="Unexpected type (str, str)">('ham',) + ('spam',)</warning>)
print '%d %s' % (<warning descr="Too few arguments for format string"><warning descr="Unexpected type (int)">(42,) + ()</warning></warning>)
print '%d' % (<warning descr="Too many arguments for format string"><warning descr="Unexpected type (int, str)">(42,) + ('spam',)</warning></warning>)
# PY-11274
import collections
print '%(foo)s' % collections.OrderedDict(foo=None)
class MyDict(collections.Mapping):
def __getitem__(self, key):
return 'spam'
def __iter__(self):
yield 'spam'
def __len__(self):
return 1
print '%(foo)s' % MyDict()
foo = {1, 2, 3}
print('%s %s %s' % <warning descr="Too few arguments for format string">foo</warning>)
'%s %s %s' % <warning descr="Too few arguments for format string">(x for x in range(10))</warning>
| apache-2.0 | -3,637,054,528,343,223,300 | 46.80315 | 227 | 0.610773 | false |
brainstorm-ai/DIGITS | digits/dataset/images/generic/views.py | 1 | 4461 | # Copyright (c) 2015-2016, NVIDIA CORPORATION. All rights reserved.
from __future__ import absolute_import
import flask
from .forms import GenericImageDatasetForm
from .job import GenericImageDatasetJob
from digits import utils
from digits.dataset import tasks
from digits.webapp import app, scheduler
from digits.utils.forms import fill_form_if_cloned, save_form_to_job
from digits.utils.routing import request_wants_json, job_from_request
blueprint = flask.Blueprint(__name__, __name__)
@blueprint.route('/new', methods=['GET'])
@utils.auth.requires_login
def new():
"""
Returns a form for a new GenericImageDatasetJob
"""
form = GenericImageDatasetForm()
## Is there a request to clone a job with ?clone=<job_id>
fill_form_if_cloned(form)
return flask.render_template('datasets/images/generic/new.html', form=form)
@blueprint.route('.json', methods=['POST'])
@blueprint.route('', methods=['POST'], strict_slashes=False)
@utils.auth.requires_login(redirect=False)
def create():
"""
Creates a new GenericImageDatasetJob
Returns JSON when requested: {job_id,name,status} or {errors:[]}
"""
form = GenericImageDatasetForm()
## Is there a request to clone a job with ?clone=<job_id>
fill_form_if_cloned(form)
if not form.validate_on_submit():
if request_wants_json():
return flask.jsonify({'errors': form.errors}), 400
else:
return flask.render_template('datasets/images/generic/new.html', form=form), 400
job = None
try:
job = GenericImageDatasetJob(
username = utils.auth.get_username(),
name = form.dataset_name.data,
mean_file = form.prebuilt_mean_file.data.strip(),
)
if form.method.data == 'prebuilt':
pass
else:
raise ValueError('method not supported')
force_same_shape = form.force_same_shape.data
job.tasks.append(
tasks.AnalyzeDbTask(
job_dir = job.dir(),
database = form.prebuilt_train_images.data,
purpose = form.prebuilt_train_images.label.text,
force_same_shape = force_same_shape,
)
)
if form.prebuilt_train_labels.data:
job.tasks.append(
tasks.AnalyzeDbTask(
job_dir = job.dir(),
database = form.prebuilt_train_labels.data,
purpose = form.prebuilt_train_labels.label.text,
force_same_shape = force_same_shape,
)
)
if form.prebuilt_val_images.data:
job.tasks.append(
tasks.AnalyzeDbTask(
job_dir = job.dir(),
database = form.prebuilt_val_images.data,
purpose = form.prebuilt_val_images.label.text,
force_same_shape = force_same_shape,
)
)
if form.prebuilt_val_labels.data:
job.tasks.append(
tasks.AnalyzeDbTask(
job_dir = job.dir(),
database = form.prebuilt_val_labels.data,
purpose = form.prebuilt_val_labels.label.text,
force_same_shape = force_same_shape,
)
)
## Save form data with the job so we can easily clone it later.
save_form_to_job(job, form)
scheduler.add_job(job)
if request_wants_json():
return flask.jsonify(job.json_dict())
else:
return flask.redirect(flask.url_for('digits.dataset.views.show', job_id=job.id()))
except:
if job:
scheduler.delete_job(job)
raise
def show(job, related_jobs=None):
"""
Called from digits.dataset.views.datasets_show()
"""
return flask.render_template('datasets/images/generic/show.html', job=job, related_jobs=related_jobs)
@blueprint.route('/summary', methods=['GET'])
def summary():
"""
Return a short HTML summary of a DatasetJob
"""
job = job_from_request()
return flask.render_template('datasets/images/generic/summary.html', dataset=job)
| bsd-3-clause | 2,914,148,635,444,790,000 | 33.053435 | 105 | 0.559067 | false |
gquirozbogner/contentbox-master | third_party/unidecode/x090.py | 4 | 4889 | data = (
'Tui ', # 0x00
'Song ', # 0x01
'Gua ', # 0x02
'Tao ', # 0x03
'Pang ', # 0x04
'Hou ', # 0x05
'Ni ', # 0x06
'Dun ', # 0x07
'Jiong ', # 0x08
'Xuan ', # 0x09
'Xun ', # 0x0a
'Bu ', # 0x0b
'You ', # 0x0c
'Xiao ', # 0x0d
'Qiu ', # 0x0e
'Tou ', # 0x0f
'Zhu ', # 0x10
'Qiu ', # 0x11
'Di ', # 0x12
'Di ', # 0x13
'Tu ', # 0x14
'Jing ', # 0x15
'Ti ', # 0x16
'Dou ', # 0x17
'Yi ', # 0x18
'Zhe ', # 0x19
'Tong ', # 0x1a
'Guang ', # 0x1b
'Wu ', # 0x1c
'Shi ', # 0x1d
'Cheng ', # 0x1e
'Su ', # 0x1f
'Zao ', # 0x20
'Qun ', # 0x21
'Feng ', # 0x22
'Lian ', # 0x23
'Suo ', # 0x24
'Hui ', # 0x25
'Li ', # 0x26
'Sako ', # 0x27
'Lai ', # 0x28
'Ben ', # 0x29
'Cuo ', # 0x2a
'Jue ', # 0x2b
'Beng ', # 0x2c
'Huan ', # 0x2d
'Dai ', # 0x2e
'Lu ', # 0x2f
'You ', # 0x30
'Zhou ', # 0x31
'Jin ', # 0x32
'Yu ', # 0x33
'Chuo ', # 0x34
'Kui ', # 0x35
'Wei ', # 0x36
'Ti ', # 0x37
'Yi ', # 0x38
'Da ', # 0x39
'Yuan ', # 0x3a
'Luo ', # 0x3b
'Bi ', # 0x3c
'Nuo ', # 0x3d
'Yu ', # 0x3e
'Dang ', # 0x3f
'Sui ', # 0x40
'Dun ', # 0x41
'Sui ', # 0x42
'Yan ', # 0x43
'Chuan ', # 0x44
'Chi ', # 0x45
'Ti ', # 0x46
'Yu ', # 0x47
'Shi ', # 0x48
'Zhen ', # 0x49
'You ', # 0x4a
'Yun ', # 0x4b
'E ', # 0x4c
'Bian ', # 0x4d
'Guo ', # 0x4e
'E ', # 0x4f
'Xia ', # 0x50
'Huang ', # 0x51
'Qiu ', # 0x52
'Dao ', # 0x53
'Da ', # 0x54
'Wei ', # 0x55
'Appare ', # 0x56
'Yi ', # 0x57
'Gou ', # 0x58
'Yao ', # 0x59
'Chu ', # 0x5a
'Liu ', # 0x5b
'Xun ', # 0x5c
'Ta ', # 0x5d
'Di ', # 0x5e
'Chi ', # 0x5f
'Yuan ', # 0x60
'Su ', # 0x61
'Ta ', # 0x62
'Qian ', # 0x63
'[?] ', # 0x64
'Yao ', # 0x65
'Guan ', # 0x66
'Zhang ', # 0x67
'Ao ', # 0x68
'Shi ', # 0x69
'Ce ', # 0x6a
'Chi ', # 0x6b
'Su ', # 0x6c
'Zao ', # 0x6d
'Zhe ', # 0x6e
'Dun ', # 0x6f
'Di ', # 0x70
'Lou ', # 0x71
'Chi ', # 0x72
'Cuo ', # 0x73
'Lin ', # 0x74
'Zun ', # 0x75
'Rao ', # 0x76
'Qian ', # 0x77
'Xuan ', # 0x78
'Yu ', # 0x79
'Yi ', # 0x7a
'Wu ', # 0x7b
'Liao ', # 0x7c
'Ju ', # 0x7d
'Shi ', # 0x7e
'Bi ', # 0x7f
'Yao ', # 0x80
'Mai ', # 0x81
'Xie ', # 0x82
'Sui ', # 0x83
'Huan ', # 0x84
'Zhan ', # 0x85
'Teng ', # 0x86
'Er ', # 0x87
'Miao ', # 0x88
'Bian ', # 0x89
'Bian ', # 0x8a
'La ', # 0x8b
'Li ', # 0x8c
'Yuan ', # 0x8d
'Yao ', # 0x8e
'Luo ', # 0x8f
'Li ', # 0x90
'Yi ', # 0x91
'Ting ', # 0x92
'Deng ', # 0x93
'Qi ', # 0x94
'Yong ', # 0x95
'Shan ', # 0x96
'Han ', # 0x97
'Yu ', # 0x98
'Mang ', # 0x99
'Ru ', # 0x9a
'Qiong ', # 0x9b
'[?] ', # 0x9c
'Kuang ', # 0x9d
'Fu ', # 0x9e
'Kang ', # 0x9f
'Bin ', # 0xa0
'Fang ', # 0xa1
'Xing ', # 0xa2
'Na ', # 0xa3
'Xin ', # 0xa4
'Shen ', # 0xa5
'Bang ', # 0xa6
'Yuan ', # 0xa7
'Cun ', # 0xa8
'Huo ', # 0xa9
'Xie ', # 0xaa
'Bang ', # 0xab
'Wu ', # 0xac
'Ju ', # 0xad
'You ', # 0xae
'Han ', # 0xaf
'Tai ', # 0xb0
'Qiu ', # 0xb1
'Bi ', # 0xb2
'Pei ', # 0xb3
'Bing ', # 0xb4
'Shao ', # 0xb5
'Bei ', # 0xb6
'Wa ', # 0xb7
'Di ', # 0xb8
'Zou ', # 0xb9
'Ye ', # 0xba
'Lin ', # 0xbb
'Kuang ', # 0xbc
'Gui ', # 0xbd
'Zhu ', # 0xbe
'Shi ', # 0xbf
'Ku ', # 0xc0
'Yu ', # 0xc1
'Gai ', # 0xc2
'Ge ', # 0xc3
'Xi ', # 0xc4
'Zhi ', # 0xc5
'Ji ', # 0xc6
'Xun ', # 0xc7
'Hou ', # 0xc8
'Xing ', # 0xc9
'Jiao ', # 0xca
'Xi ', # 0xcb
'Gui ', # 0xcc
'Nuo ', # 0xcd
'Lang ', # 0xce
'Jia ', # 0xcf
'Kuai ', # 0xd0
'Zheng ', # 0xd1
'Otoko ', # 0xd2
'Yun ', # 0xd3
'Yan ', # 0xd4
'Cheng ', # 0xd5
'Dou ', # 0xd6
'Chi ', # 0xd7
'Lu ', # 0xd8
'Fu ', # 0xd9
'Wu ', # 0xda
'Fu ', # 0xdb
'Gao ', # 0xdc
'Hao ', # 0xdd
'Lang ', # 0xde
'Jia ', # 0xdf
'Geng ', # 0xe0
'Jun ', # 0xe1
'Ying ', # 0xe2
'Bo ', # 0xe3
'Xi ', # 0xe4
'Bei ', # 0xe5
'Li ', # 0xe6
'Yun ', # 0xe7
'Bu ', # 0xe8
'Xiao ', # 0xe9
'Qi ', # 0xea
'Pi ', # 0xeb
'Qing ', # 0xec
'Guo ', # 0xed
'Zhou ', # 0xee
'Tan ', # 0xef
'Zou ', # 0xf0
'Ping ', # 0xf1
'Lai ', # 0xf2
'Ni ', # 0xf3
'Chen ', # 0xf4
'You ', # 0xf5
'Bu ', # 0xf6
'Xiang ', # 0xf7
'Dan ', # 0xf8
'Ju ', # 0xf9
'Yong ', # 0xfa
'Qiao ', # 0xfb
'Yi ', # 0xfc
'Du ', # 0xfd
'Yan ', # 0xfe
'Mei ', # 0xff
)
| apache-2.0 | 8,185,213,964,507,043,000 | 16.949612 | 20 | 0.368583 | false |
benjyw/pants | src/python/pants/option/arg_splitter.py | 4 | 10671 | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import dataclasses
import os.path
from abc import ABC
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, Iterable, List, Optional, Sequence, Tuple
from pants.option.scope import GLOBAL_SCOPE, ScopeInfo
from pants.util.ordered_set import OrderedSet
class ArgSplitterError(Exception):
pass
@dataclass(frozen=True)
class SplitArgs:
"""The result of splitting args."""
goals: List[str] # Explicitly requested goals.
scope_to_flags: Dict[str, List[str]] # Scope name -> list of flags in that scope.
specs: List[str] # The specifications for what to run against, e.g. the targets or files
passthru: List[str] # Any remaining args specified after a -- separator.
class HelpRequest(ABC):
"""Represents an implicit or explicit request for help by the user."""
@dataclass(frozen=True)
class ThingHelp(HelpRequest):
"""The user requested help on one or more things: e.g., an options scope or a target type."""
advanced: bool = False
things: Tuple[str, ...] = ()
class VersionHelp(HelpRequest):
"""The user asked for the version of this instance of pants."""
class AllHelp(HelpRequest):
"""The user requested a dump of all help info."""
@dataclass(frozen=True)
class UnknownGoalHelp(HelpRequest):
"""The user specified an unknown goal (or task)."""
unknown_goals: Tuple[str, ...]
class NoGoalHelp(HelpRequest):
"""The user specified no goals."""
class ArgSplitter:
"""Splits a command-line into scoped sets of flags and a set of specs.
Recognizes, e.g.:
./pants goal -x compile --foo compile.java -y target1 target2
./pants -x compile --foo compile.java -y -- target1 target2
./pants -x compile target1 target2 --compile-java-flag
./pants -x --compile-java-flag compile target1 target2
Handles help and version args specially.
"""
_HELP_BASIC_ARGS = ("-h", "--help", "help")
_HELP_ADVANCED_ARGS = ("--help-advanced", "help-advanced")
_HELP_VERSION_ARGS = ("-v", "-V", "--version", "version")
_HELP_ALL_SCOPES_ARGS = ("help-all",)
_HELP_ARGS = (
*_HELP_BASIC_ARGS,
*_HELP_ADVANCED_ARGS,
*_HELP_VERSION_ARGS,
*_HELP_ALL_SCOPES_ARGS,
)
def __init__(self, known_scope_infos: Iterable[ScopeInfo], buildroot: str) -> None:
self._buildroot = Path(buildroot)
self._known_scope_infos = known_scope_infos
self._known_scopes = {si.scope for si in known_scope_infos} | {
"version",
"help",
"help-advanced",
"help-all",
}
self._unconsumed_args: List[
str
] = [] # In reverse order, for efficient popping off the end.
self._help_request: Optional[
HelpRequest
] = None # Will be set if we encounter any help flags.
# For convenience, and for historical reasons, we allow --scope-flag-name anywhere on the
# cmd line, as an alternative to ... scope --flag-name.
# We check for prefixes in reverse order, so we match the longest prefix first.
sorted_scope_infos = sorted(
[si for si in self._known_scope_infos if si.scope],
key=lambda si: si.scope,
reverse=True,
)
# List of pairs (prefix, ScopeInfo).
self._known_scoping_prefixes = [
(f"{si.scope.replace('.', '-')}-", si) for si in sorted_scope_infos
]
@property
def help_request(self) -> Optional[HelpRequest]:
return self._help_request
def _check_for_help_request(self, arg: str) -> bool:
if arg not in self._HELP_ARGS:
return False
if arg in self._HELP_VERSION_ARGS:
self._help_request = VersionHelp()
elif arg in self._HELP_ALL_SCOPES_ARGS:
self._help_request = AllHelp()
else:
# First ensure that we have a basic OptionsHelp.
if not self._help_request:
self._help_request = ThingHelp()
# Now see if we need to enhance it.
if isinstance(self._help_request, ThingHelp):
advanced = self._help_request.advanced or arg in self._HELP_ADVANCED_ARGS
self._help_request = dataclasses.replace(self._help_request, advanced=advanced)
return True
def split_args(self, args: Sequence[str]) -> SplitArgs:
"""Split the specified arg list (or sys.argv if unspecified).
args[0] is ignored.
Returns a SplitArgs tuple.
"""
goals: OrderedSet[str] = OrderedSet()
scope_to_flags: Dict[str, List[str]] = {}
def add_scope(s: str) -> None:
# Force the scope to appear, even if empty.
if s not in scope_to_flags:
scope_to_flags[s] = []
specs: List[str] = []
passthru: List[str] = []
unknown_scopes: List[str] = []
self._unconsumed_args = list(reversed(args))
# The first token is the binary name, so skip it.
self._unconsumed_args.pop()
def assign_flag_to_scope(flg: str, default_scope: str) -> None:
flag_scope, descoped_flag = self._descope_flag(flg, default_scope=default_scope)
if flag_scope not in scope_to_flags:
scope_to_flags[flag_scope] = []
scope_to_flags[flag_scope].append(descoped_flag)
global_flags = self._consume_flags()
add_scope(GLOBAL_SCOPE)
for flag in global_flags:
assign_flag_to_scope(flag, GLOBAL_SCOPE)
scope, flags = self._consume_scope()
while scope:
if not self._check_for_help_request(scope.lower()):
add_scope(scope)
goals.add(scope.partition(".")[0])
for flag in flags:
assign_flag_to_scope(flag, scope)
scope, flags = self._consume_scope()
while self._unconsumed_args and not self._at_double_dash():
arg = self._unconsumed_args.pop()
if arg.startswith("-"):
# We assume any args here are in global scope.
if not self._check_for_help_request(arg):
assign_flag_to_scope(arg, GLOBAL_SCOPE)
elif self.likely_a_spec(arg):
specs.append(arg)
elif arg not in self._known_scopes:
unknown_scopes.append(arg)
if self._at_double_dash():
self._unconsumed_args.pop()
passthru = list(reversed(self._unconsumed_args))
if unknown_scopes and not self._help_request:
self._help_request = UnknownGoalHelp(tuple(unknown_scopes))
if not goals and not self._help_request:
self._help_request = NoGoalHelp()
if isinstance(self._help_request, ThingHelp):
self._help_request = dataclasses.replace(
self._help_request, things=tuple(goals) + tuple(unknown_scopes)
)
return SplitArgs(
goals=list(goals),
scope_to_flags=scope_to_flags,
specs=specs,
passthru=passthru,
)
def likely_a_spec(self, arg: str) -> bool:
"""Return whether `arg` looks like a spec, rather than a goal name.
An arg is a spec if it looks like an AddressSpec or a FilesystemSpec. In the future we can
expand this heuristic to support other kinds of specs, such as URLs.
"""
return (
any(symbol in arg for symbol in (os.path.sep, ":", "*"))
or arg.startswith("!")
or (self._buildroot / arg).exists()
)
def _consume_scope(self) -> Tuple[Optional[str], List[str]]:
"""Returns a pair (scope, list of flags encountered in that scope).
Note that the flag may be explicitly scoped, and therefore not actually belong to this scope.
For example, in:
./pants --compile-java-partition-size-hint=100 compile <target>
--compile-java-partition-size-hint should be treated as if it were --partition-size-hint=100
in the compile.java scope.
"""
if not self._at_scope():
return None, []
scope = self._unconsumed_args.pop()
flags = self._consume_flags()
return scope, flags
def _consume_flags(self) -> List[str]:
"""Read flags until we encounter the first token that isn't a flag."""
flags = []
while self._at_flag():
flag = self._unconsumed_args.pop()
if not self._check_for_help_request(flag):
flags.append(flag)
return flags
def _descope_flag(self, flag: str, default_scope: str) -> Tuple[str, str]:
"""If the flag is prefixed by its scope, in the old style, extract the scope.
Otherwise assume it belongs to default_scope.
returns a pair (scope, flag).
"""
for scope_prefix, scope_info in self._known_scoping_prefixes:
for flag_prefix in ["--", "--no-"]:
prefix = flag_prefix + scope_prefix
if flag.startswith(prefix):
scope = scope_info.scope
if scope != GLOBAL_SCOPE and default_scope != GLOBAL_SCOPE:
# We allow goal.task --subsystem-foo to refer to the task-level subsystem instance,
# i.e., as if qualified by --subsystem-goal-task-foo.
# Note that this means that we can't set a task option on the cmd-line if its
# name happens to start with a subsystem scope.
# TODO: Either fix this or at least detect such options and warn.
task_subsystem_scope = f"{scope_info.scope}.{default_scope}"
if (
task_subsystem_scope in self._known_scopes
): # Such a task subsystem actually exists.
scope = task_subsystem_scope
return scope, flag_prefix + flag[len(prefix) :]
return default_scope, flag
def _at_flag(self) -> bool:
return (
bool(self._unconsumed_args)
and self._unconsumed_args[-1].startswith("-")
and not self._at_double_dash()
)
def _at_scope(self) -> bool:
return bool(self._unconsumed_args) and self._unconsumed_args[-1] in self._known_scopes
def _at_double_dash(self) -> bool:
return bool(self._unconsumed_args) and self._unconsumed_args[-1] == "--"
| apache-2.0 | -2,058,844,401,133,481,200 | 36.181185 | 107 | 0.587855 | false |
scalingdata/Impala | tests/hs2/test_fetch.py | 1 | 11529 | #!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
import re
from tests.hs2.hs2_test_suite import HS2TestSuite, needs_session
from TCLIService import TCLIService, constants
from TCLIService.ttypes import TTypeId
# Simple test to make sure all the HS2 types are supported for both the row and
# column-oriented versions of the HS2 protocol.
class TestFetch(HS2TestSuite):
def __verify_primitive_type(self, expected_type, hs2_type):
assert hs2_type.typeDesc.types[0].primitiveEntry.type == expected_type
def __verify_char_max_len(self, t, max_len):
l = t.typeDesc.types[0].primitiveEntry.typeQualifiers.qualifiers\
[constants.CHARACTER_MAXIMUM_LENGTH]
assert l.i32Value == max_len
def __verify_decimal_precision_scale(self, hs2_type, precision, scale):
p = hs2_type.typeDesc.types[0].primitiveEntry.typeQualifiers.qualifiers\
[constants.PRECISION]
s = hs2_type.typeDesc.types[0].primitiveEntry.typeQualifiers.qualifiers\
[constants.SCALE]
assert p.i32Value == precision
assert s.i32Value == scale
@needs_session(TCLIService.TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1)
def test_result_metadata_v1(self):
execute_statement_req = TCLIService.TExecuteStatementReq()
execute_statement_req.sessionHandle = self.session_handle
# Verify all primitive types in the alltypes table.
execute_statement_req.statement =\
"SELECT * FROM functional.alltypessmall ORDER BY id LIMIT 1"
execute_statement_resp = self.hs2_client.ExecuteStatement(execute_statement_req)
HS2TestSuite.check_response(execute_statement_resp)
results = self.fetch_at_most(execute_statement_resp.operationHandle,
TCLIService.TFetchOrientation.FETCH_NEXT, 1, 1)
assert len(results.results.rows) == 1
metadata_resp = self.result_metadata(execute_statement_resp.operationHandle)
column_types = metadata_resp.schema.columns
assert len(column_types) == 13
self.__verify_primitive_type(TTypeId.INT_TYPE, column_types[0])
self.__verify_primitive_type(TTypeId.BOOLEAN_TYPE, column_types[1])
self.__verify_primitive_type(TTypeId.TINYINT_TYPE, column_types[2])
self.__verify_primitive_type(TTypeId.SMALLINT_TYPE, column_types[3])
self.__verify_primitive_type(TTypeId.INT_TYPE, column_types[4])
self.__verify_primitive_type(TTypeId.BIGINT_TYPE, column_types[5])
self.__verify_primitive_type(TTypeId.FLOAT_TYPE, column_types[6])
self.__verify_primitive_type(TTypeId.DOUBLE_TYPE, column_types[7])
self.__verify_primitive_type(TTypeId.STRING_TYPE, column_types[8])
self.__verify_primitive_type(TTypeId.STRING_TYPE, column_types[9])
self.__verify_primitive_type(TTypeId.TIMESTAMP_TYPE, column_types[10])
self.__verify_primitive_type(TTypeId.INT_TYPE, column_types[11])
self.__verify_primitive_type(TTypeId.INT_TYPE, column_types[12])
self.close(execute_statement_resp.operationHandle)
# Verify the result metadata for the DECIMAL type.
execute_statement_req.statement =\
"SELECT d1,d5 FROM functional.decimal_tbl ORDER BY d1 LIMIT 1"
execute_statement_resp = self.hs2_client.ExecuteStatement(execute_statement_req)
HS2TestSuite.check_response(execute_statement_resp)
results = self.fetch_at_most(execute_statement_resp.operationHandle,
TCLIService.TFetchOrientation.FETCH_NEXT, 1, 1)
assert len(results.results.rows) == 1
# Verify the result schema is what we expect. The result has 2 columns, the
# first is decimal(9,0) and the second is decimal(10,5)
metadata_resp = self.result_metadata(execute_statement_resp.operationHandle)
column_types = metadata_resp.schema.columns
assert len(column_types) == 2
self.__verify_primitive_type(TTypeId.DECIMAL_TYPE, column_types[0])
self.__verify_decimal_precision_scale(column_types[0], 9, 0)
self.__verify_primitive_type(TTypeId.DECIMAL_TYPE, column_types[1])
self.__verify_decimal_precision_scale(column_types[1], 10, 5)
self.close(execute_statement_resp.operationHandle)
# Verify the result metadata for the CHAR/VARCHAR types.
execute_statement_req.statement =\
"SELECT * FROM functional.chars_tiny ORDER BY cs LIMIT 1"
execute_statement_resp = self.hs2_client.ExecuteStatement(execute_statement_req)
HS2TestSuite.check_response(execute_statement_resp)
results = self.fetch_at_most(execute_statement_resp.operationHandle,
TCLIService.TFetchOrientation.FETCH_NEXT, 1, 1)
assert len(results.results.rows) == 1
metadata_resp = self.result_metadata(execute_statement_resp.operationHandle)
column_types = metadata_resp.schema.columns
assert len(column_types) == 3
self.__verify_primitive_type(TTypeId.CHAR_TYPE, column_types[0])
self.__verify_char_max_len(column_types[0], 5)
self.__verify_primitive_type(TTypeId.CHAR_TYPE, column_types[1])
self.__verify_char_max_len(column_types[1], 140)
self.__verify_primitive_type(TTypeId.VARCHAR_TYPE, column_types[2])
self.__verify_char_max_len(column_types[2], 32)
self.close(execute_statement_resp.operationHandle)
def __query_and_fetch(self, query):
execute_statement_req = TCLIService.TExecuteStatementReq()
execute_statement_req.sessionHandle = self.session_handle
execute_statement_req.statement = query
execute_statement_resp = self.hs2_client.ExecuteStatement(execute_statement_req)
HS2TestSuite.check_response(execute_statement_resp)
fetch_results_req = TCLIService.TFetchResultsReq()
fetch_results_req.operationHandle = execute_statement_resp.operationHandle
fetch_results_req.maxRows = 1024
fetch_results_resp = self.hs2_client.FetchResults(fetch_results_req)
HS2TestSuite.check_response(fetch_results_resp)
return fetch_results_resp
@needs_session()
def test_alltypes_v6(self):
"""Test that a simple select statement works for all types"""
fetch_results_resp = self.__query_and_fetch(
"SELECT *, NULL from functional.alltypes ORDER BY id LIMIT 1")
num_rows, result = self.column_results_to_string(fetch_results_resp.results.columns)
assert num_rows == 1
assert result == \
"0, True, 0, 0, 0, 0, 0.0, 0.0, 01/01/09, 0, 2009-01-01 00:00:00, 2009, 1, NULL\n"
# Decimals
fetch_results_resp = self.__query_and_fetch(
"SELECT * from functional.decimal_tbl LIMIT 1")
num_rows, result = self.column_results_to_string(fetch_results_resp.results.columns)
assert result == ("1234, 2222, 1.2345678900, "
"0.12345678900000000000000000000000000000, 12345.78900, 1\n")
# VARCHAR
fetch_results_resp = self.__query_and_fetch("SELECT CAST('str' AS VARCHAR(3))")
num_rows, result = self.column_results_to_string(fetch_results_resp.results.columns)
assert result == "str\n"
# CHAR not inlined
fetch_results_resp = self.__query_and_fetch("SELECT CAST('car' AS CHAR(140))")
num_rows, result = self.column_results_to_string(fetch_results_resp.results.columns)
assert result == "car" + (" " * 137) + "\n"
# CHAR inlined
fetch_results_resp = self.__query_and_fetch("SELECT CAST('car' AS CHAR(5))")
num_rows, result = self.column_results_to_string(fetch_results_resp.results.columns)
assert result == "car \n"
@needs_session()
def test_show_partitions(self):
"""Regression test for IMPALA-1330"""
for query in ["SHOW PARTITIONS functional.alltypes",
"SHOW TABLE STATS functional.alltypes"]:
fetch_results_resp = self.__query_and_fetch(query)
num_rows, result = \
self.column_results_to_string(fetch_results_resp.results.columns)
assert num_rows == 25
# Match whether stats are computed or not
assert re.match(
r"2009, 1, -?\d+, -?\d+, \d*\.?\d+KB, NOT CACHED, NOT CACHED, TEXT", result) is not None
@needs_session()
def test_show_column_stats(self):
fetch_results_resp = self.__query_and_fetch("SHOW COLUMN STATS functional.alltypes")
num_rows, result = self.column_results_to_string(fetch_results_resp.results.columns)
assert num_rows == 13
assert re.match(r"id, INT, -?\d+, -?\d+, (NULL|\d+), 4.0", result) is not None
@needs_session(TCLIService.TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V1)
def test_execute_select_v1(self):
"""Test that a simple select statement works in the row-oriented protocol"""
execute_statement_req = TCLIService.TExecuteStatementReq()
execute_statement_req.sessionHandle = self.session_handle
execute_statement_req.statement = "SELECT COUNT(*) FROM functional.alltypes"
execute_statement_resp = self.hs2_client.ExecuteStatement(execute_statement_req)
HS2TestSuite.check_response(execute_statement_resp)
fetch_results_req = TCLIService.TFetchResultsReq()
fetch_results_req.operationHandle = execute_statement_resp.operationHandle
fetch_results_req.maxRows = 100
fetch_results_resp = self.hs2_client.FetchResults(fetch_results_req)
HS2TestSuite.check_response(fetch_results_resp)
assert len(fetch_results_resp.results.rows) == 1
assert fetch_results_resp.results.startRowOffset == 0
try:
assert not fetch_results_resp.hasMoreRows
except AssertionError:
pytest.xfail("IMPALA-558")
@needs_session()
def test_select_null(self):
"""Regression test for IMPALA-1370, where NULL literals would appear as strings where
they should be booleans"""
execute_statement_req = TCLIService.TExecuteStatementReq()
execute_statement_req.sessionHandle = self.session_handle
execute_statement_req.statement = "select null"
execute_statement_resp = self.hs2_client.ExecuteStatement(execute_statement_req)
HS2TestSuite.check_response(execute_statement_resp)
# Check that the expected type is boolean (for compatibility with Hive, see also
# IMPALA-914)
get_result_metadata_req = TCLIService.TGetResultSetMetadataReq()
get_result_metadata_req.operationHandle = execute_statement_resp.operationHandle
get_result_metadata_resp = \
self.hs2_client.GetResultSetMetadata(get_result_metadata_req)
col = get_result_metadata_resp.schema.columns[0]
assert col.typeDesc.types[0].primitiveEntry.type == TTypeId.BOOLEAN_TYPE
# Check that the actual type is boolean
fetch_results_req = TCLIService.TFetchResultsReq()
fetch_results_req.operationHandle = execute_statement_resp.operationHandle
fetch_results_req.maxRows = 1
fetch_results_resp = self.hs2_client.FetchResults(fetch_results_req)
HS2TestSuite.check_response(fetch_results_resp)
assert fetch_results_resp.results.columns[0].boolVal is not None
assert self.column_results_to_string(
fetch_results_resp.results.columns) == (1, "NULL\n")
@needs_session()
def test_compute_stats(self):
"""Exercise the child query path"""
self.__query_and_fetch("compute stats functional.alltypes")
| apache-2.0 | -7,616,108,649,754,918,000 | 48.059574 | 96 | 0.722266 | false |
phborba/dsgtoolsop | auxiliar/geopy/exc.py | 2 | 2514 | """
Exceptions raised by geopy.
"""
class GeopyError(Exception):
"""
Geopy-specific exceptions are all inherited from GeopyError.
"""
class ConfigurationError(GeopyError):
"""
When instantiating a geocoder, the arguments given were invalid. See
the documentation of each geocoder's ``__init__`` for more details.
"""
class GeocoderServiceError(GeopyError):
"""
There was an exception caused when calling the remote geocoding service,
and no more specific exception could be raised by geopy. When calling
geocoders' ``geocode`` or `reverse` methods, this is the most generic
exception that can be raised, and any non-geopy exception will be caught
and turned into this. The exception's message will be that of the
original exception.
"""
class GeocoderQueryError(GeocoderServiceError):
"""
Either geopy detected input that would cause a request to fail,
or a request was made and the remote geocoding service responded
that the request was bad.
"""
class GeocoderQuotaExceeded(GeocoderServiceError):
"""
The remote geocoding service refused to fulfill the request
because the client has used its quota.
"""
class GeocoderAuthenticationFailure(GeocoderServiceError):
"""
The remote geocoding service rejected the API key or account
credentials this geocoder was instantiated with.
"""
class GeocoderInsufficientPrivileges(GeocoderServiceError):
"""
The remote geocoding service refused to fulfill a request using the
account credentials given.
"""
class GeocoderTimedOut(GeocoderServiceError):
"""
The call to the geocoding service was aborted because no response
has been received within the ``timeout`` argument of either
the geocoding class or, if specified, the method call.
Some services are just consistently slow, and a higher timeout
may be needed to use them.
"""
class GeocoderUnavailable(GeocoderServiceError):
"""
Either it was not possible to establish a connection to the remote
geocoding service, or the service responded with a code indicating
it was unavailable.
"""
class GeocoderParseError(GeocoderServiceError):
"""
Geopy could not parse the service's response. This is probably due
to a bug in geopy.
"""
class GeocoderNotFound(GeopyError):
"""
Caller requested the geocoder matching a string, e.g.,
``"google"`` > ``GoogleV3``, but no geocoder could be found.
"""
| gpl-2.0 | 1,796,926,098,334,069,200 | 27.568182 | 76 | 0.716786 | false |
dmitryilyin/mistral | mistral/openstack/common/db/sqlalchemy/models.py | 3 | 3684 | # Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Piston Cloud Computing, Inc.
# Copyright 2012 Cloudscaling Group, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models.
"""
import six
from sqlalchemy import Column, Integer
from sqlalchemy import DateTime
from sqlalchemy.orm import object_mapper
from mistral.openstack.common.db.sqlalchemy import session as sa
from mistral.openstack.common import timeutils
class ModelBase(object):
"""Base class for models."""
__table_initialized__ = False
def save(self, session=None):
"""Save this object."""
if not session:
session = sa.get_session()
# NOTE(boris-42): This part of code should be look like:
# sesssion.add(self)
# session.flush()
# But there is a bug in sqlalchemy and eventlet that
# raises NoneType exception if there is no running
# transaction and rollback is called. As long as
# sqlalchemy has this bug we have to create transaction
# explicity.
with session.begin(subtransactions=True):
session.add(self)
session.flush()
def __setitem__(self, key, value):
setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key)
def get(self, key, default=None):
return getattr(self, key, default)
def _get_extra_keys(self):
return []
def __iter__(self):
columns = dict(object_mapper(self).columns).keys()
# NOTE(russellb): Allow models to specify other keys that can be looked
# up, beyond the actual db columns. An example would be the 'name'
# property for an Instance.
columns.extend(self._get_extra_keys())
self._i = iter(columns)
return self
def next(self):
n = six.advance_iterator(self._i)
return n, getattr(self, n)
def update(self, values):
"""Make the model object behave like a dict."""
for k, v in six.iteritems(values):
setattr(self, k, v)
def iteritems(self):
"""Make the model object behave like a dict.
Includes attributes from joins.
"""
local = dict(self)
joined = dict([(k, v) for k, v in six.iteritems(self.__dict__)
if not k[0] == '_'])
local.update(joined)
return local.iteritems()
class TimestampMixin(object):
created_at = Column(DateTime, default=timeutils.utcnow)
updated_at = Column(DateTime, onupdate=timeutils.utcnow)
class SoftDeleteMixin(object):
deleted_at = Column(DateTime)
deleted = Column(Integer, default=0)
def soft_delete(self, session=None):
"""Mark this object as deleted."""
self.deleted = self.id
self.deleted_at = timeutils.utcnow()
self.save(session=session)
| apache-2.0 | -3,426,992,808,184,027,000 | 33.111111 | 79 | 0.628664 | false |
erickt/hue | desktop/core/ext-py/django-extensions-1.5.0/django_extensions/management/commands/sqlcreate.py | 35 | 3180 | import sys
import socket
from optparse import make_option
from django.conf import settings
from django.core.management.base import CommandError, BaseCommand
from django_extensions.management.utils import signalcommand
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('-R', '--router', action='store',
dest='router', default='default',
help='Use this router-database other then defined in settings.py'),
make_option('-D', '--drop', action='store_true',
dest='drop', default=False,
help='If given, includes commands to drop any existing user and database.'),
)
help = """Generates the SQL to create your database for you, as specified in settings.py
The envisioned use case is something like this:
./manage.py sqlcreate [--router=<routername>] | mysql -u <db_administrator> -p
./manage.py sqlcreate [--router=<routername>] | psql -U <db_administrator> -W"""
requires_model_validation = False
can_import_settings = True
@signalcommand
def handle(self, *args, **options):
router = options.get('router')
dbinfo = settings.DATABASES.get(router)
if dbinfo is None:
raise CommandError("Unknown database router %s" % router)
engine = dbinfo.get('ENGINE').split('.')[-1]
dbuser = dbinfo.get('USER')
dbpass = dbinfo.get('PASSWORD')
dbname = dbinfo.get('NAME')
dbhost = dbinfo.get('HOST')
dbclient = socket.gethostname()
# django settings file tells you that localhost should be specified by leaving
# the DATABASE_HOST blank
if not dbhost:
dbhost = 'localhost'
if engine == 'mysql':
sys.stderr.write("""-- WARNING!: https://docs.djangoproject.com/en/dev/ref/databases/#collation-settings
-- Please read this carefully! Collation will be set to utf8_bin to have case-sensitive data.
""")
print("CREATE DATABASE %s CHARACTER SET utf8 COLLATE utf8_bin;" % dbname)
print("GRANT ALL PRIVILEGES ON %s.* to '%s'@'%s' identified by '%s';" % (
dbname, dbuser, dbclient, dbpass
))
elif engine == 'postgresql_psycopg2':
if options.get('drop'):
print("DROP DATABASE IF EXISTS %s;" % (dbname,))
print("DROP USER IF EXISTS %s;" % (dbuser,))
print("CREATE USER %s WITH ENCRYPTED PASSWORD '%s' CREATEDB;" % (dbuser, dbpass))
print("CREATE DATABASE %s WITH ENCODING 'UTF-8' OWNER \"%s\";" % (dbname, dbuser))
print("GRANT ALL PRIVILEGES ON DATABASE %s TO %s;" % (dbname, dbuser))
elif engine == 'sqlite3':
sys.stderr.write("-- manage.py syncdb will automatically create a sqlite3 database file.\n")
else:
# CREATE DATABASE is not SQL standard, but seems to be supported by most.
sys.stderr.write("-- Don't know how to handle '%s' falling back to SQL.\n" % engine)
print("CREATE DATABASE %s;" % dbname)
print("GRANT ALL PRIVILEGES ON DATABASE %s to %s" % (dbname, dbuser))
| apache-2.0 | 5,160,953,285,990,459,000 | 41.972973 | 116 | 0.613836 | false |
alexteodor/odoo | addons/account_voucher/account_voucher.py | 1 | 85226 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from lxml import etree
from openerp.osv import fields, osv
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
from openerp.tools import float_compare
from openerp.report import report_sxw
import openerp
class res_currency(osv.osv):
_inherit = "res.currency"
def _get_current_rate(self, cr, uid, ids, raise_on_no_rate=True, context=None):
if context is None:
context = {}
res = super(res_currency, self)._get_current_rate(cr, uid, ids, raise_on_no_rate, context=context)
if context.get('voucher_special_currency') in ids and context.get('voucher_special_currency_rate'):
res[context.get('voucher_special_currency')] = context.get('voucher_special_currency_rate')
return res
class account_voucher(osv.osv):
def _check_paid(self, cr, uid, ids, name, args, context=None):
res = {}
for voucher in self.browse(cr, uid, ids, context=context):
res[voucher.id] = any([((line.account_id.type, 'in', ('receivable', 'payable')) and line.reconcile_id) for line in voucher.move_ids])
return res
def _get_type(self, cr, uid, context=None):
if context is None:
context = {}
return context.get('type', False)
def _get_period(self, cr, uid, context=None):
if context is None: context = {}
if context.get('period_id', False):
return context.get('period_id')
periods = self.pool.get('account.period').find(cr, uid, context=context)
return periods and periods[0] or False
def _make_journal_search(self, cr, uid, ttype, context=None):
journal_pool = self.pool.get('account.journal')
return journal_pool.search(cr, uid, [('type', '=', ttype)], limit=1)
def _get_journal(self, cr, uid, context=None):
if context is None: context = {}
invoice_pool = self.pool.get('account.invoice')
journal_pool = self.pool.get('account.journal')
if context.get('invoice_id', False):
invoice = invoice_pool.browse(cr, uid, context['invoice_id'], context=context)
journal_id = journal_pool.search(cr, uid, [
('currency', '=', invoice.currency_id.id), ('company_id', '=', invoice.company_id.id)
], limit=1, context=context)
return journal_id and journal_id[0] or False
if context.get('journal_id', False):
return context.get('journal_id')
if not context.get('journal_id', False) and context.get('search_default_journal_id', False):
return context.get('search_default_journal_id')
ttype = context.get('type', 'bank')
if ttype in ('payment', 'receipt'):
ttype = 'bank'
res = self._make_journal_search(cr, uid, ttype, context=context)
return res and res[0] or False
def _get_tax(self, cr, uid, context=None):
if context is None: context = {}
journal_pool = self.pool.get('account.journal')
journal_id = context.get('journal_id', False)
if not journal_id:
ttype = context.get('type', 'bank')
res = journal_pool.search(cr, uid, [('type', '=', ttype)], limit=1)
if not res:
return False
journal_id = res[0]
if not journal_id:
return False
journal = journal_pool.browse(cr, uid, journal_id, context=context)
account_id = journal.default_credit_account_id or journal.default_debit_account_id
if account_id and account_id.tax_ids:
tax_id = account_id.tax_ids[0].id
return tax_id
return False
def _get_payment_rate_currency(self, cr, uid, context=None):
"""
Return the default value for field payment_rate_currency_id: the currency of the journal
if there is one, otherwise the currency of the user's company
"""
if context is None: context = {}
journal_pool = self.pool.get('account.journal')
journal_id = context.get('journal_id', False)
if journal_id:
journal = journal_pool.browse(cr, uid, journal_id, context=context)
if journal.currency:
return journal.currency.id
#no journal given in the context, use company currency as default
return self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id
def _get_currency(self, cr, uid, context=None):
if context is None: context = {}
journal_pool = self.pool.get('account.journal')
journal_id = context.get('journal_id', False)
if journal_id:
journal = journal_pool.browse(cr, uid, journal_id, context=context)
if journal.currency:
return journal.currency.id
return self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.currency_id.id
def _get_partner(self, cr, uid, context=None):
if context is None: context = {}
return context.get('partner_id', False)
def _get_reference(self, cr, uid, context=None):
if context is None: context = {}
return context.get('reference', False)
def _get_narration(self, cr, uid, context=None):
if context is None: context = {}
return context.get('narration', False)
def _get_amount(self, cr, uid, context=None):
if context is None:
context= {}
return context.get('amount', 0.0)
def name_get(self, cr, uid, ids, context=None):
if not ids:
return []
if context is None: context = {}
return [(r['id'], (r['number'] or _('Voucher'))) for r in self.read(cr, uid, ids, ['number'], context, load='_classic_write')]
def fields_view_get(self, cr, uid, view_id=None, view_type=False, context=None, toolbar=False, submenu=False):
mod_obj = self.pool.get('ir.model.data')
if context is None: context = {}
if view_type == 'form':
if not view_id and context.get('invoice_type'):
if context.get('invoice_type') in ('out_invoice', 'out_refund'):
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_receipt_form')
else:
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_payment_form')
result = result and result[1] or False
view_id = result
if not view_id and context.get('line_type'):
if context.get('line_type') == 'customer':
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_receipt_form')
else:
result = mod_obj.get_object_reference(cr, uid, 'account_voucher', 'view_vendor_payment_form')
result = result and result[1] or False
view_id = result
res = super(account_voucher, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=submenu)
doc = etree.XML(res['arch'])
if context.get('type', 'sale') in ('purchase', 'payment'):
nodes = doc.xpath("//field[@name='partner_id']")
for node in nodes:
node.set('context', "{'default_customer': 0, 'search_default_supplier': 1, 'default_supplier': 1}")
if context.get('invoice_type','') in ('in_invoice', 'in_refund'):
node.set('string', _("Supplier"))
res['arch'] = etree.tostring(doc)
return res
def _compute_writeoff_amount(self, cr, uid, line_dr_ids, line_cr_ids, amount, type):
debit = credit = 0.0
sign = type == 'payment' and -1 or 1
for l in line_dr_ids:
if isinstance(l, dict):
debit += l['amount']
for l in line_cr_ids:
if isinstance(l, dict):
credit += l['amount']
return amount - sign * (credit - debit)
def onchange_line_ids(self, cr, uid, ids, line_dr_ids, line_cr_ids, amount, voucher_currency, type, context=None):
context = context or {}
if not line_dr_ids and not line_cr_ids:
return {'value':{'writeoff_amount': 0.0}}
# resolve lists of commands into lists of dicts
line_dr_ids = self.resolve_2many_commands(cr, uid, 'line_dr_ids', line_dr_ids, ['amount'], context)
line_cr_ids = self.resolve_2many_commands(cr, uid, 'line_cr_ids', line_cr_ids, ['amount'], context)
#compute the field is_multi_currency that is used to hide/display options linked to secondary currency on the voucher
is_multi_currency = False
#loop on the voucher lines to see if one of these has a secondary currency. If yes, we need to see the options
for voucher_line in line_dr_ids+line_cr_ids:
line_id = voucher_line.get('id') and self.pool.get('account.voucher.line').browse(cr, uid, voucher_line['id'], context=context).move_line_id.id or voucher_line.get('move_line_id')
if line_id and self.pool.get('account.move.line').browse(cr, uid, line_id, context=context).currency_id:
is_multi_currency = True
break
return {'value': {'writeoff_amount': self._compute_writeoff_amount(cr, uid, line_dr_ids, line_cr_ids, amount, type), 'is_multi_currency': is_multi_currency}}
def _get_journal_currency(self, cr, uid, ids, name, args, context=None):
res = {}
for voucher in self.browse(cr, uid, ids, context=context):
res[voucher.id] = voucher.journal_id.currency and voucher.journal_id.currency.id or voucher.company_id.currency_id.id
return res
def _get_writeoff_amount(self, cr, uid, ids, name, args, context=None):
if not ids: return {}
currency_obj = self.pool.get('res.currency')
res = {}
debit = credit = 0.0
for voucher in self.browse(cr, uid, ids, context=context):
sign = voucher.type == 'payment' and -1 or 1
for l in voucher.line_dr_ids:
debit += l.amount
for l in voucher.line_cr_ids:
credit += l.amount
currency = voucher.currency_id or voucher.company_id.currency_id
res[voucher.id] = currency_obj.round(cr, uid, currency, voucher.amount - sign * (credit - debit))
return res
def _paid_amount_in_company_currency(self, cr, uid, ids, name, args, context=None):
if context is None:
context = {}
res = {}
ctx = context.copy()
for v in self.browse(cr, uid, ids, context=context):
ctx.update({'date': v.date})
#make a new call to browse in order to have the right date in the context, to get the right currency rate
voucher = self.browse(cr, uid, v.id, context=ctx)
ctx.update({
'voucher_special_currency': voucher.payment_rate_currency_id and voucher.payment_rate_currency_id.id or False,
'voucher_special_currency_rate': voucher.currency_id.rate * voucher.payment_rate,})
res[voucher.id] = self.pool.get('res.currency').compute(cr, uid, voucher.currency_id.id, voucher.company_id.currency_id.id, voucher.amount, context=ctx)
return res
def _get_currency_help_label(self, cr, uid, currency_id, payment_rate, payment_rate_currency_id, context=None):
"""
This function builds a string to help the users to understand the behavior of the payment rate fields they can specify on the voucher.
This string is only used to improve the usability in the voucher form view and has no other effect.
:param currency_id: the voucher currency
:type currency_id: integer
:param payment_rate: the value of the payment_rate field of the voucher
:type payment_rate: float
:param payment_rate_currency_id: the value of the payment_rate_currency_id field of the voucher
:type payment_rate_currency_id: integer
:return: translated string giving a tip on what's the effect of the current payment rate specified
:rtype: str
"""
rml_parser = report_sxw.rml_parse(cr, uid, 'currency_help_label', context=context)
currency_pool = self.pool.get('res.currency')
currency_str = payment_rate_str = ''
if currency_id:
currency_str = rml_parser.formatLang(1, currency_obj=currency_pool.browse(cr, uid, currency_id, context=context))
if payment_rate_currency_id:
payment_rate_str = rml_parser.formatLang(payment_rate, currency_obj=currency_pool.browse(cr, uid, payment_rate_currency_id, context=context))
currency_help_label = _('At the operation date, the exchange rate was\n%s = %s') % (currency_str, payment_rate_str)
return currency_help_label
def _fnct_currency_help_label(self, cr, uid, ids, name, args, context=None):
res = {}
for voucher in self.browse(cr, uid, ids, context=context):
res[voucher.id] = self._get_currency_help_label(cr, uid, voucher.currency_id.id, voucher.payment_rate, voucher.payment_rate_currency_id.id, context=context)
return res
_name = 'account.voucher'
_description = 'Accounting Voucher'
_inherit = ['mail.thread']
_order = "date desc, id desc"
# _rec_name = 'number'
_track = {
'state': {
'account_voucher.mt_voucher_state_change': lambda self, cr, uid, obj, ctx=None: True,
},
}
_columns = {
'type':fields.selection([
('sale','Sale'),
('purchase','Purchase'),
('payment','Payment'),
('receipt','Receipt'),
],'Default Type', readonly=True, states={'draft':[('readonly',False)]}),
'name':fields.char('Memo', readonly=True, states={'draft':[('readonly',False)]}),
'date':fields.date('Date', readonly=True, select=True, states={'draft':[('readonly',False)]},
help="Effective date for accounting entries", copy=False),
'journal_id':fields.many2one('account.journal', 'Journal', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'account_id':fields.many2one('account.account', 'Account', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'line_ids':fields.one2many('account.voucher.line', 'voucher_id', 'Voucher Lines',
readonly=True, copy=True,
states={'draft':[('readonly',False)]}),
'line_cr_ids':fields.one2many('account.voucher.line','voucher_id','Credits',
domain=[('type','=','cr')], context={'default_type':'cr'}, readonly=True, states={'draft':[('readonly',False)]}),
'line_dr_ids':fields.one2many('account.voucher.line','voucher_id','Debits',
domain=[('type','=','dr')], context={'default_type':'dr'}, readonly=True, states={'draft':[('readonly',False)]}),
'period_id': fields.many2one('account.period', 'Period', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'narration':fields.text('Notes', readonly=True, states={'draft':[('readonly',False)]}),
'currency_id': fields.function(_get_journal_currency, type='many2one', relation='res.currency', string='Currency', readonly=True, required=True),
'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'state':fields.selection(
[('draft','Draft'),
('cancel','Cancelled'),
('proforma','Pro-forma'),
('posted','Posted')
], 'Status', readonly=True, track_visibility='onchange', copy=False,
help=' * The \'Draft\' status is used when a user is encoding a new and unconfirmed Voucher. \
\n* The \'Pro-forma\' when voucher is in Pro-forma status,voucher does not have an voucher number. \
\n* The \'Posted\' status is used when user create voucher,a voucher number is generated and voucher entries are created in account \
\n* The \'Cancelled\' status is used when user cancel voucher.'),
'amount': fields.float('Total', digits_compute=dp.get_precision('Account'), required=True, readonly=True, states={'draft':[('readonly',False)]}),
'tax_amount':fields.float('Tax Amount', digits_compute=dp.get_precision('Account'), readonly=True),
'reference': fields.char('Ref #', readonly=True, states={'draft':[('readonly',False)]},
help="Transaction reference number.", copy=False),
'number': fields.char('Number', readonly=True, copy=False),
'move_id':fields.many2one('account.move', 'Account Entry', copy=False),
'move_ids': fields.related('move_id','line_id', type='one2many', relation='account.move.line', string='Journal Items', readonly=True),
'partner_id':fields.many2one('res.partner', 'Partner', change_default=1, readonly=True, states={'draft':[('readonly',False)]}),
'audit': fields.related('move_id','to_check', type='boolean', help='Check this box if you are unsure of that journal entry and if you want to note it as \'to be reviewed\' by an accounting expert.', relation='account.move', string='To Review'),
'paid': fields.function(_check_paid, string='Paid', type='boolean', help="The Voucher has been totally paid."),
'pay_now':fields.selection([
('pay_now','Pay Directly'),
('pay_later','Pay Later or Group Funds'),
],'Payment', select=True, readonly=True, states={'draft':[('readonly',False)]}),
'tax_id': fields.many2one('account.tax', 'Tax', readonly=True, states={'draft':[('readonly',False)]}, domain=[('price_include','=', False)], help="Only for tax excluded from price"),
'pre_line':fields.boolean('Previous Payments ?', required=False),
'date_due': fields.date('Due Date', readonly=True, select=True, states={'draft':[('readonly',False)]}),
'payment_option':fields.selection([
('without_writeoff', 'Keep Open'),
('with_writeoff', 'Reconcile Payment Balance'),
], 'Payment Difference', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="This field helps you to choose what you want to do with the eventual difference between the paid amount and the sum of allocated amounts. You can either choose to keep open this difference on the partner's account, or reconcile it with the payment(s)"),
'writeoff_acc_id': fields.many2one('account.account', 'Counterpart Account', readonly=True, states={'draft': [('readonly', False)]}),
'comment': fields.char('Counterpart Comment', required=True, readonly=True, states={'draft': [('readonly', False)]}),
'analytic_id': fields.many2one('account.analytic.account','Write-Off Analytic Account', readonly=True, states={'draft': [('readonly', False)]}),
'writeoff_amount': fields.function(_get_writeoff_amount, string='Difference Amount', type='float', readonly=True, help="Computed as the difference between the amount stated in the voucher and the sum of allocation on the voucher lines."),
'payment_rate_currency_id': fields.many2one('res.currency', 'Payment Rate Currency', required=True, readonly=True, states={'draft':[('readonly',False)]}),
'payment_rate': fields.float('Exchange Rate', digits=(12,6), required=True, readonly=True, states={'draft': [('readonly', False)]},
help='The specific rate that will be used, in this voucher, between the selected currency (in \'Payment Rate Currency\' field) and the voucher currency.'),
'paid_amount_in_company_currency': fields.function(_paid_amount_in_company_currency, string='Paid Amount in Company Currency', type='float', readonly=True),
'is_multi_currency': fields.boolean('Multi Currency Voucher', help='Fields with internal purpose only that depicts if the voucher is a multi currency one or not'),
'currency_help_label': fields.function(_fnct_currency_help_label, type='text', string="Helping Sentence", help="This sentence helps you to know how to specify the payment rate by giving you the direct effect it has"),
}
_defaults = {
'period_id': _get_period,
'partner_id': _get_partner,
'journal_id':_get_journal,
'currency_id': _get_currency,
'reference': _get_reference,
'narration':_get_narration,
'amount': _get_amount,
'type':_get_type,
'state': 'draft',
'pay_now': 'pay_now',
'name': '',
'date': lambda *a: time.strftime('%Y-%m-%d'),
'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.voucher',context=c),
'tax_id': _get_tax,
'payment_option': 'without_writeoff',
'comment': _('Write-Off'),
'payment_rate': 1.0,
'payment_rate_currency_id': _get_payment_rate_currency,
}
def compute_tax(self, cr, uid, ids, context=None):
tax_pool = self.pool.get('account.tax')
partner_pool = self.pool.get('res.partner')
position_pool = self.pool.get('account.fiscal.position')
voucher_line_pool = self.pool.get('account.voucher.line')
voucher_pool = self.pool.get('account.voucher')
if context is None: context = {}
for voucher in voucher_pool.browse(cr, uid, ids, context=context):
voucher_amount = 0.0
for line in voucher.line_ids:
voucher_amount += line.untax_amount or line.amount
line.amount = line.untax_amount or line.amount
voucher_line_pool.write(cr, uid, [line.id], {'amount':line.amount, 'untax_amount':line.untax_amount})
if not voucher.tax_id:
self.write(cr, uid, [voucher.id], {'amount':voucher_amount, 'tax_amount':0.0})
continue
tax = [tax_pool.browse(cr, uid, voucher.tax_id.id, context=context)]
partner = partner_pool.browse(cr, uid, voucher.partner_id.id, context=context) or False
taxes = position_pool.map_tax(cr, uid, partner and partner.property_account_position or False, tax)
tax = tax_pool.browse(cr, uid, taxes, context=context)
total = voucher_amount
total_tax = 0.0
if not tax[0].price_include:
for line in voucher.line_ids:
for tax_line in tax_pool.compute_all(cr, uid, tax, line.amount, 1).get('taxes', []):
total_tax += tax_line.get('amount', 0.0)
total += total_tax
else:
for line in voucher.line_ids:
line_total = 0.0
line_tax = 0.0
for tax_line in tax_pool.compute_all(cr, uid, tax, line.untax_amount or line.amount, 1).get('taxes', []):
line_tax += tax_line.get('amount', 0.0)
line_total += tax_line.get('price_unit')
total_tax += line_tax
untax_amount = line.untax_amount or line.amount
voucher_line_pool.write(cr, uid, [line.id], {'amount':line_total, 'untax_amount':untax_amount})
self.write(cr, uid, [voucher.id], {'amount':total, 'tax_amount':total_tax})
return True
def onchange_price(self, cr, uid, ids, line_ids, tax_id, partner_id=False, context=None):
context = context or {}
tax_pool = self.pool.get('account.tax')
partner_pool = self.pool.get('res.partner')
position_pool = self.pool.get('account.fiscal.position')
if not line_ids:
line_ids = []
res = {
'tax_amount': False,
'amount': False,
}
voucher_total = 0.0
# resolve the list of commands into a list of dicts
line_ids = self.resolve_2many_commands(cr, uid, 'line_ids', line_ids, ['amount'], context)
total_tax = 0.0
for line in line_ids:
line_amount = 0.0
line_amount = line.get('amount',0.0)
if tax_id:
tax = [tax_pool.browse(cr, uid, tax_id, context=context)]
if partner_id:
partner = partner_pool.browse(cr, uid, partner_id, context=context) or False
taxes = position_pool.map_tax(cr, uid, partner and partner.property_account_position or False, tax)
tax = tax_pool.browse(cr, uid, taxes, context=context)
if not tax[0].price_include:
for tax_line in tax_pool.compute_all(cr, uid, tax, line_amount, 1).get('taxes', []):
total_tax += tax_line.get('amount')
voucher_total += line_amount
total = voucher_total + total_tax
res.update({
'amount': total or voucher_total,
'tax_amount': total_tax
})
return {
'value': res
}
def onchange_term_id(self, cr, uid, ids, term_id, amount):
term_pool = self.pool.get('account.payment.term')
terms = False
due_date = False
default = {'date_due':False}
if term_id and amount:
terms = term_pool.compute(cr, uid, term_id, amount)
if terms:
due_date = terms[-1][0]
default.update({
'date_due':due_date
})
return {'value':default}
def onchange_journal_voucher(self, cr, uid, ids, line_ids=False, tax_id=False, price=0.0, partner_id=False, journal_id=False, ttype=False, company_id=False, context=None):
"""price
Returns a dict that contains new values and context
@param partner_id: latest value from user input for field partner_id
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
default = {
'value':{},
}
if not partner_id or not journal_id:
return default
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
journal = journal_pool.browse(cr, uid, journal_id, context=context)
partner = partner_pool.browse(cr, uid, partner_id, context=context)
account_id = False
tr_type = False
if journal.type in ('sale','sale_refund'):
account_id = partner.property_account_receivable.id
tr_type = 'sale'
elif journal.type in ('purchase', 'purchase_refund','expense'):
account_id = partner.property_account_payable.id
tr_type = 'purchase'
else:
if not journal.default_credit_account_id or not journal.default_debit_account_id:
raise osv.except_osv(_('Error!'), _('Please define default credit/debit accounts on the journal "%s".') % (journal.name))
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
tr_type = 'receipt'
default['value']['account_id'] = account_id
default['value']['type'] = ttype or tr_type
vals = self.onchange_journal(cr, uid, ids, journal_id, line_ids, tax_id, partner_id, time.strftime('%Y-%m-%d'), price, ttype, company_id, context)
default['value'].update(vals.get('value'))
return default
def onchange_rate(self, cr, uid, ids, rate, amount, currency_id, payment_rate_currency_id, company_id, context=None):
res = {'value': {'paid_amount_in_company_currency': amount, 'currency_help_label': self._get_currency_help_label(cr, uid, currency_id, rate, payment_rate_currency_id, context=context)}}
if rate and amount and currency_id:
company_currency = self.pool.get('res.company').browse(cr, uid, company_id, context=context).currency_id
#context should contain the date, the payment currency and the payment rate specified on the voucher
amount_in_company_currency = self.pool.get('res.currency').compute(cr, uid, currency_id, company_currency.id, amount, context=context)
res['value']['paid_amount_in_company_currency'] = amount_in_company_currency
return res
def onchange_amount(self, cr, uid, ids, amount, rate, partner_id, journal_id, currency_id, ttype, date, payment_rate_currency_id, company_id, context=None):
if context is None:
context = {}
ctx = context.copy()
ctx.update({'date': date})
#read the voucher rate with the right date in the context
currency_id = currency_id or self.pool.get('res.company').browse(cr, uid, company_id, context=ctx).currency_id.id
voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate']
ctx.update({
'voucher_special_currency': payment_rate_currency_id,
'voucher_special_currency_rate': rate * voucher_rate})
res = self.recompute_voucher_lines(cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context=ctx)
vals = self.onchange_rate(cr, uid, ids, rate, amount, currency_id, payment_rate_currency_id, company_id, context=ctx)
for key in vals.keys():
res[key].update(vals[key])
return res
def recompute_payment_rate(self, cr, uid, ids, vals, currency_id, date, ttype, journal_id, amount, context=None):
if context is None:
context = {}
#on change of the journal, we need to set also the default value for payment_rate and payment_rate_currency_id
currency_obj = self.pool.get('res.currency')
journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context)
company_id = journal.company_id.id
payment_rate = 1.0
currency_id = currency_id or journal.company_id.currency_id.id
payment_rate_currency_id = currency_id
ctx = context.copy()
ctx.update({'date': date})
o2m_to_loop = False
if ttype == 'receipt':
o2m_to_loop = 'line_cr_ids'
elif ttype == 'payment':
o2m_to_loop = 'line_dr_ids'
if o2m_to_loop and 'value' in vals and o2m_to_loop in vals['value']:
for voucher_line in vals['value'][o2m_to_loop]:
if not isinstance(voucher_line, dict):
continue
if voucher_line['currency_id'] != currency_id:
# we take as default value for the payment_rate_currency_id, the currency of the first invoice that
# is not in the voucher currency
payment_rate_currency_id = voucher_line['currency_id']
tmp = currency_obj.browse(cr, uid, payment_rate_currency_id, context=ctx).rate
payment_rate = tmp / currency_obj.browse(cr, uid, currency_id, context=ctx).rate
break
vals['value'].update({
'payment_rate': payment_rate,
'currency_id': currency_id,
'payment_rate_currency_id': payment_rate_currency_id
})
#read the voucher rate with the right date in the context
voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate']
ctx.update({
'voucher_special_currency_rate': payment_rate * voucher_rate,
'voucher_special_currency': payment_rate_currency_id})
res = self.onchange_rate(cr, uid, ids, payment_rate, amount, currency_id, payment_rate_currency_id, company_id, context=ctx)
for key in res.keys():
vals[key].update(res[key])
return vals
def basic_onchange_partner(self, cr, uid, ids, partner_id, journal_id, ttype, context=None):
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
res = {'value': {'account_id': False}}
if not partner_id or not journal_id:
return res
journal = journal_pool.browse(cr, uid, journal_id, context=context)
partner = partner_pool.browse(cr, uid, partner_id, context=context)
account_id = False
if journal.type in ('sale','sale_refund'):
account_id = partner.property_account_receivable.id
elif journal.type in ('purchase', 'purchase_refund','expense'):
account_id = partner.property_account_payable.id
else:
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
res['value']['account_id'] = account_id
return res
def onchange_partner_id(self, cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context=None):
if not journal_id:
return {}
if context is None:
context = {}
#TODO: comment me and use me directly in the sales/purchases views
res = self.basic_onchange_partner(cr, uid, ids, partner_id, journal_id, ttype, context=context)
if ttype in ['sale', 'purchase']:
return res
ctx = context.copy()
# not passing the payment_rate currency and the payment_rate in the context but it's ok because they are reset in recompute_payment_rate
ctx.update({'date': date})
vals = self.recompute_voucher_lines(cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context=ctx)
vals2 = self.recompute_payment_rate(cr, uid, ids, vals, currency_id, date, ttype, journal_id, amount, context=context)
for key in vals.keys():
res[key].update(vals[key])
for key in vals2.keys():
res[key].update(vals2[key])
#TODO: can probably be removed now
#TODO: onchange_partner_id() should not returns [pre_line, line_dr_ids, payment_rate...] for type sale, and not
# [pre_line, line_cr_ids, payment_rate...] for type purchase.
# We should definitively split account.voucher object in two and make distinct on_change functions. In the
# meanwhile, bellow lines must be there because the fields aren't present in the view, what crashes if the
# onchange returns a value for them
if ttype == 'sale':
del(res['value']['line_dr_ids'])
del(res['value']['pre_line'])
del(res['value']['payment_rate'])
elif ttype == 'purchase':
del(res['value']['line_cr_ids'])
del(res['value']['pre_line'])
del(res['value']['payment_rate'])
return res
def recompute_voucher_lines(self, cr, uid, ids, partner_id, journal_id, price, currency_id, ttype, date, context=None):
"""
Returns a dict that contains new values and context
@param partner_id: latest value from user input for field partner_id
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
def _remove_noise_in_o2m():
"""if the line is partially reconciled, then we must pay attention to display it only once and
in the good o2m.
This function returns True if the line is considered as noise and should not be displayed
"""
if line.reconcile_partial_id:
if currency_id == line.currency_id.id:
if line.amount_residual_currency <= 0:
return True
else:
if line.amount_residual <= 0:
return True
return False
if context is None:
context = {}
context_multi_currency = context.copy()
currency_pool = self.pool.get('res.currency')
move_line_pool = self.pool.get('account.move.line')
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
line_pool = self.pool.get('account.voucher.line')
#set default values
default = {
'value': {'line_dr_ids': [], 'line_cr_ids': [], 'pre_line': False},
}
# drop existing lines
line_ids = ids and line_pool.search(cr, uid, [('voucher_id', '=', ids[0])])
for line in line_pool.browse(cr, uid, line_ids, context=context):
if line.type == 'cr':
default['value']['line_cr_ids'].append((2, line.id))
else:
default['value']['line_dr_ids'].append((2, line.id))
if not partner_id or not journal_id:
return default
journal = journal_pool.browse(cr, uid, journal_id, context=context)
partner = partner_pool.browse(cr, uid, partner_id, context=context)
currency_id = currency_id or journal.company_id.currency_id.id
total_credit = 0.0
total_debit = 0.0
account_type = None
if context.get('account_id'):
account_type = self.pool['account.account'].browse(cr, uid, context['account_id'], context=context).type
if ttype == 'payment':
if not account_type:
account_type = 'payable'
total_debit = price or 0.0
else:
total_credit = price or 0.0
if not account_type:
account_type = 'receivable'
if not context.get('move_line_ids', False):
ids = move_line_pool.search(cr, uid, [('state','=','valid'), ('account_id.type', '=', account_type), ('reconcile_id', '=', False), ('partner_id', '=', partner_id)], context=context)
else:
ids = context['move_line_ids']
invoice_id = context.get('invoice_id', False)
company_currency = journal.company_id.currency_id.id
move_lines_found = []
#order the lines by most old first
ids.reverse()
account_move_lines = move_line_pool.browse(cr, uid, ids, context=context)
#compute the total debit/credit and look for a matching open amount or invoice
for line in account_move_lines:
if _remove_noise_in_o2m():
continue
if invoice_id:
if line.invoice.id == invoice_id:
#if the invoice linked to the voucher line is equal to the invoice_id in context
#then we assign the amount on that line, whatever the other voucher lines
move_lines_found.append(line.id)
elif currency_id == company_currency:
#otherwise treatments is the same but with other field names
if line.amount_residual == price:
#if the amount residual is equal the amount voucher, we assign it to that voucher
#line, whatever the other voucher lines
move_lines_found.append(line.id)
break
#otherwise we will split the voucher amount on each line (by most old first)
total_credit += line.credit or 0.0
total_debit += line.debit or 0.0
elif currency_id == line.currency_id.id:
if line.amount_residual_currency == price:
move_lines_found.append(line.id)
break
total_credit += line.credit and line.amount_currency or 0.0
total_debit += line.debit and line.amount_currency or 0.0
remaining_amount = price
#voucher line creation
for line in account_move_lines:
if _remove_noise_in_o2m():
continue
if line.currency_id and currency_id == line.currency_id.id:
amount_original = abs(line.amount_currency)
amount_unreconciled = abs(line.amount_residual_currency)
else:
#always use the amount booked in the company currency as the basis of the conversion into the voucher currency
amount_original = currency_pool.compute(cr, uid, company_currency, currency_id, line.credit or line.debit or 0.0, context=context_multi_currency)
amount_unreconciled = currency_pool.compute(cr, uid, company_currency, currency_id, abs(line.amount_residual), context=context_multi_currency)
line_currency_id = line.currency_id and line.currency_id.id or company_currency
rs = {
'name':line.move_id.name,
'type': line.credit and 'dr' or 'cr',
'move_line_id':line.id,
'account_id':line.account_id.id,
'amount_original': amount_original,
'amount': (line.id in move_lines_found) and min(abs(remaining_amount), amount_unreconciled) or 0.0,
'date_original':line.date,
'date_due':line.date_maturity,
'amount_unreconciled': amount_unreconciled,
'currency_id': line_currency_id,
}
remaining_amount -= rs['amount']
#in case a corresponding move_line hasn't been found, we now try to assign the voucher amount
#on existing invoices: we split voucher amount by most old first, but only for lines in the same currency
if not move_lines_found:
if currency_id == line_currency_id:
if line.credit:
amount = min(amount_unreconciled, abs(total_debit))
rs['amount'] = amount
total_debit -= amount
else:
amount = min(amount_unreconciled, abs(total_credit))
rs['amount'] = amount
total_credit -= amount
if rs['amount_unreconciled'] == rs['amount']:
rs['reconcile'] = True
if rs['type'] == 'cr':
default['value']['line_cr_ids'].append(rs)
else:
default['value']['line_dr_ids'].append(rs)
if len(default['value']['line_cr_ids']) > 0:
default['value']['pre_line'] = 1
elif len(default['value']['line_dr_ids']) > 0:
default['value']['pre_line'] = 1
default['value']['writeoff_amount'] = self._compute_writeoff_amount(cr, uid, default['value']['line_dr_ids'], default['value']['line_cr_ids'], price, ttype)
return default
def onchange_payment_rate_currency(self, cr, uid, ids, currency_id, payment_rate, payment_rate_currency_id, date, amount, company_id, context=None):
if context is None:
context = {}
res = {'value': {}}
if currency_id:
#set the default payment rate of the voucher and compute the paid amount in company currency
ctx = context.copy()
ctx.update({'date': date})
#read the voucher rate with the right date in the context
voucher_rate = self.pool.get('res.currency').read(cr, uid, [currency_id], ['rate'], context=ctx)[0]['rate']
ctx.update({
'voucher_special_currency_rate': payment_rate * voucher_rate,
'voucher_special_currency': payment_rate_currency_id})
vals = self.onchange_rate(cr, uid, ids, payment_rate, amount, currency_id, payment_rate_currency_id, company_id, context=ctx)
for key in vals.keys():
res[key].update(vals[key])
return res
def onchange_date(self, cr, uid, ids, date, currency_id, payment_rate_currency_id, amount, company_id, context=None):
"""
@param date: latest value from user input for field date
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
if context is None:
context ={}
res = {'value': {}}
#set the period of the voucher
period_pool = self.pool.get('account.period')
currency_obj = self.pool.get('res.currency')
ctx = context.copy()
ctx.update({'company_id': company_id, 'account_period_prefer_normal': True})
voucher_currency_id = currency_id or self.pool.get('res.company').browse(cr, uid, company_id, context=ctx).currency_id.id
pids = period_pool.find(cr, uid, date, context=ctx)
if pids:
res['value'].update({'period_id':pids[0]})
if payment_rate_currency_id:
ctx.update({'date': date})
payment_rate = 1.0
if payment_rate_currency_id != currency_id:
tmp = currency_obj.browse(cr, uid, payment_rate_currency_id, context=ctx).rate
payment_rate = tmp / currency_obj.browse(cr, uid, voucher_currency_id, context=ctx).rate
vals = self.onchange_payment_rate_currency(cr, uid, ids, voucher_currency_id, payment_rate, payment_rate_currency_id, date, amount, company_id, context=context)
vals['value'].update({'payment_rate': payment_rate})
for key in vals.keys():
res[key].update(vals[key])
return res
def onchange_journal(self, cr, uid, ids, journal_id, line_ids, tax_id, partner_id, date, amount, ttype, company_id, context=None):
if context is None:
context = {}
if not journal_id:
return False
journal_pool = self.pool.get('account.journal')
journal = journal_pool.browse(cr, uid, journal_id, context=context)
account_id = journal.default_credit_account_id or journal.default_debit_account_id
tax_id = False
if account_id and account_id.tax_ids:
tax_id = account_id.tax_ids[0].id
vals = {'value':{} }
if ttype in ('sale', 'purchase'):
vals = self.onchange_price(cr, uid, ids, line_ids, tax_id, partner_id, context)
vals['value'].update({'tax_id':tax_id,'amount': amount})
currency_id = False
if journal.currency:
currency_id = journal.currency.id
else:
currency_id = journal.company_id.currency_id.id
period_id = self.pool['account.period'].find(cr, uid, context=dict(context, company_id=company_id))
vals['value'].update({
'currency_id': currency_id,
'payment_rate_currency_id': currency_id,
'period_id' : period_id
})
#in case we want to register the payment directly from an invoice, it's confusing to allow to switch the journal
#without seeing that the amount is expressed in the journal currency, and not in the invoice currency. So to avoid
#this common mistake, we simply reset the amount to 0 if the currency is not the invoice currency.
if context.get('payment_expected_currency') and currency_id != context.get('payment_expected_currency'):
vals['value']['amount'] = 0
amount = 0
if partner_id:
res = self.onchange_partner_id(cr, uid, ids, partner_id, journal_id, amount, currency_id, ttype, date, context)
for key in res.keys():
vals[key].update(res[key])
return vals
def onchange_company(self, cr, uid, ids, partner_id, journal_id, currency_id, company_id, context=None):
"""
If the company changes, check that the journal is in the right company.
If not, fetch a new journal.
"""
journal_pool = self.pool['account.journal']
journal = journal_pool.browse(cr, uid, journal_id, context=context)
if journal.company_id.id != company_id:
# can not guess type of journal, better remove it
return {'value': {'journal_id': False}}
return {}
def button_proforma_voucher(self, cr, uid, ids, context=None):
self.signal_workflow(cr, uid, ids, 'proforma_voucher')
return {'type': 'ir.actions.act_window_close'}
def proforma_voucher(self, cr, uid, ids, context=None):
self.action_move_line_create(cr, uid, ids, context=context)
return True
def action_cancel_draft(self, cr, uid, ids, context=None):
self.create_workflow(cr, uid, ids)
self.write(cr, uid, ids, {'state':'draft'})
return True
def cancel_voucher(self, cr, uid, ids, context=None):
reconcile_pool = self.pool.get('account.move.reconcile')
move_pool = self.pool.get('account.move')
move_line_pool = self.pool.get('account.move.line')
for voucher in self.browse(cr, uid, ids, context=context):
# refresh to make sure you don't unlink an already removed move
voucher.refresh()
for line in voucher.move_ids:
# refresh to make sure you don't unreconcile an already unreconciled entry
line.refresh()
if line.reconcile_id:
move_lines = [move_line.id for move_line in line.reconcile_id.line_id]
move_lines.remove(line.id)
reconcile_pool.unlink(cr, uid, [line.reconcile_id.id])
if len(move_lines) >= 2:
move_line_pool.reconcile_partial(cr, uid, move_lines, 'auto',context=context)
if line.reconcile_partial_id:
move_lines = [move_line.id for move_line in line.reconcile_partial_id.line_partial_ids]
move_lines.remove(line.id)
reconcile_pool.unlink(cr, uid, [line.reconcile_partial_id.id])
if len(move_lines) >= 2:
move_line_pool.reconcile_partial(cr, uid, move_lines, 'auto',context=context)
if voucher.move_id:
move_pool.button_cancel(cr, uid, [voucher.move_id.id])
move_pool.unlink(cr, uid, [voucher.move_id.id])
res = {
'state':'cancel',
'move_id':False,
}
self.write(cr, uid, ids, res)
return True
def unlink(self, cr, uid, ids, context=None):
for t in self.read(cr, uid, ids, ['state'], context=context):
if t['state'] not in ('draft', 'cancel'):
raise osv.except_osv(_('Invalid Action!'), _('Cannot delete voucher(s) which are already opened or paid.'))
return super(account_voucher, self).unlink(cr, uid, ids, context=context)
def onchange_payment(self, cr, uid, ids, pay_now, journal_id, partner_id, ttype='sale'):
res = {}
if not partner_id:
return res
res = {}
partner_pool = self.pool.get('res.partner')
journal_pool = self.pool.get('account.journal')
if pay_now == 'pay_later':
partner = partner_pool.browse(cr, uid, partner_id)
journal = journal_pool.browse(cr, uid, journal_id)
if journal.type in ('sale','sale_refund'):
account_id = partner.property_account_receivable.id
elif journal.type in ('purchase', 'purchase_refund','expense'):
account_id = partner.property_account_payable.id
else:
account_id = journal.default_credit_account_id.id or journal.default_debit_account_id.id
if account_id:
res['account_id'] = account_id
return {'value':res}
def _sel_context(self, cr, uid, voucher_id, context=None):
"""
Select the context to use accordingly if it needs to be multicurrency or not.
:param voucher_id: Id of the actual voucher
:return: The returned context will be the same as given in parameter if the voucher currency is the same
than the company currency, otherwise it's a copy of the parameter with an extra key 'date' containing
the date of the voucher.
:rtype: dict
"""
company_currency = self._get_company_currency(cr, uid, voucher_id, context)
current_currency = self._get_current_currency(cr, uid, voucher_id, context)
if current_currency <> company_currency:
context_multi_currency = context.copy()
voucher = self.pool.get('account.voucher').browse(cr, uid, voucher_id, context)
context_multi_currency.update({'date': voucher.date})
return context_multi_currency
return context
def first_move_line_get(self, cr, uid, voucher_id, move_id, company_currency, current_currency, context=None):
'''
Return a dict to be use to create the first account move line of given voucher.
:param voucher_id: Id of voucher what we are creating account_move.
:param move_id: Id of account move where this line will be added.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: mapping between fieldname and value of account move line to create
:rtype: dict
'''
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
debit = credit = 0.0
# TODO: is there any other alternative then the voucher type ??
# ANSWER: We can have payment and receipt "In Advance".
# TODO: Make this logic available.
# -for sale, purchase we have but for the payment and receipt we do not have as based on the bank/cash journal we can not know its payment or receipt
if voucher.type in ('purchase', 'payment'):
credit = voucher.paid_amount_in_company_currency
elif voucher.type in ('sale', 'receipt'):
debit = voucher.paid_amount_in_company_currency
if debit < 0: credit = -debit; debit = 0.0
if credit < 0: debit = -credit; credit = 0.0
sign = debit - credit < 0 and -1 or 1
#set the first line of the voucher
move_line = {
'name': voucher.name or '/',
'debit': debit,
'credit': credit,
'account_id': voucher.account_id.id,
'move_id': move_id,
'journal_id': voucher.journal_id.id,
'period_id': voucher.period_id.id,
'partner_id': voucher.partner_id.id,
'currency_id': company_currency <> current_currency and current_currency or False,
'amount_currency': (sign * abs(voucher.amount) # amount < 0 for refunds
if company_currency != current_currency else 0.0),
'date': voucher.date,
'date_maturity': voucher.date_due
}
return move_line
def account_move_get(self, cr, uid, voucher_id, context=None):
'''
This method prepare the creation of the account move related to the given voucher.
:param voucher_id: Id of voucher for which we are creating account_move.
:return: mapping between fieldname and value of account move to create
:rtype: dict
'''
seq_obj = self.pool.get('ir.sequence')
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
if voucher.number:
name = voucher.number
elif voucher.journal_id.sequence_id:
if not voucher.journal_id.sequence_id.active:
raise osv.except_osv(_('Configuration Error !'),
_('Please activate the sequence of selected journal !'))
c = dict(context)
c.update({'fiscalyear_id': voucher.period_id.fiscalyear_id.id})
name = seq_obj.next_by_id(cr, uid, voucher.journal_id.sequence_id.id, context=c)
else:
raise osv.except_osv(_('Error!'),
_('Please define a sequence on the journal.'))
if not voucher.reference:
ref = name.replace('/','')
else:
ref = voucher.reference
move = {
'name': name,
'journal_id': voucher.journal_id.id,
'narration': voucher.narration,
'date': voucher.date,
'ref': ref,
'period_id': voucher.period_id.id,
}
return move
def _get_exchange_lines(self, cr, uid, line, move_id, amount_residual, company_currency, current_currency, context=None):
'''
Prepare the two lines in company currency due to currency rate difference.
:param line: browse record of the voucher.line for which we want to create currency rate difference accounting
entries
:param move_id: Account move wher the move lines will be.
:param amount_residual: Amount to be posted.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: the account move line and its counterpart to create, depicted as mapping between fieldname and value
:rtype: tuple of dict
'''
if amount_residual > 0:
account_id = line.voucher_id.company_id.expense_currency_exchange_account_id
if not account_id:
model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'account', 'action_account_form')
msg = _("You should configure the 'Loss Exchange Rate Account' to manage automatically the booking of accounting entries related to differences between exchange rates.")
raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel'))
else:
account_id = line.voucher_id.company_id.income_currency_exchange_account_id
if not account_id:
model, action_id = self.pool['ir.model.data'].get_object_reference(cr, uid, 'account', 'action_account_form')
msg = _("You should configure the 'Gain Exchange Rate Account' to manage automatically the booking of accounting entries related to differences between exchange rates.")
raise openerp.exceptions.RedirectWarning(msg, action_id, _('Go to the configuration panel'))
# Even if the amount_currency is never filled, we need to pass the foreign currency because otherwise
# the receivable/payable account may have a secondary currency, which render this field mandatory
if line.account_id.currency_id:
account_currency_id = line.account_id.currency_id.id
else:
account_currency_id = company_currency <> current_currency and current_currency or False
move_line = {
'journal_id': line.voucher_id.journal_id.id,
'period_id': line.voucher_id.period_id.id,
'name': _('change')+': '+(line.name or '/'),
'account_id': line.account_id.id,
'move_id': move_id,
'partner_id': line.voucher_id.partner_id.id,
'currency_id': account_currency_id,
'amount_currency': 0.0,
'quantity': 1,
'credit': amount_residual > 0 and amount_residual or 0.0,
'debit': amount_residual < 0 and -amount_residual or 0.0,
'date': line.voucher_id.date,
}
move_line_counterpart = {
'journal_id': line.voucher_id.journal_id.id,
'period_id': line.voucher_id.period_id.id,
'name': _('change')+': '+(line.name or '/'),
'account_id': account_id.id,
'move_id': move_id,
'amount_currency': 0.0,
'partner_id': line.voucher_id.partner_id.id,
'currency_id': account_currency_id,
'quantity': 1,
'debit': amount_residual > 0 and amount_residual or 0.0,
'credit': amount_residual < 0 and -amount_residual or 0.0,
'date': line.voucher_id.date,
}
return (move_line, move_line_counterpart)
def _convert_amount(self, cr, uid, amount, voucher_id, context=None):
'''
This function convert the amount given in company currency. It takes either the rate in the voucher (if the
payment_rate_currency_id is relevant) either the rate encoded in the system.
:param amount: float. The amount to convert
:param voucher: id of the voucher on which we want the conversion
:param context: to context to use for the conversion. It may contain the key 'date' set to the voucher date
field in order to select the good rate to use.
:return: the amount in the currency of the voucher's company
:rtype: float
'''
if context is None:
context = {}
currency_obj = self.pool.get('res.currency')
voucher = self.browse(cr, uid, voucher_id, context=context)
return currency_obj.compute(cr, uid, voucher.currency_id.id, voucher.company_id.currency_id.id, amount, context=context)
def voucher_move_line_create(self, cr, uid, voucher_id, line_total, move_id, company_currency, current_currency, context=None):
'''
Create one account move line, on the given account move, per voucher line where amount is not 0.0.
It returns Tuple with tot_line what is total of difference between debit and credit and
a list of lists with ids to be reconciled with this format (total_deb_cred,list_of_lists).
:param voucher_id: Voucher id what we are working with
:param line_total: Amount of the first line, which correspond to the amount we should totally split among all voucher lines.
:param move_id: Account move wher those lines will be joined.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: Tuple build as (remaining amount not allocated on voucher lines, list of account_move_line created in this method)
:rtype: tuple(float, list of int)
'''
if context is None:
context = {}
move_line_obj = self.pool.get('account.move.line')
currency_obj = self.pool.get('res.currency')
tax_obj = self.pool.get('account.tax')
tot_line = line_total
rec_lst_ids = []
date = self.read(cr, uid, [voucher_id], ['date'], context=context)[0]['date']
ctx = context.copy()
ctx.update({'date': date})
voucher = self.pool.get('account.voucher').browse(cr, uid, voucher_id, context=ctx)
voucher_currency = voucher.journal_id.currency or voucher.company_id.currency_id
ctx.update({
'voucher_special_currency_rate': voucher_currency.rate * voucher.payment_rate ,
'voucher_special_currency': voucher.payment_rate_currency_id and voucher.payment_rate_currency_id.id or False,})
prec = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account')
for line in voucher.line_ids:
#create one move line per voucher line where amount is not 0.0
# AND (second part of the clause) only if the original move line was not having debit = credit = 0 (which is a legal value)
if not line.amount and not (line.move_line_id and not float_compare(line.move_line_id.debit, line.move_line_id.credit, precision_digits=prec) and not float_compare(line.move_line_id.debit, 0.0, precision_digits=prec)):
continue
# convert the amount set on the voucher line into the currency of the voucher's company
# this calls res_curreny.compute() with the right context, so that it will take either the rate on the voucher if it is relevant or will use the default behaviour
amount = self._convert_amount(cr, uid, line.untax_amount or line.amount, voucher.id, context=ctx)
# if the amount encoded in voucher is equal to the amount unreconciled, we need to compute the
# currency rate difference
if line.amount == line.amount_unreconciled:
if not line.move_line_id:
raise osv.except_osv(_('Wrong voucher line'),_("The invoice you are willing to pay is not valid anymore."))
sign = line.type =='dr' and -1 or 1
currency_rate_difference = sign * (line.move_line_id.amount_residual - amount)
else:
currency_rate_difference = 0.0
move_line = {
'journal_id': voucher.journal_id.id,
'period_id': voucher.period_id.id,
'name': line.name or '/',
'account_id': line.account_id.id,
'move_id': move_id,
'partner_id': voucher.partner_id.id,
'currency_id': line.move_line_id and (company_currency <> line.move_line_id.currency_id.id and line.move_line_id.currency_id.id) or False,
'analytic_account_id': line.account_analytic_id and line.account_analytic_id.id or False,
'quantity': 1,
'credit': 0.0,
'debit': 0.0,
'date': voucher.date
}
if amount < 0:
amount = -amount
if line.type == 'dr':
line.type = 'cr'
else:
line.type = 'dr'
if (line.type=='dr'):
tot_line += amount
move_line['debit'] = amount
else:
tot_line -= amount
move_line['credit'] = amount
if voucher.tax_id and voucher.type in ('sale', 'purchase'):
move_line.update({
'account_tax_id': voucher.tax_id.id,
})
if move_line.get('account_tax_id', False):
tax_data = tax_obj.browse(cr, uid, [move_line['account_tax_id']], context=context)[0]
if not (tax_data.base_code_id and tax_data.tax_code_id):
raise osv.except_osv(_('No Account Base Code and Account Tax Code!'),_("You have to configure account base code and account tax code on the '%s' tax!") % (tax_data.name))
# compute the amount in foreign currency
foreign_currency_diff = 0.0
amount_currency = False
if line.move_line_id:
# We want to set it on the account move line as soon as the original line had a foreign currency
if line.move_line_id.currency_id and line.move_line_id.currency_id.id != company_currency:
# we compute the amount in that foreign currency.
if line.move_line_id.currency_id.id == current_currency:
# if the voucher and the voucher line share the same currency, there is no computation to do
sign = (move_line['debit'] - move_line['credit']) < 0 and -1 or 1
amount_currency = sign * (line.amount)
else:
# if the rate is specified on the voucher, it will be used thanks to the special keys in the context
# otherwise we use the rates of the system
amount_currency = currency_obj.compute(cr, uid, company_currency, line.move_line_id.currency_id.id, move_line['debit']-move_line['credit'], context=ctx)
if line.amount == line.amount_unreconciled:
foreign_currency_diff = line.move_line_id.amount_residual_currency - abs(amount_currency)
move_line['amount_currency'] = amount_currency
voucher_line = move_line_obj.create(cr, uid, move_line)
rec_ids = [voucher_line, line.move_line_id.id]
if not currency_obj.is_zero(cr, uid, voucher.company_id.currency_id, currency_rate_difference):
# Change difference entry in company currency
exch_lines = self._get_exchange_lines(cr, uid, line, move_id, currency_rate_difference, company_currency, current_currency, context=context)
new_id = move_line_obj.create(cr, uid, exch_lines[0],context)
move_line_obj.create(cr, uid, exch_lines[1], context)
rec_ids.append(new_id)
if line.move_line_id and line.move_line_id.currency_id and not currency_obj.is_zero(cr, uid, line.move_line_id.currency_id, foreign_currency_diff):
# Change difference entry in voucher currency
move_line_foreign_currency = {
'journal_id': line.voucher_id.journal_id.id,
'period_id': line.voucher_id.period_id.id,
'name': _('change')+': '+(line.name or '/'),
'account_id': line.account_id.id,
'move_id': move_id,
'partner_id': line.voucher_id.partner_id.id,
'currency_id': line.move_line_id.currency_id.id,
'amount_currency': -1 * foreign_currency_diff,
'quantity': 1,
'credit': 0.0,
'debit': 0.0,
'date': line.voucher_id.date,
}
new_id = move_line_obj.create(cr, uid, move_line_foreign_currency, context=context)
rec_ids.append(new_id)
if line.move_line_id.id:
rec_lst_ids.append(rec_ids)
return (tot_line, rec_lst_ids)
def writeoff_move_line_get(self, cr, uid, voucher_id, line_total, move_id, name, company_currency, current_currency, context=None):
'''
Set a dict to be use to create the writeoff move line.
:param voucher_id: Id of voucher what we are creating account_move.
:param line_total: Amount remaining to be allocated on lines.
:param move_id: Id of account move where this line will be added.
:param name: Description of account move line.
:param company_currency: id of currency of the company to which the voucher belong
:param current_currency: id of currency of the voucher
:return: mapping between fieldname and value of account move line to create
:rtype: dict
'''
currency_obj = self.pool.get('res.currency')
move_line = {}
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
current_currency_obj = voucher.currency_id or voucher.journal_id.company_id.currency_id
if not currency_obj.is_zero(cr, uid, current_currency_obj, line_total):
diff = line_total
account_id = False
write_off_name = ''
if voucher.payment_option == 'with_writeoff':
account_id = voucher.writeoff_acc_id.id
write_off_name = voucher.comment
elif voucher.partner_id:
if voucher.type in ('sale', 'receipt'):
account_id = voucher.partner_id.property_account_receivable.id
else:
account_id = voucher.partner_id.property_account_payable.id
else:
# fallback on account of voucher
account_id = voucher.account_id.id
sign = voucher.type == 'payment' and -1 or 1
move_line = {
'name': write_off_name or name,
'account_id': account_id,
'move_id': move_id,
'partner_id': voucher.partner_id.id,
'date': voucher.date,
'credit': diff > 0 and diff or 0.0,
'debit': diff < 0 and -diff or 0.0,
'amount_currency': company_currency <> current_currency and (sign * -1 * voucher.writeoff_amount) or 0.0,
'currency_id': company_currency <> current_currency and current_currency or False,
'analytic_account_id': voucher.analytic_id and voucher.analytic_id.id or False,
}
return move_line
def _get_company_currency(self, cr, uid, voucher_id, context=None):
'''
Get the currency of the actual company.
:param voucher_id: Id of the voucher what i want to obtain company currency.
:return: currency id of the company of the voucher
:rtype: int
'''
return self.pool.get('account.voucher').browse(cr,uid,voucher_id,context).journal_id.company_id.currency_id.id
def _get_current_currency(self, cr, uid, voucher_id, context=None):
'''
Get the currency of the voucher.
:param voucher_id: Id of the voucher what i want to obtain current currency.
:return: currency id of the voucher
:rtype: int
'''
voucher = self.pool.get('account.voucher').browse(cr,uid,voucher_id,context)
return voucher.currency_id.id or self._get_company_currency(cr,uid,voucher.id,context)
def action_move_line_create(self, cr, uid, ids, context=None):
'''
Confirm the vouchers given in ids and create the journal entries for each of them
'''
if context is None:
context = {}
move_pool = self.pool.get('account.move')
move_line_pool = self.pool.get('account.move.line')
for voucher in self.browse(cr, uid, ids, context=context):
local_context = dict(context, force_company=voucher.journal_id.company_id.id)
if voucher.move_id:
continue
company_currency = self._get_company_currency(cr, uid, voucher.id, context)
current_currency = self._get_current_currency(cr, uid, voucher.id, context)
# we select the context to use accordingly if it's a multicurrency case or not
context = self._sel_context(cr, uid, voucher.id, context)
# But for the operations made by _convert_amount, we always need to give the date in the context
ctx = context.copy()
ctx.update({'date': voucher.date})
# Create the account move record.
move_id = move_pool.create(cr, uid, self.account_move_get(cr, uid, voucher.id, context=context), context=context)
# Get the name of the account_move just created
name = move_pool.browse(cr, uid, move_id, context=context).name
# Create the first line of the voucher
move_line_id = move_line_pool.create(cr, uid, self.first_move_line_get(cr,uid,voucher.id, move_id, company_currency, current_currency, local_context), local_context)
move_line_brw = move_line_pool.browse(cr, uid, move_line_id, context=context)
line_total = move_line_brw.debit - move_line_brw.credit
rec_list_ids = []
if voucher.type == 'sale':
line_total = line_total - self._convert_amount(cr, uid, voucher.tax_amount, voucher.id, context=ctx)
elif voucher.type == 'purchase':
line_total = line_total + self._convert_amount(cr, uid, voucher.tax_amount, voucher.id, context=ctx)
# Create one move line per voucher line where amount is not 0.0
line_total, rec_list_ids = self.voucher_move_line_create(cr, uid, voucher.id, line_total, move_id, company_currency, current_currency, context)
# Create the writeoff line if needed
ml_writeoff = self.writeoff_move_line_get(cr, uid, voucher.id, line_total, move_id, name, company_currency, current_currency, local_context)
if ml_writeoff:
move_line_pool.create(cr, uid, ml_writeoff, local_context)
# We post the voucher.
self.write(cr, uid, [voucher.id], {
'move_id': move_id,
'state': 'posted',
'number': name,
})
if voucher.journal_id.entry_posted:
move_pool.post(cr, uid, [move_id], context={})
# We automatically reconcile the account move lines.
reconcile = False
for rec_ids in rec_list_ids:
if len(rec_ids) >= 2:
reconcile = move_line_pool.reconcile_partial(cr, uid, rec_ids, writeoff_acc_id=voucher.writeoff_acc_id.id, writeoff_period_id=voucher.period_id.id, writeoff_journal_id=voucher.journal_id.id)
return True
class account_voucher_line(osv.osv):
_name = 'account.voucher.line'
_description = 'Voucher Lines'
_order = "move_line_id"
# If the payment is in the same currency than the invoice, we keep the same amount
# Otherwise, we compute from invoice currency to payment currency
def _compute_balance(self, cr, uid, ids, name, args, context=None):
currency_pool = self.pool.get('res.currency')
rs_data = {}
for line in self.browse(cr, uid, ids, context=context):
ctx = context.copy()
ctx.update({'date': line.voucher_id.date})
voucher_rate = self.pool.get('res.currency').read(cr, uid, line.voucher_id.currency_id.id, ['rate'], context=ctx)['rate']
ctx.update({
'voucher_special_currency': line.voucher_id.payment_rate_currency_id and line.voucher_id.payment_rate_currency_id.id or False,
'voucher_special_currency_rate': line.voucher_id.payment_rate * voucher_rate})
res = {}
company_currency = line.voucher_id.journal_id.company_id.currency_id.id
voucher_currency = line.voucher_id.currency_id and line.voucher_id.currency_id.id or company_currency
move_line = line.move_line_id or False
if not move_line:
res['amount_original'] = 0.0
res['amount_unreconciled'] = 0.0
elif move_line.currency_id and voucher_currency==move_line.currency_id.id:
res['amount_original'] = abs(move_line.amount_currency)
res['amount_unreconciled'] = abs(move_line.amount_residual_currency)
else:
#always use the amount booked in the company currency as the basis of the conversion into the voucher currency
res['amount_original'] = currency_pool.compute(cr, uid, company_currency, voucher_currency, move_line.credit or move_line.debit or 0.0, context=ctx)
res['amount_unreconciled'] = currency_pool.compute(cr, uid, company_currency, voucher_currency, abs(move_line.amount_residual), context=ctx)
rs_data[line.id] = res
return rs_data
def _currency_id(self, cr, uid, ids, name, args, context=None):
'''
This function returns the currency id of a voucher line. It's either the currency of the
associated move line (if any) or the currency of the voucher or the company currency.
'''
res = {}
for line in self.browse(cr, uid, ids, context=context):
move_line = line.move_line_id
if move_line:
res[line.id] = move_line.currency_id and move_line.currency_id.id or move_line.company_id.currency_id.id
else:
res[line.id] = line.voucher_id.currency_id and line.voucher_id.currency_id.id or line.voucher_id.company_id.currency_id.id
return res
_columns = {
'voucher_id':fields.many2one('account.voucher', 'Voucher', required=1, ondelete='cascade'),
'name':fields.char('Description',),
'account_id':fields.many2one('account.account','Account', required=True),
'partner_id':fields.related('voucher_id', 'partner_id', type='many2one', relation='res.partner', string='Partner'),
'untax_amount':fields.float('Untax Amount'),
'amount':fields.float('Amount', digits_compute=dp.get_precision('Account')),
'reconcile': fields.boolean('Full Reconcile'),
'type':fields.selection([('dr','Debit'),('cr','Credit')], 'Dr/Cr'),
'account_analytic_id': fields.many2one('account.analytic.account', 'Analytic Account'),
'move_line_id': fields.many2one('account.move.line', 'Journal Item', copy=False),
'date_original': fields.related('move_line_id','date', type='date', relation='account.move.line', string='Date', readonly=1),
'date_due': fields.related('move_line_id','date_maturity', type='date', relation='account.move.line', string='Due Date', readonly=1),
'amount_original': fields.function(_compute_balance, multi='dc', type='float', string='Original Amount', store=True, digits_compute=dp.get_precision('Account')),
'amount_unreconciled': fields.function(_compute_balance, multi='dc', type='float', string='Open Balance', store=True, digits_compute=dp.get_precision('Account')),
'company_id': fields.related('voucher_id','company_id', relation='res.company', type='many2one', string='Company', store=True, readonly=True),
'currency_id': fields.function(_currency_id, string='Currency', type='many2one', relation='res.currency', readonly=True),
}
_defaults = {
'name': '',
}
def onchange_reconcile(self, cr, uid, ids, reconcile, amount, amount_unreconciled, context=None):
vals = {'amount': 0.0}
if reconcile:
vals = { 'amount': amount_unreconciled}
return {'value': vals}
def onchange_amount(self, cr, uid, ids, amount, amount_unreconciled, context=None):
vals = {}
if amount:
vals['reconcile'] = (amount == amount_unreconciled)
return {'value': vals}
def onchange_move_line_id(self, cr, user, ids, move_line_id, context=None):
"""
Returns a dict that contains new values and context
@param move_line_id: latest value from user input for field move_line_id
@param args: other arguments
@param context: context arguments, like lang, time zone
@return: Returns a dict which contains new values, and context
"""
res = {}
move_line_pool = self.pool.get('account.move.line')
if move_line_id:
move_line = move_line_pool.browse(cr, user, move_line_id, context=context)
if move_line.credit:
ttype = 'dr'
else:
ttype = 'cr'
res.update({
'account_id': move_line.account_id.id,
'type': ttype,
'currency_id': move_line.currency_id and move_line.currency_id.id or move_line.company_id.currency_id.id,
})
return {
'value':res,
}
def default_get(self, cr, user, fields_list, context=None):
"""
Returns default values for fields
@param fields_list: list of fields, for which default values are required to be read
@param context: context arguments, like lang, time zone
@return: Returns a dict that contains default values for fields
"""
if context is None:
context = {}
journal_id = context.get('journal_id', False)
partner_id = context.get('partner_id', False)
journal_pool = self.pool.get('account.journal')
partner_pool = self.pool.get('res.partner')
values = super(account_voucher_line, self).default_get(cr, user, fields_list, context=context)
if (not journal_id) or ('account_id' not in fields_list):
return values
journal = journal_pool.browse(cr, user, journal_id, context=context)
account_id = False
ttype = 'cr'
if journal.type in ('sale', 'sale_refund'):
account_id = journal.default_credit_account_id and journal.default_credit_account_id.id or False
ttype = 'cr'
elif journal.type in ('purchase', 'expense', 'purchase_refund'):
account_id = journal.default_debit_account_id and journal.default_debit_account_id.id or False
ttype = 'dr'
elif partner_id:
partner = partner_pool.browse(cr, user, partner_id, context=context)
if context.get('type') == 'payment':
ttype = 'dr'
account_id = partner.property_account_payable.id
elif context.get('type') == 'receipt':
account_id = partner.property_account_receivable.id
values.update({
'account_id':account_id,
'type':ttype
})
return values
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -955,339,748,974,411,000 | 53.111746 | 398 | 0.598773 | false |
braddockcg/ocropy | ocrolib/psegutils.py | 10 | 7620 | from toplevel import *
from pylab import *
from scipy.ndimage import filters,interpolation
import sl,morph
def B(a):
if a.dtype==dtype('B'): return a
return array(a,'B')
class record:
def __init__(self,**kw): self.__dict__.update(kw)
def blackout_images(image,ticlass):
"""Takes a page image and a ticlass text/image classification image and replaces
all regions tagged as 'image' with rectangles in the page image. The page image
is modified in place. All images are iulib arrays."""
rgb = ocropy.intarray()
ticlass.textImageProbabilities(rgb,image)
r = ocropy.bytearray()
g = ocropy.bytearray()
b = ocropy.bytearray()
ocropy.unpack_rgb(r,g,b,rgb)
components = ocropy.intarray()
components.copy(g)
n = ocropy.label_components(components)
print "[note] number of image regions",n
tirects = ocropy.rectarray()
ocropy.bounding_boxes(tirects,components)
for i in range(1,tirects.length()):
r = tirects.at(i)
ocropy.fill_rect(image,r,0)
r.pad_by(-5,-5)
ocropy.fill_rect(image,r,255)
def binary_objects(binary):
labels,n = morph.label(binary)
objects = morph.find_objects(labels)
return objects
def estimate_scale(binary):
objects = binary_objects(binary)
bysize = sorted(objects,key=sl.area)
scalemap = zeros(binary.shape)
for o in bysize:
if amax(scalemap[o])>0: continue
scalemap[o] = sl.area(o)**0.5
scale = median(scalemap[(scalemap>3)&(scalemap<100)])
return scale
def compute_boxmap(binary,scale,threshold=(.5,4),dtype='i'):
objects = binary_objects(binary)
bysize = sorted(objects,key=sl.area)
boxmap = zeros(binary.shape,dtype)
for o in bysize:
if sl.area(o)**.5<threshold[0]*scale: continue
if sl.area(o)**.5>threshold[1]*scale: continue
boxmap[o] = 1
return boxmap
def compute_lines(segmentation,scale):
"""Given a line segmentation map, computes a list
of tuples consisting of 2D slices and masked images."""
lobjects = morph.find_objects(segmentation)
lines = []
for i,o in enumerate(lobjects):
if o is None: continue
if sl.dim1(o)<2*scale or sl.dim0(o)<scale: continue
mask = (segmentation[o]==i+1)
if amax(mask)==0: continue
result = record()
result.label = i+1
result.bounds = o
result.mask = mask
lines.append(result)
return lines
def pad_image(image,d,cval=inf):
result = ones(array(image.shape)+2*d)
result[:,:] = amax(image) if cval==inf else cval
result[d:-d,d:-d] = image
return result
@checks(ARANK(2),int,int,int,int,mode=str,cval=True,_=GRAYSCALE)
def extract(image,y0,x0,y1,x1,mode='nearest',cval=0):
h,w = image.shape
ch,cw = y1-y0,x1-x0
y,x = clip(y0,0,max(h-ch,0)),clip(x0,0,max(w-cw, 0))
sub = image[y:y+ch,x:x+cw]
# print "extract",image.dtype,image.shape
try:
r = interpolation.shift(sub,(y-y0,x-x0),mode=mode,cval=cval,order=0)
if cw > w or ch > h:
pady0, padx0 = max(-y0, 0), max(-x0, 0)
r = interpolation.affine_transform(r, eye(2), offset=(pady0, padx0), cval=1, output_shape=(ch, cw))
return r
except RuntimeError:
# workaround for platform differences between 32bit and 64bit
# scipy.ndimage
dtype = sub.dtype
sub = array(sub,dtype='float64')
sub = interpolation.shift(sub,(y-y0,x-x0),mode=mode,cval=cval,order=0)
sub = array(sub,dtype=dtype)
return sub
@checks(ARANK(2),True,pad=int,expand=int,_=GRAYSCALE)
def extract_masked(image,linedesc,pad=5,expand=0):
"""Extract a subimage from the image using the line descriptor.
A line descriptor consists of bounds and a mask."""
y0,x0,y1,x1 = [int(x) for x in [linedesc.bounds[0].start,linedesc.bounds[1].start, \
linedesc.bounds[0].stop,linedesc.bounds[1].stop]]
if pad>0:
mask = pad_image(linedesc.mask,pad,cval=0)
else:
mask = linedesc.mask
line = extract(image,y0-pad,x0-pad,y1+pad,x1+pad)
if expand>0:
mask = filters.maximum_filter(mask,(expand,expand))
line = where(mask,line,amax(line))
return line
def reading_order(lines,highlight=None,debug=0):
"""Given the list of lines (a list of 2D slices), computes
the partial reading order. The output is a binary 2D array
such that order[i,j] is true if line i comes before line j
in reading order."""
order = zeros((len(lines),len(lines)),'B')
def x_overlaps(u,v):
return u[1].start<v[1].stop and u[1].stop>v[1].start
def above(u,v):
return u[0].start<v[0].start
def left_of(u,v):
return u[1].stop<v[1].start
def separates(w,u,v):
if w[0].stop<min(u[0].start,v[0].start): return 0
if w[0].start>max(u[0].stop,v[0].stop): return 0
if w[1].start<u[1].stop and w[1].stop>v[1].start: return 1
if highlight is not None:
clf(); title("highlight"); imshow(binary); ginput(1,debug)
for i,u in enumerate(lines):
for j,v in enumerate(lines):
if x_overlaps(u,v):
if above(u,v):
order[i,j] = 1
else:
if [w for w in lines if separates(w,u,v)]==[]:
if left_of(u,v): order[i,j] = 1
if j==highlight and order[i,j]:
print (i,j),
y0,x0 = sl.center(lines[i])
y1,x1 = sl.center(lines[j])
plot([x0,x1+200],[y0,y1])
if highlight is not None:
print
ginput(1,debug)
return order
def topsort(order):
"""Given a binary array defining a partial order (o[i,j]==True means i<j),
compute a topological sort. This is a quick and dirty implementation
that works for up to a few thousand elements."""
n = len(order)
visited = zeros(n)
L = []
def visit(k):
if visited[k]: return
visited[k] = 1
for l in find(order[:,k]):
visit(l)
L.append(k)
for k in range(n):
visit(k)
return L #[::-1]
def show_lines(image,lines,lsort):
"""Overlays the computed lines on top of the image, for debugging
purposes."""
ys,xs = [],[]
clf(); cla()
imshow(image)
for i in range(len(lines)):
l = lines[lsort[i]]
y,x = sl.center(l.bounds)
xs.append(x)
ys.append(y)
o = l.bounds
r = matplotlib.patches.Rectangle((o[1].start,o[0].start),edgecolor='r',fill=0,width=sl.dim1(o),height=sl.dim0(o))
gca().add_patch(r)
h,w = image.shape
ylim(h,0); xlim(0,w)
plot(xs,ys)
@obsolete
def read_gray(fname):
image = imread(fname)
if image.ndim==3: image = mean(image,2)
return image
@obsolete
def read_binary(fname):
image = imread(fname)
if image.ndim==3: image = mean(image,2)
image -= amin(image)
image /= amax(image)
assert sum(image<0.01)+sum(image>0.99)>0.99*prod(image.shape),"input image is not binary"
binary = 1.0*(image<0.5)
return binary
@obsolete
def rgbshow(r,g,b=None,gn=1,cn=0,ab=0,**kw):
"""Small function to display 2 or 3 images as RGB channels."""
if b is None: b = zeros(r.shape)
combo = transpose(array([r,g,b]),axes=[1,2,0])
if cn:
for i in range(3):
combo[:,:,i] /= max(abs(amin(combo[:,:,i])),abs(amax(combo[:,:,i])))
elif gn:
combo /= max(abs(amin(combo)),abs(amax(combo)))
if ab:
combo = abs(combo)
if amin(combo)<0: print "warning: values less than zero"
imshow(clip(combo,0,1),**kw)
| apache-2.0 | 6,891,751,351,341,753,000 | 33.170404 | 121 | 0.60315 | false |
amadeusproject/amadeuslms | users/migrations/0001_initial.py | 1 | 3278 | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-12-20 18:17
from __future__ import unicode_literals
import django.contrib.auth.models
import django.core.validators
from django.db import migrations, models
import re
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0008_alter_user_username_max_length'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('email', models.EmailField(help_text='Your email address that will be used to access the platform', max_length=254, unique=True, validators=[django.core.validators.RegexValidator(re.compile('^[\\w.@+-]+$', 32), 'Type a valid username. This fields should only contain letters, numbers and the characteres: @/./+/-/_ .', 'invalid')], verbose_name='Mail')),
('username', models.CharField(max_length=100, verbose_name='Name')),
('last_name', models.CharField(max_length=100, verbose_name='Last Name')),
('social_name', models.CharField(blank=True, max_length=100, null=True, verbose_name='Social Name')),
('description', models.TextField(blank=True, verbose_name='Description')),
('image', models.ImageField(blank=True, null=True, upload_to='users/', verbose_name='Photo')),
('type_profile', models.IntegerField(blank=True, choices=[(1, 'Professor'), (2, 'Student'), (3, 'Coordinator')], null=True, verbose_name='Type')),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Create Date')),
('show_email', models.IntegerField(choices=[(1, 'Allow everyone to see my address'), (2, 'Only classmates can see my address'), (3, 'Nobody can see my address')], null=True, verbose_name='Show email?')),
('is_staff', models.BooleanField(default=False, verbose_name='Administrator')),
('is_active', models.BooleanField(default=True, verbose_name='Active')),
('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),
('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),
],
options={
'verbose_name': 'User',
'verbose_name_plural': 'Users',
},
managers=[
('objects', django.contrib.auth.models.UserManager()),
],
),
]
| gpl-2.0 | -4,677,485,319,841,285,000 | 65.897959 | 371 | 0.629652 | false |
noelbk/neutron-juniper | neutron/db/migration/alembic_migrations/versions/27ef74513d33_quota_in_plumgrid_pl.py | 18 | 1989 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""quota_in_plumgrid_plugin
Revision ID: 27ef74513d33
Revises: 3a520dd165d0
Create Date: 2013-10-08 10:59:19.860397
"""
# revision identifiers, used by Alembic.
revision = '27ef74513d33'
down_revision = '3a520dd165d0'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.plumgrid.plumgrid_plugin.plumgrid_plugin.'
'NeutronPluginPLUMgridV2'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
### commands auto generated by Alembic - please adjust! ###
op.create_table(
'quotas',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('resource', sa.String(length=255), nullable=True),
sa.Column('limit', sa.Integer(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
### commands auto generated by Alembic - please adjust! ###
op.drop_table('quotas')
### end Alembic commands ###
| apache-2.0 | 8,185,182,609,210,244,000 | 29.6 | 78 | 0.700855 | false |
kidaa/encoded | src/encoded/audit/file.py | 1 | 9132 | from contentbase import (
AuditFailure,
audit_checker,
)
from .conditions import rfa
current_statuses = ['released', 'in progress']
not_current_statuses = ['revoked', 'obsolete', 'deleted']
raw_data_formats = [
'fastq',
'csfasta',
'csqual',
'rcc',
'idat',
'CEL',
]
paired_end_assays = [
'RNA-PET',
'ChIA-PET',
'DNA-PET',
]
@audit_checker('file', frame=['replicate', 'dataset', 'replicate.experiment'])
def audit_file_replicate_match(value, system):
'''
A file's replicate should belong to the same experiment that the file
does. These tend to get confused when replacing objects.
'''
if value['status'] in ['deleted', 'replaced', 'revoked']:
return
if 'replicate' not in value:
return
rep_exp = value['replicate']['experiment']['uuid']
file_exp = value['dataset']['uuid']
if rep_exp != file_exp:
detail = 'File {} has a replicate {} in experiment {}'.format(
value['@id'],
value['replicate']['@id'],
value['replicate']['experiment']['@id'])
raise AuditFailure('mismatched replicate', detail, level='ERROR')
@audit_checker('file', frame='object', condition=rfa('ENCODE3', 'modERN', 'ENCODE2', 'ENCODE2-Mouse'))
def audit_file_platform(value, system):
'''
A raw data file should have a platform specified.
Should be in the schema.
'''
if value['status'] in ['deleted', 'replaced']:
return
if value['file_format'] not in raw_data_formats:
return
if 'platform' not in value:
detail = 'Raw data file {} missing platform information'.format(value['@id'])
raise AuditFailure('missing platform', detail, level='ERROR')
@audit_checker('file', frame='object', condition=rfa('ENCODE3', 'modERN', 'ENCODE2', 'ENCODE2-Mouse'))
def audit_file_read_length(value, system):
'''
Reads files should have a read_length
'''
if value['status'] in ['deleted', 'replaced', 'revoked']:
return
if value['output_type'] != 'reads':
return
if 'read_length' not in value:
detail = 'Reads file {} missing read_length'.format(value['@id'])
raise AuditFailure('missing read_length', detail, level='ERROR')
@audit_checker('file',
frame=['dataset', 'dataset.target', 'controlled_by',
'controlled_by.dataset'],
condition=rfa('ENCODE2', 'ENCODE2-Mouse', 'ENCODE3', 'modERN'))
def audit_file_controlled_by(value, system):
'''
A fastq in a ChIP-seq experiment should have a controlled_by
'''
if value['status'] in ['deleted', 'replaced', 'revoked']:
return
if value['dataset'].get('assay_term_name') not in ['ChIP-seq', 'RAMPAGE', 'CAGE', 'shRNA knockdown followed by RNA-seq']:
return
if 'target' in value['dataset'] and 'control' in value['dataset']['target'].get('investigated_as', []):
return
if 'controlled_by' not in value:
value['controlled_by'] = []
if (value['controlled_by'] == []) and (value['file_format'] in ['fastq']):
detail = 'Fastq file {} from {} requires controlled_by'.format(
value['@id'],
value['dataset']['assay_term_name']
)
raise AuditFailure('missing controlled_by', detail, level='NOT_COMPLIANT')
possible_controls = value['dataset'].get('possible_controls')
biosample = value['dataset'].get('biosample_term_id')
for ff in value['controlled_by']:
control_bs = ff['dataset'].get('biosample_term_id')
if control_bs != biosample:
detail = 'File {} has a controlled_by file {} with conflicting biosample {}'.format(
value['@id'],
ff['@id'],
control_bs)
raise AuditFailure('mismatched controlled_by', detail, level='ERROR')
return
if ff['file_format'] != value['file_format']:
detail = 'File {} with file_format {} has a controlled_by file {} with file_format {}'.format(
value['@id'],
value['file_format'],
ff['@id'],
ff['file_format']
)
raise AuditFailure('mismatched controlled_by', detail, level='ERROR')
if (possible_controls is None) or (ff['dataset']['@id'] not in possible_controls):
detail = 'File {} has a controlled_by file {} with a dataset {} that is not in possible_controls'.format(
value['@id'],
ff['@id'],
ff['dataset']['@id']
)
raise AuditFailure('mismatched controlled_by', detail, level='DCC_ACTION')
@audit_checker('file', frame='object', condition=rfa('ENCODE3', 'modERN'))
def audit_file_flowcells(value, system):
'''
A fastq file could have its flowcell details.
Don't bother to check anything but ENCODE3
'''
if value['status'] in ['deleted', 'replaced', 'revoked']:
return
if value['file_format'] not in ['fastq']:
return
if 'flowcell_details' not in value or (value['flowcell_details'] == []):
detail = 'Fastq file {} is missing flowcell_details'.format(value['@id'])
raise AuditFailure('missing flowcell_details', detail, level='WARNING')
@audit_checker('file', frame=['paired_with'],)
def audit_paired_with(value, system):
'''
A file with a paired_end needs a paired_with.
Should be handled in the schema.
A paired_with should be the same replicate
'''
if value['status'] in ['deleted', 'replaced', 'revoked']:
return
if 'paired_end' not in value:
return
if 'paired_with' not in value:
detail = 'File {} has paired_end = {}. It requires a paired file'.format(
value['@id'],
value['paired_end'])
raise AuditFailure('missing paired_with', detail, level='NOT_COMPLIANT')
if 'replicate' not in value['paired_with']:
return
if 'replicate' not in value:
detail = 'File {} has paired_end = {}. It requires a replicate'.format(
value['@id'],
value['paired_end'])
raise AuditFailure('missing replicate', detail, level='DCC_ACTION')
if value['replicate'] != value['paired_with']['replicate']:
detail = 'File {} has replicate {}. It is paired_with file {} with replicate {}'.format(
value['@id'],
value.get('replicate'),
value['paired_with']['@id'],
value['paired_with'].get('replicate'))
raise AuditFailure('mismatched paired_with', detail, level='ERROR')
if value['paired_end'] == '1':
context = system['context']
paired_with = context.get_rev_links('paired_with')
if len(paired_with) > 1:
detail = 'Paired end 1 file {} paired_with by multiple paired end 2 files: {!r}'.format(
value['@id'],
paired_with,
)
raise AuditFailure('multiple paired_with', detail, level='ERROR')
@audit_checker('file', frame='object')
def audit_file_size(value, system):
if value['status'] in ['deleted', 'replaced', 'uploading', 'revoked']:
return
if 'file_size' not in value:
detail = 'File {} requires a value for file_size'.format(value['@id'])
raise AuditFailure('missing file_size', detail, level='DCC_ACTION')
@audit_checker('file', frame=['file_format_specifications'],)
def audit_file_format_specifications(value, system):
for doc in value.get('file_format_specifications', []):
if doc['document_type'] != "file format specification":
detail = 'File {} has document {} not of type file format specification'.format(
value['@id'],
doc['@id']
)
raise AuditFailure('wrong document_type', detail, level='ERROR')
@audit_checker('file', frame='object')
def audit_file_paired_ended_run_type(value, system):
'''
Audit to catch those files that were upgraded to have run_type = paired ended
resulting from its migration out of replicate but lack the paired_end property
to specify which read it is. This audit will also catch the case where run_type
= paired-ended but there is no paired_end = 2 due to registeration error.
'''
if value['status'] in ['deleted', 'replaced', 'revoked', 'upload failed']:
return
if value['file_format'] not in ['fastq', 'fasta', 'csfasta']:
return
if (value['output_type'] == 'reads') and (value.get('run_type') == 'paired-ended'):
if 'paired_end' not in value:
detail = 'File {} has a paired-ended run_type but is missing its paired_end value'.format(
value['@id'])
raise AuditFailure('missing paired_end', detail, level='DCC_ACTION')
if (value['paired_end'] == 1) and 'paired_with' not in value:
detail = 'File {} has a paired-ended run_type but is missing a paired_end=2 mate'.format(
value['@id'])
raise AuditFailure('missing mate pair', detail, level='DCC_ACTION')
| mit | 9,053,639,886,180,891,000 | 34.533074 | 125 | 0.590999 | false |
citrix-openstack-build/python-cinderclient | cinderclient/tests/v2/test_auth.py | 2 | 14395 | # Copyright (c) 2013 OpenStack Foundation
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
import requests
from cinderclient import exceptions
from cinderclient.v2 import client
from cinderclient.tests import utils
class AuthenticateAgainstKeystoneTests(utils.TestCase):
def test_authenticate_success(self):
cs = client.Client("username", "password", "project_id",
"http://localhost:8776/v2", service_type='volumev2')
resp = {
"access": {
"token": {
"expires": "12345",
"id": "FAKE_ID",
},
"serviceCatalog": [
{
"type": "volumev2",
"endpoints": [
{
"region": "RegionOne",
"adminURL": "http://localhost:8776/v2",
"internalURL": "http://localhost:8776/v2",
"publicURL": "http://localhost:8776/v2",
},
],
},
],
},
}
auth_response = utils.TestResponse({
"status_code": 200,
"text": json.dumps(resp),
})
mock_request = mock.Mock(return_value=(auth_response))
@mock.patch.object(requests, "request", mock_request)
def test_auth_call():
cs.client.authenticate()
headers = {
'User-Agent': cs.client.USER_AGENT,
'Content-Type': 'application/json',
'Accept': 'application/json',
}
body = {
'auth': {
'passwordCredentials': {
'username': cs.client.user,
'password': cs.client.password,
},
'tenantName': cs.client.projectid,
},
}
token_url = cs.client.auth_url + "/tokens"
mock_request.assert_called_with(
"POST",
token_url,
headers=headers,
data=json.dumps(body),
allow_redirects=True,
**self.TEST_REQUEST_BASE)
endpoints = resp["access"]["serviceCatalog"][0]['endpoints']
public_url = endpoints[0]["publicURL"].rstrip('/')
self.assertEqual(cs.client.management_url, public_url)
token_id = resp["access"]["token"]["id"]
self.assertEqual(cs.client.auth_token, token_id)
test_auth_call()
def test_authenticate_tenant_id(self):
cs = client.Client("username", "password",
auth_url="http://localhost:8776/v2",
tenant_id='tenant_id', service_type='volumev2')
resp = {
"access": {
"token": {
"expires": "12345",
"id": "FAKE_ID",
"tenant": {
"description": None,
"enabled": True,
"id": "tenant_id",
"name": "demo"
} # tenant associated with token
},
"serviceCatalog": [
{
"type": 'volumev2',
"endpoints": [
{
"region": "RegionOne",
"adminURL": "http://localhost:8776/v2",
"internalURL": "http://localhost:8776/v2",
"publicURL": "http://localhost:8776/v2",
},
],
},
],
},
}
auth_response = utils.TestResponse({
"status_code": 200,
"text": json.dumps(resp),
})
mock_request = mock.Mock(return_value=(auth_response))
@mock.patch.object(requests, "request", mock_request)
def test_auth_call():
cs.client.authenticate()
headers = {
'User-Agent': cs.client.USER_AGENT,
'Content-Type': 'application/json',
'Accept': 'application/json',
}
body = {
'auth': {
'passwordCredentials': {
'username': cs.client.user,
'password': cs.client.password,
},
'tenantId': cs.client.tenant_id,
},
}
token_url = cs.client.auth_url + "/tokens"
mock_request.assert_called_with(
"POST",
token_url,
headers=headers,
data=json.dumps(body),
allow_redirects=True,
**self.TEST_REQUEST_BASE)
endpoints = resp["access"]["serviceCatalog"][0]['endpoints']
public_url = endpoints[0]["publicURL"].rstrip('/')
self.assertEqual(cs.client.management_url, public_url)
token_id = resp["access"]["token"]["id"]
self.assertEqual(cs.client.auth_token, token_id)
tenant_id = resp["access"]["token"]["tenant"]["id"]
self.assertEqual(cs.client.tenant_id, tenant_id)
test_auth_call()
def test_authenticate_failure(self):
cs = client.Client("username", "password", "project_id",
"http://localhost:8776/v2")
resp = {"unauthorized": {"message": "Unauthorized", "code": "401"}}
auth_response = utils.TestResponse({
"status_code": 401,
"text": json.dumps(resp),
})
mock_request = mock.Mock(return_value=(auth_response))
@mock.patch.object(requests, "request", mock_request)
def test_auth_call():
self.assertRaises(exceptions.Unauthorized, cs.client.authenticate)
test_auth_call()
def test_auth_redirect(self):
cs = client.Client("username", "password", "project_id",
"http://localhost:8776/v2", service_type='volumev2')
dict_correct_response = {
"access": {
"token": {
"expires": "12345",
"id": "FAKE_ID",
},
"serviceCatalog": [
{
"type": "volumev2",
"endpoints": [
{
"adminURL": "http://localhost:8776/v2",
"region": "RegionOne",
"internalURL": "http://localhost:8776/v2",
"publicURL": "http://localhost:8776/v2/",
},
],
},
],
},
}
correct_response = json.dumps(dict_correct_response)
dict_responses = [
{"headers": {'location': 'http://127.0.0.1:5001'},
"status_code": 305,
"text": "Use proxy"},
# Configured on admin port, cinder redirects to v2.0 port.
# When trying to connect on it, keystone auth succeed by v1.0
# protocol (through headers) but tokens are being returned in
# body (looks like keystone bug). Leaved for compatibility.
{"headers": {},
"status_code": 200,
"text": correct_response},
{"headers": {},
"status_code": 200,
"text": correct_response}
]
responses = [(utils.TestResponse(resp)) for resp in dict_responses]
def side_effect(*args, **kwargs):
return responses.pop(0)
mock_request = mock.Mock(side_effect=side_effect)
@mock.patch.object(requests, "request", mock_request)
def test_auth_call():
cs.client.authenticate()
headers = {
'User-Agent': cs.client.USER_AGENT,
'Content-Type': 'application/json',
'Accept': 'application/json',
}
body = {
'auth': {
'passwordCredentials': {
'username': cs.client.user,
'password': cs.client.password,
},
'tenantName': cs.client.projectid,
},
}
token_url = cs.client.auth_url + "/tokens"
mock_request.assert_called_with(
"POST",
token_url,
headers=headers,
data=json.dumps(body),
allow_redirects=True,
**self.TEST_REQUEST_BASE)
resp = dict_correct_response
endpoints = resp["access"]["serviceCatalog"][0]['endpoints']
public_url = endpoints[0]["publicURL"].rstrip('/')
self.assertEqual(cs.client.management_url, public_url)
token_id = resp["access"]["token"]["id"]
self.assertEqual(cs.client.auth_token, token_id)
test_auth_call()
def test_ambiguous_endpoints(self):
cs = client.Client("username", "password", "project_id",
"http://localhost:8776/v2", service_type='volumev2')
resp = {
"access": {
"token": {
"expires": "12345",
"id": "FAKE_ID",
},
"serviceCatalog": [
{
"adminURL": "http://localhost:8776/v1",
"type": "volumev2",
"name": "Cinder Volume Service",
"endpoints": [
{
"region": "RegionOne",
"internalURL": "http://localhost:8776/v1",
"publicURL": "http://localhost:8776/v1",
},
],
},
{
"adminURL": "http://localhost:8776/v2",
"type": "volumev2",
"name": "Cinder Volume V2",
"endpoints": [
{
"internalURL": "http://localhost:8776/v2",
"publicURL": "http://localhost:8776/v2",
},
],
},
],
},
}
auth_response = utils.TestResponse({
"status_code": 200,
"text": json.dumps(resp),
})
mock_request = mock.Mock(return_value=(auth_response))
@mock.patch.object(requests, "request", mock_request)
def test_auth_call():
self.assertRaises(exceptions.AmbiguousEndpoints,
cs.client.authenticate)
test_auth_call()
class AuthenticationTests(utils.TestCase):
def test_authenticate_success(self):
cs = client.Client("username", "password", "project_id", "auth_url")
management_url = 'https://localhost/v2.1/443470'
auth_response = utils.TestResponse({
'status_code': 204,
'headers': {
'x-server-management-url': management_url,
'x-auth-token': '1b751d74-de0c-46ae-84f0-915744b582d1',
},
})
mock_request = mock.Mock(return_value=(auth_response))
@mock.patch.object(requests, "request", mock_request)
def test_auth_call():
cs.client.authenticate()
headers = {
'Accept': 'application/json',
'X-Auth-User': 'username',
'X-Auth-Key': 'password',
'X-Auth-Project-Id': 'project_id',
'User-Agent': cs.client.USER_AGENT
}
mock_request.assert_called_with(
"GET",
cs.client.auth_url,
headers=headers,
**self.TEST_REQUEST_BASE)
self.assertEqual(cs.client.management_url,
auth_response.headers['x-server-management-url'])
self.assertEqual(cs.client.auth_token,
auth_response.headers['x-auth-token'])
test_auth_call()
def test_authenticate_failure(self):
cs = client.Client("username", "password", "project_id", "auth_url")
auth_response = utils.TestResponse({"status_code": 401})
mock_request = mock.Mock(return_value=(auth_response))
@mock.patch.object(requests, "request", mock_request)
def test_auth_call():
self.assertRaises(exceptions.Unauthorized, cs.client.authenticate)
test_auth_call()
def test_auth_automatic(self):
cs = client.Client("username", "password", "project_id", "auth_url")
http_client = cs.client
http_client.management_url = ''
mock_request = mock.Mock(return_value=(None, None))
@mock.patch.object(http_client, 'request', mock_request)
@mock.patch.object(http_client, 'authenticate')
def test_auth_call(m):
http_client.get('/')
m.assert_called()
mock_request.assert_called()
test_auth_call()
def test_auth_manual(self):
cs = client.Client("username", "password", "project_id", "auth_url")
@mock.patch.object(cs.client, 'authenticate')
def test_auth_call(m):
cs.authenticate()
m.assert_called()
test_auth_call()
| apache-2.0 | -4,373,985,867,180,317,700 | 35.815857 | 79 | 0.46009 | false |
alff0x1f/Misago | misago/threads/tests/test_moderatedcontent_view.py | 8 | 2636 | from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from misago.acl.testutils import override_acl
from misago.forums.models import Forum
from misago.users.testutils import UserTestCase, AuthenticatedUserTestCase
from misago.threads import testutils
class AuthenticatedTests(AuthenticatedUserTestCase):
def override_acl(self):
new_acl = {
'can_see': True,
'can_browse': True,
'can_see_all_threads': True,
'can_review_moderated_content': True,
}
forums_acl = self.user.acl
for forum in Forum.objects.all():
forums_acl['visible_forums'].append(forum.pk)
forums_acl['can_review_moderated_content'].append(forum.pk)
forums_acl['forums'][forum.pk] = new_acl
override_acl(self.user, forums_acl)
def test_cant_see_threads_list(self):
"""user has no permission to see moderated list"""
response = self.client.get(reverse('misago:moderated_content'))
self.assertEqual(response.status_code, 403)
self.assertIn("review moderated content.", response.content)
def test_empty_threads_list(self):
"""empty threads list is rendered"""
forum = Forum.objects.all_forums().filter(role="forum")[:1][0]
[testutils.post_thread(forum) for t in xrange(10)]
self.override_acl();
response = self.client.get(reverse('misago:moderated_content'))
self.assertEqual(response.status_code, 200)
self.assertIn("There are no threads with moderated", response.content)
def test_filled_threads_list(self):
"""filled threads list is rendered"""
forum = Forum.objects.all_forums().filter(role="forum")[:1][0]
threads = []
for t in xrange(10):
threads.append(testutils.post_thread(forum, is_moderated=True))
for t in xrange(10):
threads.append(testutils.post_thread(forum))
testutils.reply_thread(threads[-1], is_moderated=True)
self.override_acl();
response = self.client.get(reverse('misago:moderated_content'))
self.assertEqual(response.status_code, 200)
for thread in threads:
self.assertIn(thread.get_absolute_url(), response.content)
class AnonymousTests(UserTestCase):
def test_anon_access_to_view(self):
"""anonymous user has no access to unread threads list"""
response = self.client.get(reverse('misago:moderated_content'))
self.assertEqual(response.status_code, 403)
self.assertIn("sign in to see list", response.content)
| gpl-2.0 | -4,014,043,167,597,863,400 | 37.202899 | 78 | 0.657056 | false |
A3sal0n/FalconGate | lib/config.py | 1 | 4738 | from configparser import SafeConfigParser
import threading
import os
import time
import lib.utils as utils
import netifaces
from lib.logger import *
from lib.settings import homenet, lock
class CheckConfigFileModification(threading.Thread):
def __init__(self, threadID):
threading.Thread.__init__(self)
self.threadID = threadID
self._cached_stamp_core = 0
self._cached_stamp_user = 0
self.core_conf_file = 'config.ini'
self.user_conf_file = 'html/user_config.ini'
def run(self):
counter = 0
while 1:
flag = False
stamp = os.stat(self.core_conf_file).st_mtime
if stamp != self._cached_stamp_core:
flag = True
self._cached_stamp_core = stamp
# Reading core configuration file
core_config = SafeConfigParser()
core_config.read('config.ini')
# main section
with lock:
homenet.interface = core_config.get('main', 'iface')
homenet.fg_intel_creds = core_config.get('api_urls', 'fg_intel_creds').strip('"')
homenet.fg_intel_domains = core_config.get('api_urls', 'fg_intel_domains').strip('"')
homenet.fg_intel_ip = core_config.get('api_urls', 'fg_intel_ip').strip('"')
homenet.vt_api_domain_url = core_config.get('api_urls', 'vt_api_domain_url').strip('"')
homenet.vt_api_ip_url = core_config.get('api_urls', 'vt_api_ip_url').strip('"')
homenet.vt_api_file_url = core_config.get('api_urls', 'vt_api_file_url').strip('"')
homenet.hibp_api_url = core_config.get('api_urls', 'hibp_api_url').strip('"')
for option in core_config.options('blacklists_ip'):
homenet.blacklist_sources_ip[option.capitalize()] = core_config.get('blacklists_ip', option).strip('"').split(',')
for option in core_config.options('blacklists_domain'):
homenet.blacklist_sources_domain[option.capitalize()] = core_config.get('blacklists_domain', option).strip('"').split(',')
stamp = os.stat(self.user_conf_file).st_mtime
if stamp != self._cached_stamp_user:
flag = True
self._cached_stamp_user = stamp
# Reading user configuration file
user_config = SafeConfigParser()
user_config.read('html/user_config.ini')
# main section
homenet.dst_emails = (user_config.get('main', 'dst_emails')).strip('"').split(",")
homenet.email_watchlist = (user_config.get('main', 'email_watchlist')).strip('"').split(",")
homenet.vt_api_key = user_config.get('main', 'vt_api_key').strip('"')
homenet.user_blacklist = (user_config.get('main', 'blacklist')).strip('"').split(",")
homenet.user_whitelist = (user_config.get('main', 'whitelist')).strip('"').split(",")
homenet.user_domain_blacklist = (user_config.get('main', 'domain_blacklist')).strip('"').split(",")
homenet.user_domain_whitelist = (user_config.get('main', 'domain_whitelist')).strip('"').split(",")
homenet.mailer_mode = user_config.get('main', 'mailer_mode').strip('"')
homenet.mailer_address = user_config.get('main', 'mailer_address').strip('"')
homenet.mailer_pwd = user_config.get('main', 'mailer_pwd').strip('"')
homenet.allow_tor = user_config.get('main', 'allow_tor').strip('"')
if flag:
counter += 1
if counter > 1:
utils.restart_falcongate_service()
time.sleep(5)
class CheckNetworkModifications(threading.Thread):
def __init__(self, threadID):
threading.Thread.__init__(self)
self.threadID = threadID
def run(self):
while 1:
gws = netifaces.gateways()
cgw = gws['default'][netifaces.AF_INET][0]
if not homenet.gateway:
homenet.gateway = cgw
else:
if homenet.gateway != cgw:
utils.reconfigure_network(homenet.gateway, cgw)
homenet.gateway = cgw
try:
with lock:
utils.save_pkl_object(homenet, "homenet.pkl")
except Exception as e:
log.debug('FG-ERROR ' + str(e.__doc__) + " - " + str(e))
utils.reboot_appliance()
else:
pass
time.sleep(10)
| gpl-3.0 | -3,961,764,385,715,233,000 | 44.557692 | 146 | 0.535458 | false |
jykntr/rest-cli-client | test/test_config.py | 1 | 2886 | import unittest
from config import Config
class TestConfig(unittest.TestCase):
def test_file_does_not_exist(self):
self.assertRaises(Exception, Config, 'missing_file.conf')
def test_verify_option(self):
config = Config('./resources/verifytrue.conf')
self.assertTrue(config.get_options().get_verify())
config = Config('./resources/verifyfalse.conf')
self.assertFalse(config.get_options().get_verify())
def test_proxies_option(self):
config = Config('./resources/proxiesempty.conf')
self.assertEqual(0, len(config.get_options().get_proxies()))
config = Config('./resources/proxiesnonempty.conf')
self.assertEqual(2, len(config.get_options().get_proxies()))
self.assertEqual('http://user:[email protected]:3128/', config.get_options().get_proxies()['http'])
self.assertEqual('http://10.10.1.10:1080', config.get_options().get_proxies()['https'])
def test_requests_section(self):
config = Config('./resources/request.conf')
self.assertEqual(1, len(config.get_requests_section()))
request = config.get_request('httpbinvars')
self.assertEqual('httpbinvars', request.name)
self.assertEqual('get', request.method)
self.assertEqual('http://httpbin.org/get', request.url)
self.assertIs(request, config.get_request('httpbinvars'))
request_list = config.get_requests()
self.assertEqual(1, len(request_list))
self.assertIs(request, config.get_requests()[0])
self.assertRaises(Exception, config.get_request, 'notthere')
self.assertRaises(Exception, Config, './resources/norequestsection.conf')
def test_profiles_section(self):
config = Config('./resources/profile.conf')
self.assertEqual(1, len(config.get_profiles()))
self.assertEqual(1, len(config.get_profiles_section()))
profile = config.get_profile('testprofile')
self.assertEqual('testprofile', profile.name)
self.assertEqual(2, len(profile.properties))
self.assertEqual('myheadervar', profile.properties['headervar'])
self.assertEqual('myparamvar', profile.properties['paramvar'])
profiles = config.get_profiles()
self.assertEqual(1, len(profiles))
self.assertIs(profile, profiles[0])
self.assertRaises(Exception, Config, './resources/noprofilesection.conf')
self.assertRaises(Exception, config.get_profile, 'notthere')
def test_default_profile(self):
config = Config('./resources/empty.conf')
self.assertIsNone(config.get_default_profile())
config = Config('./resources/defaultprofile.conf')
self.assertEqual('myprofile', config.get_default_profile())
def suite():
return unittest.TestLoader().loadTestsFromTestCase(TestConfig)
if __name__ == '__main__':
unittest.main() | mit | -6,331,840,196,142,078,000 | 37.493333 | 105 | 0.668053 | false |
fogbow/fogbow-dashboard | openstack_dashboard/test/api_tests/keystone_tests.py | 15 | 4964 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from keystoneclient.v2_0 import client as keystone_client
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
class FakeConnection(object):
pass
class ClientConnectionTests(test.TestCase):
def setUp(self):
super(ClientConnectionTests, self).setUp()
self.mox.StubOutWithMock(keystone_client, "Client")
self.internal_url = api.base.url_for(self.request,
'identity',
endpoint_type='internalURL')
self.admin_url = api.base.url_for(self.request,
'identity',
endpoint_type='adminURL')
self.conn = FakeConnection()
class RoleAPITests(test.APITestCase):
def setUp(self):
super(RoleAPITests, self).setUp()
self.role = self.roles.member
self.roles = self.roles.list()
def test_remove_tenant_user(self):
"""
Tests api.keystone.remove_tenant_user
Verifies that remove_tenant_user is called with the right arguments
after iterating the user's roles.
There are no assertions in this test because the checking is handled
by mox in the VerifyAll() call in tearDown().
"""
keystoneclient = self.stub_keystoneclient()
tenant = self.tenants.first()
keystoneclient.roles = self.mox.CreateMockAnything()
keystoneclient.roles.roles_for_user(self.user.id,
tenant.id).AndReturn(self.roles)
for role in self.roles:
keystoneclient.roles.revoke(role.id,
domain=None,
group=None,
project=tenant.id,
user=self.user.id)
self.mox.ReplayAll()
api.keystone.remove_tenant_user(self.request, tenant.id, self.user.id)
def test_get_default_role(self):
keystoneclient = self.stub_keystoneclient()
keystoneclient.roles = self.mox.CreateMockAnything()
keystoneclient.roles.list().AndReturn(self.roles)
self.mox.ReplayAll()
role = api.keystone.get_default_role(self.request)
self.assertEqual(role, self.role)
# Verify that a second call doesn't hit the API again,
# (it would show up in mox as an unexpected method call)
role = api.keystone.get_default_role(self.request)
class ServiceAPITests(test.APITestCase):
def test_service_wrapper(self):
catalog = self.service_catalog
identity_data = api.base.get_service_from_catalog(catalog, "identity")
identity_data['id'] = 1
region = identity_data["endpoints"][0]["region"]
service = api.keystone.Service(identity_data, region)
self.assertEqual(unicode(service), u"identity (native backend)")
self.assertEqual(service.region,
identity_data["endpoints"][0]["region"])
self.assertEqual(service.url,
"http://int.keystone.example.com:5000/v2.0")
self.assertEqual(service.public_url,
"http://public.keystone.example.com:5000/v2.0")
self.assertEqual(service.host, "int.keystone.example.com")
def test_service_wrapper_service_in_region(self):
catalog = self.service_catalog
compute_data = api.base.get_service_from_catalog(catalog, "compute")
compute_data['id'] = 1
region = compute_data["endpoints"][1]["region"]
service = api.keystone.Service(compute_data, region)
self.assertEqual(unicode(service), u"compute")
self.assertEqual(service.region,
compute_data["endpoints"][1]["region"])
self.assertEqual(service.url,
"http://int.nova2.example.com:8774/v2")
self.assertEqual(service.public_url,
"http://public.nova2.example.com:8774/v2")
self.assertEqual(service.host, "int.nova2.example.com")
| apache-2.0 | 3,915,623,003,545,576,000 | 41.067797 | 78 | 0.619057 | false |
sayan801/indivo_server | indivo/tests/integration/test_modules/security.py | 3 | 16915 | """
Do a bunch of security tests
Ben Adida
2010-08-27
"""
import os, cgi, uuid
import data
from lxml import etree
from django.utils import simplejson
from utils import *
PRD = 'prd'
RESP_DATA = 'response_data'
def test_client_expect_no_access(client, record_id, document_id, run_special_admin_calls=True):
"""
run tests on a client with a given record_id where the client should have zero access
to that record. The document ID is a real document inside that record, which shouldn't be allowed to be accessed of course.
"""
# commented out for now: admin apps should be able to read records that they created
#assert_403(client.read_record(record_id = record_id))
if run_special_admin_calls:
# admin client shouldn't be able to add a document, but TEMPORARILY we're allowing this
# but NOT on a record not created by the admin app
assert_403(client.post_document(record_id=record_id, data=data.doc01))
# we DO allow an admin client to check the shares on a record it has created,
# but not on another record
assert_403(client.get_shares(record_id=record_id))
# admin clients querying list of carenets... kosher?
# should not be able to list carenets, see sharing, etc..
assert_403(client.get_record_carenets(record_id=record_id))
assert_403(client.create_carenet(record_id=record_id, data={'name':'foobar'}))
assert_403(client.get_autoshare(record_id=record_id))
assert_403(client.setup_app(record_id=record_id, app_id=data.app_email))
assert_403(client.message_record(record_id=record_id, message_id= str(uuid.uuid1()), data={'subject':'foo','body':'bar', 'num_attachments': 1}))
assert_403(client.message_record_attachment(record_id=record_id, message_id= str(uuid.uuid1()), attachment_num="1", data="<foo>bar</foo>"))
assert_403(client.read_documents(record_id=record_id))
assert_403(client.post_document_ext(record_id=record_id, app_id=data.app_email, external_id= "foobar-ext-fail", data=data.doc01))
# even when the app_email matches the client, this should give no access
assert_403(client.get_recapp_documents(record_id=record_id, app_id = data.app_email))
# FIXME: test the replace call once it returns
# assert_403(client.replace_document(record_id, document_id=document_id, data=data.doc02))
assert_403(client.read_document(record_id=record_id, document_id=document_id))
assert_403(client.read_document_meta(record_id=record_id, document_id=document_id))
assert_403(client.read_document_versions(record_id=record_id, document_id=document_id))
assert_403(client.set_document_status(record_id=record_id, document_id=document_id, data='reason=void1&status=void'))
assert_403(client.read_document_status_history(record_id=record_id, document_id=document_id))
reports = ['read_equipment', 'read_procedures',
['read_measurements', {'lab_code':'HBA1C'}]]
for report in reports:
extra_params = {}
if type(report) == list:
extra_params = report[1]
report = report[0]
assert_403(getattr(client, report)(**combine_dicts({'record_id':record_id}, extra_params)))
def test_account_admin_calls(client, account_id):
"""
the following calls should be doable only by an admin app on an account
only an admin app can add auth system, set password, initialize account, and send the secret
"""
assert_403(client.add_auth_system(account_id= account_id, data={'system':'password', 'username':'foo', 'password': 'bar'}))
assert_403(client.account_set_password(account_id= account_id, data={'password': 'foo'}))
# hard to test this one since the primary secret being wrong should give 403 too, should make this better
assert_403(client.account_initialize(account_id= account_id, primary_secret='foobar'))
assert_403(client.account_secret_resend(account_id = account_id))
assert_403(client.account_set_state(account_id = account_id, data={'state': 'active'}))
assert_403(client.account_primary_secret(account_id = account_id))
assert_403(client.check_account_secrets(account_id = account_id, primary_secret='foo'))
assert_403(client.account_forgot_password(account_id = account_id))
assert_403(client.account_search(data = {'contact_email': '[email protected]'}))
assert_403(client.put_record_ext(principal_email = data.machine_app_email, external_id = 'record_ext_foobar', data=data.demographics))
assert_403(client.message_account(account_id = account_id, data = {'subject':'foo', 'body':'bar'}))
def test_chrome_session_calls(client, account_id):
"""
calls that should only be permissible to a chrome session that *is* the account in question and has full access to the record
"""
# view account inbox
assert_403(client.account_inbox(account_id = account_id))
# view account inbox message
assert_403(client.account_inbox_message(account_id = account_id, message_id='foo'))
# accept attachment into record
assert_403(client.account_inbox_message_attachment_accept(account_id = account_id, message_id='foo', attachment_num="1"))
# view account healthfeed
assert_403(client.account_notifications(account_id = account_id))
# change password (FIXME: hard to test this, need something more specific)
assert_403(client.account_change_password(account_id = account_id, data={'old':'foo', 'new':'bar'}))
def test_security(IndivoClient):
account_id = data.account['account_id']
account_id_2 = data.account02['account_id']
##
## Create a Record to see if others can access it
##
admin_client = IndivoClient(data.machine_app_email, data.machine_app_secret)
admin_client.set_app_id(data.app_email)
record_id = admin_client.create_record(data=data.demographics).response[PRD]['Record'][0]
admin_client.post_document(record_id=record_id, data=data.doc01)
admin_client.set_record_owner(data=account_id)
# create another different record and put some docs in it
chrome_client = IndivoClient(data.chrome_consumer_key, data.chrome_consumer_secret)
record2_id = chrome_client.create_record(data=data.demographics).response[PRD]['Record'][0]
chrome_client.set_record_owner(record_id = record2_id, data= account_id_2)
# put some documents into the first and second records
chrome_client.create_session(data.account)
document_id = chrome_client.post_document(record_id=record_id, data=data.allergy).response['prd']['Document'][0]
chrome_client.post_document(record_id=record_id, data=data.allergy)
chrome_client.post_document(record_id=record_id, data=data.allergy)
chrome_client = IndivoClient(data.chrome_consumer_key, data.chrome_consumer_secret)
chrome_client.create_session(data.account02)
document2_id = chrome_client.post_document(record_id=record2_id, data=data.allergy).response['prd']['Document'][0]
chrome_client.post_document(record_id=record2_id, data=data.allergy)
chrome_client.post_document(record_id=record2_id, data=data.allergy)
# other than the record owner, no one should have access
##
## A bogus client, should have access to nothing
##
bogus_client = IndivoClient("foo","bar")
assert_403(bogus_client.create_record(data=data.demographics))
test_client_expect_no_access(bogus_client, record_id, document_id)
# Creating a session should raise a 403
try:
token = bogus_client.create_session(data.account)
if token:
raise AssertionError("shouldn't be able to create a session: got a valid token.")
except IOError as e:
if e.errno != 403:
raise AssertionError("shouldn't be able to create a session: got a non 403 response.")
test_account_admin_calls(bogus_client, account_id)
test_chrome_session_calls(bogus_client, account_id)
# view account
assert_403(bogus_client.account_info(account_id = account_id))
##
## Admin Client
##
test_client_expect_no_access(admin_client, record_id, document_id, run_special_admin_calls=False)
test_client_expect_no_access(admin_client, record2_id, document2_id, run_special_admin_calls=False)
test_chrome_session_calls(admin_client, account_id)
##
## Chrome client = user
##
chrome_client = IndivoClient(data.chrome_consumer_key, data.chrome_consumer_secret)
chrome_client.create_session(data.account)
# no access to other record
test_client_expect_no_access(chrome_client, record2_id, document2_id)
# chrome client with session is NO LONGER an admin client
test_account_admin_calls(chrome_client, account_id)
# chrome client shouldn't access the other account
test_chrome_session_calls(chrome_client, account_id_2)
# view other account
assert_403(chrome_client.account_info(account_id = account_id_2))
##
## User App
##
# an app that has not been authorized
pha_client = IndivoClient(data.app_email, data.app_secret)
pha_client.set_app_id(data.app_email)
pha_client.update_token({'oauth_token': "foo", "oauth_token_secret": "bar"})
test_client_expect_no_access(pha_client, record_id, document_id)
test_client_expect_no_access(pha_client, record2_id, document2_id)
# authorize it for one record
token = admin_client.setup_app(record_id=record_id, app_id=data.app_email).response['prd']
# make sure records are still inaccessible because token not set
test_client_expect_no_access(pha_client, record2_id, document2_id)
test_client_expect_no_access(pha_client, record_id, document_id)
# no admin or chrome session calls before token
test_account_admin_calls(pha_client, account_id)
test_chrome_session_calls(pha_client, account_id)
assert_403(pha_client.account_info(account_id = account_id))
# set the token
pha_client.update_token(token)
# no admin or chrome session calls after token
test_account_admin_calls(pha_client, account_id)
test_chrome_session_calls(pha_client, account_id)
assert_403(pha_client.account_info(account_id = account_id))
# make sure other record still inaccessible
test_client_expect_no_access(pha_client, record2_id, document2_id)
##
## put a user in a carenet and see if he is successfully blocked from other carenets
##
def create_account(account_email, username, password):
# create an account
account_id = xpath(parse_xml(admin_client.create_account({'user_email' : account_email, 'primary_secret_p' : '1'})['response_data']), '/Account/@id')[0]
# get the primary secret
primary_secret_resp = admin_client.account_primary_secret(account_id = account_email)
parsed_resp = parse_xml(primary_secret_resp)
primary_secret = parsed_resp.text
# initialize it
admin_client.account_initialize(account_id= account_email, primary_secret=primary_secret)
# set password
admin_client.add_auth_system(account_id= account_email, data={'system':'password', 'username':username, 'password': password})
return account_id
def login_as(username, password):
chrome_client = IndivoClient(data.chrome_consumer_key, data.chrome_consumer_secret)
chrome_client.create_session({'username':username,'user_pass': password})
return chrome_client
owner = create_account('[email protected]', 'owner','owner-password')
friend = create_account('[email protected]', 'friend', 'friend-password')
record_id = admin_client.create_record(data=data.demographics).response[PRD]['Record'][0]
admin_client.set_record_owner(record_id = record_id, data=owner)
chrome_client = login_as('owner', 'owner-password')
# get the list of carenets
carenets = chrome_client.get_record_carenets(record_id = record_id).response[PRD]['Carenet']
carenet_id = carenets[0]
other_carenet_id = carenets[1]
# add the friend to the carenet
chrome_client.post_carenet_account(carenet_id = carenet_id, data={'account_id': friend, 'write': 'false'})
# login as the friend
chrome_client = login_as('friend', 'friend-password')
# read other carenet documents?
assert_403(chrome_client.get_carenet_documents(carenet_id = other_carenet_id))
assert_403(chrome_client.get_carenet_apps(carenet_id = other_carenet_id))
##
## make sure an app not in a given carenet is not visible to someone in that carenet
##
# authorize the app into the record
token = admin_client.setup_app(record_id=record_id, app_id=data.app_email).response['prd']
# should not be visible at this point
def check_app():
app_list = simplejson.loads(chrome_client.get_carenet_apps(carenet_id=carenet_id).response['response_data'])
assert len(app_list) == 0, "some apps are in there:\n%s" % etree.tostring(app_list[0])
# now add the app to the other carenet
chrome_client = login_as('owner', 'owner-password')
chrome_client.post_carenet_app(carenet_id = other_carenet_id, app_id = data.app_email)
# still not visible, cause friend is in main carenet, not other carenet
chrome_client = login_as('friend', 'friend-password')
check_app()
# oauth process
def do_oauth(chrome_client, app_id, app_secret, record_id=None, carenet_id=None):
"""
perform the whole oauth process up until and including the request token approve
most of the time we're checking that that one fails
"""
# get the request token
app_client = IndivoClient(app_id, app_secret)
app_client.set_app_id(app_id)
params = {'oauth_callback': 'oob'}
approve_params = {}
if record_id:
params['indivo_record_id'] = record_id
approve_params['record_id'] = record_id
if carenet_id:
params['indivo_carenet_id'] = carenet_id
approve_params['carenet_id'] = carenet_id
rt = app_client.get_request_token(data=params).response['prd']
# claim it and try to approve it, should fail
chrome_client.claim_request_token(request_token = rt['oauth_token'])
return chrome_client.approve_request_token(request_token = rt['oauth_token'], data=approve_params)
## app cannot be activated by the friend in either carenet at this point,
## since the app is not in the carenet
assert_403(do_oauth(chrome_client, data.app_email, data.app_secret, carenet_id = other_carenet_id))
assert_403(do_oauth(chrome_client, data.app_email, data.app_secret, carenet_id = carenet_id))
chrome_client = login_as('owner', 'owner-password')
## what happens if the owner themselves tries to activate in a carenet?
## right now this gives a 403, but that might not be the right thing.
assert_403(do_oauth(chrome_client, data.app_email, data.app_secret, carenet_id = carenet_id))
# put it in this carenet, still shouldn't be able to activate it in other carenet
# now add the app to the other carenet
chrome_client.post_carenet_app(carenet_id = carenet_id, app_id = data.app_email)
chrome_client = login_as('friend', 'friend-password')
assert_403(do_oauth(chrome_client, data.app_email, data.app_secret, carenet_id = other_carenet_id))
## test the oauth process for non-chrome app, request token claiming and such
## everything else should work, the only problem is that admin_client shouldn't be able to claim or approve the token
admin_client = IndivoClient(data.machine_app_email, data.machine_app_secret)
assert_403(do_oauth(admin_client, data.app_email, data.app_secret, carenet_id = carenet_id))
## test oauth process for non-user-app, should fail immediately
assert_403(admin_client.get_request_token(data={'oauth_callback':'oob'}))
## test oauth process by trying to have a different app exchange the token for the current app
approved_rt = do_oauth(chrome_client, data.app_email, data.app_secret, carenet_id = carenet_id)
## make an account anything other than "active", and make sure it's not possible to login
chrome_client = IndivoClient(data.chrome_consumer_key, data.chrome_consumer_secret)
for state in ['disabled', 'retired']:
chrome_client.account_set_state(account_id = data.account['account_id'], data={'state': state})
try:
token = chrome_client.create_session(data.account)
if token:
raise AssertionError("shouldn't be able to log in for a user in state %s: got a valid token.")
except IOError as e:
if e.errno != 403:
raise AssertionError("shouldn't be able to log in for a user in state %s: got a non 403 response.")
## test account permissions: messaging, change password
| gpl-3.0 | -5,798,742,595,480,910,000 | 42.70801 | 160 | 0.692876 | false |
Jgarcia-IAS/SAT | openerp/addons-extra/account_invoice_taxes/__openerp__.py | 3 | 1632 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2015 I.A.S. INGENIERÍA, APLICACIONES Y SOFTWARE Johan Alejandro Olano (<http:http://www.ias.com.co).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Impuestos",
"version": "1.0",
"description": """
Nomenclatura seguna estandar de la DIAN (Localizacion Colombiana)
Collaborators:
- Johan Alejandro Olano <[email protected]>
""",
"author": "I.A.S. Ingenieria, Aplicaciones y Software",
"website": "http://www.ias.com.co",
"category": "Localization",
"depends": [
],
"data":[
"account_invoice_view.xml",
],
"demo_xml": [
],
"update_xml": [
],
"active": False,
"installable": True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -9,075,844,190,031,815,000 | 31.62 | 119 | 0.602085 | false |
tanmaykm/JuliaBox | container/interactive/IJulia/tornado/src/gitsync.py | 8 | 4718 | import git
import os
import datetime
import pytz
import shutil
import string
import hashlib
class GitSync:
"""
Keeps folders synchronized with a git repository.
Not perfect yet. complex merging problematic. git commands can be brittle across versions.
"""
def __init__(self, loc):
self.loc = loc
self.repo = git.Repo(loc)
def repo_hash(self):
return hashlib.sha1('_'.join([self.loc, self.remote_url(), self.branch_name()])).hexdigest()
def repo_name(self):
return os.path.basename(self.loc) + ' (' + self.remote_url() + ' - ' + self.branch_name() + ')'
def remote_name(self):
return self.repo.remote().name
def remote_url(self):
return self.repo.remotes[self.remote_name()].url
def branch_name(self):
return self.repo.active_branch.name
def remote_branch_name(self):
return self.remote_name() + '/' + self.branch_name()
def local_branches(self):
return [x.split(' ')[-1] for x in self.repo.git.branch().split('\n')]
def is_dirty(self):
return self.repo.is_dirty()
def has_commits_to_sync(self, output=None):
gitresult = self.repo.git.log(self.remote_branch_name() + '..', '--oneline').strip()
if None != output:
output.append(gitresult)
return len(gitresult) > 0
def get_commits_to_sync(self):
output = []
if not self.has_commits_to_sync(output):
return []
return output.pop().split('\n')
def num_commits_to_sync(self):
return len(self.get_commits_to_sync())
def has_untracked_files(self, output=None):
status = self.repo.git.status()
if None != output:
output.append(status)
return 'Untracked files:' in status
def get_untracked_files(self):
output = []
if not self.has_untracked_files(output):
return []
untf = output.pop().split('Untracked files:')[1][1:].split("\n")
return [x[1:] for x in untf if string.strip(x) != "" and x.startswith("\t")]
def num_untracked_files(self):
return len(self.get_untracked_files())
def sync(self, msg=None):
g = self.repo.git
if self.has_untracked_files():
g.add('.')
has_changes = self.is_dirty()
if has_changes:
g.stash()
try:
g.pull()
except:
pass
has_conflicts = False
if has_changes:
try:
g.stash('pop')
except:
has_conflicts = True
diff = g.stash('show', '-p')
with open(os.path.join(self.loc, 'conflict.diff'), 'w') as f:
f.write(diff)
g.stash('drop')
g.add('.')
if None == msg:
msg = 'juliabox ' + str(datetime.datetime.now(pytz.utc))
g.commit(m=msg)
if (self.num_commits_to_sync() > 0) and (not self.remote_url().startswith('https://')):
g.push('-u', self.remote_name(), self.branch_name())
return has_conflicts
def delete_branch(self, branch, local=True, remote=False, force=False):
g = self.repo.git
if local:
if force:
g.branch('-D', branch)
else:
g.branch('--delete', branch)
if remote:
g.push(self.remote_name(), ':' + branch)
def checkout(self, branch, from_remote=False):
if self.branch_name() == branch:
return
if from_remote:
if branch in self.local_branches():
self.delete_branch(branch, local=True, remote=False)
remote_branch_name = self.remote_name() + '/' + branch
self.repo.git.checkout(remote_branch_name, b=branch)
else:
if branch in self.local_branches():
self.repo.git.checkout(branch)
else:
self.repo.git.checkout(b=branch)
@staticmethod
def clone(src, loc, overwrite=False):
if overwrite and os.path.exists(loc):
shutil.rmtree(loc)
repo = git.Repo.clone_from(src, loc)
if repo is not None:
return GitSync(loc)
return None
@staticmethod
def scan_repo_paths(dirs):
repos = []
for d in dirs:
for pth in os.listdir(d):
if pth.startswith('.'):
continue
fpth = os.path.join(d, pth)
if os.path.isdir(fpth):
git_pth = os.path.join(fpth, '.git')
if os.path.isdir(git_pth):
repos.append(fpth)
return repos | mit | 1,095,560,687,942,013,400 | 29.25 | 103 | 0.534972 | false |
lupyuen/RaspberryPiImage | home/pi/GrovePi/Software/Python/others/grove_spdt_relay.py | 9 | 2326 | #!/usr/bin/env python
#
# GrovePi Example for using the Grove SPDT Relay(30A) (http://www.seeedstudio.com/wiki/Grove_-_SPDT_Relay(30A))
#
# The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.dexterindustries.com/GrovePi
#
# Have a question about this example? Ask on the forums here: http://www.dexterindustries.com/forum/?forum=grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
# NOTE:
# Relay is both normally open and normally closed.
# When the coil is energised, they will both flip.
# LED will illuminate when normally open is closed (and normally closed is open).
import time
import grovepi
# Connect the Grove SPDT Relay to digital port D4
# SIG,NC,VCC,GND
relay = 4
grovepi.pinMode(relay,"OUTPUT")
while True:
try:
# switch on for 5 seconds
grovepi.digitalWrite(relay,1)
print ("on")
time.sleep(5)
# switch off for 5 seconds
grovepi.digitalWrite(relay,0)
print ("off")
time.sleep(5)
except KeyboardInterrupt:
grovepi.digitalWrite(relay,0)
break
except IOError:
print ("Error")
| apache-2.0 | -101,050,306,656,767,900 | 33.716418 | 139 | 0.742046 | false |
soarpenguin/ansible | lib/ansible/modules/cloud/docker/docker_image_facts.py | 9 | 6502 | #!/usr/bin/python
#
# Copyright 2016 Red Hat | Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: docker_image_facts
short_description: Inspect docker images
version_added: "2.1.0"
description:
- Provide one or more image names, and the module will inspect each, returning an array of inspection results.
options:
name:
description:
- An image name or a list of image names. Name format will be name[:tag] or repository/name[:tag], where tag is
optional. If a tag is not provided, 'latest' will be used.
default: null
required: true
extends_documentation_fragment:
- docker
requirements:
- "python >= 2.6"
- "docker-py >= 1.7.0"
- "Docker API >= 1.20"
author:
- Chris Houseknecht (@chouseknecht)
- James Tanner (@jctanner)
'''
EXAMPLES = '''
- name: Inspect a single image
docker_image_facts:
name: pacur/centos-7
- name: Inspect multiple images
docker_image_facts:
name:
- pacur/centos-7
- sinatra
'''
RETURN = '''
images:
description: Facts for the selected images.
returned: always
type: dict
sample: [
{
"Architecture": "amd64",
"Author": "",
"Comment": "",
"Config": {
"AttachStderr": false,
"AttachStdin": false,
"AttachStdout": false,
"Cmd": [
"/etc/docker/registry/config.yml"
],
"Domainname": "",
"Entrypoint": [
"/bin/registry"
],
"Env": [
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
],
"ExposedPorts": {
"5000/tcp": {}
},
"Hostname": "e5c68db50333",
"Image": "c72dce2618dc8f7b794d2b2c2b1e64e0205ead5befc294f8111da23bd6a2c799",
"Labels": {},
"OnBuild": [],
"OpenStdin": false,
"StdinOnce": false,
"Tty": false,
"User": "",
"Volumes": {
"/var/lib/registry": {}
},
"WorkingDir": ""
},
"Container": "e83a452b8fb89d78a25a6739457050131ca5c863629a47639530d9ad2008d610",
"ContainerConfig": {
"AttachStderr": false,
"AttachStdin": false,
"AttachStdout": false,
"Cmd": [
"/bin/sh",
"-c",
'#(nop) CMD ["/etc/docker/registry/config.yml"]'
],
"Domainname": "",
"Entrypoint": [
"/bin/registry"
],
"Env": [
"PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
],
"ExposedPorts": {
"5000/tcp": {}
},
"Hostname": "e5c68db50333",
"Image": "c72dce2618dc8f7b794d2b2c2b1e64e0205ead5befc294f8111da23bd6a2c799",
"Labels": {},
"OnBuild": [],
"OpenStdin": false,
"StdinOnce": false,
"Tty": false,
"User": "",
"Volumes": {
"/var/lib/registry": {}
},
"WorkingDir": ""
},
"Created": "2016-03-08T21:08:15.399680378Z",
"DockerVersion": "1.9.1",
"GraphDriver": {
"Data": null,
"Name": "aufs"
},
"Id": "53773d8552f07b730f3e19979e32499519807d67b344141d965463a950a66e08",
"Name": "registry:2",
"Os": "linux",
"Parent": "f0b1f729f784b755e7bf9c8c2e65d8a0a35a533769c2588f02895f6781ac0805",
"RepoDigests": [],
"RepoTags": [
"registry:2"
],
"Size": 0,
"VirtualSize": 165808884
}
]
'''
try:
from docker import utils
except ImportError:
# missing docker-py handled in docker_common
pass
from ansible.module_utils.docker_common import AnsibleDockerClient, DockerBaseClass
class ImageManager(DockerBaseClass):
def __init__(self, client, results):
super(ImageManager, self).__init__()
self.client = client
self.results = results
self.name = self.client.module.params.get('name')
self.log("Gathering facts for images: %s" % (str(self.name)))
if self.name:
self.results['images'] = self.get_facts()
else:
self.results['images'] = self.get_all_images()
def fail(self, msg):
self.client.fail(msg)
def get_facts(self):
'''
Lookup and inspect each image name found in the names parameter.
:returns array of image dictionaries
'''
results = []
names = self.name
if not isinstance(names, list):
names = [names]
for name in names:
repository, tag = utils.parse_repository_tag(name)
if not tag:
tag = 'latest'
self.log('Fetching image %s:%s' % (repository, tag))
image = self.client.find_image(name=repository, tag=tag)
if image:
results.append(image)
return results
def get_all_images(self):
results = []
images = self.client.images()
for image in images:
try:
inspection = self.client.inspect_image(image['Id'])
except Exception as exc:
self.fail("Error inspecting image %s - %s" % (image['Id'], str(exc)))
results.append(inspection)
return results
def main():
argument_spec = dict(
name=dict(type='list'),
)
client = AnsibleDockerClient(
argument_spec=argument_spec
)
results = dict(
changed=False,
images=[]
)
ImageManager(client, results)
client.module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 | -6,833,266,105,009,640,000 | 26.550847 | 117 | 0.496924 | false |
amakropoulos/nerve-segmentation | misc.py | 1 | 9591 | import scipy.misc
import numpy as np
import glob
import config_default as c
import pickle
import os.path
import re
import math
import img_augmentation as aug
import gzip
#import skimage
import cv2
params_dir = "params"
######################## LOAD/SAVE IMAGE METHODS ########################
def save_image(filename, img):
scipy.misc.imsave(filename, img.reshape(c.height, c.width))
def load_image(filename):
img = scipy.misc.imread(filename)/ np.float32(255)
return img.transpose(0,1).reshape(1, c.height, c.width)
# load all images/labels from the data_dir and randomly split them into train/val
# if only_names==True then only the names of the files are provided,
# otherwise the images are loaded as well
def load_data(fold = 1, num_folds = 10, seed = 1234, datadir = 'train', only_names = True, autoencoder = False, image_ext = '.tif'):
mask_suffix="_mask"
if autoencoder:
mask_suffix=""
mask_names = glob.glob(datadir+'/*'+mask_suffix+image_ext)
subjects = uniq([i.split("/")[1].split("_")[0] for i in mask_names])
np.random.seed(seed)
np.random.shuffle(subjects)
num_subjects = {}
# validation subjects
if num_folds <= 1:
num_subjects[0] = 0
else:
num_subjects[0] = math.ceil(1/num_folds*len(subjects))
# train subjects
num_subjects[1] = len(subjects) - num_subjects[0]
sind = num_subjects[0] * (fold-1)
lind = sind + num_subjects[0]
if lind > len(subjects):
sub = lind - len(subjects)
sind-=sub
lind-=sub
subjects = np.hstack([subjects[sind:lind], subjects[0:sind], subjects[lind:]]).tolist()
Xs = {}
ys = {}
for d in range(2):
d_num_subjects = num_subjects[d]
if d_num_subjects == 0:
Xs[d] = None
ys[d] = None
continue;
mask_names = [];
for i in range(d_num_subjects):
s = subjects.pop(0)
mask_names = mask_names + glob.glob(datadir+'/'+s+'_*'+mask_suffix+image_ext)
num_images = len(mask_names)
if d==1:
np.random.seed(seed)
np.random.shuffle(mask_names)
else:
sort_nicely(mask_names)
if only_names:
Xs[d] = {}
ys[d] = {}
ind=0
for mask_name in mask_names:
Xs[d][ind] = mask_name.replace(mask_suffix, "")
ys[d][ind] = mask_name
ind = ind + 1
else:
Xs[d] = np.zeros((num_images, 1, c.height, c.width), dtype='float32')
ys[d] = np.zeros((num_images, 1, c.height, c.width), dtype='float32')
ind=0
for mask_name in mask_names:
image_name = mask_name.replace(mask_suffix, "")
image = load_image(image_name)
mask = load_image(mask_name)
Xs[d][ind] = image
ys[d][ind] = mask
ind = ind + 1
return Xs[1], ys[1], Xs[0], ys[0]
######################## USEFUL METHODS ########################
def uniq(input):
output = []
for x in input:
if x not in output:
output.append(x)
return output
def tryint(s):
try:
return int(s)
except:
return s
def alphanum_key(s):
return [ tryint(c) for c in re.split('([0-9]+)', s) ]
def sort_nicely(l):
l.sort(key=alphanum_key)
######################## LOAD/SAVE RESULTS METHODS ########################
def get_fold_dir(version, fold=1, num_folds=10, seed=1234):
suffix ="/fold{}_{}_seed{}".format(fold,num_folds,seed)
return os.path.join(params_dir, '{}{}'.format(version, suffix))
# load config file
def load_config(version):
# load default config
global c
import config_default as c
# merge default and model config
model_config = params_dir+"/"+str(version)+"/config.py"
if os.path.exists(model_config):
import importlib
import sys
sys.path.append(os.path.dirname(model_config))
mname = os.path.splitext(os.path.basename(model_config))[0]
c2 = importlib.import_module(mname)
c.__dict__.update(c2.__dict__)
else:
import warnings
warnings.warn("using default parameters")
# params for augmentation
c.aug_params = {
'use': c.augment,
'non_elastic': c.non_elastic,
'zoom_range': (1/(1+c.scale), 1+c.scale),
'rotation_range': (-c.rotation, c.rotation),
'shear_range': (-c.shear, c.shear),
'translation_range': (-c.shift, c.shift),
'do_flip': c.flip,
'allow_stretch': c.stretch,
'elastic': c.elastic,
'elastic_warps_dir':c.elastic_warps_dir,
'alpha': c.alpha,
'sigma': c.sigma,
'autoencoder':c.autoencoder
}
return c
def resume(model, folddir):
epoch = -1
batch = 0
fn = folddir+'/checkpoint.pickle'
# print("resume "+fn)
if os.path.isfile(fn):
with open(fn, 'rb') as re:
[param_vals, epoch, batch] = pickle.load(re)
if model is not None:
import lasagne
lasagne.layers.set_all_param_values(model, param_vals)
else:
epoch = load_last_params(model, folddir) + 1
[mve, train_error, val_error, val_accuracy] = load_results(folddir)
return [epoch, batch, mve, train_error, val_error, val_accuracy]
def save_progress(model, epoch, batch, folddir):
fn = folddir+'/checkpoint.pickle'
# print("save_progress "+fn)
if not os.path.exists(os.path.dirname(fn)):
os.makedirs(os.path.dirname(fn))
import lasagne
param_vals = lasagne.layers.get_all_param_values(model)
stuff = [param_vals, epoch, batch]
with open(fn, 'wb') as wr:
pickle.dump(stuff, wr)
def load_results(folddir):
mve = None
train_error = {}
val_error = {}
val_accuracy = {}
fn = folddir+'/results.pickle'
# print("load_results "+fn)
if os.path.isfile(fn):
with open(fn, 'rb') as re:
[mve, train_error, val_error, val_accuracy] = pickle.load(re)
return [mve, train_error, val_error, val_accuracy]
def save_results(mve, train_error, val_error, val_accuracy, folddir):
fn = folddir+'/results.pickle'
# print("save_results "+fn)
if not os.path.exists(os.path.dirname(fn)):
os.makedirs(os.path.dirname(fn))
with open(fn, 'wb') as wr:
pickle.dump([mve, train_error, val_error, val_accuracy], wr)
def load_last_params(model, folddir, best=False):
fn = folddir+'/params_e*.npz'
if best:
fn = folddir+'/params_best_e*.npz'
param_names = glob.glob( fn )
if len(param_names) == 0:
return -1
sort_nicely(param_names)
paramfile = param_names[-1]
# print("load_last_params "+paramfile)
load_params(model, paramfile)
epoch = os.path.basename(paramfile).split("_e")[-1].split('.')[0]
return tryint(epoch)
def load_params(model, fn):
# print("load_params "+fn)
with np.load(fn) as f:
param_values = [f['arr_%d' % i] for i in range(len(f.files))]
import lasagne
lasagne.layers.set_all_param_values(model, param_values)
def save_params(model, epoch, folddir, best=False):
fn = folddir+'/params_e{}.npz'.format(epoch)
if best:
fn = folddir+'/params_best_e{}.npz'.format(epoch)
if not os.path.exists(os.path.dirname(fn)):
os.makedirs(os.path.dirname(fn))
import lasagne
# print("save_params "+fn)
param_vals = lasagne.layers.get_all_param_values(model)
np.savez(fn, *param_vals)
def completed(folddir):
fn = folddir+'/completed'
# print("completed "+fn)
if os.path.isfile(fn):
with open(fn, 'r') as rf:
print("best score: "+rf.readline()+'\n')
return True
return False
def finish(mve, folddir):
fn = folddir+'/completed'
# print("finish "+fn)
with open(fn, 'w') as wf:
wf.write(str(mve))
######################## OTHER STUFF NOT CURRENTLY NEEDED ########################
def run_length(img):
# img is binary mask image, shape (r,c)
bytes = img.reshape(c.height*c.width, order='F')
runs = [] ## list of run lengths
r = 0 ## the current run length
pos = 1 ## count starts from 1 per WK
for cb in bytes:
if ( cb == 0 ):
if r != 0:
runs.append((pos, r))
pos+=r
r=0
pos+=1
else:
r+=1
#if last run is unsaved (i.e. data ends with 1)
if r != 0:
runs.append((pos, r))
pos += r
r = 0
z = ''
for rr in runs:
z+='{} {} '.format(rr[0],rr[1])
return z[:-1]
# trainmean = None
# trainstd = None
# def load_image(filename, std=False):
# img = (scipy.misc.imread(filename)/ np.float32(255)).transpose(0,1).reshape(1, c.height, c.width)
# if std:
# global trainmean, trainstd
# if trainmean is None:
# with open('meanstd.pickle', 'rb') as re:
# [trainmean, trainstd] = pickle.load(re)
# img = (img - trainmean) / trainstd
# return img
# def preprocess(datadir='train'):
# mask_names = glob.glob(datadir+'/*_mask.tif')
# X = np.zeros((len(mask_names), 1, c.height, c.width), dtype='float32')
# i = 0
# for mask_name in mask_names:
# image_name = mask_name.replace("_mask", "")
# X[i] = load_image(filename)
# i += 1
# trainmean = np.mean(X)
# trainstd = np.std(X)
# with open('meanstd.pickle', 'wb') as wr:
# pickle.dump([trainmean, trainstd], wr)
| mit | -6,661,680,963,492,816,000 | 28.785714 | 132 | 0.563132 | false |
antonsotin/vkfeedtrue | vkfeed/tools/wall_parser.py | 16 | 20243 | # -*- coding: utf-8 -*-
'''Parses VKontakte wall pages.'''
from __future__ import unicode_literals
import cgi
import datetime
import logging
import re
import urllib
from vkfeed import constants
from vkfeed.core import Error
from vkfeed.tools.html_parser import HTMLPageParser
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)
class ParseError(Error):
'''Raised if we are unable to parse a gotten data.'''
def __init__(self, *args, **kwargs):
Error.__init__(self, *args, **kwargs)
class PrivateGroupError(Error):
'''Raised if the provided page is a private group.'''
def __init__(self):
Error.__init__(self, 'This is a private group.')
class ProfileNotAvailableError(Error):
'''
Raised if the provided page indicates that the user's profile has been
deleted or is available only to authorized users.
'''
def __init__(self):
Error.__init__(self, "The user's profile page is not available.")
class ServerError(Error):
'''Raised if the provided page contains a user friendly server error.'''
def __init__(self, server_error):
Error.__init__(self, 'Server returned an error.')
self.server_error = server_error
class _StopParsing(Exception):
'''Raised to stop parsing process.'''
def __init__(self):
Exception.__init__(self, 'Parsing stopped.')
class WallPageParser(HTMLPageParser):
'''Parses a vk.com wall page.
Yeah, I know that it can be parsed more easily using a number of regular
expressions, but it was just fun for me to use Python's HTML parser.
Besides, parsing using an HTML parser is more steady for the site design
changes.
'''
__data = None
'''The page data.'''
__private_data = None
'''Various state data.'''
__show_more_regex = re.compile(r'''
(<span''' + HTMLPageParser.tag_attrs_regex + r'''\s*>)
.+?
</span>
\s*
<a''' + HTMLPageParser.tag_attrs_regex + '''>[^<]+</a>
\s*
<span''' + HTMLPageParser.tag_attrs_regex + '''
\s+style\s*=\s*
(?:
"(?:[^"]*;)?\s*display\s*:\s*none\s*(?:;[^"]*)?"|
'(?:[^']*;)?\s*display\s*:\s*none\s*(?:;[^']*)?'
)
''' + HTMLPageParser.tag_attrs_regex + '''
\s*>
''', re.DOTALL | re.IGNORECASE | re.VERBOSE)
'''A regular expression for expanding a "Show more..." link.'''
__ignore_errors = True
'''Ignore insignificant errors.'''
def __init__(self, ignore_errors = True):
HTMLPageParser.__init__(self)
self.__ignore_errors = ignore_errors
def handle_root(self, tag, attrs, empty):
'''Handles a tag inside of the root of the document.'''
if tag['name'] == 'html':
tag['new_tag_handler'] = self.handle_root
elif tag['name'] == 'head':
tag['new_tag_handler'] = self.__handle_head
elif tag['name'] == 'body':
tag['new_tag_handler'] = self.__handle_body
def handle_root_data(self, tag, data):
'''Handles data inside of the root of the document.'''
def handle_root_end(self, tag):
'''Handles end of the root of the document.'''
def parse(self, html):
'''Parses the specified HTML.'''
try:
self.__data = {}
self.__private_data = {}
try:
HTMLPageParser.parse(self, html)
except _StopParsing:
pass
if 'user_name' not in self.__data:
raise ParseError('Unable to find the user name.')
if 'posts' not in self.__data:
raise ParseError('Unable to find the wall.')
if not self.__data['posts'] and not self.__private_data.get('wall_is_empty'):
raise ParseError('Unable to find wall posts.')
for post in self.__data['posts']:
if 'title' not in post:
LOG.error('Unable to find a title for post %s.', post['url'])
post['title'] = self.__data['user_name']
return self.__data
except ParseError:
# Try to understand why we haven't found the wall on the page
class_attr_regex_template = r'''
\s+class=(?:
{name}
|
'(?:[^']*\s+)?{name}(?:\s+[^']*)?'
|
"(?:[^"]*\s+)?{name}(?:\s+[^"]*)?"
)
'''
# It may be a private group
if re.search(r'''
<h1''' +
self.tag_attrs_regex + r'''
\s+id=(?:title|'title'|"title")''' +
self.tag_attrs_regex + ur'''
\s*>
\s*Закрытая\s+группа
''', html, re.IGNORECASE | re.VERBOSE):
raise PrivateGroupError()
# User's profile may be deleted
if re.search(r'''
<div''' +
self.tag_attrs_regex +
class_attr_regex_template.format(name = 'profile_deleted') +
self.tag_attrs_regex + r'''
\s*>
''', html, re.IGNORECASE | re.VERBOSE):
raise ProfileNotAvailableError()
# The server is on maintenance or returned a user friendly error -->
match = re.search(r'''
<title''' + self.tag_attrs_regex + ur'''\s*>
\s*Ошибка\s*
</title>
.*
<div''' +
self.tag_attrs_regex +
class_attr_regex_template.format(name = 'body') +
self.tag_attrs_regex + r'''
\s*>
(.*?)
</?div
''', html, re.VERBOSE | re.DOTALL | re.IGNORECASE)
if match:
raise ServerError(
re.sub('<[^>]*>', '', match.group(1)).replace('<', '').replace('>', '').strip())
# The server is on maintenance or returned a user friendly error <--
# Other errors
raise
def __handle_head(self, tag, attrs, empty):
'''Handles a tag inside of <head>.'''
if tag['name'] == 'title':
tag['data_handler'] = self.__handle_title_data
def __handle_title_data(self, tag, data):
'''Handles data inside of <title>.'''
data = data.strip()
if not data:
raise ParseError('The title is empty.')
self.__data['user_name'] = data
def __handle_body(self, tag, attrs, empty):
'''Handles a tag inside of <body>.'''
if tag['name'] == 'div' and attrs.get('id') in ('group_avatar', 'profile_avatar', 'public_avatar'):
tag['new_tag_handler'] = self.__handle_avatar
elif tag['name'] == 'div' and attrs.get('id') == 'page_wall_posts':
tag['new_tag_handler'] = self.__handle_page_wall_posts
self.__data['posts'] = []
else:
if 'posts' in self.__data and 'user_photo' in self.__data:
# We've found all data we need, so stop parsing to save a
# little CPU.
raise _StopParsing()
tag['new_tag_handler'] = self.__handle_body
def __handle_avatar(self, tag, attrs, empty):
'''Handles a tag inside of <div id="profile_avatar|public_avatar".'''
if tag['name'] == 'img' and 'src' in attrs:
self.__data['user_photo'] = attrs['src']
elif 'user_photo' not in self.__data:
tag['new_tag_handler'] = self.__handle_avatar
def __handle_page_wall_posts(self, tag, attrs, empty):
'''Handles a tag inside of <div id="page_wall_posts">.'''
if (
tag['name'] == 'div' and
attrs.get('id', '').startswith('post') and
len(attrs['id']) > len('post') and
self.__has_class(attrs, 'post')
):
if empty:
raise ParseError('Post "{0}" div tag is empty.', attrs['id'])
self.__add_post( attrs['id'][len('post'):] )
tag['new_tag_handler'] = self.__handle_post
tag['end_tag_handler'] = self.__handle_post_end
elif tag['name'] == 'div' and attrs.get('id') == 'page_no_wall':
self.__private_data['wall_is_empty'] = True
else:
tag['new_tag_handler'] = self.__handle_page_wall_posts
def __handle_post(self, tag, attrs, empty):
'''Handles a tag inside of <div id="post...">.'''
if tag['name'] == 'div' and self.__has_class(attrs, 'post_table'):
tag['new_tag_handler'] = self.__handle_post_table
else:
if not self.__get_cur_post()['text']:
tag['new_tag_handler'] = self.__handle_post
def __handle_post_table(self, tag, attrs, empty):
'''Handles a tag inside of <div class="post_table">.'''
if tag['name'] == 'div' and self.__has_class(attrs, 'post_info'):
tag['new_tag_handler'] = self.__handle_post_table_info
def __handle_post_table_info(self, tag, attrs, empty):
'''Handles a tag inside of <div class="post_table"><div class="post_info">.'''
if tag['name'] == 'div' and self.__has_class(attrs, 'text', 'wall_text'):
tag['new_tag_handler'] = self.__handle_post_text
elif tag['name'] == 'div' and self.__has_class(attrs, 'replies'):
tag['new_tag_handler'] = self.__handle_post_replies
else:
tag['new_tag_handler'] = self.__handle_post_table_info
def __handle_post_text(self, tag, attrs, empty):
'''Handles a tag inside of <div class="post_table"><div class="post_info"><div class="wall_text">.'''
if tag['name'] == 'a' and self.__has_class(attrs, 'author'):
tag['data_handler'] = self.__handle_post_author
elif tag['name'] == 'div' or tag['name'] == 'span' and attrs.get('class') == 'explain':
self.__handle_post_data_container(tag, attrs, empty)
def __handle_post_author(self, tag, data):
'''Handles data inside of a post author tag.'''
data = data.strip()
if data:
self.__get_cur_post()['title'] = data
def __handle_post_data_container(self, tag, attrs, empty):
'''Handles a tag inside of post data tag.'''
if tag['name'] == 'div' and self.__has_class(attrs, 'page_post_queue_narrow'):
pass # Ignore image thumbnails
else:
stripped_tag = self.__strip_tag(tag['name'], attrs, empty)
if stripped_tag:
def end_tag_handler(tag):
self.__get_cur_post()['text'] += stripped_tag[1]
self.__get_cur_post()['text'] += stripped_tag[0]
tag['new_tag_handler'] = self.__handle_post_data_container
tag['data_handler'] = self.__handle_post_data
if empty:
end_tag_handler(tag)
else:
tag['end_tag_handler'] = end_tag_handler
def __handle_post_data(self, tag, data):
'''Handles data inside of post data tag.'''
self.__get_cur_post()['text'] += data
def __handle_post_replies(self, tag, attrs, empty):
'''Handles a tag inside of <div class="post_table"><div class="post_info"><div class="replies">.'''
if tag['name'] == 'div' and self.__has_class(attrs, 'reply_link_wrap'):
tag['new_tag_handler'] = self.__handle_post_link
else:
tag['new_tag_handler'] = self.__handle_post_replies
def __handle_post_link(self, tag, attrs, empty):
'''Handles a tag inside of <div class="post_table"><div class="post_info"><div class="replies"><div class="reply_link_wrap">.'''
if tag['name'] == 'span' and self.__has_class(attrs, 'rel_date'):
tag['data_handler'] = self.__handle_post_date
else:
tag['new_tag_handler'] = self.__handle_post_link
def __handle_post_date(self, tag, data):
'''Handles data inside of post replies tag.'''
replacements = (
( 'jan.', '1' ),
( 'feb.', '2' ),
( 'mar.', '3' ),
( 'apr.', '4' ),
( 'may', '5' ),
( 'jun.', '6' ),
( 'jul.', '7' ),
( 'aug.', '8' ),
( 'sep.', '9' ),
( 'oct.', '10' ),
( 'nov.', '11' ),
( 'dec.', '12' ),
( 'янв', '1' ),
( 'фев', '2' ),
( 'мар', '3' ),
( 'апр', '4' ),
( 'мая', '5' ),
( 'июн', '6' ),
( 'июл', '7' ),
( 'авг', '8' ),
( 'сен', '9' ),
( 'окт', '10' ),
( 'ноя', '11' ),
( 'дек', '12' ),
( 'два', '2' ),
( 'две', '2' ),
( 'три', '3' ),
( 'четыре', '4' ),
( 'пять', '5' ),
( 'шесть', '6' ),
( 'семь', '7' ),
( 'восемь', '8' ),
( 'девять', '9' ),
( 'десять', '10' ),
( 'two', '2' ),
( 'three', '3' ),
( 'four', '4' ),
( 'five', '5' ),
( 'six', '6' ),
( 'seven', '7' ),
( 'eight', '8' ),
( 'nine', '9' ),
( 'ten', '10' ),
( 'вчера', 'yesterday' ),
( 'сегодня', 'today' ),
( ' в ', ' at ' )
)
date_string = data.strip().lower()
is_pm = date_string.endswith(' pm')
if date_string.endswith(' am') or date_string.endswith(' pm'):
date_string = date_string[:-3]
tz_delta = datetime.timedelta(hours = 4) # MSK timezone
today = datetime.datetime.utcnow() + tz_delta
for token, replacement in replacements:
date_string = date_string.replace(token, replacement)
try:
match = re.match(ur'(\d+ ){0,1}([^ ]+) (?:назад|ago)', date_string)
if match:
value = match.group(1)
if value:
value = int(value.strip())
else:
value = 1
unit = match.group(2)
if unit in ('секунд', 'секунду', 'секунды', 'second', 'seconds'):
date = today - datetime.timedelta(seconds = value)
elif unit in ('минут', 'минуту', 'минуты', 'minute', 'minutes'):
date = today - datetime.timedelta(minutes = value)
elif unit in ('час', 'часа', 'часов', 'hour', 'hours'):
date = today - datetime.timedelta(hours = value)
elif unit in ('день', 'дня', 'дней', 'day', 'days'):
date = today - datetime.timedelta(days = value)
elif unit in ('неделю', 'недели', 'недель', 'week', 'weeks'):
date = today - datetime.timedelta(weeks = value)
else:
raise Error('Invalid time dimension: {0}.', unit)
else:
try:
date = datetime.datetime.strptime(date_string, 'today at %H:%M')
date = datetime.datetime.combine(today, date.time())
except ValueError:
try:
date = datetime.datetime.strptime(date_string, 'yesterday at %H:%M')
date = datetime.datetime.combine(today - datetime.timedelta(days = 1), date.time())
except ValueError:
try:
date = datetime.datetime.strptime('{0} {1}'.format(today.year, date_string), '%Y %d %m at %H:%M')
except ValueError:
date = datetime.datetime.strptime(date_string, '%d %m %Y')
date += tz_delta
if is_pm:
date += datetime.timedelta(hours = 12)
date -= tz_delta
if date - datetime.timedelta(minutes = 1) > today:
if date - datetime.timedelta(days = 1) <= today:
date -= datetime.timedelta(days = 1)
else:
last_year_date = datetime.datetime(date.year - 1, date.month, date.day, date.hour, date.minute, date.second, date.microsecond, date.tzinfo)
if last_year_date <= today:
date = last_year_date
self.__get_cur_post()['date'] = date
except Exception as e:
if self.__ignore_errors:
LOG.exception('Failed to parse date %s.', data)
else:
raise e
def __handle_post_end(self, tag):
'''Handles end of <div id="post...">.'''
cur_post = self.__get_cur_post()
# Expanding the post contents
cur_post['text'] = self.__show_more_regex.sub(r'\1', cur_post['text'])
# Cut off video counters which are broken due to stripping class
# attributes
cur_post['text'] = re.sub(r'<div>\s*<span>(?:\d+:)?\d+:\d+</span>\s*</div>', '', cur_post['text'])
cur_post['text'] = cur_post['text'].strip()
def __add_post(self, post_id):
'''Adds a new post to the wall.'''
self.__data['posts'].append({
'id': post_id,
'url': constants.VK_URL + 'wall' + post_id,
'text': '',
})
def __get_cur_post(self):
'''Returns current post.'''
return self.__data['posts'][-1]
def __has_class(self, attrs, *class_names):
'''
Checks whether a tag with the specified attributes has at least one of
the specified classes.
'''
tag_classes = set(attrs.get('class', '').strip().split(' '))
return bool(tag_classes.intersection(set(class_names)))
def __strip_tag(self, tag_name, attrs, empty):
'''
Returns a tuple of strings where the first is the specified tag which
have only attributes allowed for RSS and the second is a tag close
string. Returns None if the tag is not allowed in RSS at all.
Produces only a lazy check. All other work is left for RSS reader.
'''
if tag_name in ('body', 'head', 'html', 'script'):
# Reject this tags
return
tag_data = '<' + tag_name
# Stripping the tag attributes -->
for attr, value in attrs.iteritems():
if (
tag_name == 'img' and attr == 'src' or
tag_name == 'a' and attr == 'href'
):
if not re.match('[a-z]+://', value):
value = constants.VK_URL + value[int(value.startswith('/')):]
away_to_prefix = constants.VK_URL + 'away.php?to='
if value.startswith(away_to_prefix):
value = urllib.unquote(value.split('&')[0][len(away_to_prefix):])
elif tag_name == 'a' and attr == 'onclick' and not attrs.get('href', '').strip():
if value.startswith('playAudio'):
# Ignore this link and all its contents - we'll try to get
# the link from other tags.
return
# Trying to obtain a link from the JavaScript handler
match = re.search('"(http://[^"]+)"' + '|' "'(http://[^']+)'", value)
if match:
attr = 'href'
value = match.group(1)
else:
continue
elif attr in ('id', 'class') or attr.startswith('on'):
continue
tag_data += ' {0}="{1}"'.format(attr, cgi.escape(value, quote = True))
# Stripping the tag attributes <--
return (
tag_data + (' />' if empty else '>'),
'' if empty else '</{0}>'.format(tag_name)
)
| bsd-2-clause | 3,941,412,935,544,758,000 | 32.802698 | 159 | 0.483462 | false |
benob/icsisumm | icsisumm-primary-sys34_v1/nltk/nltk-0.9.2/nltk/wordnet/cache.py | 9 | 5308 | # Natural Language Toolkit: Wordnet Interface: Cache Module
#
# Copyright (C) 2001-2008 University of Pennsylvania
# Author: Oliver Steele <[email protected]>
# David Ormiston Smith <[email protected]>>
# Steven Bird <[email protected]>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
# Some kind of cache is necessary since Sense -> Synset references are
# stored by key, and it's nice not to have to cons a new copy of a
# Synset that's been paged in each time a Sense's synset is retrieved.
# Ideally, we'd use a weak dict, but there aren't any. A strong dict
# reintroduces the problem that eliminating the Sense <-> Synset
# circularity was intended to resolve: every entity ever seen is
# preserved forever, making operations that iterate over the entire
# database prohibitive.
#
# The LRUCache approximates a weak dict in the case where temporal
# locality is good.
from util import *
DEFAULT_CACHE_CAPACITY = 1000
class _LRUCache:
"""
A cache of values such that least recently used element is flushed when
the cache fills.
This lets us retrieve the key given the timestamp, and the
timestamp given the key. (Also the value given either one.)
That's necessary so that we can reorder the history given a key,
and also manipulate the values dict given a timestamp.
I haven't tried changing history to a List. An earlier
implementation of history as a List was slower than what's here,
but the two implementations aren't directly comparable.
@type entities: C{dict}
@param entities: A dict from key -> (value, timestamp)
@type history: C{dict}
@param history: A dict from timestamp -> key
@type nextTimeStamp: C{int}
@param nextTimeStamp: Timestamp to use with the next value that's added.
@type oldestTimeStamp: C{int}
@param oldestTimeStamp: Timestamp of the oldest element (the next one to
remove), or slightly lower than that.
"""
def __init__(self, capacity):
"""
Initialize a new cache
@type capacity: int
@param capacity: Size of the cache (number of Sense -> Synset mappings)
"""
self.capacity = capacity
self.clear()
def clear(self):
"""
Flush the cache
"""
self.values = {}
self.history = {}
self.oldestTimestamp = 0
self.nextTimestamp = 1
def removeOldestEntry(self):
"""
Remove the oldest entry from the cache.
"""
while self.oldestTimestamp < self.nextTimestamp:
if self.history.get(self.oldestTimestamp):
key = self.history[self.oldestTimestamp]
del self.history[self.oldestTimestamp]
del self.values[key]
return
self.oldestTimestamp = self.oldestTimestamp + 1
def setCapacity(self, capacity):
"""
Set the capacity of the cache.
@type capacity: int
@param capacity: new size of the cache
"""
if capacity == 0: self.clear()
else:
self.capacity = capacity
while len(self.values) > self.capacity:
self.removeOldestEntry()
def get(self, key, loadfn=None):
"""
Get an item from the cache.
@type key: unknown
@param key: identifier of a cache entry
@type loadfn: function reference
@param loadfn: a function used to load the cached entry
@return: a cached item
"""
value = None
# Look up the cache
if self.values:
try:
value, timestamp = self.values.get(key)
del self.history[timestamp]
except KeyError:
value = None
# Load the value if it wasn't cached
if value == None:
value = loadfn and loadfn()
# Cache the value we loaded
if self.values:
timestamp = self.nextTimestamp
self.nextTimestamp = self.nextTimestamp + 1
self.values[key] = (value, timestamp)
self.history[timestamp] = key
if len(self.values) > self.capacity:
self.removeOldestEntry()
return value
class _NullCache:
"""
A NullCache implements the Cache interface (the interface that
LRUCache implements), but doesn't store any values.
"""
def clear():
pass
def get(self, key, loadfn=None):
return loadfn and loadfn()
def disableCache():
"""Disable the entity cache."""
entityCache = _NullCache()
def enableCache():
"""Enable the entity cache."""
if not isinstance(entityCache, LRUCache):
entityCache = _LRUCache(DEFAULT_CACHE_CAPACITY)
def clearCache():
"""Clear the entity cache."""
entityCache.clear()
def setCacheCapacity(capacity=DEFAULT_CACHE_CAPACITY):
"""
Set the capacity of the entity cache.
@type capacity: int
@param capacity: new size of the cache.
"""
enableCache()
entityCache.setCapacity(capacity)
def buildIndexFiles():
for dict in Dictionaries:
dict._buildIndexCacheFile()
# Create a default cache
entityCache = _LRUCache(DEFAULT_CACHE_CAPACITY)
| gpl-3.0 | 7,920,866,844,520,414,000 | 28.325967 | 79 | 0.626225 | false |
DarkLuk42/hn-ias-race | app/resources/race_evaluation.py | 1 | 3008 | # coding: utf-8
from copy import deepcopy
from app.resource import Resource
from app.validator import Validator
class RaceEvaluation(Resource):
exposed = True
fields = {
"time_s": int
}
def __init__(self, application):
self.filename = "race_evaluation"
super().__init__(application)
def sortfunction(self, resource):
return resource["time_s"]
def GET(self, race_id=None, vehicle_id=None, station=None, **data):
if station is None:
if vehicle_id is None:
if race_id is None:
return self.response(self.sort(self.findall(data)))
race_id = Validator.require_int(race_id)
data["race_id"] = race_id
return self.response(self.sort(self.findall(data)))
vehicle_id = Validator.require_int(vehicle_id)
data["vehicle_id"] = vehicle_id
return self.response(self.sort(self.findall(data)))
station = Validator.require_int(station)
resource = self.find({"race_id": race_id, "vehicle_id": vehicle_id, "station": station})
return self.response(resource)
def PUT(self, race_id, vehicle_id, station, **data):
race_id = Validator.require_int(race_id)
vehicle_id = Validator.require_int(vehicle_id)
station = Validator.require_int(station)
resource = self.find({"race_id": race_id, "vehicle_id": vehicle_id, "station": station})
if resource is None:
resource = {"race_id": race_id, "vehicle_id": vehicle_id, "station": station}
data = Validator.validate(data, self.__class__.fields, self.__class__.defaults, require_all=True)
self.resources.append(resource)
else:
data = Validator.validate(data, self.__class__.fields, self.__class__.defaults, require_all=False)
for field in data:
resource[field] = data[field]
self.save()
return self.response(resource)
def DELETE(self, race_id, vehicle_id, station, **data):
race_id = Validator.require_int(race_id)
vehicle_id = Validator.require_int(vehicle_id)
station = Validator.require_int(station)
filter = {"race_id": race_id, "vehicle_id": vehicle_id, "station": station}
resource = self.find(filter)
if resource is not None:
response = deepcopy(resource)
self.remove(filter)
return self.response(response)
Validator.fail_found()
def get_correct_station_order(self, resource):
position = 0
resources = self.sort(self.findall({"race_id": resource["race_id"], "vehicle_id": resource["vehicle_id"]}))
for cmp_res in resources:
if cmp_res["time_s"] < resource["time_s"]:
position += 1
return position == resource["station"]
def prepare_response(self, resource):
resource["correct_station_order"] = self.get_correct_station_order(resource)
# EOF
| gpl-2.0 | -976,043,208,546,712,700 | 36.6 | 115 | 0.605718 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.