repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
hexlism/xx_net | python27/1.0/lib/zipfile.py | 11 | 59383 | """
Read and write ZIP files.
"""
import struct, os, time, sys, shutil
import binascii, cStringIO, stat
import io
import re
import string
try:
import zlib # We may need its compression method
crc32 = zlib.crc32
except ImportError:
zlib = None
crc32 = binascii.crc32
__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile",
"ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ]
class BadZipfile(Exception):
pass
class LargeZipFile(Exception):
"""
Raised when writing a zipfile, the zipfile requires ZIP64 extensions
and those extensions are disabled.
"""
error = BadZipfile # The exception raised by this module
ZIP64_LIMIT = (1 << 31) - 1
ZIP_FILECOUNT_LIMIT = (1 << 16) - 1
ZIP_MAX_COMMENT = (1 << 16) - 1
# constants for Zip file compression methods
ZIP_STORED = 0
ZIP_DEFLATED = 8
# Other ZIP compression methods not supported
# Below are some formats and associated data for reading/writing headers using
# the struct module. The names and structures of headers/records are those used
# in the PKWARE description of the ZIP file format:
# http://www.pkware.com/documents/casestudies/APPNOTE.TXT
# (URL valid as of January 2008)
# The "end of central directory" structure, magic number, size, and indices
# (section V.I in the format document)
structEndArchive = "<4s4H2LH"
stringEndArchive = "PK\005\006"
sizeEndCentDir = struct.calcsize(structEndArchive)
_ECD_SIGNATURE = 0
_ECD_DISK_NUMBER = 1
_ECD_DISK_START = 2
_ECD_ENTRIES_THIS_DISK = 3
_ECD_ENTRIES_TOTAL = 4
_ECD_SIZE = 5
_ECD_OFFSET = 6
_ECD_COMMENT_SIZE = 7
# These last two indices are not part of the structure as defined in the
# spec, but they are used internally by this module as a convenience
_ECD_COMMENT = 8
_ECD_LOCATION = 9
# The "central directory" structure, magic number, size, and indices
# of entries in the structure (section V.F in the format document)
structCentralDir = "<4s4B4HL2L5H2L"
stringCentralDir = "PK\001\002"
sizeCentralDir = struct.calcsize(structCentralDir)
# indexes of entries in the central directory structure
_CD_SIGNATURE = 0
_CD_CREATE_VERSION = 1
_CD_CREATE_SYSTEM = 2
_CD_EXTRACT_VERSION = 3
_CD_EXTRACT_SYSTEM = 4
_CD_FLAG_BITS = 5
_CD_COMPRESS_TYPE = 6
_CD_TIME = 7
_CD_DATE = 8
_CD_CRC = 9
_CD_COMPRESSED_SIZE = 10
_CD_UNCOMPRESSED_SIZE = 11
_CD_FILENAME_LENGTH = 12
_CD_EXTRA_FIELD_LENGTH = 13
_CD_COMMENT_LENGTH = 14
_CD_DISK_NUMBER_START = 15
_CD_INTERNAL_FILE_ATTRIBUTES = 16
_CD_EXTERNAL_FILE_ATTRIBUTES = 17
_CD_LOCAL_HEADER_OFFSET = 18
# The "local file header" structure, magic number, size, and indices
# (section V.A in the format document)
structFileHeader = "<4s2B4HL2L2H"
stringFileHeader = "PK\003\004"
sizeFileHeader = struct.calcsize(structFileHeader)
_FH_SIGNATURE = 0
_FH_EXTRACT_VERSION = 1
_FH_EXTRACT_SYSTEM = 2
_FH_GENERAL_PURPOSE_FLAG_BITS = 3
_FH_COMPRESSION_METHOD = 4
_FH_LAST_MOD_TIME = 5
_FH_LAST_MOD_DATE = 6
_FH_CRC = 7
_FH_COMPRESSED_SIZE = 8
_FH_UNCOMPRESSED_SIZE = 9
_FH_FILENAME_LENGTH = 10
_FH_EXTRA_FIELD_LENGTH = 11
# The "Zip64 end of central directory locator" structure, magic number, and size
structEndArchive64Locator = "<4sLQL"
stringEndArchive64Locator = "PK\x06\x07"
sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator)
# The "Zip64 end of central directory" record, magic number, size, and indices
# (section V.G in the format document)
structEndArchive64 = "<4sQ2H2L4Q"
stringEndArchive64 = "PK\x06\x06"
sizeEndCentDir64 = struct.calcsize(structEndArchive64)
_CD64_SIGNATURE = 0
_CD64_DIRECTORY_RECSIZE = 1
_CD64_CREATE_VERSION = 2
_CD64_EXTRACT_VERSION = 3
_CD64_DISK_NUMBER = 4
_CD64_DISK_NUMBER_START = 5
_CD64_NUMBER_ENTRIES_THIS_DISK = 6
_CD64_NUMBER_ENTRIES_TOTAL = 7
_CD64_DIRECTORY_SIZE = 8
_CD64_OFFSET_START_CENTDIR = 9
def _check_zipfile(fp):
try:
if _EndRecData(fp):
return True # file has correct magic number
except IOError:
pass
return False
def is_zipfile(filename):
"""Quickly see if a file is a ZIP file by checking the magic number.
The filename argument may be a file or file-like object too.
"""
result = False
try:
if hasattr(filename, "read"):
result = _check_zipfile(fp=filename)
else:
with open(filename, "rb") as fp:
result = _check_zipfile(fp)
except IOError:
pass
return result
def _EndRecData64(fpin, offset, endrec):
"""
Read the ZIP64 end-of-archive records and use that to update endrec
"""
try:
fpin.seek(offset - sizeEndCentDir64Locator, 2)
except IOError:
# If the seek fails, the file is not large enough to contain a ZIP64
# end-of-archive record, so just return the end record we were given.
return endrec
data = fpin.read(sizeEndCentDir64Locator)
if len(data) != sizeEndCentDir64Locator:
return endrec
sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
if sig != stringEndArchive64Locator:
return endrec
if diskno != 0 or disks != 1:
raise BadZipfile("zipfiles that span multiple disks are not supported")
# Assume no 'zip64 extensible data'
fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
data = fpin.read(sizeEndCentDir64)
if len(data) != sizeEndCentDir64:
return endrec
sig, sz, create_version, read_version, disk_num, disk_dir, \
dircount, dircount2, dirsize, diroffset = \
struct.unpack(structEndArchive64, data)
if sig != stringEndArchive64:
return endrec
# Update the original endrec using data from the ZIP64 record
endrec[_ECD_SIGNATURE] = sig
endrec[_ECD_DISK_NUMBER] = disk_num
endrec[_ECD_DISK_START] = disk_dir
endrec[_ECD_ENTRIES_THIS_DISK] = dircount
endrec[_ECD_ENTRIES_TOTAL] = dircount2
endrec[_ECD_SIZE] = dirsize
endrec[_ECD_OFFSET] = diroffset
return endrec
def _EndRecData(fpin):
"""Return data from the "End of Central Directory" record, or None.
The data is a list of the nine items in the ZIP "End of central dir"
record followed by a tenth item, the file seek offset of this record."""
# Determine file size
fpin.seek(0, 2)
filesize = fpin.tell()
# Check to see if this is ZIP file with no archive comment (the
# "end of central directory" structure should be the last item in the
# file if this is the case).
try:
fpin.seek(-sizeEndCentDir, 2)
except IOError:
return None
data = fpin.read()
if (len(data) == sizeEndCentDir and
data[0:4] == stringEndArchive and
data[-2:] == b"\000\000"):
# the signature is correct and there's no comment, unpack structure
endrec = struct.unpack(structEndArchive, data)
endrec=list(endrec)
# Append a blank comment and record start offset
endrec.append("")
endrec.append(filesize - sizeEndCentDir)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, -sizeEndCentDir, endrec)
# Either this is not a ZIP file, or it is a ZIP file with an archive
# comment. Search the end of the file for the "end of central directory"
# record signature. The comment is the last item in the ZIP file and may be
# up to 64K long. It is assumed that the "end of central directory" magic
# number does not appear in the comment.
maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0)
fpin.seek(maxCommentStart, 0)
data = fpin.read()
start = data.rfind(stringEndArchive)
if start >= 0:
# found the magic number; attempt to unpack and interpret
recData = data[start:start+sizeEndCentDir]
if len(recData) != sizeEndCentDir:
# Zip file is corrupted.
return None
endrec = list(struct.unpack(structEndArchive, recData))
commentSize = endrec[_ECD_COMMENT_SIZE] #as claimed by the zip file
comment = data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]
endrec.append(comment)
endrec.append(maxCommentStart + start)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, maxCommentStart + start - filesize,
endrec)
# Unable to find a valid end of central directory structure
return None
class ZipInfo (object):
"""Class with attributes describing each file in the ZIP archive."""
__slots__ = (
'orig_filename',
'filename',
'date_time',
'compress_type',
'comment',
'extra',
'create_system',
'create_version',
'extract_version',
'reserved',
'flag_bits',
'volume',
'internal_attr',
'external_attr',
'header_offset',
'CRC',
'compress_size',
'file_size',
'_raw_time',
)
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
self.orig_filename = filename # Original file name in archive
# Terminate the file name at the first null byte. Null bytes in file
# names are used as tricks by viruses in archives.
null_byte = filename.find(chr(0))
if null_byte >= 0:
filename = filename[0:null_byte]
# This is used to ensure paths in generated ZIP files always use
# forward slashes as the directory separator, as required by the
# ZIP format specification.
if os.sep != "/" and os.sep in filename:
filename = filename.replace(os.sep, "/")
self.filename = filename # Normalized file name
self.date_time = date_time # year, month, day, hour, min, sec
if date_time[0] < 1980:
raise ValueError('ZIP does not support timestamps before 1980')
# Standard values:
self.compress_type = ZIP_STORED # Type of compression for the file
self.comment = "" # Comment for each file
self.extra = "" # ZIP extra data
if sys.platform == 'win32':
self.create_system = 0 # System which created ZIP archive
else:
# Assume everything else is unix-y
self.create_system = 3 # System which created ZIP archive
self.create_version = 20 # Version which created ZIP archive
self.extract_version = 20 # Version needed to extract archive
self.reserved = 0 # Must be zero
self.flag_bits = 0 # ZIP flag bits
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
# compress_size Size of the compressed file
# file_size Size of the uncompressed file
def FileHeader(self, zip64=None):
"""Return the per-file header as a string."""
dt = self.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
if self.flag_bits & 0x08:
# Set these to zero because we write them after the file data
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
extra = self.extra
if zip64 is None:
zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT
if zip64:
fmt = '<HHQQ'
extra = extra + struct.pack(fmt,
1, struct.calcsize(fmt)-4, file_size, compress_size)
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
if not zip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
# File is larger than what fits into a 4 byte integer,
# fall back to the ZIP64 extension
file_size = 0xffffffff
compress_size = 0xffffffff
self.extract_version = max(45, self.extract_version)
self.create_version = max(45, self.extract_version)
filename, flag_bits = self._encodeFilenameFlags()
header = struct.pack(structFileHeader, stringFileHeader,
self.extract_version, self.reserved, flag_bits,
self.compress_type, dostime, dosdate, CRC,
compress_size, file_size,
len(filename), len(extra))
return header + filename + extra
def _encodeFilenameFlags(self):
if isinstance(self.filename, unicode):
try:
return self.filename.encode('ascii'), self.flag_bits
except UnicodeEncodeError:
return self.filename.encode('utf-8'), self.flag_bits | 0x800
else:
return self.filename, self.flag_bits
def _decodeFilename(self):
if self.flag_bits & 0x800:
return self.filename.decode('utf-8')
else:
return self.filename
def _decodeExtra(self):
# Try to decode the extra field.
extra = self.extra
unpack = struct.unpack
while len(extra) >= 4:
tp, ln = unpack('<HH', extra[:4])
if tp == 1:
if ln >= 24:
counts = unpack('<QQQ', extra[4:28])
elif ln == 16:
counts = unpack('<QQ', extra[4:20])
elif ln == 8:
counts = unpack('<Q', extra[4:12])
elif ln == 0:
counts = ()
else:
raise RuntimeError, "Corrupt extra field %s"%(ln,)
idx = 0
# ZIP64 extension (large files and/or large archives)
if self.file_size in (0xffffffffffffffffL, 0xffffffffL):
self.file_size = counts[idx]
idx += 1
if self.compress_size == 0xFFFFFFFFL:
self.compress_size = counts[idx]
idx += 1
if self.header_offset == 0xffffffffL:
old = self.header_offset
self.header_offset = counts[idx]
idx+=1
extra = extra[ln+4:]
class _ZipDecrypter:
"""Class to handle decryption of files stored within a ZIP archive.
ZIP supports a password-based form of encryption. Even though known
plaintext attacks have been found against it, it is still useful
to be able to get data out of such a file.
Usage:
zd = _ZipDecrypter(mypwd)
plain_char = zd(cypher_char)
plain_text = map(zd, cypher_text)
"""
def _GenerateCRCTable():
"""Generate a CRC-32 table.
ZIP encryption uses the CRC32 one-byte primitive for scrambling some
internal keys. We noticed that a direct implementation is faster than
relying on binascii.crc32().
"""
poly = 0xedb88320
table = [0] * 256
for i in range(256):
crc = i
for j in range(8):
if crc & 1:
crc = ((crc >> 1) & 0x7FFFFFFF) ^ poly
else:
crc = ((crc >> 1) & 0x7FFFFFFF)
table[i] = crc
return table
crctable = _GenerateCRCTable()
def _crc32(self, ch, crc):
"""Compute the CRC32 primitive on one byte."""
return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ord(ch)) & 0xff]
def __init__(self, pwd):
self.key0 = 305419896
self.key1 = 591751049
self.key2 = 878082192
for p in pwd:
self._UpdateKeys(p)
def _UpdateKeys(self, c):
self.key0 = self._crc32(c, self.key0)
self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295
self.key1 = (self.key1 * 134775813 + 1) & 4294967295
self.key2 = self._crc32(chr((self.key1 >> 24) & 255), self.key2)
def __call__(self, c):
"""Decrypt a single character."""
c = ord(c)
k = self.key2 | 2
c = c ^ (((k * (k^1)) >> 8) & 255)
c = chr(c)
self._UpdateKeys(c)
return c
compressor_names = {
0: 'store',
1: 'shrink',
2: 'reduce',
3: 'reduce',
4: 'reduce',
5: 'reduce',
6: 'implode',
7: 'tokenize',
8: 'deflate',
9: 'deflate64',
10: 'implode',
12: 'bzip2',
14: 'lzma',
18: 'terse',
19: 'lz77',
97: 'wavpack',
98: 'ppmd',
}
class _SharedFile:
def __init__(self, file, pos, close):
self._file = file
self._pos = pos
self._close = close
def read(self, n=-1):
self._file.seek(self._pos)
data = self._file.read(n)
self._pos = self._file.tell()
return data
def close(self):
if self._file is not None:
fileobj = self._file
self._file = None
self._close(fileobj)
class ZipExtFile(io.BufferedIOBase):
"""File-like object for reading an archive member.
Is returned by ZipFile.open().
"""
# Max size supported by decompressor.
MAX_N = 1 << 31 - 1
# Read from compressed files in 4k blocks.
MIN_READ_SIZE = 4096
# Search for universal newlines or line chunks.
PATTERN = re.compile(r'^(?P<chunk>[^\r\n]+)|(?P<newline>\n|\r\n?)')
def __init__(self, fileobj, mode, zipinfo, decrypter=None,
close_fileobj=False):
self._fileobj = fileobj
self._decrypter = decrypter
self._close_fileobj = close_fileobj
self._compress_type = zipinfo.compress_type
self._compress_size = zipinfo.compress_size
self._compress_left = zipinfo.compress_size
if self._compress_type == ZIP_DEFLATED:
self._decompressor = zlib.decompressobj(-15)
elif self._compress_type != ZIP_STORED:
descr = compressor_names.get(self._compress_type)
if descr:
raise NotImplementedError("compression type %d (%s)" % (self._compress_type, descr))
else:
raise NotImplementedError("compression type %d" % (self._compress_type,))
self._unconsumed = ''
self._readbuffer = ''
self._offset = 0
self._universal = 'U' in mode
self.newlines = None
# Adjust read size for encrypted files since the first 12 bytes
# are for the encryption/password information.
if self._decrypter is not None:
self._compress_left -= 12
self.mode = mode
self.name = zipinfo.filename
if hasattr(zipinfo, 'CRC'):
self._expected_crc = zipinfo.CRC
self._running_crc = crc32(b'') & 0xffffffff
else:
self._expected_crc = None
def readline(self, limit=-1):
"""Read and return a line from the stream.
If limit is specified, at most limit bytes will be read.
"""
if not self._universal and limit < 0:
# Shortcut common case - newline found in buffer.
i = self._readbuffer.find('\n', self._offset) + 1
if i > 0:
line = self._readbuffer[self._offset: i]
self._offset = i
return line
if not self._universal:
return io.BufferedIOBase.readline(self, limit)
line = ''
while limit < 0 or len(line) < limit:
readahead = self.peek(2)
if readahead == '':
return line
#
# Search for universal newlines or line chunks.
#
# The pattern returns either a line chunk or a newline, but not
# both. Combined with peek(2), we are assured that the sequence
# '\r\n' is always retrieved completely and never split into
# separate newlines - '\r', '\n' due to coincidental readaheads.
#
match = self.PATTERN.search(readahead)
newline = match.group('newline')
if newline is not None:
if self.newlines is None:
self.newlines = []
if newline not in self.newlines:
self.newlines.append(newline)
self._offset += len(newline)
return line + '\n'
chunk = match.group('chunk')
if limit >= 0:
chunk = chunk[: limit - len(line)]
self._offset += len(chunk)
line += chunk
return line
def peek(self, n=1):
"""Returns buffered bytes without advancing the position."""
if n > len(self._readbuffer) - self._offset:
chunk = self.read(n)
if len(chunk) > self._offset:
self._readbuffer = chunk + self._readbuffer[self._offset:]
self._offset = 0
else:
self._offset -= len(chunk)
# Return up to 512 bytes to reduce allocation overhead for tight loops.
return self._readbuffer[self._offset: self._offset + 512]
def readable(self):
return True
def read(self, n=-1):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
"""
buf = ''
if n is None:
n = -1
while True:
if n < 0:
data = self.read1(n)
elif n > len(buf):
data = self.read1(n - len(buf))
else:
return buf
if len(data) == 0:
return buf
buf += data
def _update_crc(self, newdata, eof):
# Update the CRC using the given data.
if self._expected_crc is None:
# No need to compute the CRC if we don't have a reference value
return
self._running_crc = crc32(newdata, self._running_crc) & 0xffffffff
# Check the CRC if we're at the end of the file
if eof and self._running_crc != self._expected_crc:
raise BadZipfile("Bad CRC-32 for file %r" % self.name)
def read1(self, n):
"""Read up to n bytes with at most one read() system call."""
# Simplify algorithm (branching) by transforming negative n to large n.
if n < 0 or n is None:
n = self.MAX_N
# Bytes available in read buffer.
len_readbuffer = len(self._readbuffer) - self._offset
# Read from file.
if self._compress_left > 0 and n > len_readbuffer + len(self._unconsumed):
nbytes = n - len_readbuffer - len(self._unconsumed)
nbytes = max(nbytes, self.MIN_READ_SIZE)
nbytes = min(nbytes, self._compress_left)
data = self._fileobj.read(nbytes)
self._compress_left -= len(data)
if data and self._decrypter is not None:
data = ''.join(map(self._decrypter, data))
if self._compress_type == ZIP_STORED:
self._update_crc(data, eof=(self._compress_left==0))
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
else:
# Prepare deflated bytes for decompression.
self._unconsumed += data
# Handle unconsumed data.
if (len(self._unconsumed) > 0 and n > len_readbuffer and
self._compress_type == ZIP_DEFLATED):
data = self._decompressor.decompress(
self._unconsumed,
max(n - len_readbuffer, self.MIN_READ_SIZE)
)
self._unconsumed = self._decompressor.unconsumed_tail
eof = len(self._unconsumed) == 0 and self._compress_left == 0
if eof:
data += self._decompressor.flush()
self._update_crc(data, eof=eof)
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
# Read from buffer.
data = self._readbuffer[self._offset: self._offset + n]
self._offset += len(data)
return data
def close(self):
try :
if self._close_fileobj:
self._fileobj.close()
finally:
super(ZipExtFile, self).close()
class ZipFile(object):
""" Class with methods to open, read, write, close, list zip files.
z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False)
file: Either the path to the file, or a file-like object.
If it is a path, the file will be opened and closed by ZipFile.
mode: The mode can be either read "r", write "w" or append "a".
compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib).
allowZip64: if True ZipFile will create files with ZIP64 extensions when
needed, otherwise it will raise an exception when this would
be necessary.
"""
fp = None # Set here since __del__ checks it
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode read "r", write "w" or append "a"."""
if mode not in ("r", "w", "a"):
raise RuntimeError('ZipFile() requires mode "r", "w", or "a"')
if compression == ZIP_STORED:
pass
elif compression == ZIP_DEFLATED:
if not zlib:
raise RuntimeError,\
"Compression requires the (missing) zlib module"
else:
raise RuntimeError, "That compression method is not supported"
self._allowZip64 = allowZip64
self._didModify = False
self.debug = 0 # Level of printing: 0 through 3
self.NameToInfo = {} # Find file info given name
self.filelist = [] # List of ZipInfo instances for archive
self.compression = compression # Method of compression
self.mode = mode
self.pwd = None
self._comment = ''
# Check if we were passed a file-like object
if isinstance(file, basestring):
self._filePassed = 0
self.filename = file
modeDict = {'r' : 'rb', 'w': 'w+b', 'a' : 'r+b',
'r+b': 'w+b', 'w+b': 'wb'}
filemode = modeDict[mode]
while True:
try:
self.fp = io.open(file, filemode)
except IOError:
if filemode in modeDict:
filemode = modeDict[filemode]
continue
raise
break
else:
self._filePassed = 1
self.fp = file
self.filename = getattr(file, 'name', None)
self._fileRefCnt = 1
try:
if mode == 'r':
self._RealGetContents()
elif mode == 'w':
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
self.start_dir = 0
elif mode == 'a':
try:
# See if file is a zip file
self._RealGetContents()
# seek to start of directory and overwrite
self.fp.seek(self.start_dir, 0)
except BadZipfile:
# file is not a zip file, just append
self.fp.seek(0, 2)
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
self.start_dir = self.fp.tell()
else:
raise RuntimeError('Mode must be "r", "w" or "a"')
except:
fp = self.fp
self.fp = None
self._fpclose(fp)
raise
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def _RealGetContents(self):
"""Read in the table of contents for the ZIP file."""
fp = self.fp
try:
endrec = _EndRecData(fp)
except IOError:
raise BadZipfile("File is not a zip file")
if not endrec:
raise BadZipfile, "File is not a zip file"
if self.debug > 1:
print endrec
size_cd = endrec[_ECD_SIZE] # bytes in central directory
offset_cd = endrec[_ECD_OFFSET] # offset of central directory
self._comment = endrec[_ECD_COMMENT] # archive comment
# "concat" is zero, unless zip was concatenated to another file
concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
if endrec[_ECD_SIGNATURE] == stringEndArchive64:
# If Zip64 extension structures are present, account for them
concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
if self.debug > 2:
inferred = concat + offset_cd
print "given, inferred, offset", offset_cd, inferred, concat
# self.start_dir: Position of start of central directory
self.start_dir = offset_cd + concat
fp.seek(self.start_dir, 0)
data = fp.read(size_cd)
fp = cStringIO.StringIO(data)
total = 0
while total < size_cd:
centdir = fp.read(sizeCentralDir)
if len(centdir) != sizeCentralDir:
raise BadZipfile("Truncated central directory")
centdir = struct.unpack(structCentralDir, centdir)
if centdir[_CD_SIGNATURE] != stringCentralDir:
raise BadZipfile("Bad magic number for central directory")
if self.debug > 2:
print centdir
filename = fp.read(centdir[_CD_FILENAME_LENGTH])
# Create ZipInfo instance to store file information
x = ZipInfo(filename)
x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
(x.create_version, x.create_system, x.extract_version, x.reserved,
x.flag_bits, x.compress_type, t, d,
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
x.volume, x.internal_attr, x.external_attr = centdir[15:18]
# Convert date/time code to (year, month, day, hour, min, sec)
x._raw_time = t
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
x._decodeExtra()
x.header_offset = x.header_offset + concat
x.filename = x._decodeFilename()
self.filelist.append(x)
self.NameToInfo[x.filename] = x
# update total bytes read from central directory
total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
+ centdir[_CD_EXTRA_FIELD_LENGTH]
+ centdir[_CD_COMMENT_LENGTH])
if self.debug > 2:
print "total", total
def namelist(self):
"""Return a list of file names in the archive."""
l = []
for data in self.filelist:
l.append(data.filename)
return l
def infolist(self):
"""Return a list of class ZipInfo instances for files in the
archive."""
return self.filelist
def printdir(self):
"""Print a table of contents for the zip file."""
print "%-46s %19s %12s" % ("File Name", "Modified ", "Size")
for zinfo in self.filelist:
date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6]
print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size)
def testzip(self):
"""Read all the files and check the CRC."""
chunk_size = 2 ** 20
for zinfo in self.filelist:
try:
# Read by chunks, to avoid an OverflowError or a
# MemoryError with very large embedded files.
with self.open(zinfo.filename, "r") as f:
while f.read(chunk_size): # Check CRC-32
pass
except BadZipfile:
return zinfo.filename
def getinfo(self, name):
"""Return the instance of ZipInfo given 'name'."""
info = self.NameToInfo.get(name)
if info is None:
raise KeyError(
'There is no item named %r in the archive' % name)
return info
def setpassword(self, pwd):
"""Set default password for encrypted files."""
self.pwd = pwd
@property
def comment(self):
"""The comment text associated with the ZIP file."""
return self._comment
@comment.setter
def comment(self, comment):
# check for valid comment length
if len(comment) > ZIP_MAX_COMMENT:
import warnings
warnings.warn('Archive comment is too long; truncating to %d bytes'
% ZIP_MAX_COMMENT, stacklevel=2)
comment = comment[:ZIP_MAX_COMMENT]
self._comment = comment
self._didModify = True
def read(self, name, pwd=None):
"""Return file bytes (as a string) for name."""
return self.open(name, "r", pwd).read()
def open(self, name, mode="r", pwd=None):
"""Return file-like object for 'name'."""
if mode not in ("r", "U", "rU"):
raise RuntimeError, 'open() requires mode "r", "U", or "rU"'
if not self.fp:
raise RuntimeError, \
"Attempt to read ZIP archive that was already closed"
# Make sure we have an info object
if isinstance(name, ZipInfo):
# 'name' is already an info object
zinfo = name
else:
# Get info object for name
zinfo = self.getinfo(name)
self._fileRefCnt += 1
zef_file = _SharedFile(self.fp, zinfo.header_offset, self._fpclose)
try:
# Skip the file header:
fheader = zef_file.read(sizeFileHeader)
if len(fheader) != sizeFileHeader:
raise BadZipfile("Truncated file header")
fheader = struct.unpack(structFileHeader, fheader)
if fheader[_FH_SIGNATURE] != stringFileHeader:
raise BadZipfile("Bad magic number for file header")
fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
if fheader[_FH_EXTRA_FIELD_LENGTH]:
zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename:
raise BadZipfile, \
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname)
# check for encrypted flag & handle password
is_encrypted = zinfo.flag_bits & 0x1
zd = None
if is_encrypted:
if not pwd:
pwd = self.pwd
if not pwd:
raise RuntimeError, "File %s is encrypted, " \
"password required for extraction" % name
zd = _ZipDecrypter(pwd)
# The first 12 bytes in the cypher stream is an encryption header
# used to strengthen the algorithm. The first 11 bytes are
# completely random, while the 12th contains the MSB of the CRC,
# or the MSB of the file time depending on the header type
# and is used to check the correctness of the password.
bytes = zef_file.read(12)
h = map(zd, bytes[0:12])
if zinfo.flag_bits & 0x8:
# compare against the file type from extended local headers
check_byte = (zinfo._raw_time >> 8) & 0xff
else:
# compare against the CRC otherwise
check_byte = (zinfo.CRC >> 24) & 0xff
if ord(h[11]) != check_byte:
raise RuntimeError("Bad password for file", name)
return ZipExtFile(zef_file, mode, zinfo, zd, True)
except:
zef_file.close()
raise
def extract(self, member, path=None, pwd=None):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a ZipInfo object. You can
specify a different directory using `path'.
"""
if not isinstance(member, ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
return self._extract_member(member, path, pwd)
def extractall(self, path=None, members=None, pwd=None):
"""Extract all members from the archive to the current working
directory. `path' specifies a different directory to extract to.
`members' is optional and must be a subset of the list returned
by namelist().
"""
if members is None:
members = self.namelist()
for zipinfo in members:
self.extract(zipinfo, path, pwd)
def _extract_member(self, member, targetpath, pwd):
"""Extract the ZipInfo object 'member' to a physical
file on the path targetpath.
"""
# build the destination pathname, replacing
# forward slashes to platform specific separators.
arcname = member.filename.replace('/', os.path.sep)
if os.path.altsep:
arcname = arcname.replace(os.path.altsep, os.path.sep)
# interpret absolute pathname as relative, remove drive letter or
# UNC path, redundant separators, "." and ".." components.
arcname = os.path.splitdrive(arcname)[1]
arcname = os.path.sep.join(x for x in arcname.split(os.path.sep)
if x not in ('', os.path.curdir, os.path.pardir))
if os.path.sep == '\\':
# filter illegal characters on Windows
illegal = ':<>|"?*'
if isinstance(arcname, unicode):
table = {ord(c): ord('_') for c in illegal}
else:
table = string.maketrans(illegal, '_' * len(illegal))
arcname = arcname.translate(table)
# remove trailing dots
arcname = (x.rstrip('.') for x in arcname.split(os.path.sep))
arcname = os.path.sep.join(x for x in arcname if x)
targetpath = os.path.join(targetpath, arcname)
targetpath = os.path.normpath(targetpath)
# Create all upper directories if necessary.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
os.makedirs(upperdirs)
if member.filename[-1] == '/':
if not os.path.isdir(targetpath):
os.mkdir(targetpath)
return targetpath
with self.open(member, pwd=pwd) as source, \
file(targetpath, "wb") as target:
shutil.copyfileobj(source, target)
return targetpath
def _writecheck(self, zinfo):
"""Check for errors before writing a file to the archive."""
if zinfo.filename in self.NameToInfo:
import warnings
warnings.warn('Duplicate name: %r' % zinfo.filename, stacklevel=3)
if self.mode not in ("w", "a"):
raise RuntimeError, 'write() requires mode "w" or "a"'
if not self.fp:
raise RuntimeError, \
"Attempt to write ZIP archive that was already closed"
if zinfo.compress_type == ZIP_DEFLATED and not zlib:
raise RuntimeError, \
"Compression requires the (missing) zlib module"
if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED):
raise RuntimeError, \
"That compression method is not supported"
if not self._allowZip64:
requires_zip64 = None
if len(self.filelist) >= ZIP_FILECOUNT_LIMIT:
requires_zip64 = "Files count"
elif zinfo.file_size > ZIP64_LIMIT:
requires_zip64 = "Filesize"
elif zinfo.header_offset > ZIP64_LIMIT:
requires_zip64 = "Zipfile size"
if requires_zip64:
raise LargeZipFile(requires_zip64 +
" would require ZIP64 extensions")
def write(self, filename, arcname=None, compress_type=None):
"""Put the bytes from filename into the archive under the name
arcname."""
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
st = os.stat(filename)
isdir = stat.S_ISDIR(st.st_mode)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
while arcname[0] in (os.sep, os.altsep):
arcname = arcname[1:]
if isdir:
arcname += '/'
zinfo = ZipInfo(arcname, date_time)
zinfo.external_attr = (st[0] & 0xFFFF) << 16L # Unix attributes
if compress_type is None:
zinfo.compress_type = self.compression
else:
zinfo.compress_type = compress_type
zinfo.file_size = st.st_size
zinfo.flag_bits = 0x00
self.fp.seek(self.start_dir, 0)
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
if isdir:
zinfo.file_size = 0
zinfo.compress_size = 0
zinfo.CRC = 0
zinfo.external_attr |= 0x10 # MS-DOS directory flag
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
self.fp.write(zinfo.FileHeader(False))
self.start_dir = self.fp.tell()
return
with open(filename, "rb") as fp:
# Must overwrite CRC and sizes with correct data later
zinfo.CRC = CRC = 0
zinfo.compress_size = compress_size = 0
# Compressed size can be larger than uncompressed size
zip64 = self._allowZip64 and \
zinfo.file_size * 1.05 > ZIP64_LIMIT
self.fp.write(zinfo.FileHeader(zip64))
if zinfo.compress_type == ZIP_DEFLATED:
cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
else:
cmpr = None
file_size = 0
while 1:
buf = fp.read(1024 * 8)
if not buf:
break
file_size = file_size + len(buf)
CRC = crc32(buf, CRC) & 0xffffffff
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
self.fp.write(buf)
if cmpr:
buf = cmpr.flush()
compress_size = compress_size + len(buf)
self.fp.write(buf)
zinfo.compress_size = compress_size
else:
zinfo.compress_size = file_size
zinfo.CRC = CRC
zinfo.file_size = file_size
if not zip64 and self._allowZip64:
if file_size > ZIP64_LIMIT:
raise RuntimeError('File size has increased during compressing')
if compress_size > ZIP64_LIMIT:
raise RuntimeError('Compressed size larger than uncompressed size')
# Seek backwards and write file header (which will now include
# correct CRC and file sizes)
self.start_dir = self.fp.tell() # Preserve current position in file
self.fp.seek(zinfo.header_offset, 0)
self.fp.write(zinfo.FileHeader(zip64))
self.fp.seek(self.start_dir, 0)
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def writestr(self, zinfo_or_arcname, bytes, compress_type=None):
"""Write a file into the archive. The contents is the string
'bytes'. 'zinfo_or_arcname' is either a ZipInfo instance or
the name of the file in the archive."""
if not isinstance(zinfo_or_arcname, ZipInfo):
zinfo = ZipInfo(filename=zinfo_or_arcname,
date_time=time.localtime(time.time())[:6])
zinfo.compress_type = self.compression
if zinfo.filename[-1] == '/':
zinfo.external_attr = 0o40775 << 16 # drwxrwxr-x
zinfo.external_attr |= 0x10 # MS-DOS directory flag
else:
zinfo.external_attr = 0o600 << 16 # ?rw-------
else:
zinfo = zinfo_or_arcname
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
if compress_type is not None:
zinfo.compress_type = compress_type
zinfo.file_size = len(bytes) # Uncompressed size
self.fp.seek(self.start_dir, 0)
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
zinfo.CRC = crc32(bytes) & 0xffffffff # CRC-32 checksum
if zinfo.compress_type == ZIP_DEFLATED:
co = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
bytes = co.compress(bytes) + co.flush()
zinfo.compress_size = len(bytes) # Compressed size
else:
zinfo.compress_size = zinfo.file_size
zip64 = zinfo.file_size > ZIP64_LIMIT or \
zinfo.compress_size > ZIP64_LIMIT
if zip64 and not self._allowZip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
self.fp.write(zinfo.FileHeader(zip64))
self.fp.write(bytes)
if zinfo.flag_bits & 0x08:
# Write CRC and file sizes after the file data
fmt = '<LQQ' if zip64 else '<LLL'
self.fp.write(struct.pack(fmt, zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.fp.flush()
self.start_dir = self.fp.tell()
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def __del__(self):
"""Call the "close()" method in case the user forgot."""
self.close()
def close(self):
"""Close the file, and for mode "w" and "a" write the ending
records."""
if self.fp is None:
return
try:
if self.mode in ("w", "a") and self._didModify: # write ending records
self.fp.seek(self.start_dir, 0)
for zinfo in self.filelist: # write central directory
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
extra = []
if zinfo.file_size > ZIP64_LIMIT \
or zinfo.compress_size > ZIP64_LIMIT:
extra.append(zinfo.file_size)
extra.append(zinfo.compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
else:
file_size = zinfo.file_size
compress_size = zinfo.compress_size
if zinfo.header_offset > ZIP64_LIMIT:
extra.append(zinfo.header_offset)
header_offset = 0xffffffffL
else:
header_offset = zinfo.header_offset
extra_data = zinfo.extra
if extra:
# Append a ZIP64 field to the extra's
extra_data = struct.pack(
'<HH' + 'Q'*len(extra),
1, 8*len(extra), *extra) + extra_data
extract_version = max(45, zinfo.extract_version)
create_version = max(45, zinfo.create_version)
else:
extract_version = zinfo.extract_version
create_version = zinfo.create_version
try:
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
except DeprecationWarning:
print >>sys.stderr, (structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(zinfo.filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
raise
self.fp.write(centdir)
self.fp.write(filename)
self.fp.write(extra_data)
self.fp.write(zinfo.comment)
pos2 = self.fp.tell()
# Write end-of-zip-archive record
centDirCount = len(self.filelist)
centDirSize = pos2 - self.start_dir
centDirOffset = self.start_dir
requires_zip64 = None
if centDirCount > ZIP_FILECOUNT_LIMIT:
requires_zip64 = "Files count"
elif centDirOffset > ZIP64_LIMIT:
requires_zip64 = "Central directory offset"
elif centDirSize > ZIP64_LIMIT:
requires_zip64 = "Central directory size"
if requires_zip64:
# Need to write the ZIP64 end-of-archive records
if not self._allowZip64:
raise LargeZipFile(requires_zip64 +
" would require ZIP64 extensions")
zip64endrec = struct.pack(
structEndArchive64, stringEndArchive64,
44, 45, 45, 0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset)
self.fp.write(zip64endrec)
zip64locrec = struct.pack(
structEndArchive64Locator,
stringEndArchive64Locator, 0, pos2, 1)
self.fp.write(zip64locrec)
centDirCount = min(centDirCount, 0xFFFF)
centDirSize = min(centDirSize, 0xFFFFFFFF)
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset, len(self._comment))
self.fp.write(endrec)
self.fp.write(self._comment)
self.fp.flush()
finally:
fp = self.fp
self.fp = None
self._fpclose(fp)
def _fpclose(self, fp):
assert self._fileRefCnt > 0
self._fileRefCnt -= 1
if not self._fileRefCnt and not self._filePassed:
fp.close()
class PyZipFile(ZipFile):
"""Class to create ZIP archives with Python library files and packages."""
def writepy(self, pathname, basename = ""):
"""Add all files from "pathname" to the ZIP archive.
If pathname is a package directory, search the directory and
all package subdirectories recursively for all *.py and enter
the modules into the archive. If pathname is a plain
directory, listdir *.py and enter all modules. Else, pathname
must be a Python *.py file and the module will be put into the
archive. Added modules are always module.pyo or module.pyc.
This method will compile the module.py into module.pyc if
necessary.
"""
dir, name = os.path.split(pathname)
if os.path.isdir(pathname):
initname = os.path.join(pathname, "__init__.py")
if os.path.isfile(initname):
# This is a package directory, add it
if basename:
basename = "%s/%s" % (basename, name)
else:
basename = name
if self.debug:
print "Adding package in", pathname, "as", basename
fname, arcname = self._get_codename(initname[0:-3], basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
dirlist = os.listdir(pathname)
dirlist.remove("__init__.py")
# Add all *.py files and package subdirectories
for filename in dirlist:
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if os.path.isdir(path):
if os.path.isfile(os.path.join(path, "__init__.py")):
# This is a package directory, add it
self.writepy(path, basename) # Recursive call
elif ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
# This is NOT a package directory, add its files at top level
if self.debug:
print "Adding files from directory", pathname
for filename in os.listdir(pathname):
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
if pathname[-3:] != ".py":
raise RuntimeError, \
'Files added with writepy() must end with ".py"'
fname, arcname = self._get_codename(pathname[0:-3], basename)
if self.debug:
print "Adding file", arcname
self.write(fname, arcname)
def _get_codename(self, pathname, basename):
"""Return (filename, archivename) for the path.
Given a module name path, return the correct file path and
archive name, compiling if necessary. For example, given
/python/lib/string, return (/python/lib/string.pyc, string).
"""
file_py = pathname + ".py"
file_pyc = pathname + ".pyc"
file_pyo = pathname + ".pyo"
if os.path.isfile(file_pyo) and \
os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime:
fname = file_pyo # Use .pyo file
elif not os.path.isfile(file_pyc) or \
os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime:
import py_compile
if self.debug:
print "Compiling", file_py
try:
py_compile.compile(file_py, file_pyc, None, True)
except py_compile.PyCompileError,err:
print err.msg
fname = file_pyc
else:
fname = file_pyc
archivename = os.path.split(fname)[1]
if basename:
archivename = "%s/%s" % (basename, archivename)
return (fname, archivename)
def main(args = None):
import textwrap
USAGE=textwrap.dedent("""\
Usage:
zipfile.py -l zipfile.zip # Show listing of a zipfile
zipfile.py -t zipfile.zip # Test if a zipfile is valid
zipfile.py -e zipfile.zip target # Extract zipfile into target dir
zipfile.py -c zipfile.zip src ... # Create zipfile from sources
""")
if args is None:
args = sys.argv[1:]
if not args or args[0] not in ('-l', '-c', '-e', '-t'):
print USAGE
sys.exit(1)
if args[0] == '-l':
if len(args) != 2:
print USAGE
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
zf.printdir()
elif args[0] == '-t':
if len(args) != 2:
print USAGE
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
badfile = zf.testzip()
if badfile:
print("The following enclosed file is corrupted: {!r}".format(badfile))
print "Done testing"
elif args[0] == '-e':
if len(args) != 3:
print USAGE
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
zf.extractall(args[2])
elif args[0] == '-c':
if len(args) < 3:
print USAGE
sys.exit(1)
def addToZip(zf, path, zippath):
if os.path.isfile(path):
zf.write(path, zippath, ZIP_DEFLATED)
elif os.path.isdir(path):
if zippath:
zf.write(path, zippath)
for nm in os.listdir(path):
addToZip(zf,
os.path.join(path, nm), os.path.join(zippath, nm))
# else: ignore
with ZipFile(args[1], 'w', allowZip64=True) as zf:
for path in args[2:]:
zippath = os.path.basename(path)
if not zippath:
zippath = os.path.basename(os.path.dirname(path))
if zippath in ('', os.curdir, os.pardir):
zippath = ''
addToZip(zf, path, zippath)
if __name__ == "__main__":
main()
| bsd-2-clause | 6,829,568,326,489,694,000 | 37.04164 | 103 | 0.548642 | false |
vfreex/procszoo | tests/test_atfork.py | 1 | 1691 | #!/usr/bin/env python
import os
import sys
from distutils.log import warn as printf
try:
from procszoo.c_functions import *
except ImportError:
this_file_absdir = os.path.dirname(os.path.abspath(__file__))
procszoo_mod_dir = os.path.abspath("%s/.." % this_file_absdir)
sys.path.append(procszoo_mod_dir)
from procszoo.c_functions import *
if __name__ == "__main__":
def procinfo(str):
if "sched_getcpu" not in show_available_c_functions():
cpu_idx = -1
else:
cpu_idx = sched_getcpu()
pid = os.getpid()
ppid = os.getppid()
uid = os.getuid()
gid = os.getgid()
euid = os.geteuid()
egid = os.getegid()
hostname = gethostname()
procs = os.listdir("/proc")
printf("""%s:
cpu: %d pid: %d ppid: %d
uid %d gid %d euid %d egid %d
hostname: %s
procs: %s"""
% (str, cpu_idx, pid, ppid, uid, gid, euid, egid,
hostname, ", ".join(procs[-4:])))
def prepare_hdr():
procinfo("prepare handler")
def parent_hdr():
procinfo("parent handler")
def child_hdr():
procinfo("child handler")
def simple_handler1():
printf(1)
def simple_handler2():
printf(2)
atfork(prepare=simple_handler1, child=simple_handler1)
atfork(parent=parent_hdr, child=child_hdr)
atfork(prepare=simple_handler2)
unregister_fork_handlers(parent=parent_hdr, strict=True)
pid = os.fork()
if pid == -1:
raise RuntimeError("do fork failed")
elif pid == 0:
printf("child")
elif pid > 0:
os.waitpid(pid, 0)
printf("parent")
| gpl-3.0 | -5,939,648,310,228,879,000 | 25.421875 | 66 | 0.564755 | false |
ubuntu-core/snapcraft | tests/integration/store/test_store_validate.py | 1 | 2124 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2016-2019 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import shutil
import fixtures
from testtools.matchers import Equals
from tests import integration
class ValidateTestCase(integration.StoreTestCase):
def setUp(self):
super().setUp()
if not self.is_store_fake():
self.skipTest(
"Right combination of snaps and IDs is not available in real stores."
)
keys_dir = os.path.join(os.path.dirname(__file__), "keys")
temp_keys_dir = os.path.join(self.path, ".snap", "gnupg")
shutil.copytree(keys_dir, temp_keys_dir)
self.useFixture(fixtures.EnvironmentVariable("SNAP_GNUPG_HOME", temp_keys_dir))
def test_validate_success(self):
self.addCleanup(self.logout)
self.login()
self.assertThat(self.validate("core", ["core=3", "test-snap=4"]), Equals(0))
def test_validate_unknown_snap_failure(self):
self.addCleanup(self.logout)
self.login()
self.assertThat(
self.validate(
"unknown",
["core=3", "test-snap=4"],
expected_error="Snap 'unknown' was not found.",
),
Equals(2),
)
def test_validate_bad_argument(self):
self.addCleanup(self.logout)
self.login()
self.assertThat(
self.validate(
"core", ["core=foo"], expected_error="format must be name=revision"
),
Equals(2),
)
| gpl-3.0 | 1,699,060,045,097,039,400 | 32.1875 | 87 | 0.629944 | false |
ideal/drummer | ez_setup.py | 4 | 10034 | #!python
"""Bootstrap setuptools installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from ez_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
"""
import sys
DEFAULT_VERSION = "0.6c11"
DEFAULT_URL = "http://pypi.python.org/packages/%s/s/setuptools/" % sys.version[:3]
md5_data = {
'setuptools-0.6b1-py2.3.egg': '8822caf901250d848b996b7f25c6e6ca',
'setuptools-0.6b1-py2.4.egg': 'b79a8a403e4502fbb85ee3f1941735cb',
'setuptools-0.6b2-py2.3.egg': '5657759d8a6d8fc44070a9d07272d99b',
'setuptools-0.6b2-py2.4.egg': '4996a8d169d2be661fa32a6e52e4f82a',
'setuptools-0.6b3-py2.3.egg': 'bb31c0fc7399a63579975cad9f5a0618',
'setuptools-0.6b3-py2.4.egg': '38a8c6b3d6ecd22247f179f7da669fac',
'setuptools-0.6b4-py2.3.egg': '62045a24ed4e1ebc77fe039aa4e6f7e5',
'setuptools-0.6b4-py2.4.egg': '4cb2a185d228dacffb2d17f103b3b1c4',
'setuptools-0.6c1-py2.3.egg': 'b3f2b5539d65cb7f74ad79127f1a908c',
'setuptools-0.6c1-py2.4.egg': 'b45adeda0667d2d2ffe14009364f2a4b',
'setuptools-0.6c2-py2.3.egg': 'f0064bf6aa2b7d0f3ba0b43f20817c27',
'setuptools-0.6c2-py2.4.egg': '616192eec35f47e8ea16cd6a122b7277',
'setuptools-0.6c3-py2.3.egg': 'f181fa125dfe85a259c9cd6f1d7b78fa',
'setuptools-0.6c3-py2.4.egg': 'e0ed74682c998bfb73bf803a50e7b71e',
'setuptools-0.6c3-py2.5.egg': 'abef16fdd61955514841c7c6bd98965e',
'setuptools-0.6c4-py2.3.egg': 'b0b9131acab32022bfac7f44c5d7971f',
'setuptools-0.6c4-py2.4.egg': '2a1f9656d4fbf3c97bf946c0a124e6e2',
'setuptools-0.6c4-py2.5.egg': '8f5a052e32cdb9c72bcf4b5526f28afc',
'setuptools-0.6c5-py2.3.egg': 'ee9fd80965da04f2f3e6b3576e9d8167',
'setuptools-0.6c5-py2.4.egg': 'afe2adf1c01701ee841761f5bcd8aa64',
'setuptools-0.6c5-py2.5.egg': 'a8d3f61494ccaa8714dfed37bccd3d5d',
'setuptools-0.6c6-py2.3.egg': '35686b78116a668847237b69d549ec20',
'setuptools-0.6c6-py2.4.egg': '3c56af57be3225019260a644430065ab',
'setuptools-0.6c6-py2.5.egg': 'b2f8a7520709a5b34f80946de5f02f53',
'setuptools-0.6c7-py2.3.egg': '209fdf9adc3a615e5115b725658e13e2',
'setuptools-0.6c7-py2.4.egg': '5a8f954807d46a0fb67cf1f26c55a82e',
'setuptools-0.6c7-py2.5.egg': '45d2ad28f9750e7434111fde831e8372',
'setuptools-0.6c8-py2.3.egg': '50759d29b349db8cfd807ba8303f1902',
'setuptools-0.6c8-py2.4.egg': 'cba38d74f7d483c06e9daa6070cce6de',
'setuptools-0.6c8-py2.5.egg': '1721747ee329dc150590a58b3e1ac95b',
'setuptools-0.6c9-py2.3.egg': 'a83c4020414807b496e4cfbe08507c03',
'setuptools-0.6c9-py2.4.egg': '260a2be2e5388d66bdaee06abec6342a',
'setuptools-0.6c9-py2.5.egg': 'fe67c3e5a17b12c0e7c541b7ea43a8e6',
'setuptools-0.6c9-py2.6.egg': 'ca37b1ff16fa2ede6e19383e7b59245a',
'setuptools-0.6c11-py2.3.egg' : '2baeac6e13d414a9d28e7ba5b5a596de',
'setuptools-0.6c11-py2.4.egg' : 'bd639f9b0eac4c42497034dec2ec0c2b',
'setuptools-0.6c11-py2.5.egg' : '64c94f3bf7a72a13ec83e0b24f2749b2',
'setuptools-0.6c11-py2.6.egg' : 'bfa92100bd772d5a213eedd356d64086',
'setuptools-0.6c11-py2.7.egg' : 'fe1f997bc722265116870bc7919059ea',
}
import sys, os
try: from hashlib import md5
except ImportError: from md5 import md5
def _validate_md5(egg_name, data):
if egg_name in md5_data:
digest = md5(data).hexdigest()
if digest != md5_data[egg_name]:
sys.stderr.write(
"md5 validation of %s failed! (Possible download problem?)\n"
% egg_name
)
sys.exit(2)
return data
def use_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
download_delay=15
):
"""Automatically find/download setuptools and make it available on sys.path
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end with
a '/'). `to_dir` is the directory where setuptools will be downloaded, if
it is not already available. If `download_delay` is specified, it should
be the number of seconds that will be paused before initiating a download,
should one be required. If an older version of setuptools is installed,
this routine will print a message to ``sys.stderr`` and raise SystemExit in
an attempt to abort the calling script.
"""
was_imported = 'pkg_resources' in sys.modules or 'setuptools' in sys.modules
def do_download():
egg = download_setuptools(version, download_base, to_dir, download_delay)
sys.path.insert(0, egg)
import setuptools; setuptools.bootstrap_install_from = egg
try:
import pkg_resources
except ImportError:
return do_download()
try:
pkg_resources.require("setuptools>="+version); return
except pkg_resources.VersionConflict as e:
if was_imported:
sys.stderr.write(
"The required version of setuptools (>=%s) is not available, and\n"
"can't be installed while this script is running. Please install\n"
" a more recent version first, using 'easy_install -U setuptools'."
"\n\n(Currently using %r)\n"
% (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return do_download()
except pkg_resources.DistributionNotFound:
return do_download()
def download_setuptools(
version=DEFAULT_VERSION, download_base=DEFAULT_URL, to_dir=os.curdir,
delay = 15
):
"""Download setuptools from a specified location and return its filename
`version` should be a valid setuptools version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download attempt.
"""
import urllib2, shutil
egg_name = "setuptools-%s-py%s.egg" % (version,sys.version[:3])
url = download_base + egg_name
saveto = os.path.join(to_dir, egg_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
from distutils import log
if delay:
log.warn("""
---------------------------------------------------------------------------
This script requires setuptools version %s to run (even to display
help). I will attempt to download it for you (from
%s), but
you may need to enable firewall access for this script first.
I will start the download in %d seconds.
(Note: if this machine does not have network access, please obtain the file
%s
and place it in this directory before rerunning this script.)
---------------------------------------------------------------------------""",
version, download_base, delay, url
); from time import sleep; sleep(delay)
log.warn("Downloading %s", url)
src = urllib2.urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = _validate_md5(egg_name, src.read())
dst = open(saveto,"wb"); dst.write(data)
finally:
if src: src.close()
if dst: dst.close()
return os.path.realpath(saveto)
def main(argv, version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
try:
import setuptools
except ImportError:
egg = None
try:
egg = download_setuptools(version, delay=0)
sys.path.insert(0,egg)
from setuptools.command.easy_install import main
return main(list(argv)+[egg]) # we're done here
finally:
if egg and os.path.exists(egg):
os.unlink(egg)
else:
if setuptools.__version__ == '0.0.1':
sys.stderr.write(
"You have an obsolete version of setuptools installed. Please\n"
"remove it from your system entirely before rerunning this script.\n"
)
sys.exit(2)
req = "setuptools>="+version
import pkg_resources
try:
pkg_resources.require(req)
except pkg_resources.VersionConflict:
try:
from setuptools.command.easy_install import main
except ImportError:
from easy_install import main
main(list(argv)+[download_setuptools(delay=0)])
sys.exit(0) # try to force an exit
else:
if argv:
from setuptools.command.easy_install import main
main(argv)
else:
print("Setuptools version",version,"or greater has been installed.")
print('(Run "ez_setup.py -U setuptools" to reinstall or upgrade.)')
def update_md5(filenames):
"""Update our built-in md5 registry"""
import re
for name in filenames:
base = os.path.basename(name)
f = open(name,'rb')
md5_data[base] = md5(f.read()).hexdigest()
f.close()
data = [" %r: %r,\n" % it for it in md5_data.items()]
data.sort()
repl = "".join(data)
import inspect
srcfile = inspect.getsourcefile(sys.modules[__name__])
f = open(srcfile, 'rb'); src = f.read(); f.close()
match = re.search("\nmd5_data = {\n([^}]+)}", src)
if not match:
sys.stderr.write("Internal error!\n")
sys.exit(2)
src = src[:match.start(1)] + repl + src[match.end(1):]
f = open(srcfile,'w')
f.write(src)
f.close()
if __name__=='__main__':
if len(sys.argv)>2 and sys.argv[1]=='--md5update':
update_md5(sys.argv[2:])
else:
main(sys.argv[1:])
| gpl-3.0 | 4,738,398,491,704,964,000 | 40.634855 | 86 | 0.652581 | false |
ResearchComputing/RCAMP | rcamp/mailer/models.py | 1 | 3763 | from django.db import models
from django.core import mail
from django.core.exceptions import ValidationError
from django.template import Template,Context
import time
import socket
import mailer.signals
MODULE_EXCLUDES = ['__builtins__', '__cached__', '__doc__', '__file__', '__loader__',
'__name__', '__package__', '__spec__', 'django']
EVENT_CHOICES = tuple((s,s) for s in dir(mailer.signals) if s not in MODULE_EXCLUDES)
class MailNotifier(models.Model):
name = models.CharField(max_length=128)
event = models.CharField(max_length=128,choices=EVENT_CHOICES)
mailto=models.TextField(null=True,blank=True)
cc=models.TextField(null=True,blank=True)
bcc=models.TextField(null=True,blank=True)
from_address=models.EmailField(null=True,blank=True)
subject = models.CharField(max_length=256)
body = models.TextField()
def save(self,*args,**kwargs):
self.validate_templates()
super(MailNotifier,self).save(*args,**kwargs)
def validate_templates(self,ctx={}):
try:
self.make_email(ctx)
except:
raise ValueError('Cannot save notifier because there is a template error.')
def send(self,context={}):
try:
m = self.make_email(context)
m.send()
mail_log = MailLog(
reference_name=self.name,
email_object=m.message().__str__(),
recipient_emails=','.join(self.make_mailto_list(context)),
from_host=socket.gethostname()
)
mail_log.save()
return m.message()
except:
mail_log = MailLog(
reference_name='Error: {}'.format(self.name),
email_object=str(context),
recipient_emails='',
from_host=socket.gethostname()
)
mail_log.save()
return ''
def make_email(self,context):
email = mail.EmailMessage(subject=self.make_subject(context),
body=self.make_body(context),
from_email=self.from_address,
to=self.make_mailto_list(context),
cc=self.make_cc_list(context),
bcc=self.make_bcc_list(context))
return email
def make_mailto_list(self,context={}):
t = Template(self.mailto)
c = Context(context)
mailto_template = t.render(c)
mailto = [ e for e in mailto_template.split(",") if '@' in e ]
return mailto
def make_cc_list(self,context={}):
t = Template(self.cc)
c = Context(context)
cc_template = t.render(c)
cc = [ e for e in cc_template.split(",") if '@' in e ]
return cc
def make_bcc_list(self,context={}):
t = Template(self.bcc)
c = Context(context)
bcc_template = t.render(c)
bcc = [ e for e in bcc_template.split(",") if '@' in e ]
return bcc
def make_subject(self,context={}):
t = Template(self.subject)
c = Context(context)
return t.render(c)
def make_body(self,context={}):
t = Template(self.body)
c = Context(context)
return t.render(c)
def __str__(self):
return self.name
class MailLog(models.Model):
date_sent = models.DateTimeField(auto_now_add=True)
from_host = models.CharField(max_length=256)
recipient_emails = models.CharField(max_length=1024)
reference_name = models.CharField(max_length=256)
email_object=models.TextField()
def __str__(self):
return str(time.strftime(self.reference_name + '_%Y/%m/%d/%H:%M:%S'))
| mit | -3,203,701,715,067,586,000 | 30.621849 | 87 | 0.563646 | false |
rfverbruggen/rachiopy | tests/test_zone.py | 1 | 7027 | """Zone object test module"""
import unittest
from unittest.mock import patch
import uuid
import random
import json
from random import randrange
from rachiopy import Zone
from rachiopy.zone import ZoneSchedule
from tests.constants import BASE_API_URL, AUTHTOKEN, RESPONSE200, RESPONSE204
class TestZoneMethods(unittest.TestCase):
"""Class containing the Zone object test cases."""
def setUp(self):
self.zone = Zone(AUTHTOKEN)
zone1id = str(uuid.uuid4())
zone2id = str(uuid.uuid4())
zone3id = str(uuid.uuid4())
duration1 = randrange(10800)
duration2 = randrange(10800)
duration3 = randrange(10800)
self.zones = []
self.zones.append((zone1id, duration1))
self.zones.append((zone2id, duration2))
self.zones.append((zone3id, duration3))
def test_init(self):
"""Test if the constructor works as expected."""
self.assertEqual(self.zone.authtoken, AUTHTOKEN)
@patch("requests.Session.request")
def test_get(self, mock):
"""Test if the get method works as expected."""
mock.return_value = RESPONSE200
zoneid = uuid.uuid4()
self.zone.get(zoneid)
args, kwargs = mock.call_args
# Check that the mock function is called with the rights args.
self.assertEqual(args[1], f"{BASE_API_URL}/zone/" f"{zoneid}")
self.assertEqual(args[0], "GET")
self.assertEqual(kwargs["data"], None)
@patch("requests.Session.request")
def test_start(self, mock):
"""Test if the start method works as expected."""
mock.return_value = RESPONSE204
zoneid = str(uuid.uuid4())
duration = randrange(10800)
self.zone.start(zoneid, duration)
args, kwargs = mock.call_args
# Check that the mock function is called with the rights args.
self.assertEqual(args[1], f"{BASE_API_URL}/zone/start")
self.assertEqual(args[0], "PUT")
self.assertEqual(
kwargs["data"], json.dumps({"id": zoneid, "duration": duration})
)
# Check that values should be within range.
self.assertRaises(AssertionError, self.zone.start, zoneid, -1)
self.assertRaises(AssertionError, self.zone.start, zoneid, 10801)
@patch("requests.Session.request")
def test_start_multiple(self, mock):
"""Test if the start multiple method works as expected."""
mock.return_value = RESPONSE204
zones = [
{"id": data[0], "duration": data[1], "sortOrder": count}
for (count, data) in enumerate(self.zones, 1)
]
self.zone.start_multiple(zones)
args, kwargs = mock.call_args
# Check that the mock function is called with the rights args.
self.assertEqual(args[1], f"{BASE_API_URL}/zone/start_multiple")
self.assertEqual(args[0], "PUT")
self.assertEqual(
kwargs["data"],
json.dumps(
{
"zones": [
{
"id": data[0],
"duration": data[1],
"sortOrder": count,
}
for (count, data) in enumerate(self.zones, 1)
]
}
),
)
@patch("requests.Session.request")
def test_set_moisture_percent(self, mock):
"""Test if the set moisture percent method works as expected."""
mock.return_value = RESPONSE204
zoneid = str(uuid.uuid4())
percent = round(random.random(), 1)
self.zone.set_moisture_percent(zoneid, percent)
args, kwargs = mock.call_args
# Check that the mock function is called with the rights args.
self.assertEqual(args[1], f"{BASE_API_URL}/zone/setMoisturePercent")
self.assertEqual(args[0], "PUT")
self.assertEqual(
kwargs["data"], json.dumps({"id": zoneid, "percent": percent})
)
# Check that values should be within range.
self.assertRaises(
AssertionError, self.zone.set_moisture_percent, zoneid, -0.1
)
self.assertRaises(
AssertionError, self.zone.set_moisture_percent, zoneid, 1.1
)
@patch("requests.Session.request")
def test_set_moisture_level(self, mock):
"""Test if the set moisture level method works as expected."""
mock.return_value = RESPONSE204
zoneid = str(uuid.uuid4())
level = round(random.uniform(0.0, 100.0), 2)
self.zone.set_moisture_level(zoneid, level)
args, kwargs = mock.call_args
# Check that the mock function is called with the rights args.
self.assertEqual(args[1], f"{BASE_API_URL}/zone/setMoistureLevel")
self.assertEqual(args[0], "PUT")
self.assertEqual(
kwargs["data"], json.dumps({"id": zoneid, "level": level})
)
@patch("requests.Session.request")
def test_zoneschedule(self, mock):
"""Test if the zoneschedule helper class works as expected."""
mock.return_value = RESPONSE204
zoneschedule = self.zone.schedule()
for zone in self.zones:
zoneschedule.enqueue(zone[0], zone[1])
zoneschedule.start()
args, kwargs = mock.call_args
# Check that the mock function is called with the rights args.
self.assertEqual(args[1], f"{BASE_API_URL}/zone/start_multiple")
self.assertEqual(args[0], "PUT")
self.assertEqual(
kwargs["data"],
json.dumps(
{
"zones": [
{
"id": data[0],
"duration": data[1],
"sortOrder": count,
}
for (count, data) in enumerate(self.zones, 1)
]
}
),
)
@patch("requests.Session.request")
def test_zoneschedule_with_statement(self, mock):
"""Test if the zoneschedule with statement works as expected."""
mock.return_value = RESPONSE204
with ZoneSchedule(self.zone) as zoneschedule:
for zone in self.zones:
zoneschedule.enqueue(zone[0], zone[1])
args, kwargs = mock.call_args
# Check that the mock function is called with the rights args.
self.assertEqual(args[1], f"{BASE_API_URL}/zone/start_multiple")
self.assertEqual(args[0], "PUT")
self.assertEqual(
kwargs["data"],
json.dumps(
{
"zones": [
{
"id": data[0],
"duration": data[1],
"sortOrder": count,
}
for (count, data) in enumerate(self.zones, 1)
]
}
),
)
| mit | -8,967,909,637,884,122,000 | 32.146226 | 77 | 0.55116 | false |
jeandet/meson | test cases/common/100 manygen/subdir/manygen.py | 8 | 2129 | #!/usr/bin/env python
from __future__ import print_function
# Generates a static library, object file, source
# file and a header file.
import sys, os
import shutil, subprocess
with open(sys.argv[1]) as f:
funcname = f.readline().strip()
outdir = sys.argv[2]
buildtype_args = sys.argv[3]
if not os.path.isdir(outdir):
print('Outdir does not exist.')
sys.exit(1)
# Emulate the environment.detect_c_compiler() logic
compiler = os.environ.get('CC', None)
if not compiler:
compiler = shutil.which('cl') or \
shutil.which('gcc') or \
shutil.which('clang') or \
shutil.which('cc')
compbase = os.path.basename(compiler)
if 'cl' in compbase and 'clang' not in compbase:
libsuffix = '.lib'
is_vs = True
compiler = 'cl'
linker = 'lib'
else:
libsuffix = '.a'
is_vs = False
linker = 'ar'
if compiler is None:
print('No known compilers found.')
sys.exit(1)
objsuffix = '.o'
outo = os.path.join(outdir, funcname + objsuffix)
outa = os.path.join(outdir, funcname + libsuffix)
outh = os.path.join(outdir, funcname + '.h')
outc = os.path.join(outdir, funcname + '.c')
tmpc = 'diibadaaba.c'
tmpo = 'diibadaaba' + objsuffix
with open(outc, 'w') as f:
f.write('''#include"%s.h"
int %s_in_src() {
return 0;
}
''' % (funcname, funcname))
with open(outh, 'w') as f:
f.write('''#pragma once
int %s_in_lib();
int %s_in_obj();
int %s_in_src();
''' % (funcname, funcname, funcname))
with open(tmpc, 'w') as f:
f.write('''int %s_in_obj() {
return 0;
}
''' % funcname)
if is_vs:
subprocess.check_call([compiler, '/nologo', '/c', buildtype_args, '/Fo' + outo, tmpc])
else:
subprocess.check_call([compiler, '-c', '-o', outo, tmpc])
with open(tmpc, 'w') as f:
f.write('''int %s_in_lib() {
return 0;
}
''' % funcname)
if is_vs:
subprocess.check_call([compiler, '/nologo', '/c', '/Fo' + tmpo, tmpc])
subprocess.check_call([linker, '/NOLOGO', '/OUT:' + outa, tmpo])
else:
subprocess.check_call([compiler, '-c', '-o', tmpo, tmpc])
subprocess.check_call([linker, 'csr', outa, tmpo])
os.unlink(tmpo)
os.unlink(tmpc)
| apache-2.0 | -3,651,456,773,026,154,500 | 22.395604 | 90 | 0.616721 | false |
edgarRd/incubator-airflow | airflow/operators/oracle_operator.py | 9 | 2132 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.hooks.oracle_hook import OracleHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class OracleOperator(BaseOperator):
"""
Executes sql code in a specific Oracle database
:param oracle_conn_id: reference to a specific Oracle database
:type oracle_conn_id: string
:param sql: the sql code to be executed. (templated)
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql, oracle_conn_id='oracle_default', parameters=None,
autocommit=False, *args, **kwargs):
super(OracleOperator, self).__init__(*args, **kwargs)
self.oracle_conn_id = oracle_conn_id
self.sql = sql
self.autocommit = autocommit
self.parameters = parameters
def execute(self, context):
self.log.info('Executing: %s', self.sql)
hook = OracleHook(oracle_conn_id=self.oracle_conn_id)
hook.run(
self.sql,
autocommit=self.autocommit,
parameters=self.parameters)
| apache-2.0 | 3,355,717,025,591,581,700 | 37.071429 | 72 | 0.69137 | false |
mfherbst/spack | var/spack/repos/builtin/packages/font-misc-ethiopic/package.py | 5 | 2102 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class FontMiscEthiopic(Package):
"""X.org misc-ethiopic font."""
homepage = "http://cgit.freedesktop.org/xorg/font/misc-ethiopic"
url = "https://www.x.org/archive/individual/font/font-misc-ethiopic-1.0.3.tar.gz"
version('1.0.3', '02ddea9338d9d36804ad38f3daadb55a')
depends_on('font-util')
depends_on('fontconfig', type='build')
depends_on('mkfontdir', type='build')
depends_on('mkfontscale', type='build')
depends_on('pkgconfig', type='build')
depends_on('util-macros', type='build')
def install(self, spec, prefix):
configure('--prefix={0}'.format(prefix))
make('install')
# `make install` copies the files to the font-util installation.
# Create a fake directory to convince Spack that we actually
# installed something.
mkdir(prefix.lib)
| lgpl-2.1 | 5,406,814,945,503,770,000 | 39.423077 | 90 | 0.663654 | false |
crankycoder/leaderboard_backend | leaderboard/route_endpoints/submit_contributor_observations.py | 2 | 1490 | from leaderboard.db import session_factory
from leaderboard.models.tile import Tile
from leaderboard.models.contributor import Contributor
from leaderboard.models.reportweeks import insert_or_update_reportweek
key_tile_easting_northing = 'tile_easting_northing'
key_observations = 'observations'
def add_stumbles_for_contributor(email, display_name, query_json):
# Hack
json_object = query_json
session = session_factory()
with session.begin(subtransactions=True):
contributor = session.query(Contributor).filter_by(email=email).first()
if contributor is None:
contributor = Contributor(nickname=display_name,
email=email)
session.add(contributor)
elif contributor.nickname != display_name:
# Update the displayname
contributor.nickname = display_name
# Need to add to the session to mark as dirty
session.add(contributor)
for row in json_object['items']:
tile_coord = row[key_tile_easting_northing]
east, north = tile_coord.split(",")
tile = Tile.get_tile_mercator(int(float(east)), int(float(north)))
week_per_tile = insert_or_update_reportweek(contributor, tile)
obs = row[key_observations]
if not week_per_tile.observation_count:
week_per_tile.observation_count = 0
week_per_tile.observation_count += obs
return True
| mpl-2.0 | -556,587,348,377,407,740 | 38.210526 | 79 | 0.650336 | false |
ttm/aprendizadoSemisupervisionado | trabalho-final-aplicacao/algoritmos/ssl-propagacao-de-rotulo.py | 1 | 2561 | # -*- coding: utf-8 -*-
import numpy as np, networkx as nx, pylab as p, os
a=open("opiniao.arff","rb")
vec=a.read(); a.close()
vec=vec.split("@DATA"); vec=vec[1].split()
vec=[i.split(",")[:-1] for i in vec]
v=np.float64(vec)
vec=[[float(i[0]),float(i[1])] for i in vec]
p.plot(v[0:71,0],v[0:71,1],"go")
p.plot(v[71:,0],v[71:,1],"ro"); p.show()
n=len(vec)
# Matriz de rótulos
#pos= [1,5,30,45,55]
#pos=[15, 8, 4, 29, 16]
#pos=[24, 60, 28, 2, 19]
#pos=[51,64,57,4,28] # escolhidos à dedo
foo=np.arange(72); np.random.shuffle(foo)
pos=foo[:36] # 20%
#neg = [79,90,99,120,140]
#neg = [ 93, 121, 114, 97, 96]
#neg = [ 89, 122, 140, 124, 93]
#neg=[84, 78, 79, 125, 104] ### escolhidos à dedo
foo=np.arange(72,n); np.random.shuffle(foo)
neg=foo[:36] # 20%
l=list(pos)+list(neg)
L=np.zeros((len(l),2))
L[:len(pos)][:,0]=1 # positivos
L[len(pos):][:,1]=1 # negativos
n=len(vec)
##########
# Propagação de rótulo
# Grafo totalmente conectado, com peso.
# Hiperparâmetro regulador = alfa
alfa=10.
pesos=np.zeros((n,n))
for i in xrange(0,n-1):
for j in xrange(i+1,n):
dist=sum((v[i]-v[j])**2)**0.5
w=np.exp(-dist/alfa)
pesos[i][j]=pesos[j][i]=w
# Matriz de probabilidade de transição
P=np.zeros((n,n))
for i in xrange(0,n): # linha
peso_total=pesos[i].sum()
P[i]=pesos[i]/peso_total
# particionando corretamente a matriz P
Pu=np.delete(P,l,0)
Puu=np.delete(Pu,l,1)
foo=np.arange(n)
bar=np.delete(foo,l)
Pul=np.delete(Pu,bar,1)
I= np.identity(Puu.shape[0])
f= np.dot( np.linalg.inv(I-Puu), np.dot(Pul,L) )
num=n-len(l)-(72 -len(neg))
pos_rec=( f[:,0]>.5)[:num].sum()
neg_rec= (f[:,1]>.5)[num:].sum()
ppos=100.*pos_rec/(n-len(l)-L[0,:].sum())
pneg=100.*neg_rec/(n-len(l)-L[1,:].sum())
p=(ppos+pneg)/2
# Precisão e cobertura
prec_p=float(pos_rec)/(pos_rec+36-neg_rec)
cob_p=float(pos_rec)/36
prec_n=float(neg_rec)/(neg_rec+36-pos_rec)
cob_n=float(neg_rec)/36
foo="porcentagem de acerto: %.2f" % ( p )
print foo
os.system("echo '"+ foo + "' >> " + "resultado0.5.txt")
foo="positivos, precisão: %.2f, cobertura: %.2f" % ( prec_p*100, cob_p*100 )
print foo
os.system("echo '"+ foo + "' >> " + "resultado0.5.txt")
foo="negativos, precisão: %.2f, cobertura: %.2f" % ( prec_n*100, cob_n*100 )
print foo
os.system("echo '"+ foo + "' >> " + "resultado0.5.txt")
os.system("echo >> " + "resultado0.5.txt")
#print pos_rec, neg_rec
#res=str((pos_rec,neg_rec)) + " => " + str(ppos)+"% "+str(pneg)+"%" + ", acertos "+str(p) + "%"
#print res
#os.system("echo '"+ res + "' >> " + "resultado0.5.txt")
| unlicense | -2,881,063,819,277,067,000 | 21.165217 | 95 | 0.589643 | false |
DBuildService/atomic-reactor | tests/plugins/test_flatpak_create_oci.py | 1 | 35479 | """
Copyright (c) 2017, 2019 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
from flexmock import flexmock
from io import BytesIO
import json
import os
import png
import pytest
import re
import subprocess
import tarfile
from textwrap import dedent
from atomic_reactor.constants import IMAGE_TYPE_OCI, IMAGE_TYPE_OCI_TAR
from atomic_reactor.inner import DockerBuildWorkflow
from atomic_reactor.plugin import PrePublishPluginsRunner, PluginFailedException
from atomic_reactor.plugins.pre_reactor_config import (ReactorConfigPlugin,
WORKSPACE_CONF_KEY,
ReactorConfig)
from osbs.utils import ImageName
from tests.constants import MOCK_SOURCE
from tests.flatpak import (MODULEMD_AVAILABLE,
setup_flatpak_source_info, build_flatpak_test_configs)
if MODULEMD_AVAILABLE:
from atomic_reactor.plugins.prepub_flatpak_create_oci import FlatpakCreateOciPlugin
from gi.repository import Modulemd
CONTAINER_ID = 'CONTAINER-ID'
ROOT = '/var/tmp/flatpak-build'
USER_PARAMS = {'flatpak': True}
DESKTOP_FILE_CONTENTS = b"""[Desktop Entry]
Name=Image Viewer
Comment=Browse and rotate images
TryExec=eog
Exec=eog %U
Icon=eog
StartupNotify=true
Terminal=false
Type=Application
Categories=GNOME;GTK;Graphics;2DGraphics;RasterGraphics;Viewer;
MimeType=image/bmp;image/gif;image/jpeg;image/jpg;image/pjpeg;image/png;image/tiff;image/x-bmp;image/x-gray;image/x-icb;image/x-ico;image/x-png;image/x-portable-anymap;image/x-portable-bitmap;image/x-portable-graymap;image/x-portable-pixmap;image/x-xbitmap;image/x-xpixmap;image/x-pcx;image/svg+xml;image/svg+xml-compressed;image/vnd.wap.wbmp;
# Extra keywords that can be used to search for eog in GNOME Shell and Unity
Keywords=Picture;Slideshow;Graphics;"""
# The list of RPMs inherited from the runtime is abbreviated; we just need one
# (abattis-cantarell-fonts) to check that they are properly ignored.
APP_MANIFEST_CONTENTS = b"""eog;3.24.1;1.module_7b96ed10;x86_64;(none);42;sigmd5;1491914281;sigpgp;siggpg
exempi;2.4.2;4.module_7b96ed10;x86_64;(none);42;sigmd5;1491914281;sigpgp;siggpg
libexif;0.6.21;11.module_7b96ed10;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libpeas;1.20.0;5.module_7b96ed10;x86_64;1;42;sigmd5;1491914281;sigpgp;siggpg
libpeas-gtk;1.20.0;5.module_7b96ed10;x86_64;1;42;sigmd5;0;42;1491914281;sigpgp;siggpg
abattis-cantarell-fonts;0.0.25;2.module_e15740c0;noarch;(none);42;sigmd5;1491914281;sigpgp;siggpg
"""
ICON = BytesIO()
# create minimal 256x256 RGBA PNG
png.Writer(256, 256, greyscale=False, alpha=True).write(ICON,
[[0 for _ in range(4 * 256)]
for _ in range(256)])
APP_FILESYSTEM_CONTENTS = {
'/usr/bin/not_eog': b'SHOULD_IGNORE',
ROOT + '/usr/bin/also_not_eog': b'SHOULD_IGNORE',
ROOT + '/app/bin/eog': b'MY_PROGRAM',
ROOT + '/app/share/applications/eog.desktop': DESKTOP_FILE_CONTENTS,
ROOT + '/app/share/icons/hicolor/256x256/apps/eog.png': ICON.getvalue(),
'/var/tmp/flatpak-build.rpm_qf': APP_MANIFEST_CONTENTS
}
EXPECTED_APP_FLATPAK_CONTENTS = [
'/export/share/applications/org.gnome.eog.desktop',
'/export/share/icons/hicolor/256x256/apps/org.gnome.eog.png',
'/files/bin/eog',
'/files/share/applications/org.gnome.eog.desktop',
'/files/share/icons/hicolor/256x256/apps/eog.png',
'/files/share/icons/hicolor/256x256/apps/org.gnome.eog.png',
'/metadata'
]
APP_CONFIG = {
'expected_ref_name': 'app/org.gnome.eog/x86_64/stable',
'filesystem_contents': APP_FILESYSTEM_CONTENTS,
'expected_contents': EXPECTED_APP_FLATPAK_CONTENTS,
'expected_components': ['eog'],
'unexpected_components': ['abattis-cantarell-fonts'],
}
RUNTIME_MANIFEST_CONTENTS = b"""abattis-cantarell-fonts;0.0.25;2.module_e15740c0;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
acl;2.2.52;13.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
adwaita-cursor-theme;3.24.0;2.module_e15740c0;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
adwaita-gtk2-theme;3.22.3;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
adwaita-icon-theme;3.24.0;2.module_e15740c0;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
atk;2.24.0;1.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
at-spi2-atk;2.24.1;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
at-spi2-core;2.24.1;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
audit-libs;2.7.3;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
avahi-libs;0.6.32;7.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
basesystem;11;3.module_7e01f122;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
bash;4.4.11;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
bzip2-libs;1.0.6;22.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
ca-certificates;2017.2.11;5.module_7e01f122;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
cairo;1.14.10;1.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
cairo-gobject;1.14.10;1.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
chkconfig;1.9;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
colord-libs;1.3.5;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
coreutils;8.27;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
coreutils-common;8.27;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
cracklib;2.9.6;5.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
crypto-policies;20170330;3.git55b66da.module_82827beb;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
cryptsetup-libs;1.7.3;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
cups-libs;2.2.2;6.module_98c1823a;x86_64;1;42;sigmd5;1491914281;sigpgp;siggpg
dbus;1.11.10;2.module_7e01f122;x86_64;1;42;sigmd5;1491914281;sigpgp;siggpg
dbus-libs;1.11.10;2.module_7e01f122;x86_64;1;42;sigmd5;1491914281;sigpgp;siggpg
device-mapper;1.02.137;4.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
device-mapper-libs;1.02.137;4.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
elfutils-default-yama-scope;0.168;5.module_7e01f122;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
elfutils-libelf;0.168;5.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
elfutils-libs;0.168;5.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
emacs-filesystem;25.2;0.1.rc2.module_7e01f122;noarch;1;42;sigmd5;1491914281;sigpgp;siggpg
enchant;1.6.0;16.module_e15740c0;x86_64;1;42;sigmd5;1491914281;sigpgp;siggpg
expat;2.2.0;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
fedora-modular-release;26;4.module_bc43b454;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
fedora-modular-repos;26;0.1.module_bc43b454;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
filesystem;3.2;40.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
flatpak-runtime-config;27;3.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
fontconfig;2.12.1;4.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
fontpackages-filesystem;1.44;18.module_f9511cd3;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
freetype;2.7.1;9.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gawk;4.1.4;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gdbm;1.12;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gdk-pixbuf2;2.36.6;1.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gdk-pixbuf2-modules;2.36.6;1.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
glib2;2.52.2;3.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
glibc;2.25;4.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
glibc-all-langpacks;2.25;4.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
glibc-common;2.25;4.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
glib-networking;2.50.0;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gmp;6.1.2;3.module_7e01f122;x86_64;1;42;sigmd5;1491914281;sigpgp;siggpg
gnome-desktop3;3.24.2;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gnome-themes-standard;3.22.3;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gnutls;3.5.10;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gobject-introspection;1.52.1;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
graphite2;1.3.6;2.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
grep;3.0;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gsettings-desktop-schemas;3.24.0;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gtk2;2.24.31;3.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gtk3;3.22.16;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gtk-update-icon-cache;3.22.16;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gvfs-client;1.32.1;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
gzip;1.8;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
harfbuzz;1.4.4;1.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
hicolor-icon-theme;0.15;4.module_f9511cd3;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
hunspell;1.5.4;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
hunspell-en-GB;0.20140811.1;6.module_e15740c0;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
hunspell-en-US;0.20140811.1;6.module_e15740c0;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
hwdata;0.301;1.module_f9511cd3;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
info;6.3;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
iptables-libs;1.6.1;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
jasper-libs;2.0.12;1.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
jbigkit-libs;2.1;6.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
json-glib;1.2.8;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
keyutils-libs;1.5.9;9.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
kmod-libs;24;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
krb5-libs;1.15;9.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
lcms2;2.8;3.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libacl;2.2.52;13.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libappstream-glib;0.7.0;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libarchive;3.2.2;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libattr;2.4.47;18.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libblkid;2.29.1;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libcap;2.25;5.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libcap-ng;0.7.8;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libcom_err;1.43.4;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libcroco;0.6.11;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libcrypt;2.25;4.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libdatrie;0.2.9;4.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libdb;5.3.28;17.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libdrm;2.4.81;1.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libepoxy;1.4.1;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libfdisk;2.29.1;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libffi;3.1;10.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libgcab1;0.7;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libgcc;7.0.1;0.15.module_191b5bc9;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libgcrypt;1.7.6;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libglvnd;0.2.999;17.20170308git8e6e102.module_f9511cd3;x86_64;1;42;sigmd5;1491914281;sigpgp;siggpg
libglvnd-egl;0.2.999;17.20170308git8e6e102.module_f9511cd3;x86_64;1;42;sigmd5;1491914281;sigpgp;siggpg
libglvnd-glx;0.2.999;17.20170308git8e6e102.module_f9511cd3;x86_64;1;42;sigmd5;1491914281;sigpgp;siggpg
libgpg-error;1.25;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libgusb;0.2.10;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libidn;1.33;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libidn2;0.16;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libjpeg-turbo;1.5.1;2.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libmodman;2.0.1;13.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libmount;2.29.1;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libnotify;0.7.7;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libpcap;1.8.1;3.module_7e01f122;x86_64;14;42;sigmd5;1491914281;sigpgp;siggpg
libpciaccess;0.13.4;4.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libpng;1.6.28;2.module_7e01f122;x86_64;2;42;sigmd5;1491914281;sigpgp;siggpg
libproxy;0.4.15;1.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libpwquality;1.3.0;8.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
librsvg2;2.40.17;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libseccomp;2.3.2;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libselinux;2.6;6.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libsemanage;2.6;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libsepol;2.6;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libsigsegv;2.11;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libsmartcols;2.29.1;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libsoup;2.58.1;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libstdc++;7.0.1;0.15.module_191b5bc9;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libstemmer;0;5.585svn.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libtasn1;4.10;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libthai;0.1.25;2.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libtiff;4.0.8;1.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libunistring;0.9.7;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libusbx;1.0.21;2.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libutempter;1.1.6;9.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libuuid;2.29.1;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libverto;0.2.6;7.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libwayland-client;1.13.0;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libwayland-cursor;1.13.0;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libwayland-server;1.13.0;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libX11;1.6.5;2.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libX11-common;1.6.5;2.module_98c1823a;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
libXau;1.0.8;7.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libxcb;1.12;3.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXcomposite;0.4.4;9.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXcursor;1.1.14;8.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXdamage;1.1.4;9.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXext;1.3.3;5.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXfixes;5.0.3;2.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXft;2.3.2;5.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXi;1.7.9;2.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXinerama;1.1.3;7.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libxkbcommon;0.7.1;3.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libxml2;2.9.4;2.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXrandr;1.5.1;2.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXrender;0.9.10;2.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libxshmfence;1.2;4.module_98c1823a;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXtst;1.2.3;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
libXxf86vm;1.1.4;4.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
lz4-libs;1.7.5;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
lzo;2.08;9.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
mesa-libEGL;17.1.4;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
mesa-libgbm;17.1.4;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
mesa-libGL;17.1.4;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
mesa-libglapi;17.1.4;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
mesa-libwayland-egl;17.1.4;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
mpfr;3.1.5;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
ncurses;6.0;8.20170212.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
ncurses-base;6.0;8.20170212.module_7e01f122;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
ncurses-libs;6.0;8.20170212.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
nettle;3.3;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
openssl-libs;1.1.0e;1.module_7e01f122;x86_64;1;42;sigmd5;1491914281;sigpgp;siggpg
p11-kit;0.23.5;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
p11-kit-trust;0.23.5;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
pam;1.3.0;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
pango;1.40.6;1.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
pcre;8.40;5.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
pixman;0.34.0;3.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
popt;1.16;8.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
python3;3.6.0;21.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
python3-appdirs;1.4.0;10.module_7e01f122;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
python3-cairo;1.10.0;20.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
python3-gobject;3.24.1;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
python3-gobject-base;3.24.1;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
python3-libs;3.6.0;21.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
python3-packaging;16.8;4.module_7e01f122;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
python3-pip;9.0.1;7.module_7e01f122;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
python3-pyparsing;2.1.10;3.module_7e01f122;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
python3-setuptools;36.0.1;1.module_e15740c0;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
qrencode-libs;3.4.2;7.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
readline;7.0;5.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
rest;0.8.0;2.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
sed;4.4;1.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
setup;2.10.5;2.module_7e01f122;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
shadow-utils;4.3.1;3.module_7e01f122;x86_64;2;42;sigmd5;1491914281;sigpgp;siggpg
shared-mime-info;1.8;2.module_f9511cd3;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
sqlite-libs;3.17.0;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
systemd;233;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
systemd-libs;233;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
systemd-pam;233;3.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
system-python;3.6.0;21.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
system-python-libs;3.6.0;21.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
tzdata;2016j;3.module_7e01f122;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
ustr;1.0.4;22.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
util-linux;2.29.1;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
xkeyboard-config;2.21;1.module_e15740c0;noarch;0;42;sigmd5;1491914281;sigpgp;siggpg
xz-libs;5.2.3;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
zenity;3.24.0;1.module_e15740c0;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
zlib;1.2.11;2.module_7e01f122;x86_64;0;42;sigmd5;1491914281;sigpgp;siggpg
"""
RUNTIME_FILESYSTEM_CONTENTS = {
'/usr/bin/not_eog': b'SHOULD_IGNORE',
ROOT + '/etc/passwd': b'SOME_CONFIG_FILE',
ROOT + '/etc/shadow:0444': b'FUNNY_PERMISSIONS',
ROOT + '/usr/bin/bash': b'SOME_BINARY',
ROOT + '/usr/bin/mount:1755': b'SOME_SETUID_BINARY',
ROOT + '/usr/lib64/libfoo.so.1.0.0': b'SOME_LIB',
ROOT + '/usr/share/foo:0777': None, # writeable directory
'/var/tmp/flatpak-build.rpm_qf': RUNTIME_MANIFEST_CONTENTS,
}
EXPECTED_RUNTIME_FLATPAK_CONTENTS = [
'/files/bin/bash',
'/files/bin/mount',
'/files/etc/passwd',
'/files/etc/shadow',
'/files/lib64/libfoo.so.1.0.0',
'/metadata'
]
RUNTIME_CONFIG = {
'expected_ref_name': 'runtime/org.fedoraproject.Platform/x86_64/f28',
'filesystem_contents': RUNTIME_FILESYSTEM_CONTENTS,
'expected_contents': EXPECTED_RUNTIME_FLATPAK_CONTENTS,
'expected_components': ['abattis-cantarell-fonts'],
'unexpected_components': [],
}
SDK_MANIFEST_CONTENTS = b"""gcc;7.3.1;2.fc27;x86_64;(none);54142500;sigmd5;1517331292;sigpgp;siggpg
"""
SDK_FILESYSTEM_CONTENTS = {
ROOT + '/usr/bin/gcc': b'SOME_BINARY',
'/var/tmp/flatpak-build.rpm_qf': SDK_MANIFEST_CONTENTS,
}
EXPECTED_SDK_FLATPAK_CONTENTS = [
'/files/bin/gcc',
'/metadata'
]
SDK_CONFIG = {
'expected_ref_name': 'runtime/org.fedoraproject.Sdk/x86_64/f28',
'filesystem_contents': SDK_FILESYSTEM_CONTENTS,
'expected_contents': EXPECTED_SDK_FLATPAK_CONTENTS,
'expected_components': ['gcc'],
'unexpected_components': [],
}
CONFIGS = build_flatpak_test_configs({
'app': APP_CONFIG,
'runtime': RUNTIME_CONFIG,
'sdk': SDK_CONFIG,
})
class MockSource(object):
dockerfile_path = None
path = None
class MockBuilder(object):
def __init__(self):
self.image_id = "xxx"
self.source = MockSource()
self.base_image = ImageName(repo="qwe", tag="asd")
def load_labels_and_annotations(metadata):
def get_path(descriptor):
digest = descriptor["digest"]
assert digest.startswith("sha256:")
return os.path.join(metadata['path'],
"blobs/sha256",
digest[len("sha256:"):])
with open(os.path.join(metadata['path'], "index.json")) as f:
index_json = json.load(f)
with open(get_path(index_json["manifests"][0])) as f:
manifest_json = json.load(f)
with open(get_path(manifest_json["config"])) as f:
config_json = json.load(f)
return config_json["config"]["Labels"], manifest_json["annotations"]
class DefaultInspector(object):
def __init__(self, tmpdir, metadata):
# Import the OCI bundle into a ostree repository for examination
self.repodir = os.path.join(str(tmpdir), 'repo')
subprocess.check_call(['ostree', 'init', '--mode=archive-z2', '--repo=' + self.repodir])
subprocess.check_call(['flatpak', 'build-import-bundle', '--oci',
self.repodir, str(metadata['path'])])
self.ref_name = metadata['ref_name']
def list_files(self):
output = subprocess.check_output(['ostree', '--repo=' + self.repodir,
'ls', '-R', self.ref_name],
universal_newlines=True)
files = []
for line in output.split('\n'):
line = line.strip()
if line == '':
continue
perms, _, _, _, path = line.split()
if perms.startswith('d'): # A directory
continue
files.append(path)
return files
def cat_file(self, path):
return subprocess.check_output(['ostree', '--repo=' + self.repodir,
'cat', self.ref_name,
path],
universal_newlines=True)
def get_file_perms(self, path):
output = subprocess.check_output(['ostree', '--repo=' + self.repodir,
'ls', '-R', self.ref_name, path],
universal_newlines=True)
for line in output.split('\n'):
line = line.strip()
if line == '':
continue
perms = line.split()[0]
return perms
def make_and_store_reactor_config_map(workflow, flatpak_metadata):
workflow.plugin_workspace[ReactorConfigPlugin.key] = {}
reactor_map = {
'version': 1,
'flatpak': {'metadata': flatpak_metadata},
}
workflow.plugin_workspace[ReactorConfigPlugin.key] = {
WORKSPACE_CONF_KEY: ReactorConfig(reactor_map)
}
def write_docker_file(config, tmpdir):
df_path = os.path.join(tmpdir, "Dockerfile")
base_module_name = config['base_module']
base_module = config['modules'][base_module_name]
with open(df_path, "w") as f:
f.write(dedent("""\
FROM fedora:30
LABEL name="{name}"
LABEL com.redhat.component="{component}"
LABEL version="{stream}"
LABEL release="{version}"
""".format(name=config['name'],
component=config['component'],
stream=base_module['stream'],
version=base_module['version'])))
return df_path
@pytest.mark.skipif(not MODULEMD_AVAILABLE, # noqa - docker_tasker fixture
reason="libmodulemd not available")
@pytest.mark.parametrize('config_name, flatpak_metadata, breakage', [
('app', 'both', None),
('app', 'both', 'copy_error'),
('app', 'both', 'no_runtime'),
('app', 'annotations', None),
('app', 'labels', None),
('runtime', 'both', None),
('sdk', 'both', None),
])
def test_flatpak_create_oci(tmpdir, docker_tasker, user_params,
config_name, flatpak_metadata, breakage):
# Check that we actually have flatpak available
have_flatpak = False
try:
output = subprocess.check_output(['flatpak', '--version'],
universal_newlines=True)
m = re.search(r'(\d+)\.(\d+)\.(\d+)', output)
if m and (int(m.group(1)), int(m.group(2)), int(m.group(3))) >= (0, 9, 7):
have_flatpak = True
except (subprocess.CalledProcessError, OSError):
pytest.skip(msg='flatpak not available')
if not have_flatpak:
return
# Check if we have skopeo
try:
subprocess.check_output(['skopeo', '--version'])
except (subprocess.CalledProcessError, OSError):
pytest.skip(msg='skopeo not available')
config = CONFIGS[config_name]
workflow = DockerBuildWorkflow(source=MOCK_SOURCE)
workflow.user_params.update(USER_PARAMS)
setattr(workflow, 'builder', MockBuilder())
workflow.builder.df_path = write_docker_file(config, str(tmpdir))
setattr(workflow.builder, 'tasker', docker_tasker)
# Make a local copy instead of pushing oci to docker storage
workflow.storage_transport = 'oci:{}'.format(str(tmpdir))
make_and_store_reactor_config_map(workflow, flatpak_metadata)
filesystem_dir = os.path.join(str(tmpdir), 'filesystem')
os.mkdir(filesystem_dir)
filesystem_contents = config['filesystem_contents']
for path, contents in filesystem_contents.items():
parts = path.split(':', 1)
path = parts[0]
mode = parts[1] if len(parts) == 2 else None
fullpath = os.path.join(filesystem_dir, path[1:])
parent_dir = os.path.dirname(fullpath)
if not os.path.isdir(parent_dir):
os.makedirs(parent_dir)
if contents is None:
os.mkdir(fullpath)
else:
with open(fullpath, 'wb') as f:
f.write(contents)
if mode is not None:
os.chmod(fullpath, int(mode, 8))
if breakage == 'no_runtime':
# Copy the parts of the config we are going to change
config = dict(config)
config['modules'] = dict(config['modules'])
config['modules']['eog'] = dict(config['modules']['eog'])
module_config = config['modules']['eog']
mmd = Modulemd.ModuleStream.read_string(module_config['metadata'], strict=True)
mmd.clear_dependencies()
mmd.add_dependencies(Modulemd.Dependencies())
mmd_index = Modulemd.ModuleIndex.new()
mmd_index.add_module_stream(mmd)
module_config['metadata'] = mmd_index.dump_to_string()
expected_exception = 'Failed to identify runtime module'
elif breakage == 'copy_error':
workflow.storage_transport = 'idontexist'
expected_exception = 'CalledProcessError'
else:
assert breakage is None
expected_exception = None
filesystem_tar = os.path.join(filesystem_dir, 'tar')
with open(filesystem_tar, "wb") as f:
with tarfile.TarFile(fileobj=f, mode='w') as tf:
for f in os.listdir(filesystem_dir):
tf.add(os.path.join(filesystem_dir, f), f)
export_stream = open(filesystem_tar, "rb")
def stream_to_generator(s):
while True:
# Yield small chunks to test the StreamAdapter code better
buf = s.read(100)
if len(buf) == 0:
return
yield buf
export_generator = stream_to_generator(export_stream)
(flexmock(docker_tasker.tasker)
.should_receive('export_container')
.with_args(CONTAINER_ID)
.and_return(export_generator))
(flexmock(docker_tasker.tasker.d.wrapped)
.should_receive('create_container')
.with_args(workflow.image, command=["/bin/bash"])
.and_return({'Id': CONTAINER_ID}))
(flexmock(docker_tasker.tasker.d.wrapped)
.should_receive('remove_container')
.with_args(CONTAINER_ID, force=False))
setup_flatpak_source_info(workflow, config)
runner = PrePublishPluginsRunner(
docker_tasker,
workflow,
[{
'name': FlatpakCreateOciPlugin.key,
'args': {}
}]
)
if expected_exception:
with pytest.raises(PluginFailedException) as ex:
runner.run()
assert expected_exception in str(ex.value)
else:
# Check if run replaces image_id and marks filesystem image for removal
filesystem_image_id = 'xxx'
for_removal = workflow.plugin_workspace.get(
'remove_built_image', {}).get('images_to_remove', [])
assert workflow.builder.image_id == filesystem_image_id
assert filesystem_image_id not in for_removal
runner.run()
for_removal = workflow.plugin_workspace['remove_built_image']['images_to_remove']
assert re.match(r'^sha256:\w{64}$', workflow.builder.image_id)
assert filesystem_image_id in for_removal
dir_metadata = workflow.exported_image_sequence[-2]
assert dir_metadata['type'] == IMAGE_TYPE_OCI
tar_metadata = workflow.exported_image_sequence[-1]
assert tar_metadata['type'] == IMAGE_TYPE_OCI_TAR
# Check that the correct labels and annotations were written
labels, annotations = load_labels_and_annotations(dir_metadata)
if config_name == 'app':
assert labels['name'] == 'eog'
assert labels['com.redhat.component'] == 'eog'
assert labels['version'] == 'f28'
assert labels['release'] == '20170629213428'
elif config_name == 'runtime': # runtime
assert labels['name'] == 'flatpak-runtime'
assert labels['com.redhat.component'] == 'flatpak-runtime-container'
assert labels['version'] == 'f28'
assert labels['release'] == '20170701152209'
else:
assert labels['name'] == 'flatpak-sdk'
assert labels['com.redhat.component'] == 'flatpak-sdk-container'
assert labels['version'] == 'f28'
assert labels['release'] == '20170701152209'
if flatpak_metadata == 'annotations':
assert annotations.get('org.flatpak.ref') == config['expected_ref_name']
assert 'org.flatpak.ref' not in labels
elif flatpak_metadata == 'labels':
assert 'org.flatpak.ref' not in annotations
assert labels.get('org.flatpak.ref') == config['expected_ref_name']
elif flatpak_metadata == 'both':
assert annotations.get('org.flatpak.ref') == config['expected_ref_name']
assert labels.get('org.flatpak.ref') == config['expected_ref_name']
# Check that the expected files ended up in the flatpak
# Flatpak versions before 1.6 require annotations to be present, and Flatpak
# versions 1.6 and later require labels to be present. Skip the remaining
# checks unless we have both annotations and labels.
if flatpak_metadata != 'both':
return
inspector = DefaultInspector(tmpdir, dir_metadata)
files = inspector.list_files()
assert sorted(files) == config['expected_contents']
components = {c['name'] for c in workflow.image_components} # noqa:E501; pylint: disable=not-an-iterable
for n in config['expected_components']:
assert n in components
for n in config['unexpected_components']:
assert n not in components
metadata_lines = inspector.cat_file('/metadata').split('\n')
assert any(re.match(r'runtime=org.fedoraproject.Platform/.*/f28$', l)
for l in metadata_lines)
assert any(re.match(r'sdk=org.fedoraproject.Sdk/.*/f28$', l)
for l in metadata_lines)
if config_name == 'app':
# Check that the desktop file was rewritten
output = inspector.cat_file('/export/share/applications/org.gnome.eog.desktop')
lines = output.split('\n')
assert 'Icon=org.gnome.eog' in lines
assert 'name=org.gnome.eog' in metadata_lines
assert 'tags=Viewer' in metadata_lines
assert 'command=eog2' in metadata_lines
elif config_name == 'runtime': # runtime
# Check that permissions have been normalized
assert inspector.get_file_perms('/files/etc/shadow') == '-00644'
assert inspector.get_file_perms('/files/bin/mount') == '-00755'
assert inspector.get_file_perms('/files/share/foo') == 'd00755'
assert 'name=org.fedoraproject.Platform' in metadata_lines
else: # SDK
assert 'name=org.fedoraproject.Sdk' in metadata_lines
@pytest.mark.skipif(not MODULEMD_AVAILABLE, # noqa - docker_tasker fixture
reason="libmodulemd not available")
def test_skip_plugin(caplog, docker_tasker, user_params):
workflow = DockerBuildWorkflow(source=MOCK_SOURCE)
workflow.user_params = {}
setattr(workflow, 'builder', MockBuilder())
runner = PrePublishPluginsRunner(
docker_tasker,
workflow,
[{
'name': FlatpakCreateOciPlugin.key,
'args': {}
}]
)
runner.run()
assert 'not flatpak build, skipping plugin' in caplog.text
| bsd-3-clause | -4,129,035,388,203,286,500 | 48.413649 | 343 | 0.70529 | false |
juhnowski/FishingRod | production/pygsl-0.9.5/examples/odeiv.py | 1 | 2561 | # Author : Pierre Schnizer
"""
The python equivalent of the C example found in the GSL Reference document.
It prints the calculational ouput to stdout. The first column is t, the
second y[0] and the thrid y[1]. Plot it with your favourite programm to see
the output.
"""
import sys
import time
import pygsl._numobj as numx
from pygsl import odeiv
def func(t, y, mu):
f = numx.zeros((2,), ) * 1.0
f[0] = y[1]
f[1] = -y[0] - mu * y[1] * (y[0] ** 2 -1);
return f
def jac(t, y, mu):
dfdy = numx.ones((2,2),) * 1.
dfdy[0, 0] = 0.0
dfdy[0, 1] = 1.0
dfdy[1, 0] = -2.0 * mu * y[0] * y[1] - 1.0
dfdy[1, 1] = -mu * (y[0]**2 - 1.0)
dfdt = numx.zeros((2,))
return dfdy, dfdt
def run():
mu = 10.0
dimension = 2
# The different possible steppers for the function
# Comment your favourite one out to test it.
#stepper = odeiv.step_rk2
#stepper = odeiv.step_rk4
stepper = odeiv.step_rkf45
#stepper = odeiv.step_rkck
#stepper = odeiv.step_rk8pd
#stepper = odeiv.step_rk2imp
#stepper = odeiv.step_rk4imp
#stepper = odeiv.step_gear1
#stepper = odeiv.step_gear2
#stepper = odeiv.step_bsimp
step = stepper(dimension, func, jac, mu)
# All above steppers exept the odeiv.step_bsimp (Buerlisch - Stoer Improved
# method can calculate without an jacobian ...
step = stepper(dimension, func, args=mu)
control = odeiv.control_y_new(step, 1e-6, 1e-6)
evolve = odeiv.evolve(step, control, dimension)
print "# Using stepper %s with order %d" %(step.name(), step.order())
print "# Using Control ", control.name()
print "# %9s %9s %9s %9s " % ("t", "h", "y[0]", "y[1]")
hstart = 1
tstart = 0.0
t1 = (50.0)
#t1 = (500000.0,)
ystart = (1.0, 0.0)
t = tstart
y = ystart
stamp = time.time()
nsteps = 1000
#nsteps = 10000000
h = hstart
for i in xrange(nsteps):
if t >= t1:
break
t, h, y = evolve.apply(t, t1, h, y)
#print " %5d % 10.6f % 10.6f % 10.6f " %(i, t, y[0], y[1])
else:
raise ValueError, "Maximum number of steps exceeded!"
print "Needed %f seconds" %( time.time() - stamp,)
print " % 10.6f % 10.6f % 10.6f % 10.6f " %(t, h, y[0], y[1])
stamp = time.time()
t, h, y = evolve.apply_vector(tstart, t1, hstart, ystart, nsteps=1000, hmax=.1)
print "Needed %f seconds" %(time.time() - stamp,)
print " % 10.6f % 10.6f % 10.6f % 10.6f " %(t, h, y[0, 0], y[0, 1])
if __name__ == '__main__':
run()
| mit | -8,548,448,083,523,289,000 | 27.775281 | 83 | 0.565795 | false |
AWegnerGitHub/stackapi | setup.py | 1 | 1182 | from setuptools import setup, find_packages
long_description = open('README.rst').read()
setup(
name='StackAPI',
description='Library for interacting with the Stack Exchange API',
long_description=long_description,
url='https://github.com/AWegnerGitHub/StackAPI',
author='Andrew Wegner',
author_email='[email protected]',
maintainer='Andrew Wegner',
maintainer_email='[email protected]',
license='MIT',
keywords='stackexchange',
packages=find_packages(exclude=['contrib', 'docs', 'tests*', 'test']),
version='0.2.0',
install_requires=['requests', 'six'],
tests_require=['mock'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Natural Language :: English',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules'
],
test_suite='tests'
)
| mit | 8,084,246,330,976,300,000 | 34.818182 | 74 | 0.632826 | false |
yantrabuddhi/nativeclient | build/package_version/package_locations.py | 5 | 6916 | #!/usr/bin/python
# Copyright (c) 2014 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This library keeps track of all the standard locations for package files"""
import os
import posixpath
SHARED_FOLDER = 'shared'
ARCHIVE_DIR = 'package_archives'
def GetRemotePackageKey(is_shared, rev_num, package_target, package_name):
"""Returns key for package files in the google storage cloud.
Args:
is_shared: Whether or not the package is marked as shared.
rev_num: The revision identifier of when the package was built.
package_target: The package target which this package belongs to.
package_name: The name of the package.
Returns:
The google cloud storage key where the package file should be found.
"""
if is_shared:
intermediate_dir = SHARED_FOLDER
else:
intermediate_dir = package_target
return posixpath.join('builds',
str(rev_num),
intermediate_dir,
package_name + '.json')
def GetRemotePackageArchiveKey(archive_name, hash_value):
"""Returns key for package archive files in the google storage cloud.
Args:
archive_name: The name of the archive.
hash_value: The hash of the archive.
Returns:
The google cloud storage key where the package archive should be found.
"""
return posixpath.join('archives',
archive_name,
hash_value)
def GetLocalPackageFile(tar_dir, package_target, package_name):
"""Returns the local package file location.
Args:
tar_dir: The tar directory for where package archives would be found.
package_target: The package target of the package.
package_name: The name of the package.
Returns:
The standard location where local package file is found.
"""
return os.path.join(tar_dir,
package_target,
package_name + '.json')
def GetArchiveExtension(archive_name):
"""Returns the extension of an archive.
Note that the archive extension is different from how os.path.splitext splits
extensions. The standard python one splits on the last period, while this one
will split on the first period.
Args:
archive_name: The name of the archive.
Returns:
The extension of the archive.
"""
name_split = archive_name.split('.', 1)
if len(name_split) == 2:
return '.' + name_split[1]
return ''
def GetLocalPackageArchiveDir(tar_dir, archive_name):
"""Returns directory where local package archive files live.
Args:
tar_dir: The tar root directory for where package archives would be found.
archive_name: The name of the archive contained within the package.
Returns:
The standard location where local package archive files are found.
"""
return os.path.join(tar_dir,
ARCHIVE_DIR,
archive_name)
def GetLocalPackageArchiveFile(tar_dir, archive_name, archive_hash):
"""Returns the local package archive file location.
Args:
tar_dir: The tar root directory for where package archives would be found.
archive_name: The name of the archive contained within the package.
archive_hash: The hash of the archive, which will be part of the final name.
Returns:
The standard location where local package archive file is found.
"""
if isinstance(archive_hash, (int, long)):
archive_hash = '%040x' % archive_hash
archive_directory = GetLocalPackageArchiveDir(tar_dir, archive_name)
# Have the file keep the extension so that extractions know the file type.
archive_filename = archive_hash + GetArchiveExtension(archive_name)
return os.path.join(archive_directory, archive_filename)
def GetLocalPackageArchiveLogFile(archive_file):
"""Returns the local package archive log file location.
Args:
archive_file: Location of the local archive file location.
Returns:
The standard location where local package archive log file is found.
"""
return os.path.splitext(archive_file)[0] + '.log'
def GetRevisionFile(revision_dir, package_name):
"""Returns the local revision file location.
Args:
revision_dir: Revision directory where revision files should be found.
package_name: The name of the package revision file represents.
Returns:
The standard location where the revision file is found.
"""
return os.path.join(revision_dir,
package_name + '.json')
def GetFullDestDir(dest_dir, package_target, package_name):
"""Returns the destination directory for a package archive.
Args:
dest_dir: Destination directory root.
package_target: Package target of the package to extract.
package_name: The package name of the package to extract.
Returns:
The package directory within the destination directory.
"""
return os.path.join(dest_dir, package_target, package_name)
def GetDestPackageFile(dest_dir, package_target, package_name):
"""Returns the package file stored in the destination directory.
The package file is normally stored inside of the tar directory, but upon
extraction a copy of the package file is also made into the extraction
directory for book keeping purposes.
Args:
dest_dir: Destination directory root.
package_target: Package target of the package to extract.
package_name: The package name of the package to extract.
Returns:
The location of the package file within the destination directory.
"""
return os.path.join(GetFullDestDir(dest_dir, package_target, package_name),
package_name + '.json')
def WalkPackages(tar_dir):
"""Generator for local package target packages within a root tar directory.
Use this generator to walk through the list of package targets and their
respective packages found within a local tar directory. This function does
not guarantee that these are valid package targets or packages, so it could
yield invalid names for malformed tar directories.
Args:
tar_dir: The tar root directory where package archives would be found.
Yields:
Tuple containing (package_target, [list-of package_names]).
"""
if os.path.isdir(tar_dir):
for package_target_dir in os.listdir(tar_dir):
# Skip the package archive directory
if package_target_dir == ARCHIVE_DIR:
continue
full_package_target_dir = os.path.join(tar_dir, package_target_dir)
if os.path.isdir(full_package_target_dir):
packages = [os.path.splitext(package_name)[0]
for package_name in os.listdir(full_package_target_dir)
if package_name.endswith('.json') and
os.path.isfile(os.path.join(full_package_target_dir,
package_name))]
yield (package_target_dir, packages)
| bsd-3-clause | -5,299,187,509,450,316,000 | 33.237624 | 80 | 0.697079 | false |
mikewiebe-ansible/ansible | test/units/modules/network/fortios/test_fortios_firewall_schedule_group.py | 21 | 7727 | # Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <https://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import pytest
from mock import ANY
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
try:
from ansible.modules.network.fortios import fortios_firewall_schedule_group
except ImportError:
pytest.skip("Could not load required modules for testing", allow_module_level=True)
@pytest.fixture(autouse=True)
def connection_mock(mocker):
connection_class_mock = mocker.patch('ansible.modules.network.fortios.fortios_firewall_schedule_group.Connection')
return connection_class_mock
fos_instance = FortiOSHandler(connection_mock)
def test_firewall_schedule_group_creation(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_schedule_group': {
'color': '3',
'name': 'default_name_4'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_schedule_group.fortios_firewall_schedule(input_data, fos_instance)
expected_data = {
'color': '3',
'name': 'default_name_4'
}
set_method_mock.assert_called_with('firewall.schedule', 'group', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_schedule_group_creation_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_schedule_group': {
'color': '3',
'name': 'default_name_4'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_schedule_group.fortios_firewall_schedule(input_data, fos_instance)
expected_data = {
'color': '3',
'name': 'default_name_4'
}
set_method_mock.assert_called_with('firewall.schedule', 'group', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_schedule_group_removal(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_schedule_group': {
'color': '3',
'name': 'default_name_4'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_schedule_group.fortios_firewall_schedule(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall.schedule', 'group', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
def test_firewall_schedule_group_deletion_fails(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
delete_method_result = {'status': 'error', 'http_method': 'POST', 'http_status': 500}
delete_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.delete', return_value=delete_method_result)
input_data = {
'username': 'admin',
'state': 'absent',
'firewall_schedule_group': {
'color': '3',
'name': 'default_name_4'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_schedule_group.fortios_firewall_schedule(input_data, fos_instance)
delete_method_mock.assert_called_with('firewall.schedule', 'group', mkey=ANY, vdom='root')
schema_method_mock.assert_not_called()
assert is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 500
def test_firewall_schedule_group_idempotent(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'error', 'http_method': 'DELETE', 'http_status': 404}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_schedule_group': {
'color': '3',
'name': 'default_name_4'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_schedule_group.fortios_firewall_schedule(input_data, fos_instance)
expected_data = {
'color': '3',
'name': 'default_name_4'
}
set_method_mock.assert_called_with('firewall.schedule', 'group', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert not changed
assert response['status'] == 'error'
assert response['http_status'] == 404
def test_firewall_schedule_group_filter_foreign_attributes(mocker):
schema_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.schema')
set_method_result = {'status': 'success', 'http_method': 'POST', 'http_status': 200}
set_method_mock = mocker.patch('ansible.module_utils.network.fortios.fortios.FortiOSHandler.set', return_value=set_method_result)
input_data = {
'username': 'admin',
'state': 'present',
'firewall_schedule_group': {
'random_attribute_not_valid': 'tag',
'color': '3',
'name': 'default_name_4'
},
'vdom': 'root'}
is_error, changed, response = fortios_firewall_schedule_group.fortios_firewall_schedule(input_data, fos_instance)
expected_data = {
'color': '3',
'name': 'default_name_4'
}
set_method_mock.assert_called_with('firewall.schedule', 'group', data=expected_data, vdom='root')
schema_method_mock.assert_not_called()
assert not is_error
assert changed
assert response['status'] == 'success'
assert response['http_status'] == 200
| gpl-3.0 | -380,618,400,493,725,630 | 35.971292 | 142 | 0.671153 | false |
amenonsen/ansible | lib/ansible/modules/network/fortios/fortios_firewall_address.py | 1 | 18494 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_firewall_address
short_description: Configure IPv4 addresses in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS device by allowing the
user to set and modify firewall feature and address category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
state:
description:
- Indicates whether to create or remove the object.
type: str
choices:
- present
- absent
version_added: 2.9
firewall_address:
description:
- Configure IPv4 addresses.
default: null
type: dict
suboptions:
allow_routing:
description:
- Enable/disable use of this address in the static route configuration.
type: str
choices:
- enable
- disable
associated_interface:
description:
- Network interface associated with address. Source system.interface.name system.zone.name.
type: str
cache_ttl:
description:
- Defines the minimal TTL of individual IP addresses in FQDN cache measured in seconds.
type: int
color:
description:
- Color of icon on the GUI.
type: int
comment:
description:
- Comment.
type: str
country:
description:
- IP addresses associated to a specific country.
type: str
end_ip:
description:
- Final IP address (inclusive) in the range for the address.
type: str
epg_name:
description:
- Endpoint group name.
type: str
filter:
description:
- Match criteria filter.
type: str
fqdn:
description:
- Fully Qualified Domain Name address.
type: str
list:
description:
- IP address list.
type: list
suboptions:
ip:
description:
- IP.
required: true
type: str
name:
description:
- Address name.
required: true
type: str
obj_id:
description:
- Object ID for NSX.
type: str
organization:
description:
- "Organization domain name (Syntax: organization/domain)."
type: str
policy_group:
description:
- Policy group name.
type: str
sdn:
description:
- SDN.
type: str
choices:
- aci
- aws
- azure
- gcp
- nsx
- nuage
- oci
- openstack
sdn_tag:
description:
- SDN Tag.
type: str
start_ip:
description:
- First IP address (inclusive) in the range for the address.
type: str
subnet:
description:
- IP address and subnet mask of address.
type: str
subnet_name:
description:
- Subnet name.
type: str
tagging:
description:
- Config object tagging.
type: list
suboptions:
category:
description:
- Tag category. Source system.object-tagging.category.
type: str
name:
description:
- Tagging entry name.
required: true
type: str
tags:
description:
- Tags.
type: list
suboptions:
name:
description:
- Tag name. Source system.object-tagging.tags.name.
required: true
type: str
tenant:
description:
- Tenant.
type: str
type:
description:
- Type of address.
type: str
choices:
- ipmask
- iprange
- fqdn
- geography
- wildcard
- wildcard-fqdn
- dynamic
uuid:
description:
- Universally Unique Identifier (UUID; automatically assigned but can be manually reset).
type: str
visibility:
description:
- Enable/disable address visibility in the GUI.
type: str
choices:
- enable
- disable
wildcard:
description:
- IP address and wildcard netmask.
type: str
wildcard_fqdn:
description:
- Fully Qualified Domain Name with wildcard characters.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure IPv4 addresses.
fortios_firewall_address:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
firewall_address:
allow_routing: "enable"
associated_interface: "<your_own_value> (source system.interface.name system.zone.name)"
cache_ttl: "5"
color: "6"
comment: "Comment."
country: "<your_own_value>"
end_ip: "<your_own_value>"
epg_name: "<your_own_value>"
filter: "<your_own_value>"
fqdn: "<your_own_value>"
list:
-
ip: "<your_own_value>"
name: "default_name_15"
obj_id: "<your_own_value>"
organization: "<your_own_value>"
policy_group: "<your_own_value>"
sdn: "aci"
sdn_tag: "<your_own_value>"
start_ip: "<your_own_value>"
subnet: "<your_own_value>"
subnet_name: "<your_own_value>"
tagging:
-
category: "<your_own_value> (source system.object-tagging.category)"
name: "default_name_26"
tags:
-
name: "default_name_28 (source system.object-tagging.tags.name)"
tenant: "<your_own_value>"
type: "ipmask"
uuid: "<your_own_value>"
visibility: "enable"
wildcard: "<your_own_value>"
wildcard_fqdn: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_firewall_address_data(json):
option_list = ['allow_routing', 'associated_interface', 'cache_ttl',
'color', 'comment', 'country',
'end_ip', 'epg_name', 'filter',
'fqdn', 'list', 'name',
'obj_id', 'organization', 'policy_group',
'sdn', 'sdn_tag', 'start_ip',
'subnet', 'subnet_name', 'tagging',
'tenant', 'type', 'uuid',
'visibility', 'wildcard', 'wildcard_fqdn']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def firewall_address(data, fos):
vdom = data['vdom']
state = data['state']
firewall_address_data = data['firewall_address']
filtered_data = underscore_to_hyphen(filter_firewall_address_data(firewall_address_data))
if state == "present":
return fos.set('firewall',
'address',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('firewall',
'address',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_firewall(data, fos):
if data['firewall_address']:
resp = firewall_address(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"firewall_address": {
"required": False, "type": "dict", "default": None,
"options": {
"allow_routing": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"associated_interface": {"required": False, "type": "str"},
"cache_ttl": {"required": False, "type": "int"},
"color": {"required": False, "type": "int"},
"comment": {"required": False, "type": "str"},
"country": {"required": False, "type": "str"},
"end_ip": {"required": False, "type": "str"},
"epg_name": {"required": False, "type": "str"},
"filter": {"required": False, "type": "str"},
"fqdn": {"required": False, "type": "str"},
"list": {"required": False, "type": "list",
"options": {
"ip": {"required": True, "type": "str"}
}},
"name": {"required": True, "type": "str"},
"obj_id": {"required": False, "type": "str"},
"organization": {"required": False, "type": "str"},
"policy_group": {"required": False, "type": "str"},
"sdn": {"required": False, "type": "str",
"choices": ["aci", "aws", "azure",
"gcp", "nsx", "nuage",
"oci", "openstack"]},
"sdn_tag": {"required": False, "type": "str"},
"start_ip": {"required": False, "type": "str"},
"subnet": {"required": False, "type": "str"},
"subnet_name": {"required": False, "type": "str"},
"tagging": {"required": False, "type": "list",
"options": {
"category": {"required": False, "type": "str"},
"name": {"required": True, "type": "str"},
"tags": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}}
}},
"tenant": {"required": False, "type": "str"},
"type": {"required": False, "type": "str",
"choices": ["ipmask", "iprange", "fqdn",
"geography", "wildcard", "wildcard-fqdn",
"dynamic"]},
"uuid": {"required": False, "type": "str"},
"visibility": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"wildcard": {"required": False, "type": "str"},
"wildcard_fqdn": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_firewall(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_firewall(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 | -1,523,008,129,345,578,000 | 32.686703 | 111 | 0.491889 | false |
Ritsyy/fjord | fjord/feedback/tests/test_api.py | 1 | 32763 | # -*- coding: utf-8 -*-
import json
import random
import time
from datetime import date, datetime, timedelta
from django.core.cache import get_cache
from django.test.client import Client
from fjord.base.tests import TestCase, reverse
from fjord.feedback import models
from fjord.feedback.api_views import PER_HOUR_LIMIT
from fjord.feedback.tests import ResponseFactory
from fjord.search.tests import ElasticTestCase
class TestPublicFeedbackAPI(ElasticTestCase):
def create_basic_data(self):
testdata = [
(True, 'en-US', 'Linux', 'Firefox', '30.0', 'desc'),
(True, 'en-US', 'Mac OSX', 'Firefox for Android', '31.0', 'desc'),
(False, 'de', 'Windows', 'Firefox', '29.0', 'banana'),
]
for happy, locale, platform, product, version, desc in testdata:
ResponseFactory(
happy=happy, locale=locale, platform=platform,
product=product, version=version, description=desc)
self.refresh()
def test_basic_root(self):
self.create_basic_data()
resp = self.client.get(reverse('feedback-api'))
# FIXME: test headers
json_data = json.loads(resp.content)
assert json_data['count'] == 3
assert len(json_data['results']) == 3
def test_id(self):
feedback = ResponseFactory()
self.refresh()
resp = self.client.get(reverse('feedback-api'), {'id': feedback.id})
json_data = json.loads(resp.content)
assert json_data['count'] == 1
assert len(json_data['results']) == 1
assert json_data['results'][0]['id'] == feedback.id
def test_multiple_ids(self):
# Create some responses that we won't ask for
for i in range(5):
ResponseFactory()
resps = []
for i in range(5):
resps.append(ResponseFactory())
self.refresh()
resp = self.client.get(
reverse('feedback-api'),
{'id': ','.join([str(int(f.id)) for f in resps])}
)
json_data = json.loads(resp.content)
assert json_data['count'] == 5
assert len(json_data['results']) == 5
assert(
sorted([item['id'] for item in json_data['results']]) ==
sorted([feedback.id for feedback in resps])
)
def test_junk_ids(self):
"""Junk ids should just get ignored"""
feedback = ResponseFactory()
self.refresh()
resp = self.client.get(
reverse('feedback-api'),
{'id': str(feedback.id) + ',foo'}
)
json_data = json.loads(resp.content)
assert json_data['count'] == 1
assert len(json_data['results']) == 1
assert json_data['results'][0]['id'] == feedback.id
def test_happy(self):
self.create_basic_data()
resp = self.client.get(reverse('feedback-api'), {'happy': '1'})
json_data = json.loads(resp.content)
assert json_data['count'] == 2
assert len(json_data['results']) == 2
def test_platforms(self):
self.create_basic_data()
resp = self.client.get(reverse('feedback-api'), {'platforms': 'Linux'})
json_data = json.loads(resp.content)
assert json_data['count'] == 1
assert len(json_data['results']) == 1
resp = self.client.get(reverse('feedback-api'),
{'platforms': 'Linux,Windows'})
json_data = json.loads(resp.content)
assert json_data['count'] == 2
assert len(json_data['results']) == 2
def test_products_and_versions(self):
self.create_basic_data()
resp = self.client.get(reverse('feedback-api'),
{'products': 'Firefox'})
json_data = json.loads(resp.content)
assert json_data['count'] == 2
assert len(json_data['results']) == 2
resp = self.client.get(reverse('feedback-api'),
{'products': 'Firefox,Firefox for Android'})
json_data = json.loads(resp.content)
assert json_data['count'] == 3
assert len(json_data['results']) == 3
# version without product gets ignored
resp = self.client.get(reverse('feedback-api'),
{'versions': '30.0'})
json_data = json.loads(resp.content)
assert json_data['count'] == 3
assert len(json_data['results']) == 3
resp = self.client.get(reverse('feedback-api'),
{'products': 'Firefox',
'versions': '30.0'})
json_data = json.loads(resp.content)
assert json_data['count'] == 1
assert len(json_data['results']) == 1
def test_locales(self):
self.create_basic_data()
resp = self.client.get(reverse('feedback-api'), {'locales': 'en-US'})
json_data = json.loads(resp.content)
assert json_data['count'] == 2
assert len(json_data['results']) == 2
resp = self.client.get(reverse('feedback-api'),
{'locales': 'en-US,de'})
json_data = json.loads(resp.content)
assert json_data['count'] == 3
assert len(json_data['results']) == 3
def test_multi_filter(self):
self.create_basic_data()
# Locale and happy
resp = self.client.get(reverse('feedback-api'), {
'locales': 'de', 'happy': 1
})
json_data = json.loads(resp.content)
assert json_data['count'] == 0
assert len(json_data['results']) == 0
def test_query(self):
self.create_basic_data()
resp = self.client.get(reverse('feedback-api'), {'q': 'desc'})
json_data = json.loads(resp.content)
assert json_data['count'] == 2
assert len(json_data['results']) == 2
def test_old_responses(self):
# Make sure we can't see responses from > 180 days ago
cutoff = datetime.today() - timedelta(days=180)
ResponseFactory(description='Young enough--Party!',
created=cutoff + timedelta(days=1))
ResponseFactory(description='Too old--Get off my lawn!',
created=cutoff - timedelta(days=1))
self.refresh()
resp = self.client.get(reverse('feedback-api'), {
'date_start': (cutoff - timedelta(days=1)).strftime('%Y-%m-%d'),
'date_end': (cutoff + timedelta(days=1)).strftime('%Y-%m-%d')
})
json_data = json.loads(resp.content)
results = json_data['results']
assert len(results) == 1
assert 'Young enough--Party!' in resp.content
assert 'Too old--Get off my lawn!' not in resp.content
def test_public_fields(self):
"""The results should only contain publicly-visible fields"""
# Note: This test might fail when we add new fields to
# ES. What happens is that if a field doesn't have data when
# the document is indexed, then there won't be a key/val in
# the json results. Easy way to fix that is to make sure it
# has a value when creating the response.
ResponseFactory(description=u'best browser ever', api=True)
self.refresh()
resp = self.client.get(reverse('feedback-api'))
json_data = json.loads(resp.content)
assert json_data['count'] == 1
assert(
sorted(json_data['results'][0].keys()) ==
sorted(models.ResponseDocType.public_fields()))
def test_max(self):
for i in range(10):
ResponseFactory(description=u'best browser ever %d' % i)
self.refresh()
resp = self.client.get(reverse('feedback-api'))
json_data = json.loads(resp.content)
assert json_data['count'] == 10
resp = self.client.get(reverse('feedback-api'), {'max': '5'})
json_data = json.loads(resp.content)
assert json_data['count'] == 5
# FIXME: For now, nonsense values get ignored.
resp = self.client.get(reverse('feedback-api'), {'max': 'foo'})
json_data = json.loads(resp.content)
assert json_data['count'] == 10
class TestPublicFeedbackAPIDate(ElasticTestCase):
# Get the YYYY-MM part of the date for last month. We use last
# month since arbitrarily appending the day will always create
# dates in the past.
last_month = str(date.today() - timedelta(days=31))[:7]
def create_data(self, days):
"""Create response data for specified days
This creates the specified responses and also refreshes the
Elasticsearch index.
:arg days: List of day-of-month strings. For example
``['01', '02', '03']``
"""
for day in days:
ResponseFactory(created=self.last_month + '-' + day)
self.refresh()
def _test_date(self, params, expected):
"""Helper method for tests"""
resp = self.client.get(reverse('feedback-api'), params)
json_data = json.loads(resp.content)
results = json_data['results']
assert len(json_data['results']) == len(expected)
for result, expected in zip(results, expected):
assert result['created'] == expected + 'T00:00:00'
def test_date_start(self):
"""date_start returns responses from that day forward"""
self.create_data(['01', '02', '03', '04'])
self._test_date(
params={'date_start': self.last_month + '-02'},
expected=[
self.last_month + '-04',
self.last_month + '-03',
self.last_month + '-02'
])
def test_date_end(self):
"""date_end returns responses from before that day"""
self.create_data(['01', '02', '03', '04'])
self._test_date(
params={'date_end': self.last_month + '-03'},
expected=[
self.last_month + '-03',
self.last_month + '-02',
self.last_month + '-01'
])
def test_date_delta_with_date_end(self):
"""Test date_delta filtering when date_end exists"""
self.create_data(['01', '02', '03', '04'])
self._test_date(
params={'date_delta': '1d', 'date_end': self.last_month + '-03'},
expected=[
self.last_month + '-03',
self.last_month + '-02'
])
def test_date_delta_with_date_start(self):
"""Test date_delta filtering when date_start exists"""
self.create_data(['01', '02', '03', '04'])
self._test_date(
params={'date_delta': '1d', 'date_start': self.last_month + '-02'},
expected=[
self.last_month + '-03',
self.last_month + '-02'
])
def test_date_delta_with_date_end_and_date_start(self):
"""When all three date fields are specified ignore date_start"""
self.create_data(['01', '02', '03', '04'])
self._test_date(
params={
'date_delta': '1d',
'date_end': self.last_month + '-03',
'date_start': self.last_month + '-02'
},
expected=[
self.last_month + '-03',
self.last_month + '-02'
])
def test_date_delta_with_no_constraints(self):
"""Test date_delta filtering without date_end or date_start"""
today = str(date.today())
yesterday = str(date.today() + timedelta(days=-1))
beforeyesterday = str(date.today() + timedelta(days=-2))
for d in [beforeyesterday, yesterday, today]:
ResponseFactory(created=d)
self.refresh()
self._test_date(
params={'date_delta': '1d'},
expected=[
today,
yesterday
])
def test_both_date_end_and_date_start_with_no_date_delta(self):
self.create_data(['01', '02', '03', '04'])
self._test_date(
params={
'date_start': self.last_month + '-02',
'date_end': self.last_month + '-03'
},
expected=[
self.last_month + '-03',
self.last_month + '-02'
])
class TestFeedbackHistogramAPI(ElasticTestCase):
def generate_response(self, created, description=u'So awesome!'):
ResponseFactory(
created=datetime(created.year, created.month, created.day,
random.randint(0, 23), 0),
description=description
)
def to_date_string(self, value):
"""Takes a milliseconds since epoch int and converts to string"""
d = time.gmtime(value / 1000)
return time.strftime('%Y-%m-%d %H:%M:%S', d)
def test_basic(self):
"""Show last 7 days of counts"""
today = date.today()
for i in range(8):
self.generate_response(today - timedelta(days=i))
self.generate_response(today - timedelta(days=i))
self.refresh()
resp = self.client.get(reverse('feedback-histogram-api'))
assert resp.status_code == 200
json_data = json.loads(resp.content)
# Default is the last 7 days.
assert len(json_data['results']) == 7
# Last item in the list should be yesterday.
assert(
self.to_date_string(json_data['results'][-1][0]) ==
(today - timedelta(days=1)).strftime('%Y-%m-%d 00:00:00')
)
# Count is 2.
assert json_data['results'][-1][1] == 2
# First item is 7 days ago.
assert(
self.to_date_string(json_data['results'][0][0]) ==
(today - timedelta(days=7)).strftime('%Y-%m-%d 00:00:00')
)
# Count is 2.
assert json_data['results'][0][1] == 2
def test_q(self):
"""Test q argument"""
dt = date.today() - timedelta(days=1)
self.generate_response(created=dt, description='pocket pocket')
self.generate_response(created=dt, description='video video')
self.refresh()
resp = self.client.get(reverse('feedback-histogram-api'), {
'q': 'pocket'
})
assert resp.status_code == 200
json_data = json.loads(resp.content)
# Default range ends yesterday. Only one response with
# "pocket" in it yesterday, so this is 1.
assert json_data['results'][-1][1] == 1
# FIXME: Test date_start, date_end and date_delta
# FIXME: Test products, versions
# FIXME: Test locales
# FIXME: Test happy/sad
# FIXME: Test platforms
# FIXME: Test interval
class TestPostFeedbackAPI(TestCase):
def setUp(self):
super(TestPostFeedbackAPI, self).setUp()
# Make sure the unit tests aren't papering over CSRF issues.
self.client = Client(enforce_csrf_checks=True)
def test_minimal(self):
data = {
'happy': True,
'description': u'Great!',
'product': u'Firefox OS'
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
feedback = models.Response.objects.latest(field_name='id')
assert feedback.happy is True
assert feedback.description == data['description']
assert feedback.product == data['product']
# Fills in defaults
assert feedback.url == u''
assert feedback.api == 1
assert feedback.user_agent == u''
def test_maximal(self):
"""Tests an API call with all possible data"""
data = {
'happy': True,
'description': u'Great!',
'category': u'ui',
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
'email': '[email protected]',
'url': 'http://example.com/',
'manufacturer': 'OmniCorp',
'device': 'OmniCorp',
'country': 'US',
'user_agent': (
'Mozilla/5.0 (Mobile; rv:18.0) Gecko/18.0 Firefox/18.0'
),
'source': 'email',
'campaign': 'email_test',
}
# This makes sure the test is up-to-date. If we add fields
# to the serializer, then this will error out unless we've
# also added them to this test.
prs = models.PostResponseSerializer()
for field in prs.fields.keys():
assert field in data, '{0} not in data'.format(field)
# Post the data and then make sure everything is in the
# resulting Response. In most cases, the field names line up
# between PostResponseSerializer and Response with the
# exception of 'email' which is stored in a different table.
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
feedback = models.Response.objects.latest(field_name='id')
for field in prs.fields.keys():
if field == 'email':
email = models.ResponseEmail.objects.latest(field_name='id')
assert email.email == data['email']
else:
assert getattr(feedback, field) == data[field]
def test_missing_happy_defaults_to_sad(self):
# We want to require "happy" to be in the values, but for
# various reasons we can't. Instead, if it's not provided, we
# want to make sure it defaults to sad.
data = {
'description': u'Great!',
'version': u'1.1',
'platform': u'Firefox OS',
'product': u'Firefox OS',
'locale': 'en-US',
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
feedback = models.Response.objects.latest(field_name='id')
assert feedback.happy is False
def test_whitespace_description_is_invalid(self):
data = {
'happy': True,
'description': u' ',
'product': u'Firefox OS'
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 400
def test_blank_category_is_fine_we_suppose(self):
data = {
'happy': True,
'description': u'happy',
'product': u'Loop',
'category': u''
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
def test_invalid_unicode_url(self):
"""Tests an API call with invalid unicode URL"""
data = {
'happy': True,
'description': u'Great!',
'category': u'ui',
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
'email': '[email protected]',
'url': 'தமிழகம்',
'manufacturer': 'OmniCorp',
'device': 'OmniCorp',
'country': 'US',
'user_agent': (
'Mozilla/5.0 (Mobile; rv:18.0) Gecko/18.0 Firefox/18.0'
),
'source': 'email',
'campaign': 'email_test',
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 400
content = json.loads(r.content)
assert u'url' in content
assert content['url'][0].endswith(u'is not a valid url')
def test_with_email(self):
data = {
'happy': True,
'description': u'Great!',
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
'email': '[email protected]'
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
feedback = models.Response.objects.latest(field_name='id')
assert feedback.happy is True
assert feedback.description == data['description']
assert feedback.platform == data['platform']
assert feedback.product == data['product']
assert feedback.channel == data['channel']
assert feedback.version == data['version']
# Fills in defaults
assert feedback.url == u''
assert feedback.user_agent == u''
assert feedback.api == 1
email = models.ResponseEmail.objects.latest(field_name='id')
assert email.email == data['email']
def test_with_context(self):
data = {
'happy': True,
'description': u'Great!',
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
'email': '[email protected]',
'slopmenow': 'bar'
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
context = models.ResponseContext.objects.latest(field_name='id')
assert context.data == {'slopmenow': 'bar'}
def test_with_context_truncate_key(self):
data = {
'happy': True,
'description': u'Great!',
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
'email': '[email protected]',
'foo012345678901234567890': 'bar'
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
context = models.ResponseContext.objects.latest(field_name='id')
assert context.data, {'foo01234567890123456': 'bar'}
def test_with_context_truncate_value(self):
data = {
'happy': True,
'description': u'Great!',
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
'email': '[email protected]',
'foo': ('a' * 100) + 'b'
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
context = models.ResponseContext.objects.latest(field_name='id')
assert context.data == {'foo': ('a' * 100)}
def test_with_context_20_pairs(self):
data = {
'happy': True,
'description': u'Great!',
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
'email': '[email protected]',
}
for i in range(25):
data['foo%02d' % i] = str(i)
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
context = models.ResponseContext.objects.latest(field_name='id')
data = sorted(context.data.items())
assert len(data) == 20
assert data[0] == ('foo00', '0')
assert data[-1] == ('foo19', '19')
def test_null_device_returns_400(self):
data = {
'happy': True,
'description': u'Great!',
'product': u'Firefox OS',
'device': None
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 400
assert 'device' in r.content
def test_invalid_email_address_returns_400(self):
data = {
'happy': True,
'description': u'Great!',
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
'email': 'foo@example'
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 400
assert 'email' in r.content
def test_missing_description_returns_400(self):
data = {
'happy': True,
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 400
assert 'description' in r.content
def test_missing_product_returns_400(self):
data = {
'happy': True,
'channel': u'stable',
'version': u'1.1',
'description': u'Great!',
'platform': u'Firefox OS',
'locale': 'en-US',
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 400
assert 'product' in r.content
def test_invalid_product_returns_400(self):
data = {
'happy': True,
'channel': u'stable',
'version': u'1.1',
'description': u'Great!',
'product': u'Nurse Kitty',
'platform': u'Firefox OS',
'locale': 'en-US',
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 400
assert 'product' in r.content
def test_url_max_length(self):
url_base = 'http://example.com/'
# Up to 199 characters is fine.
data = {
'happy': True,
'channel': u'stable',
'version': u'1.1',
'description': u'Great! 199',
'product': u'Firefox OS',
'platform': u'Firefox OS',
'url': url_base + ('a' * (199 - len(url_base))) + 'b',
'locale': 'en-US',
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
# 200th character is not fine.
data = {
'happy': True,
'channel': u'stable',
'version': u'1.1',
'description': u'Great! 200',
'product': u'Firefox OS',
'platform': u'Firefox OS',
'url': url_base + ('a' * (200 - len(url_base))) + 'b',
'locale': 'en-US',
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 400
def test_valid_urls(self):
test_data = [
'example.com',
'example.com:80',
'example.com:80/foo',
'http://example.com',
'http://example.com/foo',
'http://example.com:80',
'http://example.com:80/foo',
'https://example.com',
'https://example.com/foo',
'https://example.com:80',
'https://example.com:80/foo',
'ftp://example.com',
'about:mozilla',
'chrome://foo'
]
for url in test_data:
data = {
'happy': True,
'channel': u'stable',
'version': u'1.1',
'description': u'Great!',
'product': u'Firefox OS',
'platform': u'Firefox OS',
'url': url,
'locale': 'en-US',
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
get_cache('default').clear()
def test_user_agent_inferred_bits(self):
"""Tests that we infer the right bits from the user-agent"""
data = {
'happy': True,
'description': u'Great!',
'category': u'ui',
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
'email': '[email protected]',
'url': 'http://example.com/',
'manufacturer': 'OmniCorp',
'device': 'OmniCorp',
'country': 'US',
'user_agent': (
'Mozilla/5.0 (Mobile; rv:18.0) Gecko/18.0 Firefox/18.0'
),
'source': 'email',
'campaign': 'email_test',
}
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
feedback = models.Response.objects.latest(field_name='id')
assert feedback.browser == u'Firefox OS'
assert feedback.browser_version == u'1.0'
assert feedback.browser_platform == u'Firefox OS'
class TestPostFeedbackAPIThrottle(TestCase):
def setUp(self):
super(TestPostFeedbackAPIThrottle, self).setUp()
get_cache('default').clear()
def test_throttle(self):
throttle_trigger = PER_HOUR_LIMIT
# Descriptions have to be unique otherwise we hit the
# double-submit throttling. So we do this fancy thing here.
def data_generator():
while True:
yield {
'happy': True,
'description': u'Great! ' + str(time.time()),
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
}
data = data_generator()
# Now hit the api a fajillion times making sure things got
# created
for i in range(throttle_trigger):
# django-ratelimit fails the throttling if we hit the url
# a fajillion times in rapid succession. For now, we add a
# sleep which makes this test run a little longer than it
# probably needs to.
#
# FIXME: Look into this more for a better solution.
time.sleep(0.01)
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data.next()))
assert r.status_code == 201
# This one should trip the throttle trigger
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data.next()))
assert r.status_code == 429
def test_double_submit_throttle(self):
# We disallow two submits in a row of the same description
# from the same ip address.
data = {
'happy': True,
'description': u'Great!',
'product': u'Firefox OS',
'channel': u'stable',
'version': u'1.1',
'platform': u'Firefox OS',
'locale': 'en-US',
}
# First time is fine
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 201
# Second time and back off!
r = self.client.post(
reverse('feedback-api'),
content_type='application/json',
data=json.dumps(data))
assert r.status_code == 429
| bsd-3-clause | -3,848,013,333,888,951,300 | 32.761856 | 79 | 0.520718 | false |
hustlzp/zulip | analytics/management/commands/stream_stats.py | 113 | 1703 | from __future__ import absolute_import
from django.core.management.base import BaseCommand
from django.db.models import Q
from zerver.models import Realm, Stream, Message, Subscription, Recipient
class Command(BaseCommand):
help = "Generate statistics on the streams for a realm."
def add_arguments(self, parser):
parser.add_argument('realms', metavar='<realm>', type=str, nargs='*',
help="realm to generate statistics for")
def handle(self, *args, **options):
if options['realms']:
try:
realms = [Realm.objects.get(domain=domain) for domain in options['realms']]
except Realm.DoesNotExist, e:
print e
exit(1)
else:
realms = Realm.objects.all()
for realm in realms:
print realm.domain
print "------------"
print "%25s %15s %10s" % ("stream", "subscribers", "messages")
streams = Stream.objects.filter(realm=realm).exclude(Q(name__istartswith="tutorial-"))
invite_only_count = 0
for stream in streams:
if stream.invite_only:
invite_only_count += 1
continue
print "%25s" % (stream.name,),
recipient = Recipient.objects.filter(type=Recipient.STREAM, type_id=stream.id)
print "%10d" % (len(Subscription.objects.filter(recipient=recipient, active=True)),),
num_messages = len(Message.objects.filter(recipient=recipient))
print "%12d" % (num_messages,)
print "%d invite-only streams" % (invite_only_count,)
print ""
| apache-2.0 | 3,659,913,931,787,800,600 | 41.575 | 101 | 0.568996 | false |
qPCR4vir/orange | Orange/OrangeWidgets/VisualizeQt/OWMDSQt.py | 6 | 32319 | """
<name>MDS Qt</name>
<description>Multi dimensional scaling (Qt)</description>
<icon>icons/MDS.svg</icon>
<contact>Ales Erjavec (ales.erjavec(@at@)fri.uni-lj.si)</contact>
<priority>2500</priority>
"""
from OWWidget import *
import Orange
import orange
import orngMDS
import OWGUI
import numpy, sys, math, time, os
import OWColorPalette
import OWToolbars
#from OWGraph import *
#from PyQt4.Qwt5 import *
from plot.owplot import OWPlot, OWCurve
from plot.owpoint import OWPoint
from random import random
class OWMDSQt(OWWidget):
settingsList=["graph.PointSize", "graph.proportionGraphed", "graph.ColorAttr", "graph.SizeAttr",
"graph.ShapeAttr", "graph.NameAttr", "graph.ShowStress", "graph.NumStressLines",
"graph.ShowName", "graph.differentWidths", "graph.stressByTransparency", "graph.useAntialiasing"
"StressFunc", "applyLSMT", "toolbarSelection", "autoSendSelection", "selectionOptions", "computeStress",
"RefreshMode"]
contextHandlers={"":DomainContextHandler("", [ContextField("graph.ColorAttr", DomainContextHandler.Optional),
ContextField("graph.SizeAttr", DomainContextHandler.Optional),
ContextField("graph.ShapeAttr", DomainContextHandler.Optional),
ContextField("graph.NameAttr", DomainContextHandler.Optional),
ContextField("graph.ShowName", DomainContextHandler.Optional)])}
def __init__(self, parent=None, signalManager=None, name="Multi Dimensional Scaling"):
OWWidget.__init__(self, parent, signalManager, name, wantGraph=True)
self.inputs=[("Distances", orange.SymMatrix, self.cmatrix), ("Data Subset", ExampleTable, self.cselected)]
self.outputs=[("Data", ExampleTable)]
self.StressFunc=3
self.minStressDelta=5e-5
self.maxIterations=5000
self.maxImprovment=10
self.autoSendSelection=0
self.toolbarSelection=0
self.selectionOptions=0
self.computeStress=1
self.ReDraw=1
self.NumIter=1
self.RefreshMode=0
self.applyLSMT = 0
self.stressFunc=[("Kruskal stress", orngMDS.KruskalStress),
("Sammon stress", orngMDS.SammonStress),
("Signed Sammon stress", orngMDS.SgnSammonStress),
("Signed relative stress", orngMDS.SgnRelStress)]
self.graph=MDSPlot(self.mainArea)
self.mainArea.layout().addWidget(self.graph)
self.loadSettings()
tabs=OWGUI.tabWidget(self.controlArea)
mds=OWGUI.createTabPage(tabs, "MDS")
graph=OWGUI.createTabPage(tabs, "Graph")
##MDS Tab
init=OWGUI.widgetBox(mds, "Initialization")
OWGUI.button(init, self, "Randomize", self.randomize)
OWGUI.button(init, self, "Jitter", self.jitter)
OWGUI.button(init, self, "Torgerson", self.torgerson)
opt=OWGUI.widgetBox(mds, "Optimization")
self.startButton=OWGUI.button(opt, self, "Optimize", self.testStart)
OWGUI.button(opt, self, "Single Step", self.smacofStep)
box = OWGUI.widgetBox(opt, "Stress Function")
OWGUI.comboBox(box, self, "StressFunc", items=[a[0] for a in self.stressFunc], callback=self.updateStress)
OWGUI.radioButtonsInBox(opt, self, "RefreshMode", ["Every step", "Every 10 steps", "Every 100 steps"], "Refresh During Optimization", callback=lambda :1)
self.stopping=OWGUI.widgetBox(opt, "Stopping Conditions")
OWGUI.hSlider(OWGUI.widgetBox(self.stopping, "Min. stress change", flat=True),
self, "minStressDelta", minValue=5e-5, maxValue=1e-2, step=5e-5,
labelFormat="%.5f", intOnly=0)
OWGUI.hSlider(OWGUI.widgetBox(self.stopping, "Max. number of steps", flat=True),
self, "maxIterations", minValue=10, maxValue=5000, step=10,
labelFormat="%i")
##Graph Tab
OWGUI.hSlider(graph, self, "graph.PointSize", box="Point Size", minValue=1, maxValue=20, callback=self.graph.updateData)
self.colorCombo=OWGUI.comboBox(graph, self, "graph.ColorAttr", box="Color", callback=self.graph.updateData)
self.sizeCombo=OWGUI.comboBox(graph, self, "graph.SizeAttr", box="Size", callback=self.graph.updateData)
self.shapeCombo=OWGUI.comboBox(graph, self, "graph.ShapeAttr", box="Shape", callback=self.graph.updateData)
self.nameCombo=OWGUI.comboBox(graph, self, "graph.NameAttr", box="Label", callback=self.graph.updateData)
box = OWGUI.widgetBox(graph, "Distances & Stress")
OWGUI.checkBox(box, self, "graph.ShowStress", "Show similar pairs", callback = self.graph.updateLinesRepaint)
b2 = OWGUI.widgetBox(box)
OWGUI.widgetLabel(b2, "Proportion of connected pairs")
OWGUI.separator(b2, height=3)
sl = OWGUI.hSlider(b2, self, "graph.proportionGraphed", minValue=0, maxValue=20, callback=self.graph.updateLinesRepaint, tooltip="Proportion of connected pairs (Maximum of 1000 lines will be drawn")
OWGUI.checkBox(box, self, "graph.differentWidths", "Show distance by line width", callback = self.graph.updateLinesRepaint)
OWGUI.checkBox(box, self, "graph.stressByTransparency", "Show stress by transparency", callback = self.graph.updateData)
OWGUI.checkBox(box, self, "graph.stressBySize", "Show stress by symbol size", callback = self.updateStressBySize)
self.updateStressBySize(True)
OWGUI.checkBox(graph, self, "graph.antialias_points", label="Use antialiasing", box="Antialiasing", tooltip="Use antialiasing for beter quality graphics", callback=self.graph.updateData)
self.zoomToolbar=OWToolbars.ZoomSelectToolbar(self, graph, self.graph, self.autoSendSelection)
self.connect(self.zoomToolbar.buttonSendSelections, SIGNAL("clicked()"), self.sendSelections)
self.graph.autoSendSelectionCallback = lambda :self.autoSendSelection and self.sendSelections()
OWGUI.checkBox(graph, self, "autoSendSelection", "Auto send selected")
OWGUI.radioButtonsInBox(graph, self, "selectionOptions", ["Don't append", "Append coordinates", "Append coordinates as meta"], box="Append coordinates", callback=self.sendIf)
mds.setSizePolicy(QSizePolicy(QSizePolicy.Maximum, QSizePolicy.Maximum))
graph.setSizePolicy(QSizePolicy(QSizePolicy.Maximum, QSizePolicy.Maximum))
self.controlArea.setMinimumWidth(250)
OWGUI.rubber(mds)
OWGUI.rubber(self.controlArea)
infoBox=OWGUI.widgetBox(mds, "Info")
self.infoA=OWGUI.widgetLabel(infoBox, "Avg. stress:")
self.infoB=OWGUI.widgetLabel(infoBox, "Num. steps")
# OWGUI.button(self.controlArea, self, "Save", self.graph.saveToFile, debuggingEnabled = 0)
self.connect(self.graphButton, SIGNAL("clicked()"), self.graph.saveToFile)
self.resize(900,630)
self.done=True
self.data=None
self.selectedInputExamples=[]
self.selectedInput=[]
def cmatrix(self, matrix=None):
self.closeContext()
self.origMatrix=matrix
self.data=data=None
if matrix:
self.data=data=getattr(matrix, "items")
matrix.matrixType = orange.SymMatrix.Symmetric
self.graph.ColorAttr=0
self.graph.SizeAttr=0
self.graph.ShapeAttr=0
self.graph.NameAttr=0
self.graph.closestPairs = None
if isinstance(data, orange.ExampleTable):
self.setExampleTable(data)
elif isinstance(data, list):
self.setList(data)
elif isinstance(data, orange.VarList):
self.setVarList(data)
if matrix:
self.mds=orngMDS.MDS(matrix)
self.mds.points=numpy.random.random(size=[self.mds.n, self.mds.dim])
self.mds.getStress()
self.stress=self.getAvgStress(self.stressFunc[self.StressFunc][1])
if data and type(data) == orange.ExampleTable:
self.openContext("",self.data)
self.graph.setData(self.mds, self.colors, self.sizes, self.shapes, self.names, self.selectedInput)
else:
self.graph.clear()
def cselected(self, selected=[]):
self.selectedInputExamples=selected or []
if self.data and type(self.data)==orange.ExampleTable:
self.setExampleTable(self.data)
self.graph.setData(self.mds, self.colors, self.sizes, self.shapes, self.names, self.selectedInput)
def setExampleTable(self, data):
self.colorCombo.clear()
self.sizeCombo.clear()
self.shapeCombo.clear()
self.nameCombo.clear()
attributes=[attr for attr in data.domain.variables+data.domain.getmetas().values() or [] ]
discAttributes=filter(lambda a: a.varType==orange.VarTypes.Discrete, attributes)
contAttributes=filter(lambda a: a.varType==orange.VarTypes.Continuous, attributes)
attrName=[attr.name for attr in attributes]
for name in ["Same color"]+attrName:
self.colorCombo.addItem(name)
for name in ["Same size"]+map(lambda a:a.name, contAttributes):
self.sizeCombo.addItem(name)
for name in ["Same shape"]+map(lambda a: a.name, discAttributes):
self.shapeCombo.addItem(name)
for name in ["No name"]+attrName:
self.nameCombo.addItem(name)
# if data.domain.classVar:
# if data.domain.classVar.varType == orange.VarTypes.Discrete:
# self.graph.ColorAttr = len(data.domain.variables) # index 0 is Same color!
# elif data.domain.classVar.varType == orange.VarTypes.Continuous:
# self.graph.SizeAttr = len(data.domain.variables) # index 0 is Same color!
try:
self.graph.NameAttr = 1 + [name.lower() for name in attrName].index("name")
except:
pass
self.attributes=attributes
self.discAttributes=discAttributes
self.contAttributes=contAttributes
self.colors=[[Qt.black]*(len(attributes)+1) for i in range(len(data))]
self.shapes=[[OWPoint.Ellipse]*(len(discAttributes)+1) for i in range(len(data))]
self.sizes=[[1.0]*(len(contAttributes)+1) for i in range(len(data))]
self.names=[[""]*(len(attributes)+1) for i in range(len(data))]
try:
selectedInput=self.selectedInputExamples.select(data.domain)
except Exception:
selectedInput=[]
self.selectedInput=map(lambda d: selectedInput and (d in selectedInput), data)
contI=discI=attrI=1
def check(ex, a):
try:
ex[a]
except:
return False
return not ex[a].isSpecial()
for j, attr in enumerate(attributes):
if attr.varType==orange.VarTypes.Discrete:
c=OWColorPalette.ColorPaletteHSV(len(attr.values))
for i in range(len(data)):
self.colors[i][attrI]= check(data[i],attr) and c[int(data[i][attr])] or Qt.black
## self.shapes[i][discI]= data[i][attr].isSpecial() and self.graph.shapeList[0] or self.graph.shapeList[int(data[i][attr])%len(self.graph.shapeList)]
self.shapes[i][discI]= check(data[i],attr) and self.graph.shapeList[int(data[i][attr])%len(self.graph.shapeList)] or self.graph.shapeList[0]
self.names[i][attrI]= check(data[i],attr) and " "+str(data[i][attr]) or ""
#self.sizes[i][contI]=5
attrI+=1
discI+=1
elif attr.varType==orange.VarTypes.Continuous:
c=OWColorPalette.ColorPaletteBW(-1)
#val=[e[attr] for e in data if not e[attr].isSpecial()]
val=[e[attr] for e in data if check(e, attr)]
minVal=min(val or [0])
maxVal=max(val or [1])
span = max(maxVal - minVal, 1e-6)
for i in range(len(data)):
self.colors[i][attrI]=check(data[i],attr) and c.getColor((data[i][attr] - minVal)/span) or Qt.black
#self.shapes[i][discI]=self.graph.shapeList[0]
self.names[i][attrI]=check(data[i],attr) and " "+str(data[i][attr]) or ""
self.sizes[i][contI]=check(data[i],attr) and (float(self.data[i][attr]) - minVal) / span or 1.0
contI+=1
attrI+=1
else:
for i in range(len(data)):
self.colors[i][attrI]=Qt.black
#self.shapes[i][j+1]=self.graph.shapeList[0]
self.names[i][attrI]= check(data[i],attr) and " "+str(data[i][attr]) or ""
#self.sizes[i][j+1]=5
attrI+=1
if data and data.domain.classVar:
if data.domain.classVar.varType == orange.VarTypes.Discrete:
self.graph.ColorAttr = len(self.colors[0]) - 1 # index 0 is Same color!
elif data.domain.classVar.varType == orange.VarTypes.Continuous:
self.graph.SizeAttr = len(self.sizes[0]) - 1 # index 0 is Same color!
def setList(self, data):
self.colorCombo.clear()
self.sizeCombo.clear()
self.shapeCombo.clear()
self.nameCombo.clear()
for name in ["Same color", "strain"]:
self.colorCombo.addItem(name)
for name in ["No name", "name", "strain"]:
self.nameCombo.addItem(name)
self.colors=[[Qt.black]*3 for i in range(len(data))]
self.shapes=[[OWPoint.Ellipse] for i in range(len(data))]
self.sizes=[[1.0] for i in range(len(data))]
self.selectedInput=[False]*len(data)
if type(data[0]) in [str, unicode]:
self.names = [("", di, "", "") for di in data]
else:
self.names=[[""]*4 for i in range(len(data))]
try:
strains=list(set([d.strain for d in data]))
c=OWColorPalette.ColorPaletteHSV(len(strains))
for i, d in enumerate(data):
self.colors[i][1]=c[strains.index(d.strain)]
self.names[i][1]=" "+d.name
self.names[i][2]=" "+d.strain
except Exception, val:
print val
def setVarList(self, data):
self.colorCombo.clear()
self.sizeCombo.clear()
self.shapeCombo.clear()
self.nameCombo.clear()
for name in ["Same color", "Variable"]:
self.colorCombo.addItem(name)
for name in ["No name", "Var name"]:
self.nameCombo.addItem(name)
self.colors=[[Qt.black]*3 for i in range(len(data))]
self.shapes=[[OWPoint.Ellipse] for i in range(len(data))]
self.sizes=[[1.0] for i in range(len(data))]
self.names=[[""]*4 for i in range(len(data))]
self.selectedInput=[False]*len(data)
try:
c=OWColorPalette.ColorPaletteHSV(len(data))
for i, d in enumerate(data):
self.colors[i][1]=c[i]
self.names[i][1]=" " +str(d.name)
except Exception, val:
print val
def updateStressBySize(self, noRepaint = False):
self.sizeCombo.setDisabled(self.graph.stressBySize)
if not noRepaint:
self.graph.updateData()
def smacofStep(self):
if not getattr(self, "mds", None):
return
for i in range(self.NumIter):
self.mds.SMACOFstep()
if self.computeStress:
self.mds.getStress(self.stressFunc[self.StressFunc][1])
self.stress=self.getAvgStress(self.stressFunc[self.StressFunc][1])
#st=time.clock()
if self.ReDraw:
self.graph.updateData()
#print "Update:", time.clock()-st
## I (Janez) disabled LSMT because it is implemented as it never should be:
# orngMDS.LSMT transforms the distance matrix itself (indeed there is
# the original stored, too), and from that point on there is no way the
# user can "untransform" it, except for resending the signal
# Since the basic problem is in bad design of orngMDS, I removed the option
# from the widget. If somebody has time to fix orngMDS first, he's welcome.
def LSMT(self):
if not getattr(self, "mds", None):
return
self.mds.LSMT()
if self.computeStress:
self.mds.getStress(self.stressFunc[self.StressFunc][1])
self.stress=self.getAvgStress(self.stressFunc[self.StressFunc][1])
if self.ReDraw:
self.graph.updateData()
def torgerson(self):
if not getattr(self, "mds", None):
return
self.mds.Torgerson()
if self.computeStress:
self.mds.getStress(self.stressFunc[self.StressFunc][1])
self.stress=self.getAvgStress(self.stressFunc[self.StressFunc][1])
self.graph.updateData()
def randomize(self):
if not getattr(self, "mds", None):
return
self.mds.points = numpy.random.random(size=[self.mds.n,2])
if self.computeStress:
self.mds.getStress(self.stressFunc[self.StressFunc][1])
self.stress=self.getAvgStress(self.stressFunc[self.StressFunc][1])
self.graph.updateData()
def jitter(self):
if not getattr(self, "mds", None):
return
mi = numpy.min(self.mds.points, axis=0)
ma = numpy.max(self.mds.points, axis=0)
st = 0.05 * (ma - mi)
for i in range(self.mds.n):
for j in range(2):
self.mds.points[i][j] += st[j]*(random() - 0.5)
if self.computeStress:
self.mds.getStress(self.stressFunc[self.StressFunc][1])
self.stress=self.getAvgStress(self.stressFunc[self.StressFunc][1])
self.graph.updateData()
def start(self):
if not getattr(self, "mds", None):
return
if self.done==False:
self.done=True
return
self.done=False
self.startButton.setText("Stop")
numIter=0
self.progressBarInit()
pcur=0
startStress=oldStress=stress=self.getAvgStress(self.stressFunc[self.StressFunc][1])
startTime=time.clock()
hist=[stress]*3
while not self.done and numIter<self.maxIterations:
for i in range(self.NumIter):
self.mds.SMACOFstep()
qApp.processEvents()
if self.computeStress:
self.mds.getStress(self.stressFunc[self.StressFunc][1])
self.stress=stress=self.getAvgStress(self.stressFunc[self.StressFunc][1])
hist.pop(0)
hist.append(abs(oldStress-stress))
numIter+=1
self.infoB.setText("Num. steps: %i" % numIter)
qApp.processEvents()
if self.ReDraw:
self.graph.updateData()
qApp.processEvents()
if self.computeStress and abs(sum(hist)/3)<abs(self.minStressDelta*oldStress):
break
## Update progress bar
p1=abs(self.minStressDelta*oldStress)/max(sum(hist)/3, 1e-6)*100
if p1>100: p1=0
pcur=min(max([p1, float(numIter)/self.maxIterations*100, pcur]),99)
self.progressBarSet(int(pcur))
oldStress=stress
self.startButton.setText("Optimize")
self.progressBarFinished()
#if not self.ReDraw:
self.graph.updateData()
self.done=True
#print "time %i " % (time.clock()-startTime)
def testStart(self):
if not getattr(self, "mds", None):
return
if self.done==False:
self.done=True
return
self.done=False
self.startButton.setText("Stop Optimization")
self.stopping.setDisabled(1)
self.progressBarInit()
self.iterNum=0
self.mds.progress_callback=self.callback
self.mds.mds.optimize(self.maxIterations, self.stressFunc[self.StressFunc][1], self.minStressDelta)
if self.iterNum%(math.pow(10,self.RefreshMode)):
self.graph.updateData()
self.startButton.setText("Optimize")
self.stopping.setDisabled(0)
self.progressBarFinished()
self.done=True
def callback(self, a,b=None):
if not self.iterNum%(math.pow(10,self.RefreshMode)):
self.graph.updateData()
self.iterNum+=1
self.infoB.setText("Num. steps: %i" % self.iterNum)
self.infoA.setText("Avg. Stress: %f" % self.mds.avgStress)
self.progressBarSet(int(a*100))
qApp.processEvents()
if self.done:
return 0
else:
return 1
def getAvgStress(self, stressf=orngMDS.SgnRelStress):
return self.mds.avgStress
"""
self.mds.getDistance()
total=0.0
total=sum([abs(a[0]) for a in self.mds.arr])
self.infoA.setText("Avg. stress: %.7f" % (total/(self.mds.n*self.mds.n)))
return total/(self.mds.n*self.mds.n)
"""
def sendIf(self, i=-1):
if self.autoSendSelection:
self.sendSelections()
def sendSelections(self):
if not getattr(self, "mds", None):
return
points = self.graph.main_curve.points()
selectedInd = [i for i, p in enumerate(points) if p.is_selected()]
if type(self.data)==orange.ExampleTable:
self.sendExampleTable(selectedInd)
elif type(self.data)==list:
self.sendList(selectedInd)
def sendExampleTable(self, selectedInd):
if self.selectionOptions==0:
self.send("Data", orange.ExampleTable(self.data.getitems(selectedInd)))
else:
xAttr=orange.FloatVariable("X")
yAttr=orange.FloatVariable("Y")
if self.selectionOptions==1:
domain=orange.Domain([xAttr, yAttr]+[v for v in self.data.domain.variables])
domain.addmetas(self.data.domain.getmetas())
else:
domain=orange.Domain(self.data.domain)
domain.addmeta(orange.newmetaid(), xAttr)
domain.addmeta(orange.newmetaid(), yAttr)
selection=orange.ExampleTable(domain)
selection.extend(self.data.getitems(selectedInd))
for i in range(len(selectedInd)):
selection[i][xAttr]=self.mds.points[selectedInd[i]][0]
selection[i][yAttr]=self.mds.points[selectedInd[i]][1]
self.send("Data", selection)
def sendList(self, selectedInd):
if self.data and type(self.data[0]) == str:
xAttr=orange.FloatVariable("X")
yAttr=orange.FloatVariable("Y")
nameAttr= orange.StringVariable("name")
if self.selectionOptions == 1:
domain = orange.Domain([xAttr, yAttr, nameAttr])
selection = orange.ExampleTable(domain)
for i in range(len(selectedInd)):
selection.append(list(self.mds.points[selectedInd[i]]) + [self.data[i]])
else:
domain = orange.Domain([nameAttr])
if self.selectionOptions:
domain.addmeta(orange.newmetaid(), xAttr)
domain.addmeta(orange.newmetaid(), yAttr)
selection = orange.ExampleTable(domain)
for i in range(len(selectedInd)):
selection.append([self.data[i]])
if self.selectionOptions:
selection[i][xAttr]=self.mds.points[selectedInd[i]][0]
selection[i][yAttr]=self.mds.points[selectedInd[i]][1]
self.send("Data", selection)
return
if not selectedInd:
self.send("Structured Data Files", None)
else:
datasets=[self.data[i] for i in selectedInd]
names=list(set([d.dirname for d in datasets]))
data=[(name, [d for d in filter(lambda a:a.strain==name, datasets)]) for name in names]
self.send("Structured Data Files",data)
def updateStress(self):
if not getattr(self, "mds", None):
return
self.mds.getStress(self.stressFunc[self.StressFunc][1])
self.graph.replot()
def sendReport(self):
self.reportSettings("Optimization",
[("Stress function", self.stressFunc[self.StressFunc][0]),
("Minimal stress change", self.minStressDelta),
("Maximal number of steps", self.maxIterations)])
if self.graph.ColorAttr or self.graph.stressBySize or self.graph.SizeAttr or self.graph.ShapeAttr or self.graph.NameAttr or self.graph.ShowStress:
self.reportSettings("Visual settings",
[self.graph.ColorAttr and ("Point color", self.colorCombo.currentText()),
self.graph.stressBySize and ("Point size", "<stress>")
or self.graph.SizeAttr and ("Point size", self.sizeCombo.currentText()),
self.graph.ShapeAttr and ("Point shape", self.shapeCombo.currentText()),
self.graph.NameAttr and ("Labels", self.nameCombo.currentText()),
self.graph.ShowStress and ("Proportion of connected pairs", self.graph.proportionGraphed)])
self.reportSection("Chart")
self.reportImage(self.graph.saveToFileDirect)
class MDSPlot(OWPlot):
def __init__(self, parent=None, name=None):
OWPlot.__init__(self, parent, name)
self.use_animations = False
self.animate_points = False
self.antialias_points = True
self.data = None
self.mds = None
self.PointSize = 5
self.ColorAttr = 0
self.SizeAttr = 0
self.ShapeAttr = 0
self.NameAttr = 0
self.ShowStress = False
self.differentWidths = True
self.stressByTransparency = True
self.stressBySize = False
self.NumStressLines = 10
self.proportionGraphed = 20
self.ShowName = True
#self.curveKeys=[]
self.pointKeys = []
self.points = []
self.lines = []
self.lineKeys = []
self.distanceLineCurves = []
self.colors = []
self.sizes = []
self.closestPairs = None
self.shapeList = [OWPoint.Ellipse,
OWPoint.Rect,
OWPoint.Diamond,
OWPoint.Triangle,
OWPoint.DTriangle ,
OWPoint.UTriangle,
OWPoint.LTriangle,
OWPoint.RTriangle,
OWPoint.Cross,
OWPoint.XCross ]
def setData(self, mds, colors, sizes, shapes, names, showFilled):
self.mds = mds
self.colors = colors
self.sizes = sizes
self.shapes = shapes
self.names = names
self.showFilled = showFilled
self.updateData()
def updateData(self):
self.clear()
self.distanceLineCurves = []
if self.ShowStress:
self.updateDistanceLines()
self.setPoints()
self.replot()
def updateDistanceLines(self):
if not self.mds:
return
N = len(self.mds.points)
np = min(int(N*(N-1)/2. * self.proportionGraphed/100.), 1000) # draw maximum of 1000 closest pairs
needlines = int(math.ceil((1 + math.sqrt(1+8*np)) / 2))
if self.closestPairs is None or len(self.closestPairs) < np:
import heapq
m = self.mds.originalDistances
self.closestPairs = sorted(heapq.nsmallest(np, ((m[i, j], i, j) for i in range(m.dim) for j in range(i))))
for c in self.distanceLineCurves:
try:
c.detach()
except RuntimeError, ex: #underlying C/C++ object has been deleted
pass
self.distanceLineCurves = []
hdist = self.closestPairs[:np]
if not hdist:
return
black = QColor(192,192,192)
if self.differentWidths:
mindist = hdist[0][0]
maxdist = hdist[-1][0]
else:
mindist = maxdist = 0
if maxdist != mindist:
k = 3 / (maxdist - mindist)**2
for dist, i, j in hdist:
pti, ptj = self.mds.points[i], self.mds.points[j]
c = self.add_curve("n_lines", black, black, 2, style=OWCurve.Lines, xData=[pti[0],ptj[0]], yData=[pti[1],ptj[1]], lineWidth = max(1, (maxdist - dist)**2 * k))
c.set_in_background(True)
self.distanceLineCurves.append(c)
else:
for dist, i, j in hdist:
pti, ptj = self.mds.points[i], self.mds.points[j]
c = self.add_curve("n_lines", black, black, 2, OWCurve.Lines, xData=[pti[0],ptj[0]], yData=[pti[1],ptj[1]], lineWidth = 2)
c.set_in_background(True)
self.distanceLineCurves.append(c)
def updateLinesRepaint(self):
if self.mds:
if self.ShowStress:
self.updateDistanceLines()
else:
for c in self.distanceLineCurves:
try:
c.detach()
except RuntimeError, ex: #underlying C/C++ object has been deleted
pass
self.distanceLineCurves = []
self.replot()
def setPoints(self):
if not self.mds:
return
x_data = [p[0] for p in self.mds.points]
y_data = [p[1] for p in self.mds.points]
if self.stressBySize or self.stressByTransparency:
stresses = map(sum, self.mds.stress)
mins, maxs = min(stresses), max(stresses)
stress_scale = 1. / max(1e-7, maxs - mins)
# stress_scale = 1. / max(1e-7, maxs - mins)
if self.ColorAttr != 0:
colors = [c[self.ColorAttr] for c in self.colors]
else:
colors = [QColor(Qt.black) for _ in self.colors] #QColor(Qt.black)
if self.stressByTransparency:
for c, s in zip(colors, stresses):
c.setAlpha(math.floor((1.0 - ((s - mins) * stress_scale)) * 255))
if self.stressBySize:
sizes = [(s - mins) * stress_scale * self.PointSize for s in stresses]
elif self.SizeAttr != 0:
sizes = [s[self.SizeAttr] * self.PointSize for s in self.sizes]
else:
sizes = [self.PointSize]
if self.ShapeAttr != 0:
shapes = [s[self.ShapeAttr] for s in self.shapes]
else:
shapes = [self.shapeList[0]]
if self.NameAttr != 0:
labels = [n[self.NameAttr] for n in self.names]
else:
labels = []
self.set_main_curve_data(x_data, y_data, colors, labels, sizes, shapes, self.showFilled)
def sendData(self, *args):
pass
if __name__=="__main__":
app=QApplication(sys.argv)
w=OWMDSQt()
w.show()
data=orange.ExampleTable("../../doc/datasets/iris.tab")
## data = orange.ExampleTable(r"E:\Development\Orange Datasets\UCI\iris.tab")
## data=orange.ExampleTable("/home/ales/src/MDSjakulin/eu_nations.txt")
matrix = orange.SymMatrix(len(data))
dist = orange.ExamplesDistanceConstructor_Euclidean(data)
matrix = orange.SymMatrix(len(data))
matrix.setattr('items', data)
for i in range(len(data)):
for j in range(i+1):
matrix[i, j] = dist(data[i], data[j])
w.cmatrix(matrix)
w.cselected(orange.ExampleTable(data[:50]))
app.exec_()
w.saveSettings()
| gpl-3.0 | 6,245,947,863,665,940,000 | 43.031335 | 206 | 0.58266 | false |
jiwanlimbu/aura | keystone/oauth1/backends/sql.py | 1 | 10594 | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import random as _random
import uuid
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from keystone.common import sql
from keystone.common import utils
from keystone import exception
from keystone.i18n import _
from keystone.oauth1.backends import base
random = _random.SystemRandom()
class Consumer(sql.ModelBase, sql.DictBase):
__tablename__ = 'consumer'
attributes = ['id', 'description', 'secret']
id = sql.Column(sql.String(64), primary_key=True, nullable=False)
description = sql.Column(sql.String(64), nullable=True)
secret = sql.Column(sql.String(64), nullable=False)
extra = sql.Column(sql.JsonBlob(), nullable=False)
class RequestToken(sql.ModelBase, sql.DictBase):
__tablename__ = 'request_token'
attributes = ['id', 'request_secret',
'verifier', 'authorizing_user_id', 'requested_project_id',
'role_ids', 'consumer_id', 'expires_at']
id = sql.Column(sql.String(64), primary_key=True, nullable=False)
request_secret = sql.Column(sql.String(64), nullable=False)
verifier = sql.Column(sql.String(64), nullable=True)
authorizing_user_id = sql.Column(sql.String(64), nullable=True)
requested_project_id = sql.Column(sql.String(64), nullable=False)
role_ids = sql.Column(sql.Text(), nullable=True)
consumer_id = sql.Column(sql.String(64), sql.ForeignKey('consumer.id'),
nullable=False, index=True)
expires_at = sql.Column(sql.String(64), nullable=True)
@classmethod
def from_dict(cls, user_dict):
return cls(**user_dict)
def to_dict(self):
return dict(self.items())
class AccessToken(sql.ModelBase, sql.DictBase):
__tablename__ = 'access_token'
attributes = ['id', 'access_secret', 'authorizing_user_id',
'project_id', 'role_ids', 'consumer_id',
'expires_at']
id = sql.Column(sql.String(64), primary_key=True, nullable=False)
access_secret = sql.Column(sql.String(64), nullable=False)
authorizing_user_id = sql.Column(sql.String(64), nullable=False,
index=True)
project_id = sql.Column(sql.String(64), nullable=False)
role_ids = sql.Column(sql.Text(), nullable=False)
consumer_id = sql.Column(sql.String(64), sql.ForeignKey('consumer.id'),
nullable=False)
expires_at = sql.Column(sql.String(64), nullable=True)
@classmethod
def from_dict(cls, user_dict):
return cls(**user_dict)
def to_dict(self):
return dict(self.items())
class OAuth1(base.Oauth1DriverBase):
def _get_consumer(self, session, consumer_id):
consumer_ref = session.query(Consumer).get(consumer_id)
if consumer_ref is None:
raise exception.NotFound(_('Consumer not found'))
return consumer_ref
def get_consumer_with_secret(self, consumer_id):
with sql.session_for_read() as session:
consumer_ref = self._get_consumer(session, consumer_id)
return consumer_ref.to_dict()
def get_consumer(self, consumer_id):
return base.filter_consumer(
self.get_consumer_with_secret(consumer_id))
def create_consumer(self, consumer_ref):
with sql.session_for_write() as session:
consumer = Consumer.from_dict(consumer_ref)
session.add(consumer)
return consumer.to_dict()
def _delete_consumer(self, session, consumer_id):
consumer_ref = self._get_consumer(session, consumer_id)
session.delete(consumer_ref)
def _delete_request_tokens(self, session, consumer_id):
q = session.query(RequestToken)
req_tokens = q.filter_by(consumer_id=consumer_id)
req_tokens_list = set([x.id for x in req_tokens])
for token_id in req_tokens_list:
token_ref = self._get_request_token(session, token_id)
session.delete(token_ref)
def _delete_access_tokens(self, session, consumer_id):
q = session.query(AccessToken)
acc_tokens = q.filter_by(consumer_id=consumer_id)
acc_tokens_list = set([x.id for x in acc_tokens])
for token_id in acc_tokens_list:
token_ref = self._get_access_token(session, token_id)
session.delete(token_ref)
def delete_consumer(self, consumer_id):
with sql.session_for_write() as session:
self._delete_request_tokens(session, consumer_id)
self._delete_access_tokens(session, consumer_id)
self._delete_consumer(session, consumer_id)
def list_consumers(self):
with sql.session_for_read() as session:
cons = session.query(Consumer)
return [base.filter_consumer(x.to_dict()) for x in cons]
def update_consumer(self, consumer_id, consumer_ref):
with sql.session_for_write() as session:
consumer = self._get_consumer(session, consumer_id)
old_consumer_dict = consumer.to_dict()
old_consumer_dict.update(consumer_ref)
new_consumer = Consumer.from_dict(old_consumer_dict)
consumer.description = new_consumer.description
consumer.extra = new_consumer.extra
return base.filter_consumer(consumer.to_dict())
def create_request_token(self, consumer_id, requested_project,
request_token_duration):
request_token_id = uuid.uuid4().hex
request_token_secret = uuid.uuid4().hex
expiry_date = None
if request_token_duration:
now = timeutils.utcnow()
future = now + datetime.timedelta(seconds=request_token_duration)
expiry_date = utils.isotime(future, subsecond=True)
ref = {}
ref['id'] = request_token_id
ref['request_secret'] = request_token_secret
ref['verifier'] = None
ref['authorizing_user_id'] = None
ref['requested_project_id'] = requested_project
ref['role_ids'] = None
ref['consumer_id'] = consumer_id
ref['expires_at'] = expiry_date
with sql.session_for_write() as session:
token_ref = RequestToken.from_dict(ref)
session.add(token_ref)
return token_ref.to_dict()
def _get_request_token(self, session, request_token_id):
token_ref = session.query(RequestToken).get(request_token_id)
if token_ref is None:
raise exception.NotFound(_('Request token not found'))
return token_ref
def get_request_token(self, request_token_id):
with sql.session_for_read() as session:
token_ref = self._get_request_token(session, request_token_id)
return token_ref.to_dict()
def authorize_request_token(self, request_token_id, user_id,
role_ids):
with sql.session_for_write() as session:
token_ref = self._get_request_token(session, request_token_id)
token_dict = token_ref.to_dict()
token_dict['authorizing_user_id'] = user_id
token_dict['verifier'] = ''.join(random.sample(base.VERIFIER_CHARS,
8))
token_dict['role_ids'] = jsonutils.dumps(role_ids)
new_token = RequestToken.from_dict(token_dict)
for attr in RequestToken.attributes:
if attr in ['authorizing_user_id', 'verifier', 'role_ids']:
setattr(token_ref, attr, getattr(new_token, attr))
return token_ref.to_dict()
def create_access_token(self, request_id, access_token_duration):
access_token_id = uuid.uuid4().hex
access_token_secret = uuid.uuid4().hex
with sql.session_for_write() as session:
req_token_ref = self._get_request_token(session, request_id)
token_dict = req_token_ref.to_dict()
expiry_date = None
if access_token_duration:
now = timeutils.utcnow()
future = (now +
datetime.timedelta(seconds=access_token_duration))
expiry_date = utils.isotime(future, subsecond=True)
# add Access Token
ref = {}
ref['id'] = access_token_id
ref['access_secret'] = access_token_secret
ref['authorizing_user_id'] = token_dict['authorizing_user_id']
ref['project_id'] = token_dict['requested_project_id']
ref['role_ids'] = token_dict['role_ids']
ref['consumer_id'] = token_dict['consumer_id']
ref['expires_at'] = expiry_date
token_ref = AccessToken.from_dict(ref)
session.add(token_ref)
# remove request token, it's been used
session.delete(req_token_ref)
return token_ref.to_dict()
def _get_access_token(self, session, access_token_id):
token_ref = session.query(AccessToken).get(access_token_id)
if token_ref is None:
raise exception.NotFound(_('Access token not found'))
return token_ref
def get_access_token(self, access_token_id):
with sql.session_for_read() as session:
token_ref = self._get_access_token(session, access_token_id)
return token_ref.to_dict()
def list_access_tokens(self, user_id):
with sql.session_for_read() as session:
q = session.query(AccessToken)
user_auths = q.filter_by(authorizing_user_id=user_id)
return [base.filter_token(x.to_dict()) for x in user_auths]
def delete_access_token(self, user_id, access_token_id):
with sql.session_for_write() as session:
token_ref = self._get_access_token(session, access_token_id)
token_dict = token_ref.to_dict()
if token_dict['authorizing_user_id'] != user_id:
raise exception.Unauthorized(_('User IDs do not match'))
session.delete(token_ref)
| apache-2.0 | 9,211,129,164,002,423,000 | 40.22179 | 79 | 0.62054 | false |
hamasho/endojo | apps/games/vocabulary/models.py | 1 | 8032 | from datetime import timedelta
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from registration.models import Language
from core.utils import date_range, get_today
class Package(models.Model):
title = models.CharField(max_length=200, unique=True)
level = models.SmallIntegerField()
pub_date = models.DateTimeField(default=timezone.now)
class Meta:
ordering = ['-pub_date']
@staticmethod
def get_package_list(user, language):
packages = Package.objects.filter(
availablepackage__language=language,
).order_by('level', 'title')
result = []
for package in packages:
try:
state = PackageState.objects.get(
user=user,
package=package,
)
state = 'Complete' if state.complete else 'Learning'
except PackageState.DoesNotExist:
state = 'Yet'
n_tried = PackageState.objects.filter(
package=package,
).count()
n_completed = PackageState.objects.filter(
package=package,
complete=True,
).count()
result.append({
'id': package.id,
'title': package.title,
'level': package.level,
'pub_date': package.pub_date,
'state': state,
'n_tried': n_tried,
'n_completed': n_completed,
})
return result
class AvailablePackage(models.Model):
package = models.ForeignKey(Package)
language = models.ForeignKey(Language)
class Meta:
unique_together = ('package', 'language')
class PackageState(models.Model):
user = models.ForeignKey(User)
package = models.ForeignKey(Package)
complete = models.BooleanField(default=True)
class Meta:
unique_together = ('user', 'package')
class Word(models.Model):
word_text = models.CharField(max_length=40, unique=True)
package = models.ForeignKey(Package)
@staticmethod
def get_translated_words(package_id, language):
"""
Return translated word objects
[{
"id": word.id,
"word_text": word.word_text,
"meaning": meaning
}, ...]
"""
t_words = TranslatedWord.objects.filter(
word__package_id=package_id,
language=language,
).prefetch_related('word')
result = []
for t_word in t_words:
result.append({
'id': t_word.word.id,
'word_text': t_word.word.word_text,
'meaning': t_word.meaning,
})
return result
class TranslatedWord(models.Model):
word = models.ForeignKey(Word)
language = models.ForeignKey(Language)
meaning = models.CharField(max_length=100)
class Meta:
unique_together = ('word', 'language')
class WordState(models.Model):
"""
Class for memorizing state of word.
`state` column indicates how deeply the user remember the word.
This value can be 1 to 6, and each indicates the span
from `last_appeared` until next vocabulary game.
1: 0 day
2: 1 days
3: 3 days
4: 1 week
5: 2 weeks
6: Mark as memorized
If the user can answer the meanigs of word with which state is 6,
it is marked remembered and the word disappears from this model.
"""
state = models.SmallIntegerField(default=1)
user = models.ForeignKey(User)
word = models.ForeignKey(Word)
added = models.DateField(auto_now_add=True)
next_date = models.DateField(default=get_today)
class Meta:
unique_together = ('user', 'word')
def level_up(self):
"""
Count up the state of WordState.
If the state become 6, then check other words in the same package
and if there is no words with level less than 6,
change the user's package state to `complete = True`.
Finally, save the current word state.
"""
self.state += 1
if (self.state == 6):
self.save()
n_remaining_words = WordState.objects.filter(
word__package=self.word.package,
user=self.user,
).exclude(
state=6,
).count()
# If all words in the package complete learning,
# update the package state as `complete = True.`
if n_remaining_words == 0:
package_state = PackageState.objects.get(
user=self.user,
package=self.word.package,
)
package_state.complete = True
package_state.save()
return
if (self.state == 2):
delta = timedelta(days=1)
elif (self.state == 3):
delta = timedelta(days=3)
elif (self.state == 4):
delta = timedelta(days=7)
elif (self.state == 5):
delta = timedelta(days=14)
self.next_date = get_today() + delta
self.save()
def level_reset(self):
self.state = 1
self.next_date = get_today()
self.save()
@staticmethod
def get_learning_words(user, count):
result = []
learning_words = WordState.objects.filter(
user=user,
state__lte=5,
next_date__lte=timezone.now(),
).order_by('-state', '-next_date')[:count]
for learning_word in learning_words:
result.append(dict(
id=learning_word.id,
word_text=learning_word.word.word_text,
meaning=TranslatedWord.objects.get(
language=user.userinfo.language,
word=learning_word.word,
).meaning,
state=learning_word.state,
))
return result
class History(models.Model):
user = models.ForeignKey(User, related_name='vocabulary_history_user')
n_failed = models.SmallIntegerField(default=0)
n_complete = models.SmallIntegerField(default=0)
n_levelup = models.SmallIntegerField(default=0)
n_state1 = models.IntegerField(default=0)
n_state2 = models.IntegerField(default=0)
n_state3 = models.IntegerField(default=0)
n_state4 = models.IntegerField(default=0)
n_state5 = models.IntegerField(default=0)
n_state6 = models.IntegerField(default=0)
date = models.DateField(auto_now_add=True)
class Meta:
ordering = ['date']
@staticmethod
def get_formatted_stats(user):
result = {
'state1': [],
'state2': [],
'state3': [],
'state4': [],
'state5': [],
'state6': [],
}
histories = History.objects.filter(user=user)
if len(histories) == 0:
return result
start_date = histories[0].date
end_date = histories[len(histories) - 1].date
for date in date_range(start_date, end_date + timedelta(1)):
that_day = histories.filter(date=date)
if not that_day.exists():
continue
that_day = that_day[0]
result['state1'].append({
'x': date,
'y': that_day.n_state1,
})
result['state2'].append({
'x': date,
'y': that_day.n_state2,
})
result['state3'].append({
'x': date,
'y': that_day.n_state3,
})
result['state4'].append({
'x': date,
'y': that_day.n_state4,
})
result['state5'].append({
'x': date,
'y': that_day.n_state5,
})
result['state6'].append({
'x': date,
'y': that_day.n_state6,
})
return result
| gpl-3.0 | -4,291,319,311,046,072,000 | 30.131783 | 74 | 0.541086 | false |
blueskycoco/rt-thread | bsp/gd32e230k-start/rtconfig.py | 12 | 3354 | import os
# toolchains options
ARCH='arm'
CPU='cortex-m23'
CROSS_TOOL='keil'
if os.getenv('RTT_CC'):
CROSS_TOOL = os.getenv('RTT_CC')
# cross_tool provides the cross compiler
# EXEC_PATH is the compiler execute path, for example, CodeSourcery, Keil MDK, IAR
if CROSS_TOOL == 'gcc':
PLATFORM = 'gcc'
EXEC_PATH = r'D:/toolchain/gnu_tools_arm_embedded/5.4_2016q3/bin'
elif CROSS_TOOL == 'keil':
PLATFORM = 'armcc'
EXEC_PATH = r'C:/Keil_v5'
elif CROSS_TOOL == 'iar':
PLATFORM = 'iar'
EXEC_PATH = r'C:/Program Files (x86)/IAR Systems/Embedded Workbench 8.0'
if os.getenv('RTT_EXEC_PATH'):
EXEC_PATH = os.getenv('RTT_EXEC_PATH')
BUILD = 'debug'
if PLATFORM == 'gcc':
# tool-chains
PREFIX = 'arm-none-eabi-'
CC = PREFIX + 'gcc'
AS = PREFIX + 'gcc'
AR = PREFIX + 'ar'
LINK = PREFIX + 'gcc'
TARGET_EXT = 'elf'
SIZE = PREFIX + 'size'
OBJDUMP = PREFIX + 'objdump'
OBJCPY = PREFIX + 'objcopy'
DEVICE = ' -mcpu=cortex-m23 -mthumb -ffunction-sections -fdata-sections'
CFLAGS = DEVICE + ' -Dgcc' # -D' + PART_TYPE
AFLAGS = ' -c' + DEVICE + ' -x assembler-with-cpp -Wa,-mimplicit-it=thumb '
LFLAGS = DEVICE + ' -Wl,--gc-sections,-Map=rtthread-gd32.map,-cref,-u,Reset_Handler -T gd32_rom.ld'
CPATH = ''
LPATH = ''
if BUILD == 'debug':
CFLAGS += ' -O0 -gdwarf-2 -g'
AFLAGS += ' -gdwarf-2'
else:
CFLAGS += ' -O2'
POST_ACTION = OBJCPY + ' -O binary $TARGET rtthread.bin\n' + SIZE + ' $TARGET \n'
elif PLATFORM == 'armcc':
# toolchains
CC = 'armcc'
AS = 'armasm'
AR = 'armar'
LINK = 'armlink'
TARGET_EXT = 'axf'
DEVICE = ' --cpu Cortex-M23'
CFLAGS = DEVICE + ' --apcs=interwork'
AFLAGS = DEVICE
LFLAGS = DEVICE + ' --info sizes --info totals --info unused --info veneers --list rtthread-gd32.map --scatter gd32_rom.sct'
LFLAGS += ' --keep *.o(.rti_fn.*) --keep *.o(FSymTab) --keep *.o(VSymTab)'
EXEC_PATH += '/ARM/ARMCC/bin'
print(EXEC_PATH)
if BUILD == 'debug':
CFLAGS += ' -g -O0'
AFLAGS += ' -g'
else:
CFLAGS += ' -O2'
POST_ACTION = 'fromelf --bin $TARGET --output rtthread.bin \nfromelf -z $TARGET'
elif PLATFORM == 'iar':
# toolchains
CC = 'iccarm'
AS = 'iasmarm'
AR = 'iarchive'
LINK = 'ilinkarm'
TARGET_EXT = 'out'
DEVICE = ' -D USE_STDPERIPH_DRIVER' + ' -D GD32F30X_HD'
CFLAGS = DEVICE
CFLAGS += ' --diag_suppress Pa050'
CFLAGS += ' --no_cse'
CFLAGS += ' --no_unroll'
CFLAGS += ' --no_inline'
CFLAGS += ' --no_code_motion'
CFLAGS += ' --no_tbaa'
CFLAGS += ' --no_clustering'
CFLAGS += ' --no_scheduling'
CFLAGS += ' --debug'
CFLAGS += ' --endian=little'
CFLAGS += ' --cpu=Cortex-M23'
CFLAGS += ' -e'
CFLAGS += ' --fpu=None'
CFLAGS += ' --dlib_config "' + EXEC_PATH + '/arm/INC/c/DLib_Config_Normal.h"'
CFLAGS += ' -Ol'
CFLAGS += ' --use_c++_inline'
AFLAGS = ''
AFLAGS += ' -s+'
AFLAGS += ' -w+'
AFLAGS += ' -r'
AFLAGS += ' --cpu Cortex-M23'
AFLAGS += ' --fpu None'
LFLAGS = ' --config gd32_rom.icf'
LFLAGS += ' --redirect _Printf=_PrintfTiny'
LFLAGS += ' --redirect _Scanf=_ScanfSmall'
LFLAGS += ' --entry __iar_program_start'
EXEC_PATH += '/arm/bin/'
POST_ACTION = ''
| gpl-2.0 | -8,796,945,031,366,751,000 | 26.048387 | 128 | 0.55963 | false |
clever-crow-consulting/otm-core | opentreemap/treemap/species/codes.py | 8 | 98503 | # flake8: noqa
def all_itree_region_codes():
return _CODES.keys()
def all_species_codes():
return species_codes_for_regions(all_itree_region_codes())
def species_codes_for_regions(region_codes):
if region_codes is None:
return None
species_codes = []
for region_code in region_codes:
species_codes.extend(_CODES[region_code])
# Converting to a set removes duplicates
return list(set(species_codes))
def get_itree_code(region_code, otm_code):
if otm_code:
if region_code in _CODES:
if otm_code in _CODES[region_code]:
return _CODES[region_code][otm_code]
return None
# The ``_CODES`` dictionary has the following format
#
# {
# itree_region_code: {
# otm_code: itree_code
# }
# }
#
# The otm_code of a species is usually the same as a
# USDA code, but this is not guaranteed.
#
# Calculating eco benefits for a tree requires a diameter
# and an itree code. The itree code for a tree is based on
# the species of the tree, but the specific code assigned
# to a species changes depending on where that species
# is located geographically.
_CODES = {
##################################################
'CaNCCoJBK': {
'AB': 'CEL OTHER',
'AC': 'BDM OTHER',
'ACBA2': 'ACME',
'ACBU': 'BDS OTHER',
'ACDE': 'ACME',
'ACDE2': 'ACME',
'ACLO': 'ACME',
'ACMA': 'BDL OTHER',
'ACME': 'ACME',
'ACNE': 'BDL OTHER',
'ACPA': 'ACPA',
'ACPL': 'BDL OTHER',
'ACPS': 'BDM OTHER',
'ACRU': 'BDM OTHER',
'ACSA1': 'BDL OTHER',
'ACSA2': 'BDL OTHER',
'ACVE2': 'ACME',
'AECA2': 'BES OTHER',
'AEHI': 'BES OTHER',
'AGFL': 'BES OTHER',
'AIAL': 'BDM OTHER',
'ALAR': 'PES OTHER',
'ALCO2': 'BDM OTHER',
'ALJU': 'BDM OTHER',
'ALRH': 'BDM OTHER',
'ALRU2': 'BDM OTHER',
'ARCU': 'PES OTHER',
'ARHE': 'CEL OTHER',
'ARME': 'BEM OTHER',
'ARRO': 'PES OTHER',
'ARUN': 'BES OTHER',
'BDL OTHER': 'ULAM',
'BDM OTHER': 'FRVE',
'BDS OTHER': 'PRCE',
'BEAL2': 'BDM OTHER',
'BEL OTHER': 'QUAG',
'BEM OTHER': 'CICA',
'BENI': 'BDM OTHER',
'BEPE': 'BDM OTHER',
'BES OTHER': 'PYKA',
'BR': 'PES OTHER',
'BRED': 'PES OTHER',
'BRPA': 'BDM OTHER',
'BUCA': 'PES OTHER',
'CABE': 'BDM OTHER',
'CACA3': 'BDM OTHER',
'CACI': 'BES OTHER',
'CADE2': 'CEL OTHER',
'CAEQ': 'CEL OTHER',
'CAER': 'BDM OTHER',
'CAJA9': 'BES OTHER',
'CALE': 'BES OTHER',
'CASA5': 'BES OTHER',
'CASP': 'BDL OTHER',
'CAVI': 'BES OTHER',
'CEAT': 'CEL OTHER',
'CEAU': 'BDL OTHER',
'CECA': 'BDS OTHER',
'CEDE': 'CEL OTHER',
'CEL OTHER': 'PIRA',
'CEM OTHER': 'PIBR2',
'CEOC': 'BDL OTHER',
'CEOC3': 'BDS OTHER',
'CERE': 'BDS OTHER',
'CERE2': 'BDS OTHER',
'CESI3': 'BEM OTHER',
'CESI4': 'BDL OTHER',
'CES OTHER': 'PICO5',
'CETH': 'BES OTHER',
'CHFU': 'CEL OTHER',
'CHHU': 'PES OTHER',
'CHLA2': 'CEL OTHER',
'CHSP': 'CEL OTHER',
'CIAU': 'BES OTHER',
'CICA': 'CICA',
'CILI': 'BES OTHER',
'CIPA': 'BES OTHER',
'CISI': 'BES OTHER',
'COAU': 'PES OTHER',
'COBU': 'BES OTHER',
'COCO1': 'BDS OTHER',
'CONU2': 'BDM OTHER',
'CR': 'BDS OTHER',
'CRDO': 'BDS OTHER',
'CRPH': 'BDS OTHER',
'CUAN': 'BES OTHER',
'CUAR': 'CEM OTHER',
'CULE': 'CEM OTHER',
'CUMA': 'CEL OTHER',
'CUSE': 'CEM OTHER',
'CYSC4': 'BDS OTHER',
'DAIM': 'BDS OTHER',
'DIVI': 'BDS OTHER',
'DOVI': 'BES OTHER',
'DRDR': 'PES OTHER',
'ERDE': 'BES OTHER',
'ERJA': 'BES OTHER',
'EU1': 'EUGL',
'EUCI': 'EUGL',
'EUCI2': 'EUGL',
'EUFI81': 'EUGL',
'EUGL': 'EUGL',
'EUGU': 'EUGL',
'EULE': 'EUGL',
'EULE2': 'EUGL',
'EUMA': 'EUGL',
'EUNI': 'EUGL',
'EUNY': 'BES OTHER',
'EUPA2': 'BES OTHER',
'EUPA26': 'EUGL',
'EUPO': 'EUGL',
'EURU': 'EUGL',
'EUSI': 'EUGL',
'EUSM': 'BES OTHER',
'EUTO11': 'EUGL',
'FASY': 'BDL OTHER',
'FESE': 'BES OTHER',
'FIBE': 'BEM OTHER',
'FICA': 'BDS OTHER',
'FIEL': 'BES OTHER',
'FIRE4': 'BEM OTHER',
'FR': 'FRVE',
'FRAN2': 'FRVE',
'FRCA6': 'BES OTHER',
'FRME2': 'BES OTHER',
'FRUH': 'BDL OTHER',
'FRVE': 'FRVE',
'GAEL': 'BES OTHER',
'GEPA': 'BEM OTHER',
'GIBI': 'GIBI',
'GLTR': 'ROPS',
'GRRO': 'BEL OTHER',
'HASU': 'CES OTHER',
'HEAR': 'BES OTHER',
'HISY': 'BDS OTHER',
'HYFL': 'BES OTHER',
'ILAL': 'BES OTHER',
'ILCO2': 'BES OTHER',
'JA6': 'BES OTHER',
'JAMI': 'BDM OTHER',
'JU': 'CES OTHER',
'JUCA1': 'CEM OTHER',
'JUCH': 'CES OTHER',
'JUNI': 'BDL OTHER',
'JUOC': 'CEL OTHER',
'JURE': 'BDM OTHER',
'KOBI': 'BDM OTHER',
'KOEL': 'BDM OTHER',
'LAIN': 'BDS OTHER',
'LANO': 'BEM OTHER',
'LAPA': 'BEM OTHER',
'LELA12': 'BES OTHER',
'LIDE': 'BEL OTHER',
'LIFO': 'BDM OTHER',
'LIJA': 'BES OTHER',
'LILU': 'BEM OTHER',
'LIOV': 'BES OTHER',
'LIST': 'LIST',
'LITU': 'LITU',
'LYFL': 'BEM OTHER',
'LYRA': 'BES OTHER',
'MA2': 'BDS OTHER',
'MABO': 'BEM OTHER',
'MAGR': 'MAGR',
'MAPU': 'BDS OTHER',
'MASO': 'BDS OTHER',
'MAST': 'BDS OTHER',
'MASY2': 'BDS OTHER',
'MATI': 'BDS OTHER',
'MEAZ': 'BDM OTHER',
'MEEX': 'BEM OTHER',
'MEGL': 'BDL OTHER',
'MELE': 'BEM OTHER',
'MELI7': 'BES OTHER',
'MENE': 'BES OTHER',
'MEQU': 'BEM OTHER',
'MEST': 'BEM OTHER',
'MOAL': 'BDM OTHER',
'MOAR': 'BES OTHER',
'MUPA3': 'BES OTHER',
'MYCA': 'BES OTHER',
'MYLA': 'BES OTHER',
'NEOL': 'BES OTHER',
'NIGL': 'BES OTHER',
'NYSY': 'BDM OTHER',
'OLEU': 'BEM OTHER',
'OPFI': 'CES OTHER',
'OTHER': 'BDM OTHER',
'PEAM': 'BEM OTHER',
'PEL OTHER': 'PHCA',
'PEM OTHER': 'PHDA4',
'PES OTHER': 'WARO',
'PH18': 'BES OTHER',
'PHCA': 'PEL OTHER',
'PHDA4': 'PEL OTHER',
'PHFR': 'BES OTHER',
'PHRO': 'PES OTHER',
'PI2': 'PIRA',
'PI23': 'PIUN',
'PIBR2': 'PIBR2',
'PICA': 'PIRA',
'PICE': 'PIRA',
'PICH': 'PICH',
'PICO2': 'PIRA',
'PICO5': 'PICO5',
'PICR': 'PIUN',
'PIEU': 'PIUN',
'PIHA': 'PIRA',
'PIPA4': 'PIRA',
'PIPI2': 'PIRA',
'PIPU': 'CEL OTHER',
'PIRA': 'PIRA',
'PIRH': 'PIUN',
'PITH': 'PIRA',
'PITO': 'PIUN',
'PITO2': 'PIRA',
'PIUN': 'PIUN',
'PIVI5': 'PIUN',
'PLAC': 'PLAC',
'PLRA': 'BDL OTHER',
'POAL': 'BDL OTHER',
'POBA': 'BDL OTHER',
'POFR': 'BDL OTHER',
'POGR2': 'BEL OTHER',
'POMA': 'BES OTHER',
'PONI': 'BDL OTHER',
'POTR1': 'BDM OTHER',
'PR': 'BDS OTHER',
'PRAM2': 'BDS OTHER',
'PRAR': 'BDS OTHER',
'PRBL': 'BDS OTHER',
'PRCA': 'BDS OTHER',
'PRCE': 'PRCE',
'PRDO': 'BDS OTHER',
'PRIL': 'BDS OTHER',
'PRLY': 'BDS OTHER',
'PRPE2': 'BDS OTHER',
'PRPI': 'BDS OTHER',
'PRSE2': 'BDS OTHER',
'PRYE': 'BDS OTHER',
'PSME': 'CEL OTHER',
'PTTR': 'BDS OTHER',
'PYCA': 'PYCA',
'PYCO': 'BDS OTHER',
'PYIO': 'BDS OTHER',
'PYKA': 'PYKA',
'PYSP': 'BES OTHER',
'QU': 'QUAG',
'QUAG': 'QUAG',
'QUCH': 'QUAG',
'QUCO': 'QUAG',
'QUIL2': 'QUAG',
'QUKE': 'QUAG',
'QULO': 'QUAG',
'QUPA': 'QUAG',
'QURO': 'QUAG',
'QURU': 'QUAG',
'QUSU': 'QUAG',
'QUVI': 'QUAG',
'RHIN2': 'BES OTHER',
'RHLA': 'BES OTHER',
'RHOV': 'BES OTHER',
'RHSP1': 'BES OTHER',
'ROPS': 'ROPS',
'SA': 'BDM OTHER',
'SA12': 'BDS OTHER',
'SABA': 'BDM OTHER',
'SACA': 'BDS OTHER',
'SADI': 'BDS OTHER',
'SALA1': 'BDS OTHER',
'SALU': 'BDS OTHER',
'SAMA': 'BDM OTHER',
'SC3': 'BES OTHER',
'SCMO': 'BEM OTHER',
'SCPO': 'BES OTHER',
'SCTE': 'BES OTHER',
'SEGI': 'CEL OTHER',
'SESE': 'SESE',
'SETR': 'BDS OTHER',
'STNI': 'BES OTHER',
'SYRE': 'BDS OTHER',
'TABA': 'BES OTHER',
'TACH2': 'BDS OTHER',
'TAIM': 'BES OTHER',
'THOC': 'CEL OTHER',
'THPL': 'CEL OTHER',
'TICO': 'BDM OTHER',
'TIEU': 'BDM OTHER',
'TIUR': 'BES OTHER',
'TRCO': 'BEL OTHER',
'TRFO': 'PES OTHER',
'TRLA': 'BES OTHER',
'TRSE6': 'BDM OTHER',
'ULAM': 'ULAM',
'ULPA': 'ULPA',
'ULPU': 'ULAM',
'UMCA': 'BEL OTHER',
'VIJA': 'BES OTHER',
'WAFI': 'PES OTHER',
'WARO': 'WARO',
'XYCO': 'BES OTHER',
'YUAL': 'PES OTHER',
'YUGL2': 'PES OTHER',
'YURE': 'PES OTHER',
'YUTO': 'PES OTHER',
'ZESE': 'BDL OTHER'
} # END CaNCCoJBK
,
##################################################
'CenFlaXXX': {
'ACAC2': 'PES OTHER',
'ACNE': 'BDM OTHER',
'ACPA': 'BDS OTHER',
'ACRU': 'ACRU',
'ACSP2': 'BEM OTHER',
'ACWR': 'PES OTHER',
'ALJU': 'BDS OTHER',
'ALLE': 'BDL OTHER',
'ARCU': 'PES OTHER',
'ARHE': 'CEL OTHER',
'AVCA': 'BES OTHER',
'BA': 'BES OTHER',
'BAVA': 'BEM OTHER',
'BDL OTHER': 'QULA2',
'BDM OTHER': 'ULPA',
'BDS OTHER': 'LAIN',
'BE': 'BDM OTHER',
'BEL OTHER': 'MAGR',
'BEM OTHER': 'CICA',
'BENI': 'BDM OTHER',
'BES OTHER': 'ERJA',
'BIJA': 'BEM OTHER',
'BINO': 'PES OTHER',
'BOSP': 'BES OTHER',
'BRAC': 'BEL OTHER',
'BRPA': 'BDM OTHER',
'BRSU': 'BES OTHER',
'BUCA': 'PES OTHER',
'CA1': 'BDL OTHER',
'CABI2': 'BDS OTHER',
'CACA': 'BDM OTHER',
'CAEQ': 'BEL OTHER',
'CAFI': 'BDM OTHER',
'CAGL': 'BDL OTHER',
'CAIL': 'BDL OTHER',
'CAME': 'BES OTHER',
'CAMI36': 'PES OTHER',
'CAOV': 'BDL OTHER',
'CAOV2': 'BDL OTHER',
'CAPA3': 'BES OTHER',
'CAPU13': 'BES OTHER',
'CASSI': 'BDS OTHER',
'CASU34': 'BES OTHER',
'CATO': 'BDL OTHER',
'CAVI': 'BEM OTHER',
'CECA': 'BDS OTHER',
'CEDE': 'CEL OTHER',
'CELA': 'CEL OTHER',
'CEL OTHER': 'PIEL2',
'CEM OTHER': 'JUSI',
'CES OTHER': 'THOR',
'CHHU': 'PES OTHER',
'CHLU': 'PES OTHER',
'CHSP': 'BDL OTHER',
'CICA': 'CICA',
'CISP': 'BES OTHER',
'CL6': 'BES OTHER',
'CLQU': 'BES OTHER',
'COCI': 'BEL OTHER',
'COFL': 'BDS OTHER',
'CONU': 'PEL OTHER',
'COPR': 'PES OTHER',
'COUV': 'BES OTHER',
'CUAN': 'BEM OTHER',
'CUAR': 'CEL OTHER',
'CULA': 'CEL OTHER',
'CULE': 'CEM OTHER',
'CUSE': 'CEM OTHER',
'CYCI': 'PEM OTHER',
'CYRE11': 'PES OTHER',
'DASI': 'BDL OTHER',
'DERE': 'BDS OTHER',
'DIVI': 'BDM OTHER',
'DYDE2': 'PES OTHER',
'ELDE': 'BEL OTHER',
'ENCY': 'BDL OTHER',
'ERCR': 'BEM OTHER',
'ERJA': 'ERJA',
'EU1': 'BEL OTHER',
'EUTI': 'BDS OTHER',
'EUUN2': 'BEM OTHER',
'FE': 'BES OTHER',
'FI1': 'BEM OTHER',
'FIBE2': 'BEL OTHER',
'FICA': 'BDS OTHER',
'FIEL': 'BEL OTHER',
'FILY': 'BEL OTHER',
'FRCA': 'BDM OTHER',
'FRPE': 'BDL OTHER',
'GIBI': 'BDM OTHER',
'GLTRIN': 'BDL OTHER',
'GRRO': 'BEL OTHER',
'HIMO': 'BDS OTHER',
'HITI': 'BDM OTHER',
'HYLA': 'PES OTHER',
'ILCO2': 'BES OTHER',
'ILSP': 'BES OTHER',
'ILVO': 'BES OTHER',
'JAIN': 'BES OTHER',
'JAMI': 'BDM OTHER',
'JU': 'JUSI',
'JUCH': 'CEL OTHER',
'JUSI': 'JUSI',
'KOEL': 'KOELFO',
'LAIN': 'LAIN',
'LICH': 'PEM OTHER',
'LIDE2': 'PEM OTHER',
'LIJA': 'BES OTHER',
'LILU': 'BES OTHER',
'LIOR': 'LIST',
'LIST': 'LIST',
'LITU': 'BDL OTHER',
'LOGO': 'BEL OTHER',
'MAGR': 'MAGR',
'MAIN': 'BES OTHER',
'MAPU': 'BDM OTHER',
'MASO': 'BDS OTHER',
'MAST': 'BDS OTHER',
'MATE': 'BDM OTHER',
'MAVI': 'MAGR',
'MEAZ': 'BDM OTHER',
'MEQU': 'BEL OTHER',
'MOAL': 'BDM OTHER',
'MUPA': 'BES OTHER',
'MYCE': 'BES OTHER',
'NEOL': 'BES OTHER',
'NORE': 'PES OTHER',
'NYSY': 'BDM OTHER',
'PAAC': 'BDS OTHER',
'PATO': 'BDM OTHER',
'PEAM': 'BEL OTHER',
'PEBO': 'BEL OTHER',
'PEL OTHER': 'SYRO',
'PEM OTHER': 'SAPA',
'PES OTHER': 'WARO',
'PHCA': 'PEL OTHER',
'PHDA4': 'PEM OTHER',
'PHFR': 'BES OTHER',
'PHRE': 'PEL OTHER',
'PHRO': 'PES OTHER',
'PHSP2': 'BES OTHER',
'PHSY': 'PEM OTHER',
'PI2': 'CEL OTHER',
'PICL': 'CEL OTHER',
'PIEC': 'CEL OTHER',
'PIEL': 'PIEL2',
'PIFE': 'BES OTHER',
'PIPA': 'CEL OTHER',
'PITA': 'CEL OTHER',
'PLAC1': 'BDL OTHER',
'PLOC': 'PLOC',
'PODE': 'BDL OTHER',
'POMA': 'BEM OTHER',
'PONA': 'BEL OTHER',
'POPI': 'BES OTHER',
'PR': 'BDS OTHER',
'PRAN': 'BDS OTHER',
'PRCA': 'PRCA',
'PRPE2': 'BDS OTHER',
'PRSE1': 'BDL OTHER',
'PYCA': 'BDS OTHER',
'PYCO': 'BDM OTHER',
'QU': 'QULA2',
'QUAC2': 'QULA2',
'QUAL': 'QULA2',
'QUFA': 'QULA2',
'QUGE': 'QUVI',
'QULA': 'BDS OTHER',
'QULA2': 'QULA2',
'QUMI': 'QULA2',
'QUNI': 'QULA2',
'QUNU': 'QULA2',
'QUPH': 'QULA2',
'QUSH': 'QUSH',
'QUVI': 'QUVI',
'RAGL': 'PES OTHER',
'RHIN62': 'BES OTHER',
'SABA': 'BDM OTHER',
'SANI4': 'BDS OTHER',
'SAPA': 'SAPA',
'SCTE': 'BDS OTHER',
'SEPU7': 'BDS OTHER',
'SIGL': 'BEL OTHER',
'SWMA': 'BDL OTHER',
'SYRO': 'SYRO',
'TAAS': 'BDL OTHER',
'TACA': 'BEL OTHER',
'TACH': 'BEL OTHER',
'TADI': 'BDL OTHER',
'TAIN': 'BEL OTHER',
'TAPA': 'BEL OTHER',
'TASP': 'BDL OTHER',
'TH9': 'CEL OTHER',
'THOR': 'THOR',
'TIGR3': 'BES OTHER',
'TITI2': 'BDL OTHER',
'TRSE6': 'TRSE6',
'ULAL': 'BDL OTHER',
'ULAM': 'BDL OTHER',
'ULPA': 'ULPA',
'ULPU': 'BDL OTHER',
'ULRU': 'BDM OTHER',
'ULS': 'BDL OTHER',
'ULX': 'BDL OTHER',
'UNP': 'PES OTHER',
'UNS': 'BDS OTHER',
'VEME': 'PES OTHER',
'VIOD': 'BES OTHER',
'VISP2': 'BES OTHER',
'WAFI': 'PES OTHER',
'WARO': 'WARO',
'WOBI2': 'PEM OTHER',
'YU1': 'PES OTHER'
} # END CenFlaXXX
,
##################################################
'GulfCoCHS' : {
'ACBU': 'ACRU',
'ACGI': 'ACRU',
'ACNE': 'ACRU',
'ACPA': 'ACRU',
'ACRU': 'ACRU',
'ACRU_O': 'ACRU',
'ACSA1': 'ACRU',
'ACSA2': 'ACRU',
'AEGL': 'BDL OTHER',
'ALJU': 'BDS OTHER',
'BDL OTHER': 'CAIL',
'BDM OTHER': 'PYCA',
'BDS OTHER': 'LAIN',
'BEL OTHER': 'QUVI',
'BEM OTHER': 'MAGR',
'BENI': 'BDM OTHER',
'BES OTHER': 'ILOP',
'BRPA': 'BDM OTHER',
'BUCA': 'BUCA',
'CA1': 'CAIL',
'CABI': 'BDM OTHER',
'CABI2': 'BDS OTHER',
'CACA': 'BDM OTHER',
'CACO': 'CAIL',
'CAGL': 'CAIL',
'CAIL': 'CAIL',
'CASA': 'BES OTHER',
'CATO': 'CAIL',
'CECA': 'BDS OTHER',
'CELA': 'CELA',
'CEL OTHER': 'PITA',
'CEM OTHER': 'JUVI',
'CES OTHER': 'PICO5',
'CHHU': 'PES OTHER',
'CHTH': 'CEL OTHER',
'CIAU2': 'BES OTHER',
'CICA': 'BEM OTHER',
'CLLU': 'BDM OTHER',
'COFL': 'COFL',
'COKO': 'COFL',
'CRPH': 'BDS OTHER',
'CULE': 'CEL OTHER',
'CUSE': 'CEM OTHER',
'CYRE11': 'PES OTHER',
'DIVI': 'BDM OTHER',
'ERJA': 'BES OTHER',
'EUCI': 'BEM OTHER',
'EUSA': 'BEL OTHER',
'FISI': 'BDM OTHER',
'FOIN3': 'BDS OTHER',
'FRAM': 'BDL OTHER',
'FRPE': 'BDL OTHER',
'GIBI': 'BDL OTHER',
'GLTR': 'GLTR',
'HIMU3': 'BDS OTHER',
'HISY': 'BDS OTHER',
'ILAT': 'ILOP',
'ILCA': 'ILOP',
'ILCO2': 'ILOP',
'ILMY': 'ILOP',
'ILOP': 'ILOP',
'ILOP_S': 'ILOP',
'ILSP': 'ILOP',
'ILVO': 'ILOP',
'JU': 'JUVI',
'JUNI': 'BDL OTHER',
'JUVI': 'JUVI',
'KOBI': 'BDM OTHER',
'KOPA': 'BDS OTHER',
'LA6_M': 'LAIN',
'LA6_N': 'LAIN',
'LA6_T1': 'LAIN',
'LA6_T2': 'LAIN',
'LAIN': 'LAIN',
'LIJA': 'BES OTHER',
'LISI': 'BES OTHER',
'LIST': 'LIST',
'LITU': 'BDL OTHER',
'MA2': 'BDS OTHER',
'MAGR': 'MAGR',
'MAGR_L': 'BEM OTHER',
'MASO': 'BDS OTHER',
'MAST': 'BDS OTHER',
'MATR': 'BDS OTHER',
'MEAZ': 'BDM OTHER',
'MEGL': 'CEL OTHER',
'MORU': 'BDM OTHER',
'MYCE2': 'BES OTHER',
'NEOL': 'BES OTHER',
'NYSY': 'BDL OTHER',
'OSFR': 'BES OTHER',
'PAAC': 'BDS OTHER',
'PEBO': 'BEL OTHER',
'PEL OTHER': 'PHCA',
'PEM OTHER': 'SAPA',
'PES OTHER': 'BUCA',
'PHCA': 'PHCA',
'PHFR': 'BES OTHER',
'PHSE': 'BES OTHER',
'PHSP2': 'BES OTHER',
'PICH': 'BDM OTHER',
'PICO5': 'PICO5',
'PIEC': 'PITA',
'PIEL': 'PITA',
'PINI': 'JUVI',
'PIPA': 'PITA',
'PIPU': 'PITA',
'PIST': 'PITA',
'PITA': 'PITA',
'PIVI': 'JUVI',
'PLAC': 'PLOC',
'PLAC_B': 'PLOC',
'PLOC': 'PLOC',
'POAL': 'BDL OTHER',
'PODE': 'BDL OTHER',
'POMA': 'BEM OTHER',
'PR': 'BDS OTHER',
'PRAM': 'BDS OTHER',
'PRCA': 'BEM OTHER',
'PRCE': 'BDS OTHER',
'PRPE2': 'BDS OTHER',
'PRSE1': 'BDL OTHER',
'PRSE2': 'BDS OTHER',
'PYAN': 'BDS OTHER',
'PYCA': 'PYCA',
'PYCA_B': 'PYCA',
'PYCO': 'PYCA',
'PYCO2': 'BES OTHER',
'QU': 'BDL OTHER',
'QUAC': 'BDM OTHER',
'QUAL': 'BDL OTHER',
'QUCI': 'BES OTHER',
'QUCO': 'BDL OTHER',
'QUFA': 'BDL OTHER',
'QUHE': 'BEM OTHER',
'QULA1': 'BDM OTHER',
'QULA2': 'QULA2',
'QUMI': 'BDL OTHER',
'QUNI': 'QUNI',
'QUPA': 'BDL OTHER',
'QUPH': 'QUPH',
'QURU': 'BDL OTHER',
'QUSH': 'BDL OTHER',
'QUST': 'BDL OTHER',
'QUVE': 'BDL OTHER',
'QUVI': 'QUVI',
'ROPS': 'BDM OTHER',
'SA': 'BDM OTHER',
'SAAL': 'BDL OTHER',
'SAMA_T': 'BDM OTHER',
'SAMI8': 'PES OTHER',
'SANI': 'BDM OTHER',
'SAPA': 'SAPA',
'SAPE12': 'BDM OTHER',
'TADI': 'CEL OTHER',
'THOC': 'CEM OTHER',
'TIAM': 'BDL OTHER',
'TICO': 'BDM OTHER',
'TRFO': 'PEM OTHER',
'TRSE6': 'BDM OTHER',
'ULAL': 'BDM OTHER',
'ULAM': 'BDL OTHER',
'ULPA': 'BDM OTHER',
'ULPA_D': 'BDM OTHER',
'ULPA_E': 'BDM OTHER',
'ULPU': 'BDM OTHER',
'ULRU': 'BDL OTHER',
'ULS': 'BDL OTHER',
'VIAG': 'BDS OTHER',
'VIPR': 'BDS OTHER',
'WARO': 'PES OTHER',
'WIFL': 'BDS OTHER',
'YUGL2': 'PES OTHER',
'ZESE': 'BDL OTHER'
} # END GulfCoCHS
,
##################################################
'InlEmpCLM': {
'ABPR': 'CEL OTHER',
'ACBA2': 'BEM OTHER',
'ACDE': 'BEM OTHER',
'ACLO': 'BEM OTHER',
'ACMA': 'BDL OTHER',
'ACME': 'BEM OTHER',
'ACOB': 'BEM OTHER',
'ACPA': 'BDL OTHER',
'ACRU': 'BDL OTHER',
'ACSA1': 'BDL OTHER',
'AECA2': 'BES OTHER',
'AECA3': 'BDS OTHER',
'AGFL': 'BES OTHER',
'AIAL': 'BDM OTHER',
'ALCO2': 'BDM OTHER',
'ALGL': 'BDL OTHER',
'ALJU': 'BDS OTHER',
'ALRH': 'BDM OTHER',
'ARCU': 'PES OTHER',
'ARHE': 'CEL OTHER',
'ARMA2': 'BES OTHER',
'ARRO': 'PES OTHER',
'ARUN': 'BES OTHER',
'BAFO': 'BDS OTHER',
'BAVA': 'BDS OTHER',
'BDL OTHER': 'FRUH',
'BDM OTHER': 'PYCA',
'BDS OTHER': 'LAIN',
'BEL OTHER': 'CICA',
'BEM OTHER': 'MAGR',
'BEPE': 'BDM OTHER',
'BES OTHER': 'SCTE',
'BRAC2': 'BDM OTHER',
'BRAR': 'PES OTHER',
'BRBR': 'PES OTHER',
'BRDI9': 'BDM OTHER',
'BRED': 'PES OTHER',
'BRPO': 'BRPO',
'BUCA': 'PES OTHER',
'CACA3': 'BEM OTHER',
'CACI': 'BES OTHER',
'CADE': 'BDL OTHER',
'CADE2': 'CEL OTHER',
'CAED': 'BEL OTHER',
'CAIL': 'BDL OTHER',
'CALE': 'BES OTHER',
'CASP': 'BDL OTHER',
'CATW': 'BES OTHER',
'CAVI': 'BES OTHER',
'CEAT': 'CEL OTHER',
'CEAU': 'BDL OTHER',
'CECA': 'BDS OTHER',
'CEDE': 'CEL OTHER',
'CEFL': 'BDM OTHER',
'CEL OTHER': 'PICA',
'CEM OTHER': 'PIBR2',
'CEOC': 'BDL OTHER',
'CEOC3': 'BDS OTHER',
'CESI3': 'BEM OTHER',
'CESI4': 'BDL OTHER',
'CES OTHER': 'PICO5',
'CHHU': 'PES OTHER',
'CHLI': 'BDS OTHER',
'CHRE': 'BDS OTHER',
'CHSP': 'BEM OTHER',
'CHTA': 'BDS OTHER',
'CICA': 'CICA',
'CILI': 'BES OTHER',
'CISI': 'BES OTHER',
'COAU': 'PES OTHER',
'COLA': 'BES OTHER',
'CRPA': 'BES OTHER',
'CRRU': 'BES OTHER',
'CUAN': 'BES OTHER',
'CUAR': 'CEM OTHER',
'CULE': 'CES OTHER',
'CUSE': 'CEL OTHER',
'DIVI': 'BDS OTHER',
'DOVI': 'BES OTHER',
'ELDE': 'BDS OTHER',
'ERCA': 'BEL OTHER',
'ERDE': 'BES OTHER',
'ERJA': 'BES OTHER',
'EUCA1': 'EUSI',
'EUCI': 'EUSI',
'EUCI2': 'EUSI',
'EUCL': 'EUSI',
'EUCO3': 'EUSI',
'EUCR': 'EUSI',
'EUFI81': 'EUSI',
'EUGL': 'EUSI',
'EUGLCO': 'EUSI',
'EUGR': 'EUSI',
'EULE': 'EUSI',
'EULE2': 'EUSI',
'EUMA23': 'EUSI',
'EUNI': 'EUSI',
'EUPO': 'EUSI',
'EURO': 'EUSI',
'EURU': 'EUSI',
'EUSI': 'EUSI',
'EUTE': 'EUSI',
'EUTO11': 'EUSI',
'EUVI': 'EUSI',
'FICA': 'BDS OTHER',
'FIMA2': 'BEL OTHER',
'FIMINI': 'BES OTHER',
'FISI': 'BDS OTHER',
'FRANR': 'FRUH',
'FROR2': 'FRUH',
'FRUH': 'FRUH',
'FRVE': 'FRUH',
'FRVEG': 'FRVEG',
'GEPA': 'BES OTHER',
'GIBI': 'GIBI',
'GLTR': 'BDM OTHER',
'GRRO': 'BEL OTHER',
'HASU': 'CES OTHER',
'HEAR': 'BES OTHER',
'HYFL': 'BES OTHER',
'ILAL': 'BES OTHER',
'ILVO': 'BES OTHER',
'JAMI': 'JAMI',
'JUCA2': 'BDL OTHER',
'JUCH': 'CES OTHER',
'JURE': 'BDM OTHER',
'KOBI': 'BDM OTHER',
'KOEL': 'BDM OTHER',
'KOPA': 'BDS OTHER',
'LAIN': 'LAIN',
'LANO': 'BEM OTHER',
'LELA12': 'BES OTHER',
'LIFO': 'LIST',
'LILU': 'BEM OTHER',
'LIOV': 'BES OTHER',
'LIST': 'LIST',
'LITU': 'LITU',
'LYAS': 'BEM OTHER',
'MA2': 'BDS OTHER',
'MABO': 'BEM OTHER',
'MAFL80': 'BDS OTHER',
'MAGR': 'MAGR',
'MALA6': 'BDS OTHER',
'MASO': 'MAGR',
'MASY2': 'BDS OTHER',
'MATE': 'BES OTHER',
'MATI': 'BDS OTHER',
'MEAZ': 'BDM OTHER',
'MEGL': 'BDL OTHER',
'MELI7': 'BES OTHER',
'MEQU': 'BEM OTHER',
'MOAL': 'BDL OTHER',
'MORU': 'BDM OTHER',
'NEOL': 'BES OTHER',
'OLEU': 'BES OTHER',
'OLEU2': 'BES OTHER',
'PAAC': 'BDM OTHER',
'PEAM': 'BDS OTHER',
'PEL OTHER': 'PHCA',
'PEM OTHER': 'PHDA4',
'PES OTHER': 'WARO',
'PHCA': 'PEL OTHER',
'PHDA4': 'PEM OTHER',
'PHFR': 'BES OTHER',
'PHRO': 'PEM OTHER',
'PIAT': 'CEL OTHER',
'PIBR2': 'PIBR2',
'PICA': 'PICA',
'PICH': 'PICH',
'PICO2': 'CEL OTHER',
'PICO5': 'PICO5',
'PIED': 'CES OTHER',
'PIEL2': 'CEL OTHER',
'PIHA': 'CEL OTHER',
'PIMU2': 'CEM OTHER',
'PINI': 'CEL OTHER',
'PIPH2': 'BES OTHER',
'PIPI2': 'CEL OTHER',
'PIPU': 'CEL OTHER',
'PIRA': 'CEL OTHER',
'PIRH': 'BEM OTHER',
'PIRO': 'CEL OTHER',
'PITA': 'CEL OTHER',
'PITH': 'CES OTHER',
'PITO': 'BEM OTHER',
'PITO2': 'CEL OTHER',
'PIUN': 'BEM OTHER',
'PIVI5': 'BEM OTHER',
'PLAC': 'PLAC',
'PLOC': 'PLRA',
'PLRA': 'PLRA',
'POCA2': 'BDL OTHER',
'POGR2': 'BEL OTHER',
'POMA': 'BES OTHER',
'PONI': 'BDL OTHER',
'PRAR': 'BDS OTHER',
'PRBL': 'BDS OTHER',
'PRCA': 'BES OTHER',
'PRCE': 'BDS OTHER',
'PRCEKV': 'BDS OTHER',
'PRCH': 'BDM OTHER',
'PRDO': 'BDS OTHER',
'PRDU': 'BDS OTHER',
'PRIL': 'BDS OTHER',
'PRLY': 'BDS OTHER',
'PRPE2': 'BDS OTHER',
'PRSE2': 'BDS OTHER',
'PSME': 'CEL OTHER',
'PTST': 'BDL OTHER',
'PUGR': 'BDS OTHER',
'PYCA': 'PYCA',
'PYCAA': 'PYCA',
'PYCA_B': 'PYCA',
'PYKA': 'BES OTHER',
'QUAG': 'QUAG',
'QUCO': 'QUAG',
'QUEN': 'QUAG',
'QUIL2': 'QUIL2',
'QUKE': 'QUAG',
'QULO': 'QUAG',
'QUPA': 'QUAG',
'QURO': 'QUAG',
'QURU': 'QUAG',
'QUSU': 'QUAG',
'QUVI': 'QUAG',
'RHIN': 'BDS OTHER',
'ROAMI': 'BDM OTHER',
'ROPS': 'BDL OTHER',
'SAALT': 'BDL OTHER',
'SACANE': 'BDS OTHER',
'SAMA': 'BDL OTHER',
'SCMO': 'SCMO',
'SCPO': 'SCMO',
'SCTE': 'SCTE',
'SECO9': 'BES OTHER',
'SEGI': 'CEL OTHER',
'SESE': 'CEL OTHER',
'SOJA': 'BDM OTHER',
'STSI': 'BES OTHER',
'SYPA2': 'BEM OTHER',
'TAAV': 'BES OTHER',
'TACH3': 'BES OTHER',
'THOC': 'CEL OTHER',
'THOR': 'CES OTHER',
'TRAC': 'PES OTHER',
'TRCO': 'BES OTHER',
'TRFO': 'PES OTHER',
'TRSE6': 'BDM OTHER',
'ULAM': 'BDL OTHER',
'ULPA': 'BEL OTHER',
'ULPU': 'BEL OTHER',
'UMCA': 'BEL OTHER',
'WAFI': 'PES OTHER',
'WARO': 'WARO',
'XYCO': 'PES OTHER',
'YUGL2': 'PES OTHER',
'YUGU': 'PES OTHER',
'ZESE': 'BDL OTHER',
'ZESE_V': 'BDL OTHER'
} # END InlEmpCLM
,
##################################################
'InlValMOD': {
'AC': 'ACSA1',
'ACBU': 'ACSA1',
'ACNE': 'ACSA1',
'ACPA': 'ACSA1',
'ACPL': 'ACSA1',
'ACPL_CK': 'ACSA1',
'ACPS_S': 'ACSA1',
'ACRU': 'ACSA1',
'ACSA1': 'ACSA1',
'ACSP2': 'BEL OTHER',
'AECA3_B': 'BDM OTHER',
'AECA3_S': 'BDM OTHER',
'AIAL': 'BDM OTHER',
'ALJU': 'BDM OTHER',
'ALRH': 'BDM OTHER',
'ARRO': 'PES OTHER',
'BDL OTHER': 'CESI4',
'BDM OTHER': 'PYCA_B',
'BDS OTHER': 'LAIN',
'BEL OTHER': 'QUIL2',
'BEM OTHER': 'CICA',
'BEPE': 'BEPE',
'BES OTHER': 'PYKA',
'BRED': 'PEM OTHER',
'BRPA': 'BDS OTHER',
'CABE': 'BDM OTHER',
'CABE_F': 'BDM OTHER',
'CACI': 'BES OTHER',
'CADE2': 'CEL OTHER',
'CAIL': 'BDL OTHER',
'CE2': 'BDL OTHER',
'CEAT': 'CEL OTHER',
'CECA': 'BDS OTHER',
'CEDE': 'CEL OTHER',
'CEL OTHER': 'PIRA',
'CEM OTHER': 'PIBR2',
'CEOC': 'BDL OTHER',
'CEOC3': 'BDS OTHER',
'CESI2': 'BDS OTHER',
'CESI3': 'BDL OTHER',
'CESI4': 'CESI4',
'CES OTHER': 'PICO5',
'CHLI': 'BDS OTHER',
'CICA': 'CICA',
'CISP': 'BES OTHER',
'COFL': 'BES OTHER',
'CR': 'BDS OTHER',
'CRLA80': 'BDS OTHER',
'CRPH': 'BDS OTHER',
'CU': 'CEL OTHER',
'CULE': 'CEL OTHER',
'CUMA': 'CEL OTHER',
'CUSE': 'CEL OTHER',
'DIKA': 'BDM OTHER',
'ELAN': 'BDS OTHER',
'ERDE': 'BES OTHER',
'ERJA': 'BES OTHER',
'EU1': 'BEL OTHER',
'EUPO': 'BEL OTHER',
'FASY': 'BDL OTHER',
'FICA': 'BDS OTHER',
'FRAM': 'FRVE_G',
'FRAM_A': 'FRVE_G',
'FRAM_R': 'FRVE_G',
'FRAN_R': 'FRAN_R',
'FREX': 'FRVE_G',
'FREXH': 'FREX_H',
'FREX_K': 'FREX_H',
'FRHO': 'FRHO',
'FROX_F': 'FRAN_R',
'FRPE': 'FRPE_M',
'FRPEM': 'FRPE_M',
'FRPES': 'FRPE_M',
'FRPE2': 'FRPE_M',
'FRUH': 'FRPE_M',
'FRVE': 'FRVE_G',
'FRVE_FW': 'FRVE_G',
'FRVEG': 'FRVE_G',
'GIBI': 'GIBI',
'GIBI_AG': 'GIBI',
'GIBI_F': 'GIBI',
'GL3': 'GLTR',
'GLTR': 'GLTR',
'GLTRS1': 'GLTR',
'GRRO': 'BEL OTHER',
'HISY': 'BDS OTHER',
'JU1': 'BDL OTHER',
'JUHI': 'BDL OTHER',
'JUNI': 'BDL OTHER',
'KOPA': 'KOPA',
'KOPA_F': 'KOPA',
'LAIN': 'LAIN',
'LANO': 'BEM OTHER',
'LIFO': 'LIST',
'LILU': 'BES OTHER',
'LIST': 'LIST',
'LITU': 'LIST',
'MA1': 'MAGR',
'MA2': 'BDS OTHER',
'MABO': 'BEM OTHER',
'MAFL80': 'BDS OTHER',
'MAGR': 'MAGR',
'MASO': 'MAGR',
'MASO_G': 'MAGR',
'MEAZ': 'BDM OTHER',
'MOAL': 'BDM OTHER',
'NEOL': 'BES OTHER',
'NYSY': 'BDM OTHER',
'OLEU': 'BEM OTHER',
'OSVI': 'BDM OTHER',
'PEL OTHER': 'PHCA',
'PEM OTHER': 'PHDA4',
'PES OTHER': 'WARO',
'PHCA': 'PHCA',
'PHDA4': 'PHDA4',
'PHFR': 'BES OTHER',
'PHSE': 'BES OTHER',
'PHSP2': 'BES OTHER',
'PI1': 'CEL OTHER',
'PI2': 'CEL OTHER',
'PI23': 'BES OTHER',
'PIAT4': 'PICH',
'PIBR2': 'PIBR2',
'PICH': 'PICH',
'PICH_PS': 'PICH',
'PICO5': 'PICO5',
'PINI': 'CEM OTHER',
'PIPA4': 'CEL OTHER',
'PIPI2': 'CEL OTHER',
'PIPU': 'CEL OTHER',
'PIRA': 'PIRA',
'PISY': 'CEL OTHER',
'PITH': 'PITH',
'PIUN': 'BES OTHER',
'PLAC': 'PLAC',
'PLAC_B': 'PLAC',
'PLAC_C': 'PLAC',
'PLOC': 'PLAC',
'PLOR': 'PLAC',
'PO': 'BDL OTHER',
'POGR2': 'BEL OTHER',
'PR': 'BDS OTHER',
'PRAM2': 'BDS OTHER',
'PRAR': 'BDM OTHER',
'PRAV': 'BDM OTHER',
'PRBL': 'BDS OTHER',
'PRCE': 'BDS OTHER',
'PRPE2': 'BDS OTHER',
'PRSU': 'BDS OTHER',
'PUGR': 'BDS OTHER',
'PY': 'BDM OTHER',
'PYCAA': 'BDM OTHER',
'PYCA_B': 'PYCA_B',
'PYCA_C': 'BDM OTHER',
'PYCA_CH': 'BDM OTHER',
'PYCA_R': 'BDM OTHER',
'PYCA_T': 'BDM OTHER',
'PYCA_W': 'BDM OTHER',
'PYKA': 'PYKA',
'QU': 'BEL OTHER',
'QUAG': 'BEL OTHER',
'QUCO': 'BEL OTHER',
'QUIL2': 'QUIL2',
'QULO': 'BEL OTHER',
'QUPA': 'BEL OTHER',
'QURU': 'BEL OTHER',
'QUSU': 'BEL OTHER',
'QUWI': 'BEL OTHER',
'RHLA': 'BES OTHER',
'ROPS': 'GLTR',
'ROPS_PR': 'GLTR',
'SA': 'BDM OTHER',
'SAMA': 'BDS OTHER',
'SAPE12': 'BDL OTHER',
'SCMO': 'BEM OTHER',
'SEGI': 'CEL OTHER',
'SESE': 'CEL OTHER',
'SOHUCF': 'BDS OTHER',
'SOHUCQ': 'BDS OTHER',
'SOJA': 'PICH',
'TADI': 'CEL OTHER',
'TI': 'BDM OTHER',
'TRFO': 'PEM OTHER',
'TRLA': 'BES OTHER',
'TRLA_E': 'BES OTHER',
'TRSE6': 'BDM OTHER',
'ULPA': 'ZESE',
'ULS': 'ZESE',
'UMCA': 'BEL OTHER',
'WAFI': 'PES OTHER',
'WARO': 'WARO',
'ZESE': 'ZESE',
'ZESE_V': 'ZESE'
} # END InlValMOD
,
##################################################
'InterWABQ': {
'ACNE': 'BDL OTHER',
'ACPL': 'BDL OTHER',
'ACSA1': 'BDL OTHER',
'AIAL': 'BDL OTHER',
'ALJU': 'BDS OTHER',
'BDL OTHER': 'FRAM',
'BDM OTHER': 'FRAN2',
'BDS OTHER': 'KOPA',
'BEL OTHER': 'EUGL',
'BEM OTHER': 'EUMI2',
'BES OTHER': 'ILOP',
'CA3': 'BDL OTHER',
'CASP': 'BDL OTHER',
'CECA': 'BDS OTHER',
'CEDE': 'CEL OTHER',
'CEL OTHER': 'PIPO',
'CEM OTHER': 'PINI',
'CERE2': 'BDS OTHER',
'CESI4': 'BDM OTHER',
'CES OTHER': 'PIED',
'CH31': 'BDS OTHER',
'CHLI': 'CHLI',
'CR': 'BDS OTHER',
'CUAR': 'CEM OTHER',
'CULE': 'CEL OTHER',
'ELAN': 'ELAN',
'EUGL': 'EUGL',
'EUMI2': 'EUMI2',
'FOPU2': 'BDS OTHER',
'FRAM': 'FRAM',
'FRAN2': 'FRAN2',
'FRBE': 'FRVE',
'FRPE': 'FRPE',
'FRPE2': 'FRPE',
'FRPEM': 'FRPE',
'FRVE': 'FRVE',
'FRVEG': 'FRVE',
'GLTR': 'GLTR',
'GYDI': 'BDL OTHER',
'ILOP': 'ILOP',
'JU': 'CEM OTHER',
'JUSC': 'CES OTHER',
'KOPA': 'KOPA',
'MA1': 'BES OTHER',
'MA2': 'BDS OTHER',
'MAPO': 'BDM OTHER',
'MO': 'BDM OTHER',
'OTHER': 'BDM OTHER',
'PEL OTHER': 'PHCA',
'PEM OTHER': 'PHDA4',
'PES OTHER': 'WARO',
'PHCA': 'PHCA',
'PHDA4': 'PHDA4',
'PI1': 'CEL OTHER',
'PICH': 'PICH',
'PIED': 'PIED',
'PIEL2': 'CEL OTHER',
'PIFL': 'CEM OTHER',
'PINI': 'PINI',
'PIPO': 'PIPO',
'PIST2': 'CEL OTHER',
'PISY': 'PISY',
'PLAC': 'PLAC',
'PLWR': 'BDL OTHER',
'PO': 'POAN',
'POAN': 'POAN',
'POFR': 'POFR',
'PR': 'PRCE',
'PRCE': 'PRCE',
'PY': 'PYCA',
'PYCA': 'PYCA',
'QU': 'BDL OTHER',
'QUMA1': 'BDL OTHER',
'QUSH': 'BDL OTHER',
'ROPS': 'BDL OTHER',
'SAMA': 'BDL OTHER',
'SO': 'BDS OTHER',
'SOJA': 'BDL OTHER',
'TI': 'BDL OTHER',
'ULAM': 'ULPU',
'ULPA': 'ULPU',
'ULPU': 'ULPU',
'VI5': 'BDS OTHER',
'WARO': 'WARO',
'ZESE': 'ULPU'
} # END InterWABQ
,
##################################################
'LoMidWXXX': {
'AB': 'CEL OTHER',
'ABBA': 'CEL OTHER',
'ABCO': 'CEL OTHER',
'ABFR': 'CEL OTHER',
'AC': 'ACRU',
'ACCA': 'ACRU',
'ACCAQE': 'ACRU',
'ACGI': 'ACRU',
'ACNE': 'ACRU',
'ACNI': 'ACPL',
'ACPA': 'ACRU',
'ACPL': 'ACPL',
'ACPLC': 'ACPL',
'ACPLCK': 'ACPL',
'ACPS': 'ACPL',
'ACRU': 'ACRU',
'ACRUA': 'ACRU',
'ACRUG': 'ACRU',
'ACRUOG': 'ACRU',
'ACRURS': 'ACRU',
'ACSA1': 'ACSA1',
'ACSA2': 'ACSA2',
'AE': 'BDM OTHER',
'AEGL': 'BDM OTHER',
'AEHI': 'BDL OTHER',
'AIAL': 'BDL OTHER',
'ALGL': 'BDL OTHER',
'ALJU': 'BDS OTHER',
'AMCA': 'BDS OTHER',
'AMUT': 'BDS OTHER',
'ARSP': 'BDS OTHER',
'ASTR': 'BDS OTHER',
'BDL OTHER': 'FRPE',
'BDM OTHER': 'TICO',
'BDS OTHER': 'CECA',
'BE': 'BDM OTHER',
'BEAL': 'BDL OTHER',
'BEL OTHER': 'QUIL2',
'BEM OTHER': 'MAGR',
'BENI': 'BDL OTHER',
'BEPA': 'BDL OTHER',
'BES OTHER': 'ILOP',
'BUSP': 'BES OTHER',
'CA40': 'BDM OTHER',
'CABEF': 'BDM OTHER',
'CACA': 'BDM OTHER',
'CACO': 'BDL OTHER',
'CAGL': 'BDL OTHER',
'CALA': 'BDL OTHER',
'CAMO': 'BDM OTHER',
'CAOV': 'BDL OTHER',
'CASP': 'CASP',
'CECA': 'CECA',
'CEJA': 'BDL OTHER',
'CEL OTHER': 'PIST',
'CEM OTHER': 'PINI',
'CEOC': 'CEOC',
'CES OTHER': 'PICO5',
'CLLU': 'BDM OTHER',
'CO1': 'BDS OTHER',
'COAM': 'BDS OTHER',
'COCO1': 'BDS OTHER',
'COFL': 'BDS OTHER',
'CORA': 'BDS OTHER',
'CR': 'BDS OTHER',
'CRCRI': 'BDS OTHER',
'CRLA': 'BDS OTHER',
'CRPH': 'BDS OTHER',
'CRVI2': 'BDS OTHER',
'DIVI': 'BDM OTHER',
'EL1': 'BDS OTHER',
'ELAN': 'BDS OTHER',
'ELUM': 'BES OTHER',
'EUSP': 'BDS OTHER',
'EUUL': 'BDM OTHER',
'FA': 'BDL OTHER',
'FAGR': 'BDL OTHER',
'FASY': 'BDL OTHER',
'FASYP': 'BDL OTHER',
'FR': 'FRPE',
'FRAM': 'FRAM',
'FRAMAA': 'FRAM',
'FRAMCC': 'FRAM',
'FREXH': 'FRAM',
'FRNI': 'FRAM',
'FROR': 'FRPE',
'FROXA': 'FRPE',
'FRPE': 'FRPE',
'FRPES': 'FRPE',
'FRQU': 'FRPE',
'GIBI': 'BDL OTHER',
'GIBIF2': 'BDL OTHER',
'GLTR': 'GLTR',
'GLTRI': 'GLTR',
'GLTRS': 'GLTR',
'GLTRS1': 'GLTR',
'GYDI': 'BDL OTHER',
'HISP': 'BDS OTHER',
'ILOP': 'ILOP',
'JU': 'CES OTHER',
'JU1': 'JUNI',
'JUCI': 'JUNI',
'JUCO3': 'CES OTHER',
'JUNI': 'JUNI',
'JUPR': 'CES OTHER',
'JURE': 'JUNI',
'JUVI': 'CEM OTHER',
'KOPA': 'BDS OTHER',
'LA10': 'BDL OTHER',
'LADE': 'BDL OTHER',
'LISP': 'BES OTHER',
'LIST': 'BDL OTHER',
'LITU': 'BDL OTHER',
'LOSP': 'BDS OTHER',
'MA1': 'BDS OTHER',
'MA2': 'MA2',
'MAAC': 'BDL OTHER',
'MAGR': 'MAGR',
'MAPO': 'BDL OTHER',
'MAPY': 'BDS OTHER',
'MAST': 'BDS OTHER',
'MEGL': 'BDL OTHER',
'MO': 'MO',
'NYSY': 'BDM OTHER',
'OSVI': 'BDM OTHER',
'OXAR': 'BDL OTHER',
'PA19': 'BDL OTHER',
'PATO': 'BDM OTHER',
'PEL OTHER': 'PHCA',
'PEM OTHER': 'PHDA4',
'PES OTHER': 'WARO',
'PHAM': 'BDM OTHER',
'PHCA': 'PHCA',
'PHDA4': 'PHDA4',
'PI1': 'PIPU',
'PI2': 'CEL OTHER',
'PIAB': 'PIPU',
'PIBA': 'CEL OTHER',
'PICO5': 'PICO5',
'PIGL1': 'PIPU',
'PIMA': 'PIPU',
'PIMU': 'CES OTHER',
'PINI': 'PINI',
'PIPO': 'CEL OTHER',
'PIPU': 'PIPU',
'PIRE': 'CEL OTHER',
'PIRU': 'PIPU',
'PIST': 'PIST',
'PISY': 'CEL OTHER',
'PIVI': 'CEL OTHER',
'PL3': 'BDL OTHER',
'PLAC': 'BDL OTHER',
'PLOC': 'BDL OTHER',
'PO': 'PODE',
'POAL': 'PODE',
'PODE': 'PODE',
'PONI': 'PODE',
'PR': 'BDS OTHER',
'PRHAJO': 'BDS OTHER',
'PRPE1': 'BDS OTHER',
'PRSE1': 'BDL OTHER',
'PRSE2': 'BDS OTHER',
'PRSU': 'BDS OTHER',
'PSME': 'CEL OTHER',
'PY': 'PYCA',
'PYCA': 'PYCA',
'PYCAA': 'PYCA',
'PYCA_B': 'PYCA',
'QU': 'QURU',
'QUAL': 'QURU',
'QUBI': 'QURU',
'QUCO': 'QURU',
'QUIL2': 'QUIL2',
'QUIM': 'QURU',
'QUMA1': 'QURU',
'QUMU': 'QURU',
'QUPA': 'QURU',
'QUPR': 'QURU',
'QURO': 'QURU',
'QUROF': 'QURU',
'QURU': 'QURU',
'QUVE': 'QURU',
'RHCA': 'BDS OTHER',
'RHFR': 'BDS OTHER',
'RHTR': 'BDS OTHER',
'ROPS': 'BDL OTHER',
'ROVI': 'BDS OTHER',
'SA': 'BDM OTHER',
'SAAL': 'BDM OTHER',
'SOAL': 'BDM OTHER',
'SOJA': 'BDM OTHER',
'SPVA2': 'BDS OTHER',
'SYSP': 'BDS OTHER',
'TA': 'CES OTHER',
'TACA': 'CES OTHER',
'TADI': 'BDL OTHER',
'THOC': 'CEM OTHER',
'TI': 'TICO',
'TIAM': 'BDL OTHER',
'TICO': 'TICO',
'TICOG': 'TICO',
'TITO': 'TICO',
'TITOSS': 'TICO',
'TSCA': 'CEM OTHER',
'ULAM': 'ULPU',
'ULPA': 'ULPU',
'ULPU': 'ULPU',
'ULRU': 'ULPU',
'ULS': 'ULPU',
'VISP2': 'BDS OTHER',
'WARO': 'WARO',
'ZESE': 'BDL OTHER'
} # END LoMidWXXX
,
##################################################
'MidWstMSP' : {
'AC': 'ACRU',
'ACGI': 'MA2',
'ACNE': 'ACNE',
'ACNI': 'ACRU',
'ACPA': 'BDS OTHER',
'ACPL': 'ACPL',
'ACRU': 'ACRU',
'ACSA1': 'ACSA1',
'ACSA2': 'ACSA2',
'AEGL': 'ACPL',
'AL': 'MA2',
'ALJU': 'BDS OTHER',
'BDL OTHER': 'FRPE',
'BDM OTHER': 'ACPL',
'BDS OTHER': 'MA2',
'BE': 'ACPL',
'BEL OTHER': 'QUNI',
'BEM OTHER': 'MAGR',
'BENI': 'ACPL',
'BEPA': 'FRPE',
'BES OTHER': 'ILOP',
'CA1': 'FRPE',
'CA3': 'FRPE',
'CADE': 'BDL OTHER',
'CASP': 'BDL OTHER',
'CECA': 'BDS OTHER',
'CEL OTHER': 'PIPO',
'CEM OTHER': 'PINI',
'CEOC': 'CEOC',
'CES OTHER': 'PICO5',
'CLLU': 'BDM OTHER',
'CO1': 'BDS OTHER',
'COFL': 'BDS OTHER',
'FR': 'ACPL',
'FRAM': 'FRAM',
'FRNI': 'ACPL',
'FRPE': 'FRPE',
'GIBI': 'GIBI',
'GLTR': 'GLTR',
'GYDI': 'FRPE',
'HISY': 'BDS OTHER',
'ILOP': 'BES OTHER',
'ILSP': 'BES OTHER',
'JU': 'PICO5',
'JUNI': 'FRPE',
'JUVI': 'CES OTHER',
'LIST': 'BDL OTHER',
'LITU': 'BDL OTHER',
'MA2': 'MA2',
'MAGR': 'BEM OTHER',
'MAVI': 'BES OTHER',
'MO': 'MA2',
'MOAL': 'BDS OTHER',
'OSVI': 'MA2',
'PHAM': 'ACPL',
'PI1': 'PIPO',
'PIAB': 'CEL OTHER',
'PICO': 'PICO5',
'PIMA': 'PINI',
'PINI': 'PINI',
'PIPO': 'PIPO',
'PIPU': 'PINI',
'PIRE': 'PIPO',
'PIST': 'PIPO',
'PISY': 'PIPO',
'PIVI': 'CEL OTHER',
'PLOC': 'BDL OTHER',
'PO': 'FRPE',
'PODE': 'FRPE',
'PONI': 'BDL OTHER',
'POTR1': 'FRPE',
'PR': 'MA2',
'PRCE': 'BDS OTHER',
'PRSE1': 'BDS OTHER',
'PRSE2': 'BDS OTHER',
'PRVI': 'MA2',
'PY': 'MA2',
'PYCA': 'BDM OTHER',
'QU': 'FRPE',
'QUAL': 'FRPE',
'QUBI': 'ACPL',
'QUCO': 'BDL OTHER',
'QUEL': 'ACPL',
'QUMA1': 'FRPE',
'QUNI': 'QUNI',
'QUPA': 'QUPA',
'QURU': 'QURU',
'RHSP': 'MA2',
'RHSP2': 'MA2',
'ROPS': 'ACPL',
'SA': 'ACPL',
'SADI': 'BDS OTHER',
'SO': 'MA2',
'SYRE': 'MA2',
'SYSP': 'MA2',
'THOC': 'CEL OTHER',
'TI': 'FRPE',
'TIAM': 'TIAM',
'TICO': 'TICO',
'TSCA': 'CEL OTHER',
'ULAM': 'ULAM',
'ULPA': 'BDL OTHER',
'ULPU': 'ULPU',
'ULS': 'FRPE',
} # END MidWstMSP
,
##################################################
'NMtnPrFNL': {
'ABBA': 'CEL OTHER',
'ABCO': 'PIPU',
'AC': 'FRPE',
'ACFR': 'FRPE',
'ACGI': 'BDS OTHER',
'ACGL': 'FRPE',
'ACNE': 'FRPE',
'ACPL': 'ACPL',
'ACRU': 'BDL OTHER',
'ACSA1': 'ACSA1',
'ACSA2': 'ACSA2',
'ACTA': 'BDS OTHER',
'AEGL': 'ACPL',
'AEHI': 'ACPL',
'AM': 'MA2',
'BDL OTHER': 'FRPE',
'BDM OTHER': 'ACPL',
'BDS OTHER': 'MA2',
'BE': 'ACPL',
'BEL OTHER': 'QUNI',
'BEM OTHER': 'MAGR',
'BENI': 'BDL OTHER',
'BEPA': 'BDL OTHER',
'BEPE': 'BDL OTHER',
'BES OTHER': 'ILOP',
'CA1': 'FRPE',
'CA3': 'FRPE',
'CABE': 'ACPL',
'CAOV': 'FRPE',
'CASP': 'BDL OTHER',
'CECA': 'MA2',
'CEL OTHER': 'PIPU',
'CEM OTHER': 'PINI',
'CEOC': 'CEOC',
'CES OTHER': 'PICO5',
'CO1': 'MA2',
'COCO2': 'ACPL',
'CR': 'MA2',
'ELAN': 'MA2',
'FA': 'FRPE',
'FASY': 'FRPE',
'FR': 'FRPE',
'FRAM': 'FRAM',
'FRPE': 'FRPE',
'GIBI': 'FRPE',
'GLTR': 'GLTR',
'GYDI': 'GYDI',
'ILOP': 'ILOP',
'JU': 'PICO5',
'JUCI': 'FRPE',
'JUCO1': 'PICO5',
'JUMO': 'PICO5',
'JUNI': 'FRPE',
'JUSC': 'PICO5',
'JUVI': 'PICO5',
'LIST': 'FRPE',
'LITU': 'FRPE',
'LOSP': 'MA2',
'MA2': 'MA2',
'MAGR': 'MAGR',
'MAPU': 'BDM OTHER',
'MEGL': 'FRPE',
'MORU': 'BDL OTHER',
'PI1': 'PIPU',
'PI2': 'CEM OTHER',
'PIAB': 'CEL OTHER',
'PICE': 'PICO5',
'PICO': 'PINI',
'PICO5': 'PICO5',
'PIED': 'PICO5',
'PIEN': 'PIPU',
'PIFL': 'CEL OTHER',
'PIGL1': 'PIPU',
'PIMA': 'PIPU',
'PIMU': 'PICO5',
'PINI': 'PINI',
'PIPO': 'PIPO',
'PIPU': 'PIPU',
'PIRU': 'PIPU',
'PIST': 'CEL OTHER',
'PIST2': 'PICO5',
'PISY': 'PINI',
'PO': 'FRPE',
'POAC5': 'FRPE',
'POAL': 'FRPE',
'POAN': 'FRPE',
'POBA': 'BDL OTHER',
'POCA2': 'FRPE',
'POFR': 'BDL OTHER',
'PONI': 'BDL OTHER',
'POSA': 'POSA',
'POTR1': 'ACPL',
'PR': 'PR',
'PRCE': 'BDS OTHER',
'PRPA': 'MA2',
'PRVI': 'MA2',
'PSME': 'PIPU',
'PY': 'PY',
'QU': 'FRPE',
'QUBI': 'FRPE',
'QUCO': 'FRPE',
'QUMA1': 'QUMA1',
'QUMU': 'FRPE',
'QUNI': 'QUNI',
'QUPA': 'FRPE',
'QURO': 'FRPE',
'QURU': 'FRPE',
'QUSH': 'FRPE',
'RHCA': 'BDS OTHER',
'RHGL': 'BDS OTHER',
'RHSP': 'MA2',
'RHTY': 'MA2',
'ROPS': 'FRPE',
'SA': 'FRPE',
'SAAL4': 'FRPE',
'SADI': 'MA2',
'SAFR': 'FRPE',
'SO': 'MA2',
'SOAM': 'BDS OTHER',
'SOAU': 'BDS OTHER',
'SYRE': 'MA2',
'SYSP': 'MA2',
'TH9': 'PIPU',
'THPL': 'PIPU',
'TI': 'FRPE',
'TIAM': 'TIAM',
'TICO': 'TICO',
'ULAM': 'ULAM',
'ULPA': 'BDL OTHER',
'ULPU': 'ULPU',
'ULS': 'FRPE',
'WISI': 'MA2'
} # END NMtnPrFNL
,
##################################################
'NoEastXXX': {
'AB': 'CEL OTHER',
'ABBA': 'CEL OTHER',
'ABCO': 'CEL OTHER',
'ABHO': 'CEL OTHER',
'AC': 'ACPL',
'ACBU': 'ACPL',
'ACCA': 'ACPL',
'ACGI': 'ACPL',
'ACNE': 'ACPL',
'ACPA': 'ACPL',
'ACPE': 'ACPL',
'ACPL': 'ACPL',
'ACPLCO': 'ACPL',
'ACPLCR': 'ACPL',
'ACPLSC': 'ACPL',
'ACPS': 'ACPL',
'ACRU': 'ACRU',
'ACRUAR': 'ACRU',
'ACRUOC': 'ACRU',
'ACSA1': 'ACSA1',
'ACSA2': 'ACSA2',
'ACSA2GR': 'ACSA2',
'AECA': 'AEHI',
'AEHI': 'AEHI',
'AEOC': 'AEHI',
'AIAL': 'BDL OTHER',
'ALJU': 'BDS OTHER',
'AM': 'BDS OTHER',
'AMAR': 'BDS OTHER',
'AMCA': 'BDS OTHER',
'ARAR': 'CEL OTHER',
'AREX': 'CEL OTHER',
'ASTR': 'BDS OTHER',
'BDL OTHER': 'ZESE',
'BDM OTHER': 'ACRU',
'BDS OTHER': 'PRSE2',
'BE': 'BDM OTHER',
'BEAL': 'BDL OTHER',
'BELE': 'BDL OTHER',
'BEL OTHER': 'ULPA99',
'BEM OTHER': 'MAGR',
'BENI': 'BDL OTHER',
'BEPA': 'BDL OTHER',
'BEPE': 'BDM OTHER',
'BEPEGR': 'BDM OTHER',
'BEPO': 'BDM OTHER',
'BES OTHER': 'ILOP',
'BRPA': 'BDM OTHER',
'BUCA': 'BUCA',
'CA1': 'BDL OTHER',
'CABE': 'BDM OTHER',
'CABI': 'BDM OTHER',
'CACA': 'BDM OTHER',
'CACO': 'BDL OTHER',
'CADE': 'BDL OTHER',
'CAEQ': 'CEL OTHER',
'CAGL': 'BDL OTHER',
'CAJA': 'BDM OTHER',
'CAMO': 'BDM OTHER',
'CAOV': 'BDL OTHER',
'CAPU': 'BDS OTHER',
'CASP': 'BDM OTHER',
'CATE': 'BDL OTHER',
'CATO': 'BDL OTHER',
'CEAT': 'CEM OTHER',
'CECA': 'BDS OTHER',
'CEDE': 'CEL OTHER',
'CEJA': 'BDM OTHER',
'CEL OTHER': 'PIST',
'CEM OTHER': 'JUVI',
'CEOC': 'BDL OTHER',
'CES OTHER': 'PICO5',
'CHTH': 'CEM OTHER',
'CISP': 'BES OTHER',
'CLLU': 'BDM OTHER',
'CO1': 'BDS OTHER',
'CO2': 'BDS OTHER',
'COCO2': 'BDL OTHER',
'COFL': 'BDS OTHER',
'COKO': 'BDS OTHER',
'COMA': 'BDS OTHER',
'CR': 'BDS OTHER',
'CRCR': 'BDS OTHER',
'CRJA': 'CEL OTHER',
'CRMO2': 'BDS OTHER',
'CRPH': 'BDS OTHER',
'CRVI': 'BDS OTHER',
'CULE': 'CEL OTHER',
'DIVI': 'BDM OTHER',
'ELAN': 'BDS OTHER',
'ELUM': 'BES OTHER',
'EUUL': 'BDL OTHER',
'FA': 'BDL OTHER',
'FAGR': 'BDL OTHER',
'FASY': 'BDL OTHER',
'FR': 'FRPE',
'FRAM': 'FRPE',
'FRCA': 'FRPE',
'FRNI': 'FRPE',
'FRPE': 'FRPE',
'FRPES': 'FRPE',
'GIBI': 'GIBI',
'GIBI(F)': 'GIBI',
'GLTR': 'GLTR',
'GYDI': 'BDL OTHER',
'HADI': 'BDS OTHER',
'HAVI': 'BDS OTHER',
'HISY': 'BDS OTHER',
'ILCA': 'BES OTHER',
'ILOP': 'ILOP',
'ILSP': 'BES OTHER',
'JUCI': 'BDL OTHER',
'JUNI': 'BDL OTHER',
'JURE': 'BDL OTHER',
'JUVI': 'JUVI',
'KOEL': 'BDS OTHER',
'KOPA': 'BDS OTHER',
'LADE': 'BDL OTHER',
'LISP': 'BES OTHER',
'LIST': 'LIST',
'LITU': 'BDL OTHER',
'MA1': 'BEM OTHER',
'MA2': 'MA2',
'MAAM': 'BDM OTHER',
'MADE': 'BDS OTHER',
'MAGR': 'MAGR',
'MAHA': 'MA2',
'MAIS': 'MA2',
'MAPU': 'MA2',
'MASO': 'BDS OTHER',
'MAST': 'BDS OTHER',
'MEGL': 'BDL OTHER',
'MOAL': 'BDM OTHER',
'MORU': 'BDM OTHER',
'MYCE': 'BES OTHER',
'NYSY': 'BDM OTHER',
'OSVI': 'BDM OTHER',
'PATO': 'BDM OTHER',
'PEL OTHER': 'PHCA',
'PEM OTHER': 'SAPA',
'PES OTHER': 'BUCA',
'PHAM': 'BDM OTHER',
'PHCA': 'PHCA',
'PI1': 'CEL OTHER',
'PI2': 'CEL OTHER',
'PIAB': 'CEL OTHER',
'PIBA': 'CEM OTHER',
'PICL': 'CEM OTHER',
'PICO5': 'PICO5',
'PIEC': 'CEL OTHER',
'PIGL1': 'CEL OTHER',
'PIGL2': 'CEL OTHER',
'PIMA': 'CEL OTHER',
'PINI': 'CEL OTHER',
'PIPUGL': 'CEL OTHER',
'PIRI': 'CEL OTHER',
'PISE': 'CEL OTHER',
'PIST': 'PIST',
'PISY': 'CEL OTHER',
'PITA': 'CEL OTHER',
'PITH': 'CEL OTHER',
'PLAC': 'PLAC',
'PLOC': 'PLAC',
'PO': 'BDL OTHER',
'POAL': 'BDL OTHER',
'POBA': 'BDL OTHER',
'POCA': 'BDL OTHER',
'PODE': 'BDL OTHER',
'POGR': 'BDL OTHER',
'PONI': 'BDL OTHER',
'PONIIT': 'BDL OTHER',
'POTR1': 'BDL OTHER',
'POTR2': 'BDL OTHER',
'PR': 'PRSE2',
'PR2': 'PES OTHER',
'PRAM': 'PRSE2',
'PRAV': 'PRSE2',
'PRCA': 'PRSE2',
'PRCE': 'PRSE2',
'PRCI': 'PRSE2',
'PRMA': 'PRSE2',
'PRPE1': 'PRSE2',
'PRPE2': 'PRSE2',
'PRSA': 'PRSE2',
'PRSE1': 'PRSE2',
'PRSE2': 'PRSE2',
'PRSU': 'PRSE2',
'PRTR': 'PRSE2',
'PRVI': 'PRSE2',
'PRVISH': 'PRSE2',
'PRYE': 'PRSE2',
'PSME': 'CEL OTHER',
'PTTR': 'BDS OTHER',
'PYCA': 'PYCA',
'PYCAA': 'PYCA',
'PYCO': 'PYCA',
'QU': 'QUPA',
'QUAC': 'QUPA',
'QUAL': 'QUPA',
'QUAU': 'QUPA',
'QUBI': 'QUPA',
'QUCO': 'QUPA',
'QUEL': 'QUPA',
'QUFA': 'QUPA',
'QUIM': 'QUPA',
'QULY': 'QUPA',
'QUMA1': 'QUPA',
'QUMA2': 'QUPA',
'QUMU': 'QUPA',
'QUNI': 'QUPA',
'QUPA': 'QUPA',
'QUPAFA': 'QUPA',
'QUPH': 'QUPH',
'QUPR': 'QURU',
'QURO': 'QURU',
'QURU': 'QURU',
'QUSH': 'QURU',
'QUST': 'QURU',
'QUVE': 'QURU',
'QUVI': 'QURU',
'RHGL': 'BDS OTHER',
'RHTY': 'BDS OTHER',
'ROPS': 'BDL OTHER',
'SA': 'BDM OTHER',
'SAAL': 'BDM OTHER',
'SABA': 'BDM OTHER',
'SAMA': 'BDM OTHER',
'SANI': 'BDM OTHER',
'SAPA': 'SAPA',
'SESE': 'CEL OTHER',
'SOAM': 'BDS OTHER',
'SOAU': 'BDS OTHER',
'SOJA': 'BDM OTHER',
'STJA': 'BDM OTHER',
'SYRE': 'BDS OTHER',
'TA': 'CES OTHER',
'TADI': 'BDL OTHER',
'THOC': 'CEM OTHER',
'TI': 'TITO',
'TIAM': 'TITO',
'TICO': 'TICO',
'TICOG': 'TICO',
'TIPL': 'TITO',
'TITO': 'TITO',
'TSCA': 'CEM OTHER',
'ULAL': 'ULAM',
'ULAM': 'ULAM',
'ULCAHO': 'ULAM',
'ULPA': 'ULAM',
'ULPA99': 'ULPA99',
'ULPR': 'ULAM',
'ULPU': 'ULAM',
'ULRU': 'ULAM',
'ULS': 'ULAM',
'ULSE': 'ULAM',
'ULTH': 'ULAM',
'ZE': 'ZESE',
'ZESE': 'ZESE',
} # END NoEastXXX
,
##################################################
'PacfNWLOG': {
'ABGR': 'CEL OTHER',
'ABLA': 'CEL OTHER',
'ABMA': 'CEL OTHER',
'ABPI': 'CES OTHER',
'ABPR': 'CEL OTHER',
'AC': 'ACPL',
'ACBU': 'ACPL',
'ACCI': 'ACPL',
'ACMA': 'ACMA',
'ACNE': 'ACMA',
'ACPA': 'ACPL',
'ACPADI': 'ACPL',
'ACPL': 'ACPL',
'ACPLFA': 'ACPL',
'ACPLQE': 'ACPL',
'ACPLSC': 'ACPL',
'ACPS': 'ACPL',
'ACRU': 'ACRU',
'ACRUMO': 'ACRU',
'ACSA1': 'ACPL',
'ACSA2': 'ACSA2',
'AEHI': 'BDL OTHER',
'AIAL': 'BDM OTHER',
'ALJU': 'BDM OTHER',
'ALRU2': 'BDM OTHER',
'BDL OTHER': 'ACMA',
'BDM OTHER': 'TICO',
'BDS OTHER': 'PRSE2',
'BEL OTHER': 'QUAG',
'BEM OTHER': 'ILOP',
'BENI': 'BEPE',
'BEPE': 'BEPE',
'BES OTHER': 'PYKA',
'CABEF': 'CABEF',
'CACA3': 'BDM OTHER',
'CADE2': 'CADE2',
'CAIL': 'BDL OTHER',
'CASP': 'BDL OTHER',
'CEAT': 'CEL OTHER',
'CEDE': 'CEL OTHER',
'CEJA': 'BDM OTHER',
'CEL OTHER': 'PSME',
'CEM OTHER': 'CADE2',
'CEOC': 'BDL OTHER',
'CESI4': 'BDM OTHER',
'CES OTHER': 'PICO5',
'CHLA2': 'CEL OTHER',
'CHNO': 'CEL OTHER',
'CHOB': 'CES OTHER',
'CHPI': 'CES OTHER',
'CHTH': 'CES OTHER',
'COFL': 'BDS OTHER',
'COMA2': 'BDS OTHER',
'CONU2': 'BDM OTHER',
'CRDO': 'CRLA80',
'CRLA': 'CRLA80',
'CRLA80': 'CRLA80',
'CRPH': 'CRLA80',
'CULA': 'CEM OTHER',
'FASY': 'FASYAT',
'FASYAT': 'FASYAT',
'FRAM': 'FRLA',
'FRHO': 'FRLA',
'FRLA': 'FRLA',
'FROX': 'FRLA',
'FRPEM': 'FRLA',
'FRPESG': 'FRLA',
'FRVE': 'FRLA',
'GIBI': 'BDL OTHER',
'GLTR': 'BDL OTHER',
'ILAQ': 'ILOP',
'ILOP': 'ILOP',
'JUCH': 'CES OTHER',
'JURE': 'BDM OTHER',
'KOPA': 'BDM OTHER',
'LAAN2': 'BDS OTHER',
'LADE': 'BDM OTHER',
'LADEWPE': 'BDS OTHER',
'LIOR': 'BDM OTHER',
'LIST': 'BDM OTHER',
'LITU': 'BDL OTHER',
'MAFL80': 'PYAN',
'MAGR': 'BES OTHER',
'MAIO': 'PYAN',
'MAMA': 'BDM OTHER',
'MAPUEL': 'PYAN',
'MASO': 'BDS OTHER',
'MASY2': 'PYAN',
'MOAL': 'MOAL',
'PAPE': 'BDM OTHER',
'PATO': 'BDM OTHER',
'PEL OTHER': 'PHCA',
'PEM OTHER': 'PHDA4',
'PES OTHER': 'WARO',
'PHCA': 'PHCA',
'PHDA4': 'PHDA4',
'PHFR': 'BES OTHER',
'PIAB': 'CEL OTHER',
'PIAR': 'CES OTHER',
'PICO': 'PICO5',
'PICO5': 'PICO5',
'PICO6': 'PICO5',
'PIDE': 'CEL OTHER',
'PIGL1': 'CEL OTHER',
'PIMU': 'CADE2',
'PIPI2': 'CADE2',
'PIPO': 'CEL OTHER',
'PIPU': 'CEL OTHER',
'PISI': 'CEL OTHER',
'PISY': 'CEL OTHER',
'PITH': 'CEL OTHER',
'PLAC': 'ACMA',
'POAL': 'POTR2',
'POALPY': 'POTR2',
'PONI': 'POTR2',
'POTR1': 'POTR2',
'POTR2': 'POTR2',
'PRAV': 'PRSE2',
'PRBL': 'PRSE2',
'PRCE': 'PRCEKW',
'PRCEKW': 'PRCEKW',
'PRDO': 'PRSE2',
'PRLA': 'PRSE2',
'PRPE2': 'PRSE2',
'PRSE2': 'PRSE2',
'PRSEAM': 'PRSE2',
'PRSESH': 'PRSE2',
'PRSESO': 'PRSE2',
'PRSU': 'PRSE2',
'PRYE': 'BDM OTHER',
'PSME': 'PSME',
'PYAN': 'PYAN',
'PYCACL': 'PYKA',
'PYCO': 'PYKA',
'PYKA': 'PYKA',
'QUAG': 'QUAG',
'QUAL': 'QURU',
'QUCO': 'QURU',
'QUMA1': 'QURU',
'QUPA': 'QURU',
'QURU': 'QURU',
'RHGL': 'BDS OTHER',
'RHTY': 'BDS OTHER',
'ROPS': 'BDM OTHER',
'SAAM': 'BDM OTHER',
'SABA': 'BDM OTHER',
'SACANE': 'BDS OTHER',
'SAMA': 'BDM OTHER',
'SASC': 'BDM OTHER',
'SCVE': 'PSME',
'SEGI': 'PSME',
'SESE': 'PSME',
'SOAU': 'BDS OTHER',
'SYRE': 'BDS OTHER',
'SYVU': 'BDS OTHER',
'TABA': 'CES OTHER',
'TABR': 'PSME',
'TADI': 'BDL OTHER',
'THOC': 'PSME',
'THPL': 'PSME',
'TIAM': 'TIAM',
'TICO': 'TICO',
'TIHE': 'TIAM',
'TSHE': 'PSME',
'TSME': 'CES OTHER',
'ULAM': 'ULAM',
'ULAMLI': 'ULAM',
'ULPR': 'ULAM',
'ULPU': 'ULAM',
'WARO': 'WARO',
'WISI': 'BDS OTHER'
} # END PacfNWLOG
,
##################################################
'PiedmtCLT': {
'AB': 'CEL OTHER',
'ABCO': 'CEL OTHER',
'ACBA2': 'BES OTHER',
'ACBU': 'BDS OTHER',
'ACCA': 'BDM OTHER',
'ACFR': 'BDL OTHER',
'ACGI': 'BDS OTHER',
'ACGR': 'BDS OTHER',
'ACMA': 'BDL OTHER',
'ACNE': 'BDM OTHER',
'ACNI': 'BDL OTHER',
'ACPA': 'BDS OTHER',
'ACPL': 'BDL OTHER',
'ACRU': 'ACRU',
'ACSA1': 'ACSA1',
'ACSA2': 'ACSA2',
'ACTR': 'BDS OTHER',
'AEGL': 'BDL OTHER',
'AEHI': 'BDL OTHER',
'AEOC': 'BDL OTHER',
'AEPA': 'BDS OTHER',
'AIAL': 'BDL OTHER',
'ALJU': 'BDS OTHER',
'AM': 'BDS OTHER',
'AMAR': 'BDS OTHER',
'ARAR': 'CEL OTHER',
'ASTR': 'BDS OTHER',
'AU1': 'BES OTHER',
'BDL OTHER': 'ACSA2',
'BDM OTHER': 'BENI',
'BDS OTHER': 'COFL',
'BELE': 'BDM OTHER',
'BEL OTHER': 'QUNI',
'BEM OTHER': 'MAGR',
'BENI': 'BENI',
'BEPA': 'BDM OTHER',
'BEPE': 'BDM OTHER',
'BEPL2': 'BDM OTHER',
'BES OTHER': 'ILOP',
'BEUT2': 'BDM OTHER',
'BRPA': 'BDM OTHER',
'BUDA2': 'BDS OTHER',
'BUSP': 'BES OTHER',
'CA1': 'BDL OTHER',
'CABE': 'BDM OTHER',
'CACA': 'BDM OTHER',
'CACO': 'BDL OTHER',
'CADE': 'BDL OTHER',
'CAGL': 'BDL OTHER',
'CAIL': 'BDL OTHER',
'CAJA9': 'BES OTHER',
'CAMO': 'BDM OTHER',
'CAOV': 'BDL OTHER',
'CASP': 'BDM OTHER',
'CATO': 'BDL OTHER',
'CE2': 'BDL OTHER',
'CEAT': 'CEM OTHER',
'CECA': 'BDS OTHER',
'CEDE': 'CEL OTHER',
'CEJA': 'BDL OTHER',
'CELA': 'CEL OTHER',
'CEL OTHER': 'PIEC',
'CEM OTHER': 'JUVI',
'CEOC': 'BDL OTHER',
'CES OTHER': 'PICO5',
'CHLA2': 'CEL OTHER',
'CHPI': 'CES OTHER',
'CHRE': 'BDS OTHER',
'CHTH': 'CEM OTHER',
'CHVI': 'BDS OTHER',
'CLLU': 'BDM OTHER',
'CLTR': 'BDS OTHER',
'CO1': 'COFL',
'COAL': 'BDS OTHER',
'COCO1': 'BDS OTHER',
'COFL': 'COFL',
'COKO': 'BDS OTHER',
'COMA': 'BDS OTHER',
'CR': 'BDS OTHER',
'CRJA': 'CEL OTHER',
'CRPH': 'BDS OTHER',
'CRVI': 'BDS OTHER',
'CULA': 'CEL OTHER',
'CULE': 'CEL OTHER',
'DIVI': 'BDM OTHER',
'ELUM': 'BES OTHER',
'EU1': 'BEL OTHER',
'FAGR': 'BDL OTHER',
'FASY': 'BDL OTHER',
'FICA': 'BDS OTHER',
'FISI': 'BDM OTHER',
'FORS': 'BDS OTHER',
'FRAM': 'BDL OTHER',
'FRNI': 'BDM OTHER',
'FRPE': 'BDL OTHER',
'FRQU': 'BDL OTHER',
'GIBI': 'BDM OTHER',
'GLTR': 'BDL OTHER',
'GYDI': 'BDL OTHER',
'HA4': 'BES OTHER',
'HACA': 'BDM OTHER',
'HAVI': 'BDS OTHER',
'HISY': 'BDS OTHER',
'ILAQ': 'BES OTHER',
'ILCA': 'BES OTHER',
'ILCO2': 'BES OTHER',
'ILOP': 'ILOP',
'ILSP': 'BES OTHER',
'JU': 'CEM OTHER',
'JUNI': 'BDL OTHER',
'JURE': 'BDM OTHER',
'JUVI': 'JUVI',
'KOPA': 'BDS OTHER',
'LA6': 'LA6',
'LADE': 'BDL OTHER',
'LISP': 'ILOP',
'LIST': 'LIST',
'LITU': 'BDL OTHER',
'MA1': 'BDM OTHER',
'MA2': 'MA2',
'MAAC': 'BDL OTHER',
'MABE': 'BES OTHER',
'MAGR': 'MAGR',
'MAPO': 'BDM OTHER',
'MASO': 'BDS OTHER',
'MAST': 'BDS OTHER',
'MASY2': 'BDS OTHER',
'MATS': 'BDS OTHER',
'MAVI': 'BEM OTHER',
'MEAZ': 'BDM OTHER',
'MEGL': 'BDL OTHER',
'MO': 'BDM OTHER',
'MORU': 'BDL OTHER',
'MYCE2': 'BES OTHER',
'NYSY': 'BDM OTHER',
'OSVI': 'BDM OTHER',
'PATO': 'BDM OTHER',
'PEL OTHER': 'PHDA4',
'PEM OTHER': 'WARO',
'PES OTHER': 'WAFI',
'PHAM': 'BDM OTHER',
'PHDA4': 'PHDA4',
'PHFR': 'BES OTHER',
'PHSP2': 'BES OTHER',
'PI1': 'CEM OTHER',
'PI2': 'CEL OTHER',
'PIAB': 'CEL OTHER',
'PICH': 'BDM OTHER',
'PICO5': 'PICO5',
'PICO6': 'CEL OTHER',
'PIEC': 'PIEC',
'PIGL1': 'CEM OTHER',
'PIMU': 'CES OTHER',
'PINI': 'CEM OTHER',
'PIPA': 'CEL OTHER',
'PIPU': 'CEM OTHER',
'PIRE': 'CEL OTHER',
'PIST': 'CEL OTHER',
'PISY': 'CEM OTHER',
'PITA': 'PITA',
'PIVI': 'CEM OTHER',
'PLAC': 'BDL OTHER',
'PLOC': 'BDL OTHER',
'POAL': 'BDL OTHER',
'POBA': 'BDL OTHER',
'PODE': 'BDL OTHER',
'PONI': 'BDM OTHER',
'PR': 'PR',
'PRCA': 'BES OTHER',
'PRCA2': 'BDS OTHER',
'PRCE': 'BDS OTHER',
'PRPA': 'BDM OTHER',
'PRPE2': 'BDS OTHER',
'PRSE1': 'BDL OTHER',
'PRSE2': 'BDS OTHER',
'PRSU': 'BDS OTHER',
'PRTO': 'BDS OTHER',
'PRYE': 'PRYE',
'PSME': 'CEL OTHER',
'PY': 'PYCA',
'PYCA': 'PYCA',
'PYCO': 'BDM OTHER',
'PYKO': 'BES OTHER',
'PYSP': 'BES OTHER',
'QUAC': 'BDM OTHER',
'QUAL': 'QUAL',
'QUBI': 'BDL OTHER',
'QUCO': 'BDL OTHER',
'QUEL': 'BDL OTHER',
'QUFA': 'BDL OTHER',
'QUHE': 'BEM OTHER',
'QUIM': 'BDL OTHER',
'QULY': 'BDM OTHER',
'QUMA1': 'BDL OTHER',
'QUMA2': 'BDM OTHER',
'QUMI': 'BDL OTHER',
'QUMU': 'BDM OTHER',
'QUNI': 'QUNI',
'QUPA': 'BDL OTHER',
'QUPH': 'QUPH',
'QURO': 'BDL OTHER',
'QURU': 'QURU',
'QUSH': 'BDL OTHER',
'QUST': 'BDL OTHER',
'QUVE': 'BDL OTHER',
'QUVI': 'BEL OTHER',
'RHSP': 'BDS OTHER',
'RHSP2': 'BDS OTHER',
'ROBA': 'BDS OTHER',
'ROPS': 'BDM OTHER',
'SA': 'BDM OTHER',
'SAAL': 'BDM OTHER',
'SAGR': 'BDS OTHER',
'SAMA': 'BDM OTHER',
'SANI': 'BDM OTHER',
'SAPA': 'WARO',
'SAPE12': 'BDM OTHER',
'SERE2': 'PES OTHER',
'SOAU': 'BDS OTHER',
'SOJA': 'BDM OTHER',
'STJA': 'BDS OTHER',
'SYRE': 'BDS OTHER',
'SYSP': 'BDS OTHER',
'TADI': 'BDL OTHER',
'THOC': 'CEM OTHER',
'THPL': 'CEL OTHER',
'TIAM': 'BDL OTHER',
'TICO': 'BDM OTHER',
'TOTA': 'CES OTHER',
'TRSE6': 'BDS OTHER',
'TSCA': 'CEM OTHER',
'ULAL': 'ULAL',
'ULAM': 'BDL OTHER',
'ULPA': 'BDM OTHER',
'ULPU': 'BDL OTHER',
'ULRU': 'BDM OTHER',
'ULS': 'BDL OTHER',
'VIAG': 'BDS OTHER',
'VIPR': 'BDS OTHER',
'VISP2': 'BDS OTHER',
'WAFI': 'WAFI',
'WARO': 'WARO',
'YU1': 'PES OTHER',
'ZESE': 'BDL OTHER',
} # END PiedmtCLT
,
##################################################
'SoCalCSMA': {
'AC': 'BDL OTHER',
'ACBA2': 'BEM OTHER',
'ACBU': 'BDS OTHER',
'ACDE': 'BEM OTHER',
'ACLO': 'BEM OTHER',
'ACME': 'BEL OTHER',
'ACNE': 'BDL OTHER',
'ACOB': 'BDS OTHER',
'ACPA': 'BDL OTHER',
'ACPE1': 'BDL OTHER',
'ACRU': 'BDL OTHER',
'ACSA1': 'BDL OTHER',
'AGFL': 'BES OTHER',
'ALCO2': 'BDM OTHER',
'ALEX': 'BES OTHER',
'ALJU': 'BDS OTHER',
'ALRH': 'BDM OTHER',
'ANCH4': 'BES OTHER',
'ARBI': 'CEL OTHER',
'ARCO24': 'CEL OTHER',
'ARCU': 'PES OTHER',
'ARHE': 'CEL OTHER',
'ARRO': 'WARO',
'ARUN': 'BES OTHER',
'BABL': 'BES OTHER',
'BAPU': 'BES OTHER',
'BAVA': 'BDS OTHER',
'BDL OTHER': 'PLAC',
'BDM OTHER': 'LIST',
'BDS OTHER': 'JAMI',
'BE': 'BDM OTHER',
'BEL OTHER': 'EUFI81',
'BEM OTHER': 'CICA',
'BENI': 'BDM OTHER',
'BEPE': 'BDM OTHER',
'BERE': 'PES OTHER',
'BES OTHER': 'POMA',
'BIJA': 'BEL OTHER',
'BR': 'PES OTHER',
'BR1': 'BDM OTHER',
'BRAC': 'BES OTHER',
'BRAC2': 'BDM OTHER',
'BRAR': 'PES OTHER',
'BRED': 'PES OTHER',
'BRPO': 'BEM OTHER',
'BRPA': 'BDM OTHER',
'BRPO': 'CICA',
'BUCA': 'PES OTHER',
'CA4': 'BDL OTHER',
'CA52': 'CEL OTHER',
'CACA3': 'BEM OTHER',
'CACI': 'CACI',
'CACU8': 'CEDE',
'CADE2': 'CEL OTHER',
'CAED': 'BEL OTHER',
'CAEX': 'BES OTHER',
'CALE': 'BES OTHER',
'CASA5': 'BES OTHER',
'CASP11': 'BES OTHER',
'CAST': 'CEL OTHER',
'CATW': 'BES OTHER',
'CAVI': 'BES OTHER',
'CE2': 'BDL OTHER',
'CEAT': 'CEDE',
'CECA': 'BDS OTHER',
'CEDE': 'CEDE',
'CEFI2': 'BDM OTHER',
'CEL OTHER': 'CEDE',
'CEM OTHER': 'PIBR2',
'CEOC2': 'BDS OTHER',
'CESI3': 'CESI3',
'CES OTHER': 'PICO5',
'CH': 'CEL OTHER',
'CHHU': 'PES OTHER',
'CHSP': 'BEM OTHER',
'CICA': 'CICA',
'CILI': 'BES OTHER',
'CISI': 'BES OTHER',
'COAU': 'PES OTHER',
'COLA': 'BES OTHER',
'COLA18': 'BES OTHER',
'CRJA': 'CEL OTHER',
'CRPA': 'BDS OTHER',
'CRRU': 'BES OTHER',
'CU': 'CEL OTHER',
'CUAN': 'CUAN',
'CULE': 'CES OTHER',
'CUMA': 'CEL OTHER',
'CUSE': 'CEL OTHER',
'CYRE11': 'CES OTHER',
'DIVI': 'BDS OTHER',
'DOVI': 'BES OTHER',
'DRDR': 'PES OTHER',
'DUER': 'BES OTHER',
'ERBI': 'BEL OTHER',
'ERCA': 'BEL OTHER',
'ERCO': 'BEL OTHER',
'ERDE': 'BES OTHER',
'ERFA': 'BEM OTHER',
'ERHU4': 'BDS OTHER',
'ERJA': 'BES OTHER',
'ERLY': 'BEL OTHER',
'EU1': 'BEL OTHER',
'EUCA': 'BEL OTHER',
'EUCA1': 'BEL OTHER',
'EUCI': 'BEM OTHER',
'EUCI2': 'BEL OTHER',
'EUCL': 'BEL OTHER',
'EUCO24': 'BES OTHER',
'EUCO3': 'BEL OTHER',
'EUER': 'BEM OTHER',
'EUFI81': 'EUFI81',
'EUGL': 'BEL OTHER',
'EUGR': 'BEL OTHER',
'EULE': 'BEM OTHER',
'EULE2': 'BES OTHER',
'EUMA23': 'BEL OTHER',
'EUNI': 'BEM OTHER',
'EUOC9': 'BEL OTHER',
'EUPO': 'BEL OTHER',
'EURU': 'BEL OTHER',
'EUSI': 'BEL OTHER',
'EUSM': 'BES OTHER',
'EUTO11': 'BEM OTHER',
'EUVI': 'BEL OTHER',
'FESE': 'BES OTHER',
'FI1': 'BEM OTHER',
'FIAL5': 'BES OTHER',
'FIBE': 'BEM OTHER',
'FICA': 'BDS OTHER',
'FIEL': 'BEM OTHER',
'FILY': 'BES OTHER',
'FIMA2': 'BEL OTHER',
'FIMI': 'FIMI',
'FIRE4': 'BEM OTHER',
'FIRU': 'BEM OTHER',
'FISI': 'BDM OTHER',
'FIWA': 'BEL OTHER',
'FRMA6': 'BDM OTHER',
'FRUH': 'BEL OTHER',
'FRVE': 'BEL OTHER',
'GEPA': 'BES OTHER',
'GIBI': 'BDM OTHER',
'GRRO': 'BEL OTHER',
'HACA3': 'BES OTHER',
'HALA': 'BES OTHER',
'HASU': 'CES OTHER',
'HEAR': 'BES OTHER',
'HIRO-SI': 'BES OTHER',
'HOFO': 'PEM OTHER',
'HYFL': 'BES OTHER',
'ILAL': 'BES OTHER',
'JAMI': 'JAMI',
'JUCH': 'CES OTHER',
'JUHI': 'BDL OTHER',
'JURE': 'BDM OTHER',
'KOBI': 'BDM OTHER',
'KOPA': 'BDM OTHER',
'LAIN': 'BDS OTHER',
'LAPA': 'BEM OTHER',
'LE14': 'BES OTHER',
'LELA12': 'BES OTHER',
'LIAU9': 'BDM OTHER',
'LILU': 'BEM OTHER',
'LIOR': 'BDM OTHER',
'LIST': 'LIST',
'LITU': 'BDL OTHER',
'LYRA': 'BES OTHER',
'MABO': 'BEM OTHER',
'MAFL80': 'BDS OTHER',
'MAGR': 'MAGR',
'MAIN8': 'BES OTHER',
'MALU4': 'BDS OTHER',
'MAPU': 'BDS OTHER',
'MASO': 'BDS OTHER',
'MASY2': 'BDS OTHER',
'MATE': 'BES OTHER',
'MATI': 'BDS OTHER',
'MEAR': 'BES OTHER',
'MEBR9': 'BES OTHER',
'MEEX': 'MEEX',
'MENE': 'BES OTHER',
'MEQU': 'MEQU',
'MEST': 'BEM OTHER',
'MOAL': 'BDL OTHER',
'MU5': 'BES OTHER',
'MYCO': 'BES OTHER',
'MYLA': 'BES OTHER',
'NEOL': 'CACI',
'OLEU': 'BES OTHER',
'PALO8': 'BDS OTHER',
'PEAM': 'BDS OTHER',
'PEBO': 'BDS OTHER',
'PEIN17': 'BES OTHER',
'PEL OTHER': 'PHCA',
'PEM OTHER': 'PHDA4',
'PES OTHER': 'WARO',
'PHCA': 'PHCA',
'PHDA4': 'PHDA4',
'PHFR': 'BES OTHER',
'PHRE': 'PEM OTHER',
'PHRO': 'PEM OTHER',
'PI1': 'CEL OTHER',
'PIBR2': 'PIBR2',
'PICA': 'PICA',
'PICH': 'BDM OTHER',
'PICO5': 'PICO5',
'PICR': 'BEM OTHER',
'PIDE': 'CEL OTHER',
'PIED': 'CES OTHER',
'PIHA': 'CEL OTHER',
'PIPI2': 'CEL OTHER',
'PIPI6': 'CEL OTHER',
'PIRA': 'CEL OTHER',
'PIRH': 'BEM OTHER',
'PIRO': 'BES OTHER',
'PITH': 'CES OTHER',
'PITO': 'BEM OTHER',
'PITO2': 'CEL OTHER',
'PIUN': 'PIUN',
'PIVI5': 'BEM OTHER',
'PLAC': 'PLAC',
'PLRA': 'BDL OTHER',
'POAL': 'BDL OTHER',
'POGR2': 'POMA',
'POHE2': 'BES OTHER',
'POMA': 'POMA',
'PONI': 'BDL OTHER',
'PR': 'BDS OTHER',
'PRAM2': 'BDS OTHER',
'PRAR': 'BDS OTHER',
'PRCA': 'BES OTHER',
'PRCA2': 'BDS OTHER',
'PRCE': 'BDS OTHER',
'PRDO': 'BDS OTHER',
'PRDU': 'BDS OTHER',
'PRLY': 'BDS OTHER',
'PRPE2': 'BDS OTHER',
'PRSE2': 'BDS OTHER',
'PSCA': 'BES OTHER',
'PSME': 'CEL OTHER',
'PYCA': 'BES OTHER',
'PYCO': 'BES OTHER',
'PYKA': 'BES OTHER',
'QUAG': 'BEL OTHER',
'QUIL2': 'BEL OTHER',
'QURU': 'BDL OTHER',
'QUSA2': 'BES OTHER',
'QUSU': 'BEL OTHER',
'QUVI': 'BEL OTHER',
'RARI': 'PES OTHER',
'RASA5': 'BEL OTHER',
'RHBA7': 'PES OTHER',
'RHLA': 'BES OTHER',
'ROPS': 'BDL OTHER',
'SAMA': 'BDL OTHER',
'SAPE12': 'BDL OTHER',
'SASA': 'BES OTHER',
'SCMO': 'BEM OTHER',
'SCPO': 'BES OTHER',
'SCTE': 'SCTE',
'SESE': 'CEL OTHER',
'SPCA': 'BEL OTHER',
'STNI': 'BES OTHER',
'STSI': 'BES OTHER',
'SYPA2': 'BEM OTHER',
'TACH3': 'BDM OTHER',
'TAMU': 'BDL OTHER',
'THOR': 'CES OTHER',
'THPE3': 'BES OTHER',
'TRCO': 'TRCO',
'TRFO': 'PES OTHER',
'TRLA': 'BES OTHER',
'TUCA': 'BES OTHER',
'ULAM': 'BDL OTHER',
'ULPA': 'BEL OTHER',
'ULPU': 'BEL OTHER',
'UMCA': 'BEL OTHER',
'VILU6': 'BEL OTHER',
'VITI2': 'BES OTHER',
'WAFI': 'WARO',
'WARO': 'WARO',
'XYCO': 'PES OTHER',
'YU1': 'PES OTHER',
'YUGL2': 'PES OTHER',
'YUGU': 'PES OTHER',
} # END SoCalCSMA
,
##################################################
'SWDsrtGDL': {
'ACAN': 'ACSA3',
'ACFA': 'ACFA',
'ACMI': 'ACSA3',
'ACSA': 'ACSA3',
'ACSA3': 'ACSA3',
'ACSP2': 'ACSA3',
'ACST': 'BES OTHER',
'ALJU': 'BDM OTHER',
'ARRO': 'PES OTHER',
'BAVA': 'BDS OTHER',
'BDL OTHER': 'FRUH',
'BDM OTHER': 'PRCH',
'BDS OTHER': 'ACFA',
'BEL OTHER': 'EUMI2',
'BEM OTHER': 'BRPO',
'BES OTHER': 'RHLA',
'BRAR': 'PES OTHER',
'BRPO': 'BRPO',
'CAEQ': 'CEL OTHER',
'CAIL': 'BDL OTHER',
'CAVI': 'BES OTHER',
'CEFL': 'CEFL',
'CEL OTHER': 'PIHA',
'CEMI': 'CEFL',
'CEM OTHER': 'PIBR2',
'CEPR': 'CEFL',
'CESI3': 'BEM OTHER',
'CES OTHER': 'PICO5',
'CHHU': 'BES OTHER',
'CHLI': 'CHLI',
'CISP': 'BES OTHER',
'CUGU': 'CEL OTHER',
'CUSE': 'CEL OTHER',
'CYOB': 'BDS OTHER',
'DASI': 'BEM OTHER',
'EBEB': 'BES OTHER',
'EU1': 'EUMI2',
'EUCA1': 'EUMI2',
'EULE': 'EUMI2',
'EUMI2': 'EUMI2',
'EUPO': 'BEL OTHER',
'EURU': 'EUMI2',
'EUSI': 'EUMI2',
'EUSP': 'EUMI2',
'EUTO11': 'EUMI2',
'FIBE': 'BES OTHER',
'FICA': 'BDS OTHER',
'FIRE4': 'BEM OTHER',
'FRUH': 'FRUH',
'FRVE': 'FRVE',
'GEPA': 'BES OTHER',
'GLTR': 'BDL OTHER',
'GRRO': 'BEL OTHER',
'JAMI': 'BDM OTHER',
'JU': 'CES OTHER',
'LAIN': 'BDS OTHER',
'LILU': 'BES OTHER',
'LYMI': 'PRCH',
'MA2': 'BDS OTHER',
'MAGR': 'BEM OTHER',
'MATI': 'BDM OTHER',
'MEAZ': 'BDM OTHER',
'MOAL': 'MOAL',
'MYCO': 'BES OTHER',
'NEOL': 'BES OTHER',
'OLEU': 'OLEU',
'OLTE': 'BES OTHER',
'OTHER': 'BES OTHER',
'PAAC': 'PAAC',
'PEL OTHER': 'PHCA',
'PEM OTHER': 'PHDA4',
'PES OTHER': 'WARO',
'PHCA': 'PHCA',
'PHDA4': 'PHDA4',
'PI2': 'CEL OTHER',
'PIBR2': 'PIBR2',
'PICA': 'CEL OTHER',
'PICH': 'PICH',
'PICO5': 'PICO5',
'PIEL2': 'PIEL2',
'PIHA': 'PIHA',
'PIRO': 'CEL OTHER',
'PLRA': 'BDL OTHER',
'PLWR': 'BDL OTHER',
'PO': 'BDL OTHER',
'POBAB2': 'BDL OTHER',
'POFR': 'BDL OTHER',
'PR': 'BDS OTHER',
'PR6': 'PRCH',
'PRAL2': 'BEM OTHER',
'PRAR': 'BDS OTHER',
'PRCE': 'BDS OTHER',
'PRCH': 'PRCH',
'PRDU': 'BDS OTHER',
'PRGL2': 'PRCH',
'PRPE2': 'BDS OTHER',
'PRPU2': 'PRCH',
'PRVE': 'PRCH',
'PYCO': 'BDM OTHER',
'PYKA': 'BES OTHER',
'QUMU': 'BDL OTHER',
'QUSU': 'BEL OTHER',
'QUVI': 'QUVI',
'RHLA': 'RHLA',
'SA': 'BDS OTHER',
'SABA': 'BDM OTHER',
'SCMO': 'BEM OTHER',
'SOSE': 'BES OTHER',
'TACH2': 'BDS OTHER',
'TAMU': 'CEL OTHER',
'THOR': 'BES OTHER',
'THPE3': 'BES OTHER',
'ULPA': 'ULPA',
'VIAG': 'BDS OTHER',
'WAFI': 'WAFI',
'WARO': 'WARO'
} # END SWDsrtGDL
,
##################################################
'TpIntWBOI': {
'AB': 'CEL OTHER',
'ABAL': 'CEL OTHER',
'ABCO': 'CEL OTHER',
'ABHO': 'CEL OTHER',
'ABHO2': 'CEL OTHER',
'AC': 'ACSA1',
'ACCA': 'ACPL',
'ACGI': 'ACPL',
'ACGR': 'ACPL',
'ACGR3': 'ACPL',
'ACNE': 'ACSA1',
'ACNI': 'ACSA1',
'ACPL': 'ACPL',
'ACPS': 'ACPL',
'ACRU': 'ACPL',
'ACSA1': 'ACSA1',
'ACSA2': 'ACSA2',
'ACTR': 'ACPL',
'AEGL': 'BDM OTHER',
'AEHI': 'BDM OTHER',
'AIAL': 'BDL OTHER',
'ALCO2': 'BDM OTHER',
'ALGL': 'BDM OTHER',
'ALJU': 'BDS OTHER',
'AMAR': 'BDS OTHER',
'BDL OTHER': 'FRPE',
'BDM OTHER': 'ACPL',
'BDS OTHER': 'MA2',
'BE': 'BDM OTHER',
'BENI': 'BDL OTHER',
'BEPA': 'BDL OTHER',
'BEPE': 'BDM OTHER',
'BES OTHER': 'ILOP',
'CA1': 'BDL OTHER',
'CABE': 'BDM OTHER',
'CABI': 'BDL OTHER',
'CACA': 'BDS OTHER',
'CADE': 'BDL OTHER',
'CASP': 'CASP',
'CE7': 'CEM OTHER',
'CEAT': 'CEM OTHER',
'CECA': 'BDS OTHER',
'CEDE': 'CEL OTHER',
'CEJA': 'BDM OTHER',
'CEL OTHER': 'PIPU',
'CEM OTHER': 'PISY',
'CEOC': 'BDM OTHER',
'CES OTHER': 'PIED',
'CLLU': 'BDS OTHER',
'CO1': 'BDS OTHER',
'CO2': 'BDM OTHER',
'COCO1': 'BDS OTHER',
'COCO2': 'BDM OTHER',
'COFL': 'BDS OTHER',
'COOB': 'BDS OTHER',
'CR': 'CR',
'CRLA80': 'CR',
'CROX': 'CR',
'CRPH': 'CR',
'CRVI': 'CR',
'CU': 'CEM OTHER',
'DIVI': 'BDS OTHER',
'ELAN': 'BDS OTHER',
'EUUL': 'BDM OTHER',
'FA': 'BDM OTHER',
'FASY': 'BDM OTHER',
'FR': 'FRPE',
'FRAM': 'FRAM',
'FREX': 'FRPE',
'FRMA': 'FRPE',
'FRNI': 'FRPE',
'FROR': 'FRPE',
'FROX': 'FRPE',
'FRPE': 'FRPE',
'FRQU': 'FRPE',
'GIBI': 'BDM OTHER',
'GLCA': 'GLTR',
'GLTR': 'GLTR',
'GYDI': 'BDL OTHER',
'ILOP': 'ILOP',
'JU': 'CEM OTHER',
'JUNI': 'JUNI',
'JURE': 'JUNI',
'JUSC': 'CES OTHER',
'JUVI': 'CEM OTHER',
'KOPA': 'BDS OTHER',
'LADE': 'BDL OTHER',
'LAWA': 'BDS OTHER',
'LIST': 'LIST',
'LITU': 'BDL OTHER',
'MA1': 'BDS OTHER',
'MA2': 'MA2',
'MAAC': 'BDL OTHER',
'MAAM': 'BDS OTHER',
'MAPU': 'MA2',
'MOAL': 'BDM OTHER',
'MORU': 'BDM OTHER',
'PHAM': 'BDM OTHER',
'PI1': 'PIPU',
'PI2': 'CEL OTHER',
'PIAB': 'CEL OTHER',
'PIAS': 'PIPU',
'PIAT': 'CEM OTHER',
'PICE2': 'CEM OTHER',
'PICO2': 'CEM OTHER',
'PIED': 'PIED',
'PIEN': 'CEL OTHER',
'PIGL1': 'PIPU',
'PIGL2': 'CEM OTHER',
'PIMA': 'CEL OTHER',
'PIMO3': 'CEM OTHER',
'PIMU': 'CES OTHER',
'PINI': 'CEM OTHER',
'PIOM': 'PIPU',
'PIOR': 'PIPU',
'PIPO': 'CEL OTHER',
'PIPU': 'PIPU',
'PISE': 'CEL OTHER',
'PIST': 'CEL OTHER',
'PISY': 'PISY',
'PIWA3': 'CEM OTHER',
'PLAC': 'PLAC',
'PLOC': 'PLOC',
'PO': 'BDL OTHER',
'POAL': 'BDL OTHER',
'POBA': 'BDL OTHER',
'POCA2': 'BDL OTHER',
'PODE': 'BDL OTHER',
'POGR': 'BDS OTHER',
'PONI': 'BDL OTHER',
'POTR1': 'BDM OTHER',
'POTR2': 'BDL OTHER',
'PR': 'MA2',
'PRAR': 'MA2',
'PRBL': 'MA2',
'PRCE': 'MA2',
'PRPE2': 'MA2',
'PRSA': 'MA2',
'PRSE2': 'MA2',
'PRTR': 'MA2',
'PRVI': 'MA2',
'PSME': 'CEL OTHER',
'PTCO': 'BDS OTHER',
'PTTR': 'BDS OTHER',
'PYCA': 'PYCA',
'PYCO': 'PYCA',
'PYFA': 'PYCA',
'PYIO': 'MA2',
'QU': 'QURU',
'QUAC': 'QURU',
'QUAL': 'QURU',
'QUAL3': 'QURU',
'QUBI': 'QURU',
'QUCO': 'QURU',
'QUFA': 'QURU',
'QUKE': 'QURU',
'QULA2': 'QURU',
'QUMA1': 'QURU',
'QUMU': 'QURU',
'QUPA': 'QURU',
'QUPH': 'QURU',
'QURO': 'QURU',
'QURU': 'QURU',
'QUSE': 'QURU',
'QUVE': 'QURU',
'QUVI': 'QURU',
'RHSP': 'BDS OTHER',
'RHTY': 'BDS OTHER',
'ROPS': 'ROPS',
'SA': 'BDM OTHER',
'SAMA': 'BDS OTHER',
'SANI': 'BDM OTHER',
'SCVE': 'CEM OTHER',
'SEGI': 'CEL OTHER',
'SOAM': 'BDS OTHER',
'SOAU': 'BDS OTHER',
'SOJA': 'BDM OTHER',
'SYRE': 'BDS OTHER',
'TADI': 'BDL OTHER',
'THOC': 'CEL OTHER',
'THPL': 'CEL OTHER',
'TI': 'TIAM',
'TIAM': 'TIAM',
'TICO': 'TIAM',
'TITO': 'TIAM',
'TSCA': 'CEL OTHER',
'ULAM': 'ULPU',
'ULGL': 'ULPU',
'ULPA': 'ULPU',
'ULPR': 'ULPU',
'ULPU': 'ULPU',
'ULS': 'ULPU'
} # END TpIntWBOI
,
##################################################
'TropicPacXXX': {
'ACCO': 'BEM OTHER',
'ACKO': 'BEL OTHER',
'ACWR': 'PES OTHER',
'AGVI14': 'CEL OTHER',
'ALFA': 'BEL OTHER',
'ALJU': 'BDM OTHER',
'ALLE': 'BDL OTHER',
'ALMO': 'BEL OTHER',
'AMNO4': 'BEM OTHER',
'ANIM': 'BEL OTHER',
'ANMU': 'BES OTHER',
'ANRE': 'BDM OTHER',
'ANSQ': 'BDS OTHER',
'ARAL': 'PES OTHER',
'ARAL2': 'BEL OTHER',
'ARCO24': 'CEL OTHER',
'ARCU': 'PES OTHER',
'ARHE': 'CEL OTHER',
'ARHE2': 'BEL OTHER',
'AVBI': 'BEL OTHER',
'AVCA': 'BES OTHER',
'AZIN2': 'BEL OTHER',
'BA13': 'BEM OTHER',
'BABL': 'BABL',
'BAHO3': 'BES OTHER',
'BAPU': 'BEM OTHER',
'BAVA': 'BEM OTHER',
'BDL OTHER': 'PISA2',
'BDM OTHER': 'CANE33',
'BDS OTHER': 'DERE',
'BEL OTHER': 'FIBE',
'BEM OTHER': 'CISP2',
'BERE': 'BEM OTHER',
'BES OTHER': 'BABL',
'BIOR': 'BEM OTHER',
'BO9': 'BES OTHER',
'BOSP8': 'BES OTHER',
'BRAC': 'BEL OTHER',
'BUBU': 'BEL OTHER',
'BUCA': 'PEM OTHER',
'CACA73': 'BDL OTHER',
'CACI': 'BES OTHER',
'CAEQ': 'CAEQ',
'CAFI': 'CANE33',
'CAGR11': 'BDL OTHER',
'CAIN4': 'CAIN4',
'CALO': 'BEL OTHER',
'CAMA37': 'BES OTHER',
'CAMI36': 'PES OTHER',
'CANE33': 'CANE33',
'CAPA3': 'BES OTHER',
'CARI9': 'BES OTHER',
'CARO': 'CANE33',
'CAVI': 'BEM OTHER',
'CEL OTHER': 'PIRA',
'CEM OTHER': 'PIBR2',
'CESI3': 'BEL OTHER',
'CES OTHER': 'PICO5',
'CHHU': 'PES OTHER',
'CHLU': 'PES OTHER',
'CHOL': 'BEM OTHER',
'CHSP': 'BDL OTHER',
'CICA': 'BEL OTHER',
'CIGR': 'BES OTHER',
'CILI': 'BES OTHER',
'CIPA': 'BEL OTHER',
'CIRE3': 'BES OTHER',
'CISI': 'BEM OTHER',
'CISP': 'BES OTHER',
'CISP2': 'CISP2',
'CIVE2': 'BEM OTHER',
'CLRO': 'BES OTHER',
'COCO1': 'BDS OTHER',
'COERA2': 'COERA2',
'CONU': 'CONU',
'CORA13': 'BEM OTHER',
'COSE2': 'COSU2',
'COSU2': 'COSU2',
'COUT': 'PEL OTHER',
'COUV': 'BES OTHER',
'COVI': 'BDL OTHER',
'CRCU': 'BES OTHER',
'CU': 'CEL OTHER',
'CUAN': 'BEM OTHER',
'CUSE': 'CEL OTHER',
'CYRE11': 'PES OTHER',
'DA2': 'BDL OTHER',
'DERE': 'DERE',
'DR': 'BES OTHER',
'DYDE2': 'PES OTHER',
'ELOR2': 'ELOR2',
'ENCY': 'BDL OTHER',
'ER15': 'BDL OTHER',
'ERCR': 'BEM OTHER',
'ERJA': 'BES OTHER',
'ERSA11': 'BDL OTHER',
'ERVA7': 'BDL OTHER',
'ERVAO': 'BDL OTHER',
'EU1': 'BEL OTHER',
'EUCI2': 'BEL OTHER',
'EUDE': 'BEL OTHER',
'EURO': 'BEL OTHER',
'EUTI': 'BDS OTHER',
'EUUN2': 'BEM OTHER',
'FABE': 'BEL OTHER',
'FI1': 'FIBE',
'FIBE': 'FIBE',
'FIBE2': 'FIBE',
'FICA': 'FIBE',
'FIDE6': 'FIDE6',
'FIEL': 'FIBE',
'FILY': 'FIBE',
'FIMA2': 'FIBE',
'FIMI2': 'FIBE',
'FIRE3': 'FIBE',
'FIVI3': 'FIBE',
'FRUH': 'BDL OTHER',
'GA2': 'BES OTHER',
'GRRO': 'BEL OTHER',
'GUOF': 'BES OTHER',
'HACA3': 'BEM OTHER',
'HAPE7': 'BES OTHER',
'HELI9': 'BEL OTHER',
'HENY': 'BEL OTHER',
'HISP': 'BES OTHER',
'HITI': 'BDM OTHER',
'HUCR': 'BDL OTHER',
'HYLA': 'PES OTHER',
'HYVE9': 'PES OTHER',
'ILPA2': 'ILPA2',
'JAIN': 'BES OTHER',
'JAMI': 'BDM OTHER',
'JUCH': 'CEL OTHER',
'JUCHS6': 'CEL OTHER',
'KOEL': 'BDM OTHER',
'LAIN': 'BDS OTHER',
'LAPA': 'BEL OTHER',
'LASP': 'LASP',
'LELE': 'BES OTHER',
'LICH': 'PES OTHER',
'LICH4': 'BEL OTHER',
'LIJA': 'BES OTHER',
'MAGR': 'BES OTHER',
'MAIN': 'BEL OTHER',
'MAIN8': 'BEL OTHER',
'MAZA': 'BEL OTHER',
'MEAZ': 'BDL OTHER',
'MEPO5': 'BEL OTHER',
'MEQU': 'MEQU',
'MICA21': 'BEM OTHER',
'MO': 'BDS OTHER',
'MOCI3': 'BES OTHER',
'MONI': 'BDS OTHER',
'MOOL': 'BEM OTHER',
'MU5': 'BES OTHER',
'MUPA4': 'BES OTHER',
'NEOL': 'BES OTHER',
'NOEM': 'BEL OTHER',
'OCEL': 'BES OTHER',
'OCSE2': 'BES OTHER',
'OLEU': 'BEL OTHER',
'ORCO9': 'PEL OTHER',
'PACE8': 'BES OTHER',
'PASP': 'PEM OTHER',
'PATE2': 'BEM OTHER',
'PEAM': 'BEL OTHER',
'PEL OTHER': 'CONU',
'PEM OTHER': 'PHDA4',
'PEPT': 'BEL OTHER',
'PES OTHER': 'VEME',
'PH7': 'PEM OTHER',
'PHCA': 'PEL OTHER',
'PHDA4': 'PHDA4',
'PHRO': 'PES OTHER',
'PI2': 'CEL OTHER',
'PI23': 'BEM OTHER',
'PIAR9': 'BEM OTHER',
'PIBR2': 'PIBR2',
'PICO5': 'PICO5',
'PIDI3': 'BEL OTHER',
'PIDU': 'BEL OTHER',
'PIPE8': 'BEM OTHER',
'PIPI2': 'CEL OTHER',
'PIRA': 'PIRA',
'PIRA2': 'BEL OTHER',
'PISA2': 'PISA2',
'PITH': 'CEL OTHER',
'PL13': 'BES OTHER',
'PLPI4': 'BDL OTHER',
'PO3': 'CEL OTHER',
'POLO21': 'BDL OTHER',
'PONE21': 'CEL OTHER',
'POUS2': 'CEL OTHER',
'PRPA11': 'PES OTHER',
'PRPA2': 'BEL OTHER',
'PSCA': 'BES OTHER',
'PSEL5': 'BDL OTHER',
'PSGU': 'BEM OTHER',
'PTIN': 'BDL OTHER',
'PTMA8': 'PES OTHER',
'RAMA': 'BEL OTHER',
'RORE2': 'PEM OTHER',
'SAMA': 'BEM OTHER',
'SCMO': 'BEM OTHER',
'SCPU18': 'BES OTHER',
'SCTE': 'BDS OTHER',
'SEGR5': 'BEL OTHER',
'SESU4': 'BES OTHER',
'SPCA': 'BEL OTHER',
'SWMA': 'SWMA',
'SYCO': 'PEM OTHER',
'SYJA': 'BEM OTHER',
'SYRO': 'PEL OTHER',
'TAAR': 'TAAR',
'TABA2': 'TACH',
'TACH4': 'TACH',
'TADO2': 'TACH',
'TAIM': 'TAAR',
'TAIN': 'BEL OTHER',
'TAPA': 'TAPA',
'TAPA13': 'BES OTHER',
'TASP': 'TAAR',
'TECA': 'BDL OTHER',
'THOR': 'CEL OTHER',
'THPE3': 'BES OTHER',
'THPU': 'BEM OTHER',
'TITI2': 'BDL OTHER',
'TOAR2': 'BEM OTHER',
'VEME': 'VEME',
'VIPA6': 'BEL OTHER',
'WARO': 'PES OTHER'
} # END TropicPacXXX
}
# The currency conversions are in the following units:
# electricity: $/kWh
# natural_gas: $/kBTU
# h20: $/gal
# co2, o3, nox, pm10, sox, voc: $/lb
ITREE_REGIONS = {
'CaNCCoJBK': {
'name': 'Northern California Coast',
'currency_conversion': {
'electricity_kwh_to_currency': 0.1323,
'natural_gas_kbtu_to_currency': 0.013048,
'h20_gal_to_currency': 0.004,
'co2_lb_to_currency': 0.0075,
'o3_lb_to_currency': 10.3101,
'nox_lb_to_currency': 10.3101,
'pm10_lb_to_currency': 11.7901,
'sox_lb_to_currency': 3.67,
'voc_lb_to_currency': 7.22
}
},
'CenFlaXXX': {
'name': 'Central Florida',
'currency_conversion': {
'electricity_kwh_to_currency': 0.13182,
'natural_gas_kbtu_to_currency': 0.002678,
'h20_gal_to_currency': 0.003,
'co2_lb_to_currency': 0.0034,
'o3_lb_to_currency': 2.2,
'nox_lb_to_currency': 2.2,
'pm10_lb_to_currency': 2.1,
'sox_lb_to_currency': 2.01,
'voc_lb_to_currency': 1.03
}
},
'GulfCoCHS': {
'name': 'Coastal Plain',
'currency_conversion': {
'electricity_kwh_to_currency': 0.09339,
'natural_gas_kbtu_to_currency': 0.019742,
'h20_gal_to_currency': 0.00605,
'co2_lb_to_currency': 0.0075,
'o3_lb_to_currency': 1.04,
'nox_lb_to_currency': 1.04,
'pm10_lb_to_currency': 0.76,
'sox_lb_to_currency': 1.28,
'voc_lb_to_currency': 1.48
}
},
'InlEmpCLM': {
'name': 'Inland Empire',
'currency_conversion': {
'electricity_kwh_to_currency': 0.20148,
'natural_gas_kbtu_to_currency': 0.0066871,
'h20_gal_to_currency': 0.0055,
'co2_lb_to_currency': 0.00334,
'o3_lb_to_currency': 3.84,
'nox_lb_to_currency': 3.84,
'pm10_lb_to_currency': 4.63,
'sox_lb_to_currency': 2.43,
'voc_lb_to_currency': 1.92
}
},
'InlValMOD': {
'name': 'Inland Valleys',
'currency_conversion': {
'electricity_kwh_to_currency': 0.1166,
'natural_gas_kbtu_to_currency': 0.0125278,
'h20_gal_to_currency': 0.0078,
'co2_lb_to_currency': 0.0075,
'o3_lb_to_currency': 5.0032,
'nox_lb_to_currency': 12.79,
'pm10_lb_to_currency': 9.41,
'sox_lb_to_currency': 3.72,
'voc_lb_to_currency': 4.69
}
},
'InterWABQ': {
'name': 'Interior West',
'currency_conversion': {
'electricity_kwh_to_currency': 0.078844,
'natural_gas_kbtu_to_currency': 0.011,
'h20_gal_to_currency': 0.005,
'co2_lb_to_currency': 0.00334,
'o3_lb_to_currency': 0.61,
'nox_lb_to_currency': 0.61,
'pm10_lb_to_currency': 1.14,
'sox_lb_to_currency': 1.42,
'voc_lb_to_currency': 0.19
}
},
'LoMidWXXX': {
'name': 'Lower Midwest',
'currency_conversion': {
'electricity_kwh_to_currency': 0.068,
'natural_gas_kbtu_to_currency': 0.00973,
'h20_gal_to_currency': 0.0062,
'co2_lb_to_currency': 0.00334,
'o3_lb_to_currency': 0.82,
'nox_lb_to_currency': 0.82,
'pm10_lb_to_currency': 0.99,
'sox_lb_to_currency': 1.50,
'voc_lb_to_currency': 0.3
}
},
'MidWstMSP': {
'name': 'Midwest',
'currency_conversion': {
'electricity_kwh_to_currency': 0.0759,
'natural_gas_kbtu_to_currency': 0.0098,
'h20_gal_to_currency': 0.0271,
'co2_lb_to_currency': 0.0075,
'o3_lb_to_currency': 3.334,
'nox_lb_to_currency': 3.334,
'pm10_lb_to_currency': 2.84,
'sox_lb_to_currency': 2.06,
'voc_lb_to_currency': 3.75
}
},
'NMtnPrFNL': {
'name': 'North',
'currency_conversion': {
'electricity_kwh_to_currency': 0.636,
'natural_gas_kbtu_to_currency': 0.009066,
'h20_gal_to_currency': 0.0108,
'co2_lb_to_currency': 0.0075,
'o3_lb_to_currency': 2.77,
'nox_lb_to_currency': 2.77,
'pm10_lb_to_currency': 0.49,
'sox_lb_to_currency': 0.98,
'voc_lb_to_currency': 2.61
}
},
'NoEastXXX': {
'name': 'Northeast',
'currency_conversion': {
'electricity_kwh_to_currency': 0.1401,
'natural_gas_kbtu_to_currency': 0.01408,
'h20_gal_to_currency': 0.0008,
'co2_lb_to_currency': 0.00334,
'o3_lb_to_currency': 4.59,
'nox_lb_to_currency': 4.59,
'pm10_lb_to_currency': 8.31,
'sox_lb_to_currency': 3.48,
'voc_lb_to_currency': 2.31
}
},
'PacfNWLOG': {
'name': 'Pacific Northwest',
'currency_conversion': {
'electricity_kwh_to_currency': 0.0512,
'natural_gas_kbtu_to_currency': 0.01146,
'h20_gal_to_currency': 0.02779,
'co2_lb_to_currency': 0.00334,
'o3_lb_to_currency': 0.94,
'nox_lb_to_currency': 0.94,
'pm10_lb_to_currency': 1.67,
'sox_lb_to_currency': 1.88,
'voc_lb_to_currency': 0.35
}
},
'PiedmtCLT': {
'name': 'South',
'currency_conversion': {
'electricity_kwh_to_currency': 0.0759,
'natural_gas_kbtu_to_currency': 0.01046,
'h20_gal_to_currency': 0.0099,
'co2_lb_to_currency': 0.0075,
'o3_lb_to_currency': 6.55,
'nox_lb_to_currency': 6.55,
'pm10_lb_to_currency': 2.49,
'sox_lb_to_currency': 1.97,
'voc_lb_to_currency': 6.26
}
},
'SoCalCSMA': {
'name': 'Southern California Coast',
'currency_conversion': {
'electricity_kwh_to_currency': 0.16281,
'natural_gas_kbtu_to_currency': 0.0131,
'h20_gal_to_currency': 0.00183,
'co2_lb_to_currency': 0.0150,
'o3_lb_to_currency': 16.44,
'nox_lb_to_currency': 16.44,
'pm10_lb_to_currency': 51.32,
'sox_lb_to_currency': 21.47,
'voc_lb_to_currency': 3.34
}
},
'SWDsrtGDL': {
'name': 'Southwest Desert',
'currency_conversion': {
'electricity_kwh_to_currency': 0.1208,
'natural_gas_kbtu_to_currency': 0.009409,
'h20_gal_to_currency': 0.0048,
'co2_lb_to_currency': 0.0075,
'o3_lb_to_currency': 4.0,
'nox_lb_to_currency': 4.0,
'pm10_lb_to_currency': 6.0,
'sox_lb_to_currency': 15.7,
'voc_lb_to_currency': 4.0
}
},
'TpIntWBOI': {
'name': 'Temperate Interior West',
'currency_conversion': {
'electricity_kwh_to_currency': 0.060936,
'natural_gas_kbtu_to_currency': 0.0114245,
'h20_gal_to_currency': 0.005,
'co2_lb_to_currency': 0.00334,
'o3_lb_to_currency': 0.51,
'nox_lb_to_currency': 0.51,
'pm10_lb_to_currency': 0.92,
'sox_lb_to_currency': 0.06,
'voc_lb_to_currency': 0.14
}
},
'TropicPacXXX': {
'name': 'Tropical',
'currency_conversion': {
'electricity_kwh_to_currency': 0.122,
'natural_gas_kbtu_to_currency': 0.0235,
'h20_gal_to_currency': 0.01,
'co2_lb_to_currency': 0.00334,
'o3_lb_to_currency': 1.47,
'nox_lb_to_currency': 1.47,
'pm10_lb_to_currency': 1.34,
'sox_lb_to_currency': 1.52,
'voc_lb_to_currency': 0.60
}
}
}
ITREE_REGION_CHOICES = [(code, conf['name']) for code, conf in ITREE_REGIONS.items()]
| agpl-3.0 | -7,224,161,547,193,751,000 | 26.991759 | 85 | 0.402607 | false |
dga4654dan/UTM-Demo | V_1_0_2_1/UtmDemo_Sfs_2.9.0/UtmDemo_Sfs_2.9.0_Server/lib/Lib/unittest.py | 11 | 25328 | #!/usr/bin/env python
'''
Python unit testing framework, based on Erich Gamma's JUnit and Kent Beck's
Smalltalk testing framework.
This module contains the core framework classes that form the basis of
specific test cases and suites (TestCase, TestSuite etc.), and also a
text-based utility class for running the tests and reporting the results
(TextTestRunner).
Simple usage:
import unittest
class IntegerArithmenticTestCase(unittest.TestCase):
def testAdd(self): ## test method names begin 'test*'
self.assertEquals((1 + 2), 3)
self.assertEquals(0 + 1, 1)
def testMultiply(self):
self.assertEquals((0 * 10), 0)
self.assertEquals((5 * 8), 40)
if __name__ == '__main__':
unittest.main()
Further information is available in the bundled documentation, and from
http://pyunit.sourceforge.net/
Copyright (c) 1999, 2000, 2001 Steve Purcell
This module is free software, and you may redistribute it and/or modify
it under the same terms as Python itself, so long as this copyright message
and disclaimer are retained in their original form.
IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
DAMAGE.
THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE. THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
'''
__author__ = "Steve Purcell"
__email__ = "stephen_purcell at yahoo dot com"
__version__ = "#Revision: 1.43 $"[11:-2]
import time
import sys
import traceback
import string
import os
import types
##############################################################################
# Test framework core
##############################################################################
class TestResult:
"""Holder for test result information.
Test results are automatically managed by the TestCase and TestSuite
classes, and do not need to be explicitly manipulated by writers of tests.
Each instance holds the total number of tests run, and collections of
failures and errors that occurred among those test runs. The collections
contain tuples of (testcase, exceptioninfo), where exceptioninfo is the
formatted traceback of the error that occurred.
"""
def __init__(self):
self.failures = []
self.errors = []
self.testsRun = 0
self.shouldStop = 0
def startTest(self, test):
"Called when the given test is about to be run"
self.testsRun = self.testsRun + 1
def stopTest(self, test):
"Called when the given test has been run"
pass
def addError(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info().
"""
self.errors.append((test, self._exc_info_to_string(err)))
def addFailure(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info()."""
self.failures.append((test, self._exc_info_to_string(err)))
def addSuccess(self, test):
"Called when a test has completed successfully"
pass
def wasSuccessful(self):
"Tells whether or not this result was a success"
return len(self.failures) == len(self.errors) == 0
def stop(self):
"Indicates that the tests should be aborted"
self.shouldStop = 1
def _exc_info_to_string(self, err):
"""Converts a sys.exc_info()-style tuple of values into a string."""
return string.join(apply(traceback.format_exception, err), '')
def __repr__(self):
return "<%s run=%i errors=%i failures=%i>" % \
(self.__class__, self.testsRun, len(self.errors),
len(self.failures))
class TestCase:
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
"""
# This attribute determines which exception will be raised when
# the instance's assertion methods fail; test methods raising this
# exception will be deemed to have 'failed' rather than 'errored'
failureException = AssertionError
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
try:
self.__testMethodName = methodName
testMethod = getattr(self, methodName)
self.__testMethodDoc = testMethod.__doc__
except AttributeError:
raise ValueError, "no such test method in %s: %s" % \
(self.__class__, methodName)
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
pass
def tearDown(self):
"Hook method for deconstructing the test fixture after testing it."
pass
def countTestCases(self):
return 1
def defaultTestResult(self):
return TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self.__testMethodDoc
return doc and string.strip(string.split(doc, "\n")[0]) or None
def id(self):
return "%s.%s" % (self.__class__, self.__testMethodName)
def __str__(self):
return "%s (%s)" % (self.__testMethodName, self.__class__)
def __repr__(self):
return "<%s testMethod=%s>" % \
(self.__class__, self.__testMethodName)
def run(self, result=None):
return self(result)
def __call__(self, result=None):
if result is None: result = self.defaultTestResult()
result.startTest(self)
testMethod = getattr(self, self.__testMethodName)
try:
try:
self.setUp()
except KeyboardInterrupt:
raise
except:
result.addError(self, self.__exc_info())
return
ok = 0
try:
testMethod()
ok = 1
except self.failureException, e:
result.addFailure(self, self.__exc_info())
except KeyboardInterrupt:
raise
except:
result.addError(self, self.__exc_info())
try:
self.tearDown()
except KeyboardInterrupt:
raise
except:
result.addError(self, self.__exc_info())
ok = 0
if ok: result.addSuccess(self)
finally:
result.stopTest(self)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self.__testMethodName)()
self.tearDown()
def __exc_info(self):
"""Return a version of sys.exc_info() with the traceback frame
minimised; usually the top level of the traceback frame is not
needed.
"""
exctype, excvalue, tb = sys.exc_info()
if sys.platform[:4] == 'java': ## tracebacks look different in Jython
return (exctype, excvalue, tb)
newtb = tb.tb_next
if newtb is None:
return (exctype, excvalue, tb)
return (exctype, excvalue, newtb)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException, msg
def failIf(self, expr, msg=None):
"Fail the test if the expression is true."
if expr: raise self.failureException, msg
def failUnless(self, expr, msg=None):
"""Fail the test unless the expression is true."""
if not expr: raise self.failureException, msg
def failUnlessRaises(self, excClass, callableObj, *args, **kwargs):
"""Fail unless an exception of class excClass is thrown
by callableObj when invoked with arguments args and keyword
arguments kwargs. If a different type of exception is
thrown, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
"""
try:
apply(callableObj, args, kwargs)
except excClass:
return
else:
if hasattr(excClass,'__name__'): excName = excClass.__name__
else: excName = str(excClass)
raise self.failureException, excName
def failUnlessEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '!='
operator.
"""
if first != second:
raise self.failureException, \
(msg or '%s != %s' % (`first`, `second`))
def failIfEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '=='
operator.
"""
if first == second:
raise self.failureException, \
(msg or '%s == %s' % (`first`, `second`))
assertEqual = assertEquals = failUnlessEqual
assertNotEqual = assertNotEquals = failIfEqual
assertRaises = failUnlessRaises
assert_ = failUnless
class TestSuite:
"""A test suite is a composite test consisting of a number of TestCases.
For use, create an instance of TestSuite, then add test case instances.
When all tests have been added, the suite can be passed to a test
runner, such as TextTestRunner. It will run the individual test cases
in the order in which they were added, aggregating the results. When
subclassing, do not forget to call the base class constructor.
"""
def __init__(self, tests=()):
self._tests = []
self.addTests(tests)
def __repr__(self):
return "<%s tests=%s>" % (self.__class__, self._tests)
__str__ = __repr__
def countTestCases(self):
cases = 0
for test in self._tests:
cases = cases + test.countTestCases()
return cases
def addTest(self, test):
self._tests.append(test)
def addTests(self, tests):
for test in tests:
self.addTest(test)
def run(self, result):
return self(result)
def __call__(self, result):
for test in self._tests:
if result.shouldStop:
break
test(result)
return result
def debug(self):
"""Run the tests without collecting errors in a TestResult"""
for test in self._tests: test.debug()
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
PyUnit framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None,
description=None):
TestCase.__init__(self)
self.__setUpFunc = setUp
self.__tearDownFunc = tearDown
self.__testFunc = testFunc
self.__description = description
def setUp(self):
if self.__setUpFunc is not None:
self.__setUpFunc()
def tearDown(self):
if self.__tearDownFunc is not None:
self.__tearDownFunc()
def runTest(self):
self.__testFunc()
def id(self):
return self.__testFunc.__name__
def __str__(self):
return "%s (%s)" % (self.__class__, self.__testFunc.__name__)
def __repr__(self):
return "<%s testFunc=%s>" % (self.__class__, self.__testFunc)
def shortDescription(self):
if self.__description is not None: return self.__description
doc = self.__testFunc.__doc__
return doc and string.strip(string.split(doc, "\n")[0]) or None
##############################################################################
# Locating and loading tests
##############################################################################
class TestLoader:
"""This class is responsible for loading tests according to various
criteria and returning them wrapped in a Test
"""
testMethodPrefix = 'test'
sortTestMethodsUsing = cmp
suiteClass = TestSuite
def loadTestsFromTestCase(self, testCaseClass):
"""Return a suite of all tests cases contained in testCaseClass"""
return self.suiteClass(map(testCaseClass,
self.getTestCaseNames(testCaseClass)))
def loadTestsFromModule(self, module):
"""Return a suite of all tests cases contained in the given module"""
tests = []
for name in dir(module):
obj = getattr(module, name)
if type(obj) == types.ClassType and issubclass(obj, TestCase):
tests.append(self.loadTestsFromTestCase(obj))
return self.suiteClass(tests)
def loadTestsFromName(self, name, module=None):
"""Return a suite of all tests cases given a string specifier.
The name may resolve either to a module, a test case class, a
test method within a test case class, or a callable object which
returns a TestCase or TestSuite instance.
The method optionally resolves the names relative to a given module.
"""
parts = string.split(name, '.')
if module is None:
if not parts:
raise ValueError, "incomplete test name: %s" % name
else:
parts_copy = parts[:]
while parts_copy:
try:
module = __import__(string.join(parts_copy,'.'))
break
except ImportError:
del parts_copy[-1]
if not parts_copy: raise
parts = parts[1:]
obj = module
for part in parts:
obj = getattr(obj, part)
import unittest
if type(obj) == types.ModuleType:
return self.loadTestsFromModule(obj)
elif type(obj) == types.ClassType and issubclass(obj, unittest.TestCase):
return self.loadTestsFromTestCase(obj)
elif type(obj) == types.UnboundMethodType:
return obj.im_class(obj.__name__)
elif callable(obj):
test = obj()
if not isinstance(test, unittest.TestCase) and \
not isinstance(test, unittest.TestSuite):
raise ValueError, \
"calling %s returned %s, not a test" % (obj,test)
return test
else:
raise ValueError, "don't know how to make test from: %s" % obj
def loadTestsFromNames(self, names, module=None):
"""Return a suite of all tests cases found using the given sequence
of string specifiers. See 'loadTestsFromName()'.
"""
suites = []
for name in names:
suites.append(self.loadTestsFromName(name, module))
return self.suiteClass(suites)
def getTestCaseNames(self, testCaseClass):
"""Return a sorted sequence of method names found within testCaseClass
"""
testFnNames = filter(lambda n,p=self.testMethodPrefix: n[:len(p)] == p,
dir(testCaseClass))
for baseclass in testCaseClass.__bases__:
for testFnName in self.getTestCaseNames(baseclass):
if testFnName not in testFnNames: # handle overridden methods
testFnNames.append(testFnName)
if self.sortTestMethodsUsing:
testFnNames.sort(self.sortTestMethodsUsing)
return testFnNames
defaultTestLoader = TestLoader()
##############################################################################
# Patches for old functions: these functions should be considered obsolete
##############################################################################
def _makeLoader(prefix, sortUsing, suiteClass=None):
loader = TestLoader()
loader.sortTestMethodsUsing = sortUsing
loader.testMethodPrefix = prefix
if suiteClass: loader.suiteClass = suiteClass
return loader
def getTestCaseNames(testCaseClass, prefix, sortUsing=cmp):
return _makeLoader(prefix, sortUsing).getTestCaseNames(testCaseClass)
def makeSuite(testCaseClass, prefix='test', sortUsing=cmp, suiteClass=TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromTestCase(testCaseClass)
def findTestCases(module, prefix='test', sortUsing=cmp, suiteClass=TestSuite):
return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromModule(module)
##############################################################################
# Text UI
##############################################################################
class _WritelnDecorator:
"""Used to decorate file-like objects with a handy 'writeln' method"""
def __init__(self,stream):
self.stream = stream
def __getattr__(self, attr):
return getattr(self.stream,attr)
def writeln(self, *args):
if args: apply(self.write, args)
self.write('\n') # text-mode streams translate to \r\n if needed
class _TextTestResult(TestResult):
"""A test result class that can print formatted text results to a stream.
Used by TextTestRunner.
"""
separator1 = '=' * 70
separator2 = '-' * 70
def __init__(self, stream, descriptions, verbosity):
TestResult.__init__(self)
self.stream = stream
self.showAll = verbosity > 1
self.dots = verbosity == 1
self.descriptions = descriptions
def getDescription(self, test):
if self.descriptions:
return test.shortDescription() or str(test)
else:
return str(test)
def startTest(self, test):
TestResult.startTest(self, test)
if self.showAll:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
def addSuccess(self, test):
TestResult.addSuccess(self, test)
if self.showAll:
self.stream.writeln("ok")
elif self.dots:
self.stream.write('.')
def addError(self, test, err):
TestResult.addError(self, test, err)
if self.showAll:
self.stream.writeln("ERROR")
elif self.dots:
self.stream.write('E')
def addFailure(self, test, err):
TestResult.addFailure(self, test, err)
if self.showAll:
self.stream.writeln("FAIL")
elif self.dots:
self.stream.write('F')
def printErrors(self):
if self.dots or self.showAll:
self.stream.writeln()
self.printErrorList('ERROR', self.errors)
self.printErrorList('FAIL', self.failures)
def printErrorList(self, flavour, errors):
for test, err in errors:
self.stream.writeln(self.separator1)
self.stream.writeln("%s: %s" % (flavour,self.getDescription(test)))
self.stream.writeln(self.separator2)
self.stream.writeln("%s" % err)
class TextTestRunner:
"""A test runner class that displays results in textual form.
It prints out the names of tests as they are run, errors as they
occur, and a summary of the results at the end of the test run.
"""
def __init__(self, stream=sys.stderr, descriptions=1, verbosity=1):
self.stream = _WritelnDecorator(stream)
self.descriptions = descriptions
self.verbosity = verbosity
def _makeResult(self):
return _TextTestResult(self.stream, self.descriptions, self.verbosity)
def run(self, test):
"Run the given test case or test suite."
result = self._makeResult()
startTime = time.time()
test(result)
stopTime = time.time()
timeTaken = float(stopTime - startTime)
result.printErrors()
self.stream.writeln(result.separator2)
run = result.testsRun
self.stream.writeln("Ran %d test%s in %.3fs" %
(run, run != 1 and "s" or "", timeTaken))
self.stream.writeln()
if not result.wasSuccessful():
self.stream.write("FAILED (")
failed, errored = map(len, (result.failures, result.errors))
if failed:
self.stream.write("failures=%d" % failed)
if errored:
if failed: self.stream.write(", ")
self.stream.write("errors=%d" % errored)
self.stream.writeln(")")
else:
self.stream.writeln("OK")
return result
##############################################################################
# Facilities for running tests from the command line
##############################################################################
class TestProgram:
"""A command-line program that runs a set of tests; this is primarily
for making test modules conveniently executable.
"""
USAGE = """\
Usage: %(progName)s [options] [test] [...]
Options:
-h, --help Show this message
-v, --verbose Verbose output
-q, --quiet Minimal output
Examples:
%(progName)s - run default set of tests
%(progName)s MyTestSuite - run suite 'MyTestSuite'
%(progName)s MyTestCase.testSomething - run MyTestCase.testSomething
%(progName)s MyTestCase - run all 'test*' test methods
in MyTestCase
"""
def __init__(self, module='__main__', defaultTest=None,
argv=None, testRunner=None, testLoader=defaultTestLoader):
if type(module) == type(''):
self.module = __import__(module)
for part in string.split(module,'.')[1:]:
self.module = getattr(self.module, part)
else:
self.module = module
if argv is None:
argv = sys.argv
self.verbosity = 1
self.defaultTest = defaultTest
self.testRunner = testRunner
self.testLoader = testLoader
self.progName = os.path.basename(argv[0])
self.parseArgs(argv)
self.runTests()
def usageExit(self, msg=None):
if msg: print msg
print self.USAGE % self.__dict__
sys.exit(2)
def parseArgs(self, argv):
import getopt
try:
options, args = getopt.getopt(argv[1:], 'hHvq',
['help','verbose','quiet'])
for opt, value in options:
if opt in ('-h','-H','--help'):
self.usageExit()
if opt in ('-q','--quiet'):
self.verbosity = 0
if opt in ('-v','--verbose'):
self.verbosity = 2
if len(args) == 0 and self.defaultTest is None:
self.test = self.testLoader.loadTestsFromModule(self.module)
return
if len(args) > 0:
self.testNames = args
else:
self.testNames = (self.defaultTest,)
self.createTests()
except getopt.error, msg:
self.usageExit(msg)
def createTests(self):
self.test = self.testLoader.loadTestsFromNames(self.testNames,
self.module)
def runTests(self):
if self.testRunner is None:
self.testRunner = TextTestRunner(verbosity=self.verbosity)
result = self.testRunner.run(self.test)
sys.exit(not result.wasSuccessful())
main = TestProgram
##############################################################################
# Executing this module from the command line
##############################################################################
if __name__ == "__main__":
main(module=None)
| gpl-2.0 | 5,509,492,323,495,381,000 | 34.031812 | 90 | 0.583504 | false |
noobcoderT/ryu-3.21 | ryu/lib/packet/bmp.py | 22 | 29351 | # Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
BGP Monitoring Protocol draft-ietf-grow-bmp-07
"""
from ryu.lib.packet import packet_base
from ryu.lib.packet import stream_parser
from ryu.lib.packet.bgp import BGPMessage
from ryu.lib import addrconv
import struct
VERSION = 3
BMP_MSG_ROUTE_MONITORING = 0
BMP_MSG_STATISTICS_REPORT = 1
BMP_MSG_PEER_DOWN_NOTIFICATION = 2
BMP_MSG_PEER_UP_NOTIFICATION = 3
BMP_MSG_INITIATION = 4
BMP_MSG_TERMINATION = 5
BMP_PEER_TYPE_GLOBAL = 0
BMP_PEER_TYPE_L3VPN = 1
BMP_INIT_TYPE_STRING = 0
BMP_INIT_TYPE_SYSDESCR = 1
BMP_INIT_TYPE_SYSNAME = 2
BMP_TERM_TYPE_STRING = 0
BMP_TERM_TYPE_REASON = 1
BMP_TERM_REASON_ADMIN = 0
BMP_TERM_REASON_UNSPEC = 1
BMP_TERM_REASON_OUT_OF_RESOURCE = 2
BMP_TERM_REASON_REDUNDANT_CONNECTION = 3
BMP_STAT_TYPE_REJECTED = 0
BMP_STAT_TYPE_DUPLICATE_PREFIX = 1
BMP_STAT_TYPE_DUPLICATE_WITHDRAW = 2
BMP_STAT_TYPE_INV_UPDATE_DUE_TO_CLUSTER_LIST_LOOP = 3
BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_PATH_LOOP = 4
BMP_STAT_TYPE_INV_UPDATE_DUE_TO_ORIGINATOR_ID = 5
BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_CONFED_LOOP = 6
BMP_STAT_TYPE_ADJ_RIB_IN = 7
BMP_STAT_TYPE_LOC_RIB = 8
BMP_PEER_DOWN_REASON_UNKNOWN = 0
BMP_PEER_DOWN_REASON_LOCAL_BGP_NOTIFICATION = 1
BMP_PEER_DOWN_REASON_LOCAL_NO_NOTIFICATION = 2
BMP_PEER_DOWN_REASON_REMOTE_BGP_NOTIFICATION = 3
BMP_PEER_DOWN_REASON_REMOTE_NO_NOTIFICATION = 4
class _TypeDisp(object):
_TYPES = {}
_REV_TYPES = None
_UNKNOWN_TYPE = None
@classmethod
def register_unknown_type(cls):
def _register_type(subcls):
cls._UNKNOWN_TYPE = subcls
return subcls
return _register_type
@classmethod
def register_type(cls, type_):
cls._TYPES = cls._TYPES.copy()
def _register_type(subcls):
cls._TYPES[type_] = subcls
cls._REV_TYPES = None
return subcls
return _register_type
@classmethod
def _lookup_type(cls, type_):
try:
return cls._TYPES[type_]
except KeyError:
return cls._UNKNOWN_TYPE
@classmethod
def _rev_lookup_type(cls, targ_cls):
if cls._REV_TYPES is None:
rev = dict((v, k) for k, v in cls._TYPES.iteritems())
cls._REV_TYPES = rev
return cls._REV_TYPES[targ_cls]
class BMPMessage(packet_base.PacketBase, _TypeDisp):
"""Base class for BGP Monitoring Protocol messages.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte
order.
__init__ takes the corresponding args in this order.
========================== ===============================================
Attribute Description
========================== ===============================================
version Version. this packet lib defines BMP ver. 3
len Length field. Ignored when encoding.
type Type field. one of BMP\_MSG\_ constants.
========================== ===============================================
"""
_HDR_PACK_STR = '!BIB' # version, padding, len, type, padding
_HDR_LEN = struct.calcsize(_HDR_PACK_STR)
def __init__(self, type_, len_=None, version=VERSION):
self.version = version
self.len = len_
self.type = type_
@classmethod
def parse_header(cls, buf):
if len(buf) < cls._HDR_LEN:
raise stream_parser.StreamParser.TooSmallException(
'%d < %d' % (len(buf), cls._HDR_LEN))
(version, len_, type_) = struct.unpack_from(cls._HDR_PACK_STR,
buffer(buf))
return version, len_, type_
@classmethod
def parser(cls, buf):
version, msglen, type_ = cls.parse_header(buf)
if version != VERSION:
raise ValueError("not supportted bmp version: %d" % version)
if len(buf) < msglen:
raise stream_parser.StreamParser.TooSmallException(
'%d < %d' % (len(buf), msglen))
binmsg = buf[cls._HDR_LEN:msglen]
rest = buf[msglen:]
subcls = cls._lookup_type(type_)
if subcls == cls._UNKNOWN_TYPE:
raise ValueError("unknown bmp type: %d" % type_)
kwargs = subcls.parser(binmsg)
return subcls(len_=msglen,
type_=type_, version=version, **kwargs), rest
def serialize(self):
# fixup
tail = self.serialize_tail()
self.len = self._HDR_LEN + len(tail)
hdr = bytearray(struct.pack(self._HDR_PACK_STR, self.version,
self.len, self.type))
return hdr + tail
def __len__(self):
# XXX destructive
buf = self.serialize()
return len(buf)
class BMPPeerMessage(BMPMessage):
"""BMP Message with Per Peer Header
Following BMP Messages contain Per Peer Header after Common BMP Header.
- BMP_MSG_TYPE_ROUTE_MONITRING
- BMP_MSG_TYPE_STATISTICS_REPORT
- BMP_MSG_PEER_UP_NOTIFICATION
========================== ===============================================
Attribute Description
========================== ===============================================
version Version. this packet lib defines BMP ver. 3
len Length field. Ignored when encoding.
type Type field. one of BMP\_MSG\_ constants.
peer_type The type of the peer.
is_post_policy Indicate the message reflects the post-policy
Adj-RIB-In
peer_distinguisher Use for L3VPN router which can have multiple
instance.
peer_address The remote IP address associated with the TCP
session.
peer_as The Autonomous System number of the peer.
peer_bgp_id The BGP Identifier of the peer
timestamp The time when the encapsulated routes were
received.
========================== ===============================================
"""
_PEER_HDR_PACK_STR = '!BBQ16sI4sII'
_TYPE = {
'ascii': [
'peer_address',
'peer_bgp_id'
]
}
def __init__(self, peer_type, is_post_policy, peer_distinguisher,
peer_address, peer_as, peer_bgp_id, timestamp,
version=VERSION, type_=None, len_=None):
super(BMPPeerMessage, self).__init__(version=version,
len_=len_,
type_=type_)
self.peer_type = peer_type
self.is_post_policy = is_post_policy
self.peer_distinguisher = peer_distinguisher
self.peer_address = peer_address
self.peer_as = peer_as
self.peer_bgp_id = peer_bgp_id
self.timestamp = timestamp
@classmethod
def parser(cls, buf):
(peer_type, peer_flags, peer_distinguisher,
peer_address, peer_as, peer_bgp_id,
timestamp1, timestamp2) = struct.unpack_from(cls._PEER_HDR_PACK_STR,
buffer(buf))
rest = buf[struct.calcsize(cls._PEER_HDR_PACK_STR):]
if peer_flags & (1 << 6):
is_post_policy = True
else:
is_post_policy = False
if peer_flags & (1 << 7):
peer_address = addrconv.ipv6.bin_to_text(buffer(peer_address))
else:
peer_address = addrconv.ipv4.bin_to_text(buffer(peer_address[:4]))
peer_bgp_id = addrconv.ipv4.bin_to_text(buffer(peer_bgp_id))
timestamp = float(timestamp1) + timestamp2 * (10 ** -6)
return {
"peer_type": peer_type,
"is_post_policy": is_post_policy,
"peer_distinguisher": peer_distinguisher,
"peer_address": peer_address,
"peer_as": peer_as,
"peer_bgp_id": peer_bgp_id,
"timestamp": timestamp
}, rest
def serialize_tail(self):
flags = 0
if self.is_post_policy:
flags |= (1 << 6)
if ':' in self.peer_address:
flags |= (1 << 7)
peer_address = addrconv.ipv6.text_to_bin(self.peer_address)
else:
peer_address = addrconv.ipv4.text_to_bin(self.peer_address)
peer_bgp_id = addrconv.ipv4.text_to_bin(self.peer_bgp_id)
t1, t2 = [int(t) for t in ("%.6f" % self.timestamp).split('.')]
msg = bytearray(struct.pack(self._PEER_HDR_PACK_STR, self.peer_type,
flags, self.peer_distinguisher,
peer_address, self.peer_as,
peer_bgp_id, t1, t2))
return msg
@BMPMessage.register_type(BMP_MSG_ROUTE_MONITORING)
class BMPRouteMonitoring(BMPPeerMessage):
"""BMP Route Monitoring Message
========================== ===============================================
Attribute Description
========================== ===============================================
version Version. this packet lib defines BMP ver. 3
len Length field. Ignored when encoding.
type Type field. one of BMP\_MSG\_ constants.
peer_type The type of the peer.
peer_flags Provide more information about the peer.
peer_distinguisher Use for L3VPN router which can have multiple
instance.
peer_address The remote IP address associated with the TCP
session.
peer_as The Autonomous System number of the peer.
peer_bgp_id The BGP Identifier of the peer
timestamp The time when the encapsulated routes were
received.
bgp_update BGP Update PDU
========================== ===============================================
"""
def __init__(self, bgp_update, peer_type, is_post_policy,
peer_distinguisher, peer_address, peer_as, peer_bgp_id,
timestamp, version=VERSION, type_=BMP_MSG_ROUTE_MONITORING,
len_=None):
super(BMPRouteMonitoring,
self).__init__(peer_type=peer_type,
is_post_policy=is_post_policy,
peer_distinguisher=peer_distinguisher,
peer_address=peer_address,
peer_as=peer_as,
peer_bgp_id=peer_bgp_id,
timestamp=timestamp,
len_=len_,
type_=type_,
version=version)
self.bgp_update = bgp_update
@classmethod
def parser(cls, buf):
kwargs, buf = super(BMPRouteMonitoring, cls).parser(buf)
bgp_update, buf = BGPMessage.parser(buf)
kwargs['bgp_update'] = bgp_update
return kwargs
def serialize_tail(self):
msg = super(BMPRouteMonitoring, self).serialize_tail()
msg += self.bgp_update.serialize()
return msg
@BMPMessage.register_type(BMP_MSG_STATISTICS_REPORT)
class BMPStatisticsReport(BMPPeerMessage):
"""BMP Statistics Report Message
========================== ===============================================
Attribute Description
========================== ===============================================
version Version. this packet lib defines BMP ver. 3
len Length field. Ignored when encoding.
type Type field. one of BMP\_MSG\_ constants.
peer_type The type of the peer.
peer_flags Provide more information about the peer.
peer_distinguisher Use for L3VPN router which can have multiple
instance.
peer_address The remote IP address associated with the TCP
session.
peer_as The Autonomous System number of the peer.
peer_bgp_id The BGP Identifier of the peer
timestamp The time when the encapsulated routes were
received.
stats Statistics (one or more stats encoded as a TLV)
========================== ===============================================
"""
_TLV_PACK_STR = '!HH'
_MIN_LEN = struct.calcsize(_TLV_PACK_STR)
def __init__(self, stats, peer_type, is_post_policy, peer_distinguisher,
peer_address, peer_as, peer_bgp_id, timestamp,
version=VERSION, type_=BMP_MSG_STATISTICS_REPORT, len_=None):
super(BMPStatisticsReport,
self).__init__(peer_type=peer_type,
is_post_policy=is_post_policy,
peer_distinguisher=peer_distinguisher,
peer_address=peer_address,
peer_as=peer_as,
peer_bgp_id=peer_bgp_id,
timestamp=timestamp,
len_=len_,
type_=type_,
version=version)
self.stats = stats
@classmethod
def parser(cls, buf):
kwargs, rest = super(BMPStatisticsReport, cls).parser(buf)
stats_count, = struct.unpack_from('!I', buffer(rest))
buf = rest[struct.calcsize('!I'):]
stats = []
while len(buf):
if len(buf) < cls._MIN_LEN:
raise stream_parser.StreamParser.TooSmallException(
'%d < %d' % (len(buf), cls._MIN_LEN))
(type_, len_) = struct.unpack_from(cls._TLV_PACK_STR, buffer(buf))
if len(buf) < (cls._MIN_LEN + len_):
raise stream_parser.StreamParser.TooSmallException(
'%d < %d' % (len(buf), cls._MIN_LEN + len_))
value = buf[cls._MIN_LEN:cls._MIN_LEN + len_]
if type_ == BMP_STAT_TYPE_REJECTED or \
type_ == BMP_STAT_TYPE_DUPLICATE_PREFIX or \
type_ == BMP_STAT_TYPE_DUPLICATE_WITHDRAW or \
type_ == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_CLUSTER_LIST_LOOP or \
type_ == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_PATH_LOOP or \
type_ == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_ORIGINATOR_ID or \
type_ == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_CONFED_LOOP:
value, = struct.unpack_from('!I', buffer(value))
elif type_ == BMP_STAT_TYPE_ADJ_RIB_IN or \
type_ == BMP_STAT_TYPE_LOC_RIB:
value, = struct.unpack_from('!Q', buffer(value))
buf = buf[cls._MIN_LEN + len_:]
stats.append({'type': type_, 'len': len_, 'value': value})
kwargs['stats'] = stats
return kwargs
def serialize_tail(self):
msg = super(BMPStatisticsReport, self).serialize_tail()
stats_count = len(self.stats)
msg += bytearray(struct.pack('!I', stats_count))
for v in self.stats:
t = v['type']
if t == BMP_STAT_TYPE_REJECTED or \
t == BMP_STAT_TYPE_DUPLICATE_PREFIX or \
t == BMP_STAT_TYPE_DUPLICATE_WITHDRAW or \
t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_CLUSTER_LIST_LOOP or \
t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_PATH_LOOP or \
t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_ORIGINATOR_ID or \
t == BMP_STAT_TYPE_INV_UPDATE_DUE_TO_AS_CONFED_LOOP:
valuepackstr = 'I'
elif t == BMP_STAT_TYPE_ADJ_RIB_IN or \
t == BMP_STAT_TYPE_LOC_RIB:
valuepackstr = 'Q'
else:
continue
v['len'] = struct.calcsize(valuepackstr)
msg += bytearray(struct.pack(self._TLV_PACK_STR + valuepackstr,
t, v['len'], v['value']))
return msg
@BMPMessage.register_type(BMP_MSG_PEER_DOWN_NOTIFICATION)
class BMPPeerDownNotification(BMPPeerMessage):
"""BMP Peer Down Notification Message
========================== ===============================================
Attribute Description
========================== ===============================================
version Version. this packet lib defines BMP ver. 3
len Length field. Ignored when encoding.
type Type field. one of BMP\_MSG\_ constants.
reason Reason indicates why the session was closed.
data vary by the reason.
========================== ===============================================
"""
def __init__(self, reason, data, peer_type, is_post_policy,
peer_distinguisher, peer_address, peer_as, peer_bgp_id,
timestamp, version=VERSION,
type_=BMP_MSG_PEER_DOWN_NOTIFICATION, len_=None):
super(BMPPeerDownNotification,
self).__init__(peer_type=peer_type,
is_post_policy=is_post_policy,
peer_distinguisher=peer_distinguisher,
peer_address=peer_address,
peer_as=peer_as,
peer_bgp_id=peer_bgp_id,
timestamp=timestamp,
len_=len_,
type_=type_,
version=version)
self.reason = reason
self.data = data
@classmethod
def parser(cls, buf):
kwargs, buf = super(BMPPeerDownNotification, cls).parser(buf)
reason, = struct.unpack_from('!B', buffer(buf))
buf = buf[struct.calcsize('!B'):]
if reason == BMP_PEER_DOWN_REASON_LOCAL_BGP_NOTIFICATION:
data, rest = BGPMessage.parser(buf)
elif reason == BMP_PEER_DOWN_REASON_LOCAL_NO_NOTIFICATION:
data = struct.unpack_from('!H', buffer(buf))
elif reason == BMP_PEER_DOWN_REASON_REMOTE_BGP_NOTIFICATION:
data, rest = BGPMessage.parser(buf)
elif reason == BMP_PEER_DOWN_REASON_REMOTE_NO_NOTIFICATION:
data = None
else:
reason = BMP_PEER_DOWN_REASON_UNKNOWN
data = buf
kwargs['reason'] = reason
kwargs['data'] = data
return kwargs
def serialize_tail(self):
msg = super(BMPPeerDownNotification, self).serialize_tail()
msg += struct.pack('!B', self.reason)
if self.reason == BMP_PEER_DOWN_REASON_LOCAL_BGP_NOTIFICATION:
msg += self.data.serialize()
elif self.reason == BMP_PEER_DOWN_REASON_LOCAL_NO_NOTIFICATION:
msg += struct.pack('!H', self.data)
elif self.reason == BMP_PEER_DOWN_REASON_REMOTE_BGP_NOTIFICATION:
msg += self.data.serialize()
elif self.reason == BMP_PEER_DOWN_REASON_UNKNOWN:
msg += str(self.data)
return msg
@BMPMessage.register_type(BMP_MSG_PEER_UP_NOTIFICATION)
class BMPPeerUpNotification(BMPPeerMessage):
"""BMP Peer Up Notification Message
========================== ===============================================
Attribute Description
========================== ===============================================
version Version. this packet lib defines BMP ver. 3
len Length field. Ignored when encoding.
type Type field. one of BMP\_MSG\_ constants.
peer_type The type of the peer.
peer_flags Provide more information about the peer.
peer_distinguisher Use for L3VPN router which can have multiple
instance.
peer_address The remote IP address associated with the TCP
session.
peer_as The Autonomous System number of the peer.
peer_bgp_id The BGP Identifier of the peer
timestamp The time when the encapsulated routes were
received.
local_address The local IP address associated with the
peering TCP session.
local_port The local port number associated with the
peering TCP session.
remote_port The remote port number associated with the
peering TCP session.
sent_open_message The full OPEN message transmitted by the
monitored router to its peer.
received_open_message The full OPEN message received by the monitored
router from its peer.
========================== ===============================================
"""
_PACK_STR = '!16sHH'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, local_address, local_port, remote_port,
sent_open_message, received_open_message,
peer_type, is_post_policy, peer_distinguisher,
peer_address, peer_as, peer_bgp_id, timestamp,
version=VERSION, type_=BMP_MSG_PEER_UP_NOTIFICATION,
len_=None):
super(BMPPeerUpNotification,
self).__init__(peer_type=peer_type,
is_post_policy=is_post_policy,
peer_distinguisher=peer_distinguisher,
peer_address=peer_address,
peer_as=peer_as,
peer_bgp_id=peer_bgp_id,
timestamp=timestamp,
len_=len_,
type_=type_,
version=version)
self.local_address = local_address
self.local_port = local_port
self.remote_port = remote_port
self.sent_open_message = sent_open_message
self.received_open_message = received_open_message
@classmethod
def parser(cls, buf):
kwargs, rest = super(BMPPeerUpNotification, cls).parser(buf)
(local_address, local_port,
remote_port) = struct.unpack_from(cls._PACK_STR, buffer(rest))
local_address = buffer(local_address)
if '.' in kwargs['peer_address']:
local_address = addrconv.ipv4.bin_to_text(local_address[:4])
elif ':' in kwargs['peer_address']:
local_address = addrconv.ipv6.bin_to_text(local_address)
else:
raise ValueError("invalid local_address: %s" % local_address)
kwargs['local_address'] = local_address
kwargs['local_port'] = local_port
kwargs['remote_port'] = remote_port
rest = rest[cls._MIN_LEN:]
sent_open_msg, rest = BGPMessage.parser(rest)
received_open_msg, rest = BGPMessage.parser(rest)
kwargs['sent_open_message'] = sent_open_msg
kwargs['received_open_message'] = received_open_msg
return kwargs
def serialize_tail(self):
msg = super(BMPPeerUpNotification, self).serialize_tail()
if '.' in self.local_address:
local_address = addrconv.ipv4.text_to_bin(self.local_address)
elif ':' in self.local_address:
local_address = addrconv.ipv6.text_to_bin(self.local_address)
else:
raise ValueError("invalid local_address: %s" % self.local_address)
msg += struct.pack(self._PACK_STR, local_address,
self.local_port, self.remote_port)
msg += self.sent_open_message.serialize()
msg += self.received_open_message.serialize()
return msg
@BMPMessage.register_type(BMP_MSG_INITIATION)
class BMPInitiation(BMPMessage):
"""BMP Initiation Message
========================== ===============================================
Attribute Description
========================== ===============================================
version Version. this packet lib defines BMP ver. 3
len Length field. Ignored when encoding.
type Type field. one of BMP\_MSG\_ constants.
info One or more piece of information encoded as a
TLV
========================== ===============================================
"""
_TLV_PACK_STR = '!HH'
_MIN_LEN = struct.calcsize(_TLV_PACK_STR)
def __init__(self, info, type_=BMP_MSG_INITIATION, len_=None,
version=VERSION):
super(BMPInitiation, self).__init__(type_, len_, version)
self.info = info
@classmethod
def parser(cls, buf):
info = []
while len(buf):
if len(buf) < cls._MIN_LEN:
raise stream_parser.StreamParser.TooSmallException(
'%d < %d' % (len(buf), cls._MIN_LEN))
(type_, len_) = struct.unpack_from(cls._TLV_PACK_STR, buffer(buf))
if len(buf) < (cls._MIN_LEN + len_):
raise stream_parser.StreamParser.TooSmallException(
'%d < %d' % (len(buf), cls._MIN_LEN + len_))
value = buf[cls._MIN_LEN:cls._MIN_LEN + len_]
if type_ == BMP_INIT_TYPE_STRING:
value = value.decode('utf-8')
buf = buf[cls._MIN_LEN + len_:]
info.append({'type': type_, 'len': len_, 'value': value})
return {'info': info}
def serialize_tail(self):
msg = bytearray()
for v in self.info:
if v['type'] == BMP_INIT_TYPE_STRING:
value = v['value'].encode('utf-8')
else:
value = v['value']
v['len'] = len(value)
msg += struct.pack(self._TLV_PACK_STR, v['type'], v['len'])
msg += value
return msg
@BMPMessage.register_type(BMP_MSG_TERMINATION)
class BMPTermination(BMPMessage):
"""BMP Termination Message
========================== ===============================================
Attribute Description
========================== ===============================================
version Version. this packet lib defines BMP ver. 3
len Length field. Ignored when encoding.
type Type field. one of BMP\_MSG\_ constants.
info One or more piece of information encoded as a
TLV
========================== ===============================================
"""
_TLV_PACK_STR = '!HH'
_MIN_LEN = struct.calcsize(_TLV_PACK_STR)
def __init__(self, info, type_=BMP_MSG_TERMINATION, len_=None,
version=VERSION):
super(BMPTermination, self).__init__(type_, len_, version)
self.info = info
@classmethod
def parser(cls, buf):
info = []
while len(buf):
if len(buf) < cls._MIN_LEN:
raise stream_parser.StreamParser.TooSmallException(
'%d < %d' % (len(buf), cls._MIN_LEN))
(type_, len_) = struct.unpack_from(cls._TLV_PACK_STR, buffer(buf))
if len(buf) < (cls._MIN_LEN + len_):
raise stream_parser.StreamParser.TooSmallException(
'%d < %d' % (len(buf), cls._MIN_LEN + len_))
value = buf[cls._MIN_LEN:cls._MIN_LEN + len_]
if type_ == BMP_TERM_TYPE_STRING:
value = value.decode('utf-8')
elif type_ == BMP_TERM_TYPE_REASON:
value, = struct.unpack_from('!H', buffer(value))
buf = buf[cls._MIN_LEN + len_:]
info.append({'type': type_, 'len': len_, 'value': value})
return {'info': info}
def serialize_tail(self):
msg = bytearray()
for v in self.info:
if v['type'] == BMP_TERM_TYPE_STRING:
value = v['value'].encode('utf-8')
elif v['type'] == BMP_TERM_TYPE_REASON:
value = struct.pack('!H', v['value'])
v['len'] = len(value)
msg += struct.pack(self._TLV_PACK_STR, v['type'], v['len'])
msg += value
return msg
| apache-2.0 | 8,440,368,976,161,723,000 | 37.568988 | 78 | 0.506013 | false |
GerryPy/GerryPy | gerrypy/scripts/fish_scales.py | 1 | 12945 | """
Pull tract information from database,
compute new congressional districts based on criteria in request,
and update the database.
"""
from gerrypy.models.mymodel import Tract, Edge
import networkx as nx
from gerrypy.scripts.assigndistrict import assign_district
def fill_graph(request):
"""Build state graph from tract and edge databases."""
graph = nx.Graph()
tracts = request.dbsession.query(Tract).all()
edges = request.dbsession.query(Edge).all()
for tract in tracts:
graph.add_node(tract)
for edge in edges:
source = request.dbsession.query(Tract).get(edge.tract_source)
target = request.dbsession.query(Tract).get(edge.tract_target)
graph.add_edge(source, target)
return graph
class OccupiedDist(object):
"""A stucture to contain and separate tracts in a State object.
add_node(self, node): adds node to nodes and updates district
properties accordingly
rem_node(self, node): removes node from nodes and updates district
properties accordingly
"""
def __init__(self, districtID, state_graph, tracts=None):
"""Initialize the OccupiedDist Object."""
self.nodes = nx.Graph()
self.perimeter = []
self.population = 0
self.area = 0
self.districtID = districtID
if tracts:
try:
for tract in tracts:
self.add_node(tract, state_graph)
except TypeError:
raise TypeError('Tracts must be iterable.')
def add_node(self, node, state_graph):
"""Add node to nodes and updates district properties."""
node.districtid = self.districtID
self.nodes.add_node(node)
neighbors = state_graph.neighbors(node)
if node in self.perimeter:
self.perimeter.remove(node)
for neighbor in neighbors: # After node is added, make the edge connections within the occupied district.
if neighbor in self.nodes.nodes():
self.nodes.add_edge(neighbor, node)
if neighbor not in self.nodes.nodes() and neighbor not in self.perimeter:
self.perimeter.append(neighbor)
self.population += node.tract_pop
self.area += node.shape_area
def rem_node(self, node, state_graph):
"""Remove node from nodes and updates district properties."""
self.population -= node.tract_pop
self.nodes.remove_node(node)
self.area -= node.shape_area
neighbors = state_graph.neighbors(node)
to_perimeter = False
for neighbor in neighbors: # Decide whether to remove nodes from the district perimeter.
takeout = True
if neighbor in self.perimeter: # if its a perimeter node,
neighborneighbors = state_graph.neighbors(neighbor)
for neighborneighbor in neighborneighbors: # check its neighbors
if neighborneighbor in self.nodes.nodes(): # if it has a neighbor in the district
takeout = False # it should remain in the perimeter list.
if takeout: # If it should be removed,
self.perimeter.remove(neighbor) # Remove it!
elif neighbor in self.nodes.nodes(): # If the removed node neighbors the district (which it should)
to_perimeter = True # mark it to be added to the perimeter
if to_perimeter: # If its marked,
self.perimeter.append(node) # add it to the perimeter
class UnoccupiedDist(OccupiedDist):
"""A structure to contain tracts that haven't been claimed by a district.
add_node(self, node): adds node to nodes and updates district
properties accordingly
rem_node(self, node): removes node from nodes and updates district
properties accordingly
"""
def __init__(self, districtID, state_graph, tracts=None):
"""Initialize the UnoccupiedDist Object."""
self.nodes = nx.Graph()
self.perimeter = []
self.population = 0
self.area = 0
self.districtID = districtID
if tracts:
try:
for tract in tracts:
self.add_node(tract, state_graph)
except TypeError:
raise TypeError('Tracts must be iterable.')
def add_node(self, node, state_graph):
"""Add node to nodes and updates district properties accordingly."""
node.districtid = None
self.nodes.add_node(node)
for neighbor in state_graph.neighbors(node):
if neighbor in self.nodes:
self.nodes.add_edge(neighbor, node)
self.population += node.tract_pop
self.area += node.shape_area
neighbors = state_graph.neighbors(node)
to_add = False
for neighbor in neighbors: # Handling which nodes to add or remove from the perimeter.
takeout = True
if neighbor in self.perimeter:
neighborneighbors = state_graph.neighbors(neighbor)
for neighborneighbor in neighborneighbors:
if neighborneighbor not in self.nodes:
takeout = False
if takeout:
self.perimeter.remove(neighbor)
if neighbor not in self.nodes:
to_add = True
if to_add:
self.perimeter.append(node)
def rem_node(self, node, state_graph):
"""Remove node from nodes and updates district properties accordingly."""
self.population -= node.tract_pop
self.area -= node.shape_area
if node in self.perimeter:
self.perimeter.remove(node)
neighbors = self.nodes.neighbors(node)
for neighbor in neighbors:
if neighbor not in self.perimeter:
self.perimeter.append(neighbor)
self.nodes.remove_node(node)
class State(object):
"""Manages how tracts are distributed into districts in a particular state.
build_district(self, start, population):
creates a new district stemming from the start node with a given population
fill_state(self, request): continues to build districts until all unoccupied tracts are claimed
"""
def __init__(self, request, num_dst):
"""Initialize the State Object."""
self.unoccupied = []
self.districts = []
self.population = 0
self.area = 0
self.num_dst = num_dst # The Number of districts alotted for that state (7 for Colorado)
self.request = request
self.state_graph = fill_graph(self.request)
landmass = nx.connected_components(self.state_graph) # Returns all of the connected/contiguous areas of land for a state.
for island in landmass:
unoc = UnoccupiedDist(None, self.state_graph, tracts=island) # needs the state graph for its edges
for tract in unoc.nodes.nodes():
if tract.isborder == 1: # This is a hardcoded field for Colorado. A challenge of adding more states is finding these automatically.
unoc.perimeter.append(tract) # begin with all border tracts in the perimeter.
self.population += unoc.population
self.unoccupied.append(unoc)
self.area += unoc.area
def fill_state(self, criteria):
"""Build districts until all unoccupied tracts are claimed."""
for num in range(self.num_dst):
rem_pop = 0
for unoc in self.unoccupied:
rem_pop += unoc.population
rem_dist = self.num_dst - len(self.districts)
tgt_population = rem_pop / rem_dist # Average available population is the target population. It helps ensure the State gets totally filled.
self.build_district(tgt_population, num + 1, criteria)
assign_district(self.request, self.state_graph)
def build_district(self, tgt_population, dist_num, criteria):
"""Create a new district stemming from the start node with a given population."""
dst = OccupiedDist(dist_num, self.state_graph)
self.districts.append(dst)
start = self.find_start()
self.swap(dst, start) # if state is full, this wont work
while True:
new_tract = self.select_next(dst, criteria)
if new_tract is None: # If there are no more nodes in unoccupied, this will be None
for unoc in self.unoccupied: # This ends the building process
if not len(unoc.nodes.nodes()):
self.unoccupied.remove(unoc)
break
high_pop = (new_tract.tract_pop + dst.population) # Population including the next tract.
if abs(high_pop - tgt_population) > abs(dst.population - tgt_population): # If the population including the next district is further from the goal,
break # We stop building that district
else:
self.swap(dst, new_tract) # Swap removes the tract from its unoccupied district and adds it to the occupied district.
neighbors = self.state_graph.neighbors(new_tract)
unassigned_neighbors = [neighbor for neighbor in neighbors if neighbor in self.unoccupied[0].nodes] # Grab the new nodes unassigned neighbors
if len(unassigned_neighbors) > 1: # If there is more than one, than a split is possible.
for i in range(len(unassigned_neighbors)):
if not nx.has_path( # We check each node and its previous neighbor to ensure they're connected. (thanks, nx)
self.unoccupied[0].nodes,
unassigned_neighbors[i],
unassigned_neighbors[i - 1]
): # If there is a split in the unoccupied district...
unoc_neighbors = [x for x in nx.connected_components(self.unoccupied[0].nodes)] # Identify each of the distinct unoccupied districts.
biggest = max(unoc_neighbors, key=lambda x: len(x))
unoc_neighbors.remove(biggest) # Ignore the largest (This should be highest pop, fixed in different version)
# All unoccupied districts will be bordering, because as soon as there is a split, we do this.
for neigh in unoc_neighbors: # Consume all of the rest (usually one small one)
for tract in neigh: # This sometimes gives us a district that is too large, and is the major focus of improving the algorithm.
self.swap(dst, tract)
break
def swap(self, dst, new_tract):
"""Exchange tract from unoccupied district to district."""
self.unoccupied[0].rem_node(new_tract, self.state_graph)
dst.add_node(new_tract, self.state_graph)
def select_next(self, dst, criteria):
"""Choose the next best tract to add to growing district."""
best_rating = 0
best = None # We're building a score for each node based on our criteria, and saving the best.
for perimeter_tract in dst.perimeter: # dst.perimeter is every node bordering that district.
if perimeter_tract.districtid is None: # Grab those without a district assigned
count = 0 # Check how many tracts that tract borders that are ALREADY in the district. More borders gets more points
for neighbor in self.state_graph.neighbors(perimeter_tract):
if neighbor.districtid == dst.districtID:
count += 1
counties = set() # If the tracts county is in the district already, it gets a point.
for node in dst.nodes:
counties.add(node.county)
same_county = 0
if perimeter_tract.county in counties:
same_county = 1
rating = count * int(criteria['compactness']) + same_county * int(criteria['county']) # Calculate score based on criteria.
if rating > best_rating:
best_rating = rating
best = perimeter_tract
return best
def find_start(self):
"""
Choose best starting tract for a new district.
Based on number of bordering districts.
"""
best_set = set()
best = None
for tract in self.unoccupied[0].perimeter:
unique_dists = set()
for neighbor in self.state_graph.neighbors(tract):
for dst in self.districts:
if neighbor in dst.nodes.nodes():
unique_dists.add(dst)
if len(unique_dists) > len(best_set) or len(unique_dists) == 0:
best_set = unique_dists
best = tract
return best
| mit | -5,427,844,483,483,254,000 | 47.483146 | 162 | 0.60757 | false |
mnieber/shared-goals | django/goal/templates/goal/edit_goal_html.py | 2 | 2302 | from django_dominate.django_tags import *
from dominate.tags import *
from dominate.util import text
from goal.templates.dominate_tags import *
@form(
method="post",
enctype="multipart/form-data",
)
def goal_form():
django_csrf_token()
with p():
with django_if("show_errors"):
text("{{ form.title.errors }}")
with label(
_for="{{ form.title.id_for_label }}",
_class="form-label"
):
text("Title")
input_(
id="id_title",
type="text",
name="title",
maxlength="100",
value="{{ form.title.value }}",
_class="form-field"
)
with django_if("show_image_form"):
with div(_class="goal-form--image"):
text("{{ crop_settings|django_jcrop_widget }}")
with p():
with django_if("show_errors"):
text("{{ form.image.errors }}")
with label(
_for="{{ form.image.id_for_label }}",
_class="form-label"
):
text(
"Upload an image to illustrate your goal")
text("{{ form.image }}")
button(
"Upload",
id="upload-submit",
name="submit",
value="upload"
)
with div():
with div(_class="small-gap-above small-gap-below"):
with label(
_class="form-label"
):
text("{{ submit_button_header }}")
button(
"{{ post_button_label }}",
name="submit",
value="save"
)
button(
"Cancel",
name="submit",
value="cancel"
)
def result():
with django_block("head") as head:
text("{{ form.media }}")
with django_block("content") as content:
with div(_class="row"):
column(2)
with column(8):
goal_form()
text("{% init_django_jcrop %}")
return (
"{% extends 'base.html' %}",
"{% load django_jcrop_tags %}",
head,
content,
)
| apache-2.0 | -7,100,782,973,196,199,000 | 24.577778 | 66 | 0.425717 | false |
girke-lab/chemmine-ei | src/eutils/fpdbcompare.py | 1 | 1474 | """
Calculating distances to all reference compounds
"""
import os
import sys
from pexpect import spawn, TIMEOUT
bin = 'ei-fp_db_compare_server'
exchange_fp = "/tmp/__db_compare.in"
from time import time
from signal import SIGINT
class DBComparerStartupError(Exception):
pass
class ComparerError(Exception):
pass
class DBComparer(object):
"""The db comparer wrapper class"""
def __init__(self, reference):
"""start the process"""
self.cmd = bin + " " + reference
self.start()
def start(self):
try:
self.child = spawn(self.cmd)
self.child.logfile_read = sys.stderr
self.child.expect_exact('ready', timeout=4)
except TIMEOUT:
try: self.child.terminate(force=True)
except: pass
sys.stderr.write("Error: Cannot start fp_db_compare_server\n")
raise DBComparerStartupError
def close(self):
self.child.close(force=True)
def tell(self, line):
f = file(exchange_fp, 'w')
if not line.endswith('\n'):
line += '\n'
f.write(line)
f.close()
self.child.kill(SIGINT)
def compare(self, db_in, out):
if not self.child.isalive():
self.start()
start = time()
self.tell('%s\n%s\n' % (db_in, out))
start = time()
index = self.child.expect_exact(['OK:', 'Input:', TIMEOUT])
if index == 0:
fp = self.child.readline()
return fp
elif index == 1:
raise ComparerError
elif index == 2:
self.close()
self.start()
raise DBComparerError
else:
self.close()
self.start()
raise DBComparerError
| gpl-3.0 | 2,112,102,640,835,268,600 | 20.362319 | 65 | 0.66825 | false |
AlphaX2/FotoShareN9 | 0.9.7/fotoshare_deb/opt/FotoShareN9/paramiko/auth_handler.py | 26 | 16421 | # Copyright (C) 2003-2007 Robey Pointer <[email protected]>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
L{AuthHandler}
"""
import threading
import weakref
# this helps freezing utils
import encodings.utf_8
from paramiko.common import *
from paramiko import util
from paramiko.message import Message
from paramiko.ssh_exception import SSHException, AuthenticationException, \
BadAuthenticationType, PartialAuthentication
from paramiko.server import InteractiveQuery
class AuthHandler (object):
"""
Internal class to handle the mechanics of authentication.
"""
def __init__(self, transport):
self.transport = weakref.proxy(transport)
self.username = None
self.authenticated = False
self.auth_event = None
self.auth_method = ''
self.password = None
self.private_key = None
self.interactive_handler = None
self.submethods = None
# for server mode:
self.auth_username = None
self.auth_fail_count = 0
def is_authenticated(self):
return self.authenticated
def get_username(self):
if self.transport.server_mode:
return self.auth_username
else:
return self.username
def auth_none(self, username, event):
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = 'none'
self.username = username
self._request_auth()
finally:
self.transport.lock.release()
def auth_publickey(self, username, key, event):
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = 'publickey'
self.username = username
self.private_key = key
self._request_auth()
finally:
self.transport.lock.release()
def auth_password(self, username, password, event):
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = 'password'
self.username = username
self.password = password
self._request_auth()
finally:
self.transport.lock.release()
def auth_interactive(self, username, handler, event, submethods=''):
"""
response_list = handler(title, instructions, prompt_list)
"""
self.transport.lock.acquire()
try:
self.auth_event = event
self.auth_method = 'keyboard-interactive'
self.username = username
self.interactive_handler = handler
self.submethods = submethods
self._request_auth()
finally:
self.transport.lock.release()
def abort(self):
if self.auth_event is not None:
self.auth_event.set()
### internals...
def _request_auth(self):
m = Message()
m.add_byte(chr(MSG_SERVICE_REQUEST))
m.add_string('ssh-userauth')
self.transport._send_message(m)
def _disconnect_service_not_available(self):
m = Message()
m.add_byte(chr(MSG_DISCONNECT))
m.add_int(DISCONNECT_SERVICE_NOT_AVAILABLE)
m.add_string('Service not available')
m.add_string('en')
self.transport._send_message(m)
self.transport.close()
def _disconnect_no_more_auth(self):
m = Message()
m.add_byte(chr(MSG_DISCONNECT))
m.add_int(DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE)
m.add_string('No more auth methods available')
m.add_string('en')
self.transport._send_message(m)
self.transport.close()
def _get_session_blob(self, key, service, username):
m = Message()
m.add_string(self.transport.session_id)
m.add_byte(chr(MSG_USERAUTH_REQUEST))
m.add_string(username)
m.add_string(service)
m.add_string('publickey')
m.add_boolean(1)
m.add_string(key.get_name())
m.add_string(str(key))
return str(m)
def wait_for_response(self, event):
while True:
event.wait(0.1)
if not self.transport.is_active():
e = self.transport.get_exception()
if (e is None) or issubclass(e.__class__, EOFError):
e = AuthenticationException('Authentication failed.')
raise e
if event.isSet():
break
if not self.is_authenticated():
e = self.transport.get_exception()
if e is None:
e = AuthenticationException('Authentication failed.')
# this is horrible. python Exception isn't yet descended from
# object, so type(e) won't work. :(
if issubclass(e.__class__, PartialAuthentication):
return e.allowed_types
raise e
return []
def _parse_service_request(self, m):
service = m.get_string()
if self.transport.server_mode and (service == 'ssh-userauth'):
# accepted
m = Message()
m.add_byte(chr(MSG_SERVICE_ACCEPT))
m.add_string(service)
self.transport._send_message(m)
return
# dunno this one
self._disconnect_service_not_available()
def _parse_service_accept(self, m):
service = m.get_string()
if service == 'ssh-userauth':
self.transport._log(DEBUG, 'userauth is OK')
m = Message()
m.add_byte(chr(MSG_USERAUTH_REQUEST))
m.add_string(self.username)
m.add_string('ssh-connection')
m.add_string(self.auth_method)
if self.auth_method == 'password':
m.add_boolean(False)
password = self.password
if isinstance(password, unicode):
password = password.encode('UTF-8')
m.add_string(password)
elif self.auth_method == 'publickey':
m.add_boolean(True)
m.add_string(self.private_key.get_name())
m.add_string(str(self.private_key))
blob = self._get_session_blob(self.private_key, 'ssh-connection', self.username)
sig = self.private_key.sign_ssh_data(self.transport.rng, blob)
m.add_string(str(sig))
elif self.auth_method == 'keyboard-interactive':
m.add_string('')
m.add_string(self.submethods)
elif self.auth_method == 'none':
pass
else:
raise SSHException('Unknown auth method "%s"' % self.auth_method)
self.transport._send_message(m)
else:
self.transport._log(DEBUG, 'Service request "%s" accepted (?)' % service)
def _send_auth_result(self, username, method, result):
# okay, send result
m = Message()
if result == AUTH_SUCCESSFUL:
self.transport._log(INFO, 'Auth granted (%s).' % method)
m.add_byte(chr(MSG_USERAUTH_SUCCESS))
self.authenticated = True
else:
self.transport._log(INFO, 'Auth rejected (%s).' % method)
m.add_byte(chr(MSG_USERAUTH_FAILURE))
m.add_string(self.transport.server_object.get_allowed_auths(username))
if result == AUTH_PARTIALLY_SUCCESSFUL:
m.add_boolean(1)
else:
m.add_boolean(0)
self.auth_fail_count += 1
self.transport._send_message(m)
if self.auth_fail_count >= 10:
self._disconnect_no_more_auth()
if result == AUTH_SUCCESSFUL:
self.transport._auth_trigger()
def _interactive_query(self, q):
# make interactive query instead of response
m = Message()
m.add_byte(chr(MSG_USERAUTH_INFO_REQUEST))
m.add_string(q.name)
m.add_string(q.instructions)
m.add_string('')
m.add_int(len(q.prompts))
for p in q.prompts:
m.add_string(p[0])
m.add_boolean(p[1])
self.transport._send_message(m)
def _parse_userauth_request(self, m):
if not self.transport.server_mode:
# er, uh... what?
m = Message()
m.add_byte(chr(MSG_USERAUTH_FAILURE))
m.add_string('none')
m.add_boolean(0)
self.transport._send_message(m)
return
if self.authenticated:
# ignore
return
username = m.get_string()
service = m.get_string()
method = m.get_string()
self.transport._log(DEBUG, 'Auth request (type=%s) service=%s, username=%s' % (method, service, username))
if service != 'ssh-connection':
self._disconnect_service_not_available()
return
if (self.auth_username is not None) and (self.auth_username != username):
self.transport._log(WARNING, 'Auth rejected because the client attempted to change username in mid-flight')
self._disconnect_no_more_auth()
return
self.auth_username = username
if method == 'none':
result = self.transport.server_object.check_auth_none(username)
elif method == 'password':
changereq = m.get_boolean()
password = m.get_string()
try:
password = password.decode('UTF-8')
except UnicodeError:
# some clients/servers expect non-utf-8 passwords!
# in this case, just return the raw byte string.
pass
if changereq:
# always treated as failure, since we don't support changing passwords, but collect
# the list of valid auth types from the callback anyway
self.transport._log(DEBUG, 'Auth request to change passwords (rejected)')
newpassword = m.get_string()
try:
newpassword = newpassword.decode('UTF-8', 'replace')
except UnicodeError:
pass
result = AUTH_FAILED
else:
result = self.transport.server_object.check_auth_password(username, password)
elif method == 'publickey':
sig_attached = m.get_boolean()
keytype = m.get_string()
keyblob = m.get_string()
try:
key = self.transport._key_info[keytype](Message(keyblob))
except SSHException, e:
self.transport._log(INFO, 'Auth rejected: public key: %s' % str(e))
key = None
except:
self.transport._log(INFO, 'Auth rejected: unsupported or mangled public key')
key = None
if key is None:
self._disconnect_no_more_auth()
return
# first check if this key is okay... if not, we can skip the verify
result = self.transport.server_object.check_auth_publickey(username, key)
if result != AUTH_FAILED:
# key is okay, verify it
if not sig_attached:
# client wants to know if this key is acceptable, before it
# signs anything... send special "ok" message
m = Message()
m.add_byte(chr(MSG_USERAUTH_PK_OK))
m.add_string(keytype)
m.add_string(keyblob)
self.transport._send_message(m)
return
sig = Message(m.get_string())
blob = self._get_session_blob(key, service, username)
if not key.verify_ssh_sig(blob, sig):
self.transport._log(INFO, 'Auth rejected: invalid signature')
result = AUTH_FAILED
elif method == 'keyboard-interactive':
lang = m.get_string()
submethods = m.get_string()
result = self.transport.server_object.check_auth_interactive(username, submethods)
if isinstance(result, InteractiveQuery):
# make interactive query instead of response
self._interactive_query(result)
return
else:
result = self.transport.server_object.check_auth_none(username)
# okay, send result
self._send_auth_result(username, method, result)
def _parse_userauth_success(self, m):
self.transport._log(INFO, 'Authentication (%s) successful!' % self.auth_method)
self.authenticated = True
self.transport._auth_trigger()
if self.auth_event != None:
self.auth_event.set()
def _parse_userauth_failure(self, m):
authlist = m.get_list()
partial = m.get_boolean()
if partial:
self.transport._log(INFO, 'Authentication continues...')
self.transport._log(DEBUG, 'Methods: ' + str(authlist))
self.transport.saved_exception = PartialAuthentication(authlist)
elif self.auth_method not in authlist:
self.transport._log(DEBUG, 'Authentication type (%s) not permitted.' % self.auth_method)
self.transport._log(DEBUG, 'Allowed methods: ' + str(authlist))
self.transport.saved_exception = BadAuthenticationType('Bad authentication type', authlist)
else:
self.transport._log(INFO, 'Authentication (%s) failed.' % self.auth_method)
self.authenticated = False
self.username = None
if self.auth_event != None:
self.auth_event.set()
def _parse_userauth_banner(self, m):
banner = m.get_string()
lang = m.get_string()
self.transport._log(INFO, 'Auth banner: ' + banner)
# who cares.
def _parse_userauth_info_request(self, m):
if self.auth_method != 'keyboard-interactive':
raise SSHException('Illegal info request from server')
title = m.get_string()
instructions = m.get_string()
m.get_string() # lang
prompts = m.get_int()
prompt_list = []
for i in range(prompts):
prompt_list.append((m.get_string(), m.get_boolean()))
response_list = self.interactive_handler(title, instructions, prompt_list)
m = Message()
m.add_byte(chr(MSG_USERAUTH_INFO_RESPONSE))
m.add_int(len(response_list))
for r in response_list:
m.add_string(r)
self.transport._send_message(m)
def _parse_userauth_info_response(self, m):
if not self.transport.server_mode:
raise SSHException('Illegal info response from server')
n = m.get_int()
responses = []
for i in range(n):
responses.append(m.get_string())
result = self.transport.server_object.check_auth_interactive_response(responses)
if isinstance(type(result), InteractiveQuery):
# make interactive query instead of response
self._interactive_query(result)
return
self._send_auth_result(self.auth_username, 'keyboard-interactive', result)
_handler_table = {
MSG_SERVICE_REQUEST: _parse_service_request,
MSG_SERVICE_ACCEPT: _parse_service_accept,
MSG_USERAUTH_REQUEST: _parse_userauth_request,
MSG_USERAUTH_SUCCESS: _parse_userauth_success,
MSG_USERAUTH_FAILURE: _parse_userauth_failure,
MSG_USERAUTH_BANNER: _parse_userauth_banner,
MSG_USERAUTH_INFO_REQUEST: _parse_userauth_info_request,
MSG_USERAUTH_INFO_RESPONSE: _parse_userauth_info_response,
}
| gpl-3.0 | 7,699,957,758,928,900,000 | 37.546948 | 119 | 0.574874 | false |
timlinux/inasafe | safe/gis/vector/test/test_union.py | 2 | 2114 | # coding=utf-8
import unittest
from safe.definitions.constants import INASAFE_TEST
from safe.test.utilities import (
get_qgis_app,
load_test_vector_layer)
from safe.gis.vector.clean_geometry import clean_layer
QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app(qsetting=INASAFE_TEST)
from safe.gis.vector.union import union
from safe.definitions.fields import hazard_class_field, hazard_value_field
__copyright__ = "Copyright 2016, The InaSAFE Project"
__license__ = "GPL version 3"
__email__ = "[email protected]"
__revision__ = '$Format:%H$'
class TestUnionVector(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_union(self):
"""Test we can union two layers like hazard and aggregation (1)."""
union_a = load_test_vector_layer(
'gisv4', 'hazard', 'classified_vector.geojson')
union_a.keywords['inasafe_fields'][hazard_class_field['key']] = (
union_a.keywords['inasafe_fields'][hazard_value_field['key']])
union_b = load_test_vector_layer(
'gisv4', 'aggregation', 'small_grid.geojson')
layer = union(union_a, union_b)
self.assertEqual(layer.featureCount(), 11)
self.assertEqual(
union_a.fields().count() + union_b.fields().count(),
layer.fields().count()
)
@unittest.expectedFailure
def test_union_error(self):
"""Test we can union two layers like hazard and aggregation (2)."""
union_a = clean_layer(load_test_vector_layer(
'gisv4', 'hazard', 'union_check_hazard.geojson'))
union_a.keywords['inasafe_fields'][hazard_class_field['key']] = (
union_a.keywords['inasafe_fields'][hazard_value_field['key']])
union_b = clean_layer(load_test_vector_layer(
'gisv4', 'aggregation', 'union_check_aggregation.geojson'))
layer = union(union_a, union_b)
self.assertEqual(layer.featureCount(), 11)
self.assertEqual(
union_a.fields().count() + union_b.fields().count(),
layer.fields().count()
)
| gpl-3.0 | 8,934,478,622,426,080,000 | 31.030303 | 75 | 0.628193 | false |
andzaytsev/deepnav | GA3C_LSTM_NAV_D/backup/Environment_gym.py | 7 | 3447 | # Copyright (c) 2016, NVIDIA CORPORATION. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of NVIDIA CORPORATION nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
if sys.version_info >= (3,0):
from queue import Queue
else:
from Queue import Queue
import numpy as np
import scipy.misc as misc
from Config import Config
from GameManager import GameManager
class Environment:
def __init__(self):
self.game = GameManager(Config.ATARI_GAME, display=Config.PLAY_MODE)
self.nb_frames = Config.STACKED_FRAMES
self.frame_q = Queue(maxsize=self.nb_frames)
self.previous_state = None
self.current_state = None
self.total_reward = 0
self.reset()
@staticmethod
def _rgb2gray(rgb):
return np.dot(rgb[..., :3], [0.299, 0.587, 0.114])
@staticmethod
def _preprocess(image):
image = Environment._rgb2gray(image)
image = misc.imresize(image, [Config.IMAGE_HEIGHT, Config.IMAGE_WIDTH], 'bilinear')
image = image.astype(np.float32) / 128.0 - 1.0
return image
def _get_current_state(self):
if not self.frame_q.full():
return None # frame queue is not full yet.
x_ = np.array(self.frame_q.queue)
x_ = np.transpose(x_, [1, 2, 0]) # move channels
return x_
def _update_frame_q(self, frame):
if self.frame_q.full():
self.frame_q.get()
image = Environment._preprocess(frame)
self.frame_q.put(image)
def get_num_actions(self):
return self.game.env.action_space.n
def reset(self):
self.total_reward = 0
self.frame_q.queue.clear()
self._update_frame_q(self.game.reset())
self.previous_state = self.current_state = None
def step(self, action):
observation, reward, done, _ = self.game.step(action)
self.total_reward += reward
self._update_frame_q(observation)
self.previous_state = self.current_state
self.current_state = self._get_current_state()
return reward, done
| gpl-2.0 | 8,339,360,293,789,449,000 | 36.467391 | 91 | 0.688425 | false |
deepfield/ibis | ibis/expr/analytics.py | 1 | 4739 | # Copyright 2015 Cloudera Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import ibis.expr.rules as rlz
import ibis.expr.datatypes as dt
import ibis.expr.operations as ops
from ibis.expr.signature import Argument as Arg
class BucketLike(ops.ValueOp):
@property
def nbuckets(self):
return None
def output_type(self):
dtype = dt.Category(self.nbuckets)
return dtype.array_type()
class Bucket(BucketLike):
arg = Arg(rlz.noop)
buckets = Arg(rlz.noop)
closed = Arg(rlz.isin({'left', 'right'}), default='left')
close_extreme = Arg(bool, default=True)
include_under = Arg(bool, default=False)
include_over = Arg(bool, default=False)
def _validate(self):
if not len(self.buckets):
raise ValueError('Must be at least one bucket edge')
elif len(self.buckets) == 1:
if not self.include_under or not self.include_over:
raise ValueError(
'If one bucket edge provided, must have '
'include_under=True and include_over=True'
)
@property
def nbuckets(self):
return len(self.buckets) - 1 + self.include_over + self.include_under
class Histogram(BucketLike):
arg = Arg(rlz.noop)
nbins = Arg(rlz.noop, default=None)
binwidth = Arg(rlz.noop, default=None)
base = Arg(rlz.noop, default=None)
closed = Arg(rlz.isin({'left', 'right'}), default='left')
aux_hash = Arg(rlz.noop, default=None)
def _validate(self):
if self.nbins is None:
if self.binwidth is None:
raise ValueError('Must indicate nbins or binwidth')
elif self.binwidth is not None:
raise ValueError('nbins and binwidth are mutually exclusive')
def output_type(self):
# always undefined cardinality (for now)
return dt.category.array_type()
class CategoryLabel(ops.ValueOp):
arg = Arg(rlz.category)
labels = Arg(rlz.noop)
nulls = Arg(rlz.noop, default=None)
output_type = rlz.shape_like('arg', dt.string)
def _validate(self):
cardinality = self.arg.type().cardinality
if len(self.labels) != cardinality:
raise ValueError('Number of labels must match number of '
'categories: {}'.format(cardinality))
def bucket(arg, buckets, closed='left', close_extreme=True,
include_under=False, include_over=False):
"""
Compute a discrete binning of a numeric array
Parameters
----------
arg : numeric array expression
buckets : list
closed : {'left', 'right'}, default 'left'
Which side of each interval is closed. For example
buckets = [0, 100, 200]
closed = 'left': 100 falls in 2nd bucket
closed = 'right': 100 falls in 1st bucket
close_extreme : boolean, default True
Returns
-------
bucketed : coded value expression
"""
op = Bucket(arg, buckets, closed=closed, close_extreme=close_extreme,
include_under=include_under, include_over=include_over)
return op.to_expr()
def histogram(arg, nbins=None, binwidth=None, base=None, closed='left',
aux_hash=None):
"""
Compute a histogram with fixed width bins
Parameters
----------
arg : numeric array expression
nbins : int, default None
If supplied, will be used to compute the binwidth
binwidth : number, default None
If not supplied, computed from the data (actual max and min values)
base : number, default None
closed : {'left', 'right'}, default 'left'
Which side of each interval is closed
Returns
-------
histogrammed : coded value expression
"""
op = Histogram(arg, nbins, binwidth, base, closed=closed,
aux_hash=aux_hash)
return op.to_expr()
def category_label(arg, labels, nulls=None):
"""
Format a known number of categories as strings
Parameters
----------
labels : list of string
nulls : string, optional
How to label any null values among the categories
Returns
-------
string_categories : string value expression
"""
op = CategoryLabel(arg, labels, nulls)
return op.to_expr()
| apache-2.0 | -5,387,108,389,572,287,000 | 29.574194 | 77 | 0.639586 | false |
SickRage/SickRage | sickrage/libs/enzyme/language.py | 3 | 15136 | # Author: echel0n <[email protected]>
# URL: https://sickrage.ca
#
# This file is part of SickRage.
#
# SickRage is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SickRage is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SickRage. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import unicode_literals
import re
__all__ = ['resolve']
def resolve(code):
"""
Transform the given (2- or 3-letter) language code to a human readable
language name. The return value is a 2-tuple containing the given
language code and the language name. If the language code cannot be
resolved, name will be 'Unknown (<code>)'.
"""
if not code:
return None, None
if not isinstance(code, basestring):
raise ValueError('Invalid language code specified by parser')
# Take up to 3 letters from the code.
code = re.split(r'[^a-z]', code.lower())[0][:3]
for spec in codes:
if code in spec[:-1]:
return code, spec[-1]
return code, 'Unknown (%r)' % code
# Parsed from http://www.loc.gov/standards/iso639-2/ISO-639-2_utf-8.txt
codes = (
('aar', 'aa', 'Afar'),
('abk', 'ab', 'Abkhazian'),
('ace', 'Achinese'),
('ach', 'Acoli'),
('ada', 'Adangme'),
('ady', 'Adyghe'),
('afa', 'Afro-Asiatic '),
('afh', 'Afrihili'),
('afr', 'af', 'Afrikaans'),
('ain', 'Ainu'),
('aka', 'ak', 'Akan'),
('akk', 'Akkadian'),
('alb', 'sq', 'Albanian'),
('ale', 'Aleut'),
('alg', 'Algonquian languages'),
('alt', 'Southern Altai'),
('amh', 'am', 'Amharic'),
('ang', 'English, Old '),
('anp', 'Angika'),
('apa', 'Apache languages'),
('ara', 'ar', 'Arabic'),
('arc', 'Official Aramaic '),
('arg', 'an', 'Aragonese'),
('arm', 'hy', 'Armenian'),
('arn', 'Mapudungun'),
('arp', 'Arapaho'),
('art', 'Artificial '),
('arw', 'Arawak'),
('asm', 'as', 'Assamese'),
('ast', 'Asturian'),
('ath', 'Athapascan languages'),
('aus', 'Australian languages'),
('ava', 'av', 'Avaric'),
('ave', 'ae', 'Avestan'),
('awa', 'Awadhi'),
('aym', 'ay', 'Aymara'),
('aze', 'az', 'Azerbaijani'),
('bad', 'Banda languages'),
('bai', 'Bamileke languages'),
('bak', 'ba', 'Bashkir'),
('bal', 'Baluchi'),
('bam', 'bm', 'Bambara'),
('ban', 'Balinese'),
('baq', 'eu', 'Basque'),
('bas', 'Basa'),
('bat', 'Baltic '),
('bej', 'Beja'),
('bel', 'be', 'Belarusian'),
('bem', 'Bemba'),
('ben', 'bn', 'Bengali'),
('ber', 'Berber '),
('bho', 'Bhojpuri'),
('bih', 'bh', 'Bihari'),
('bik', 'Bikol'),
('bin', 'Bini'),
('bis', 'bi', 'Bislama'),
('bla', 'Siksika'),
('bnt', 'Bantu '),
('bos', 'bs', 'Bosnian'),
('bra', 'Braj'),
('bre', 'br', 'Breton'),
('btk', 'Batak languages'),
('bua', 'Buriat'),
('bug', 'Buginese'),
('bul', 'bg', 'Bulgarian'),
('bur', 'my', 'Burmese'),
('byn', 'Blin'),
('cad', 'Caddo'),
('cai', 'Central American Indian '),
('car', 'Galibi Carib'),
('cat', 'ca', 'Catalan'),
('cau', 'Caucasian '),
('ceb', 'Cebuano'),
('cel', 'Celtic '),
('cha', 'ch', 'Chamorro'),
('chb', 'Chibcha'),
('che', 'ce', 'Chechen'),
('chg', 'Chagatai'),
('chi', 'zh', 'Chinese'),
('chk', 'Chuukese'),
('chm', 'Mari'),
('chn', 'Chinook jargon'),
('cho', 'Choctaw'),
('chp', 'Chipewyan'),
('chr', 'Cherokee'),
('chu', 'cu', 'Church Slavic'),
('chv', 'cv', 'Chuvash'),
('chy', 'Cheyenne'),
('cmc', 'Chamic languages'),
('cop', 'Coptic'),
('cor', 'kw', 'Cornish'),
('cos', 'co', 'Corsican'),
('cpe', 'Creoles and pidgins, English based '),
('cpf', 'Creoles and pidgins, French-based '),
('cpp', 'Creoles and pidgins, Portuguese-based '),
('cre', 'cr', 'Cree'),
('crh', 'Crimean Tatar'),
('crp', 'Creoles and pidgins '),
('csb', 'Kashubian'),
('cus', 'Cushitic '),
('cze', 'cs', 'Czech'),
('dak', 'Dakota'),
('dan', 'da', 'Danish'),
('dar', 'Dargwa'),
('day', 'Land Dayak languages'),
('del', 'Delaware'),
('den', 'Slave '),
('dgr', 'Dogrib'),
('din', 'Dinka'),
('div', 'dv', 'Divehi'),
('doi', 'Dogri'),
('dra', 'Dravidian '),
('dsb', 'Lower Sorbian'),
('dua', 'Duala'),
('dum', 'Dutch, Middle '),
('dut', 'nl', 'Dutch'),
('dyu', 'Dyula'),
('dzo', 'dz', 'Dzongkha'),
('efi', 'Efik'),
('egy', 'Egyptian '),
('eka', 'Ekajuk'),
('elx', 'Elamite'),
('eng', 'en', 'English'),
('enm', 'English, Middle '),
('epo', 'eo', 'Esperanto'),
('est', 'et', 'Estonian'),
('ewe', 'ee', 'Ewe'),
('ewo', 'Ewondo'),
('fan', 'Fang'),
('fao', 'fo', 'Faroese'),
('fat', 'Fanti'),
('fij', 'fj', 'Fijian'),
('fil', 'Filipino'),
('fin', 'fi', 'Finnish'),
('fiu', 'Finno-Ugrian '),
('fon', 'Fon'),
('fre', 'fr', 'French'),
('frm', 'French, Middle '),
('fro', 'French, Old '),
('frr', 'Northern Frisian'),
('frs', 'Eastern Frisian'),
('fry', 'fy', 'Western Frisian'),
('ful', 'ff', 'Fulah'),
('fur', 'Friulian'),
('gaa', 'Ga'),
('gay', 'Gayo'),
('gba', 'Gbaya'),
('gem', 'Germanic '),
('geo', 'ka', 'Georgian'),
('ger', 'de', 'German'),
('gez', 'Geez'),
('gil', 'Gilbertese'),
('gla', 'gd', 'Gaelic'),
('gle', 'ga', 'Irish'),
('glg', 'gl', 'Galician'),
('glv', 'gv', 'Manx'),
('gmh', 'German, Middle High '),
('goh', 'German, Old High '),
('gon', 'Gondi'),
('gor', 'Gorontalo'),
('got', 'Gothic'),
('grb', 'Grebo'),
('grc', 'Greek, Ancient '),
('gre', 'el', 'Greek, Modern '),
('grn', 'gn', 'Guarani'),
('gsw', 'Swiss German'),
('guj', 'gu', 'Gujarati'),
('gwi', u"Gwich'in"),
('hai', 'Haida'),
('hat', 'ht', 'Haitian'),
('hau', 'ha', 'Hausa'),
('haw', 'Hawaiian'),
('heb', 'he', 'Hebrew'),
('her', 'hz', 'Herero'),
('hil', 'Hiligaynon'),
('him', 'Himachali'),
('hin', 'hi', 'Hindi'),
('hit', 'Hittite'),
('hmn', 'Hmong'),
('hmo', 'ho', 'Hiri Motu'),
('hsb', 'Upper Sorbian'),
('hun', 'hu', 'Hungarian'),
('hup', 'Hupa'),
('iba', 'Iban'),
('ibo', 'ig', 'Igbo'),
('ice', 'is', 'Icelandic'),
('ido', 'io', 'Ido'),
('iii', 'ii', 'Sichuan Yi'),
('ijo', 'Ijo languages'),
('iku', 'iu', 'Inuktitut'),
('ile', 'ie', 'Interlingue'),
('ilo', 'Iloko'),
('ina', 'ia', 'Interlingua '),
('inc', 'Indic '),
('ind', 'id', 'Indonesian'),
('ine', 'Indo-European '),
('inh', 'Ingush'),
('ipk', 'ik', 'Inupiaq'),
('ira', 'Iranian '),
('iro', 'Iroquoian languages'),
('ita', 'it', 'Italian'),
('jav', 'jv', 'Javanese'),
('jbo', 'Lojban'),
('jpn', 'ja', 'Japanese'),
('jpr', 'Judeo-Persian'),
('jrb', 'Judeo-Arabic'),
('kaa', 'Kara-Kalpak'),
('kab', 'Kabyle'),
('kac', 'Kachin'),
('kal', 'kl', 'Kalaallisut'),
('kam', 'Kamba'),
('kan', 'kn', 'Kannada'),
('kar', 'Karen languages'),
('kas', 'ks', 'Kashmiri'),
('kau', 'kr', 'Kanuri'),
('kaw', 'Kawi'),
('kaz', 'kk', 'Kazakh'),
('kbd', 'Kabardian'),
('kha', 'Khasi'),
('khi', 'Khoisan '),
('khm', 'km', 'Central Khmer'),
('kho', 'Khotanese'),
('kik', 'ki', 'Kikuyu'),
('kin', 'rw', 'Kinyarwanda'),
('kir', 'ky', 'Kirghiz'),
('kmb', 'Kimbundu'),
('kok', 'Konkani'),
('kom', 'kv', 'Komi'),
('kon', 'kg', 'Kongo'),
('kor', 'ko', 'Korean'),
('kos', 'Kosraean'),
('kpe', 'Kpelle'),
('krc', 'Karachay-Balkar'),
('krl', 'Karelian'),
('kro', 'Kru languages'),
('kru', 'Kurukh'),
('kua', 'kj', 'Kuanyama'),
('kum', 'Kumyk'),
('kur', 'ku', 'Kurdish'),
('kut', 'Kutenai'),
('lad', 'Ladino'),
('lah', 'Lahnda'),
('lam', 'Lamba'),
('lao', 'lo', 'Lao'),
('lat', 'la', 'Latin'),
('lav', 'lv', 'Latvian'),
('lez', 'Lezghian'),
('lim', 'li', 'Limburgan'),
('lin', 'ln', 'Lingala'),
('lit', 'lt', 'Lithuanian'),
('lol', 'Mongo'),
('loz', 'Lozi'),
('ltz', 'lb', 'Luxembourgish'),
('lua', 'Luba-Lulua'),
('lub', 'lu', 'Luba-Katanga'),
('lug', 'lg', 'Ganda'),
('lui', 'Luiseno'),
('lun', 'Lunda'),
('luo', 'Luo '),
('lus', 'Lushai'),
('mac', 'mk', 'Macedonian'),
('mad', 'Madurese'),
('mag', 'Magahi'),
('mah', 'mh', 'Marshallese'),
('mai', 'Maithili'),
('mak', 'Makasar'),
('mal', 'ml', 'Malayalam'),
('man', 'Mandingo'),
('mao', 'mi', 'Maori'),
('map', 'Austronesian '),
('mar', 'mr', 'Marathi'),
('mas', 'Masai'),
('may', 'ms', 'Malay'),
('mdf', 'Moksha'),
('mdr', 'Mandar'),
('men', 'Mende'),
('mga', 'Irish, Middle '),
('mic', u"Mi'kmaq"),
('min', 'Minangkabau'),
('mis', 'Uncoded languages'),
('mkh', 'Mon-Khmer '),
('mlg', 'mg', 'Malagasy'),
('mlt', 'mt', 'Maltese'),
('mnc', 'Manchu'),
('mni', 'Manipuri'),
('mno', 'Manobo languages'),
('moh', 'Mohawk'),
('mol', 'mo', 'Moldavian'),
('mon', 'mn', 'Mongolian'),
('mos', 'Mossi'),
('mul', 'Multiple languages'),
('mun', 'Munda languages'),
('mus', 'Creek'),
('mwl', 'Mirandese'),
('mwr', 'Marwari'),
('myn', 'Mayan languages'),
('myv', 'Erzya'),
('nah', 'Nahuatl languages'),
('nai', 'North American Indian'),
('nap', 'Neapolitan'),
('nau', 'na', 'Nauru'),
('nav', 'nv', 'Navajo'),
('nbl', 'nr', 'Ndebele, South'),
('nde', 'nd', 'Ndebele, North'),
('ndo', 'ng', 'Ndonga'),
('nds', 'Low German'),
('nep', 'ne', 'Nepali'),
('new', 'Nepal Bhasa'),
('nia', 'Nias'),
('nic', 'Niger-Kordofanian '),
('niu', 'Niuean'),
('nno', 'nn', 'Norwegian Nynorsk'),
('nob', 'nb', 'Bokm\xe5l, Norwegian'),
('nog', 'Nogai'),
('non', 'Norse, Old'),
('nor', 'no', 'Norwegian'),
('nqo', u"N'Ko"),
('nso', 'Pedi'),
('nub', 'Nubian languages'),
('nwc', 'Classical Newari'),
('nya', 'ny', 'Chichewa'),
('nym', 'Nyamwezi'),
('nyn', 'Nyankole'),
('nyo', 'Nyoro'),
('nzi', 'Nzima'),
('oci', 'oc', 'Occitan '),
('oji', 'oj', 'Ojibwa'),
('ori', 'or', 'Oriya'),
('orm', 'om', 'Oromo'),
('osa', 'Osage'),
('oss', 'os', 'Ossetian'),
('ota', 'Turkish, Ottoman '),
('oto', 'Otomian languages'),
('paa', 'Papuan '),
('pag', 'Pangasinan'),
('pal', 'Pahlavi'),
('pam', 'Pampanga'),
('pan', 'pa', 'Panjabi'),
('pap', 'Papiamento'),
('pau', 'Palauan'),
('peo', 'Persian, Old '),
('per', 'fa', 'Persian'),
('phi', 'Philippine '),
('phn', 'Phoenician'),
('pli', 'pi', 'Pali'),
('pol', 'pl', 'Polish'),
('pon', 'Pohnpeian'),
('por', 'pt', 'Portuguese'),
('pra', 'Prakrit languages'),
('pro', 'Proven\xe7al, Old '),
('pus', 'ps', 'Pushto'),
('qaa-qtz', 'Reserved for local use'),
('que', 'qu', 'Quechua'),
('raj', 'Rajasthani'),
('rap', 'Rapanui'),
('rar', 'Rarotongan'),
('roa', 'Romance '),
('roh', 'rm', 'Romansh'),
('rom', 'Romany'),
('rum', 'ro', 'Romanian'),
('run', 'rn', 'Rundi'),
('rup', 'Aromanian'),
('rus', 'ru', 'Russian'),
('sad', 'Sandawe'),
('sag', 'sg', 'Sango'),
('sah', 'Yakut'),
('sai', 'South American Indian '),
('sal', 'Salishan languages'),
('sam', 'Samaritan Aramaic'),
('san', 'sa', 'Sanskrit'),
('sas', 'Sasak'),
('sat', 'Santali'),
('scc', 'sr', 'Serbian'),
('scn', 'Sicilian'),
('sco', 'Scots'),
('scr', 'hr', 'Croatian'),
('sel', 'Selkup'),
('sem', 'Semitic '),
('sga', 'Irish, Old '),
('sgn', 'Sign Languages'),
('shn', 'Shan'),
('sid', 'Sidamo'),
('sin', 'si', 'Sinhala'),
('sio', 'Siouan languages'),
('sit', 'Sino-Tibetan '),
('sla', 'Slavic '),
('slo', 'sk', 'Slovak'),
('slv', 'sl', 'Slovenian'),
('sma', 'Southern Sami'),
('sme', 'se', 'Northern Sami'),
('smi', 'Sami languages '),
('smj', 'Lule Sami'),
('smn', 'Inari Sami'),
('smo', 'sm', 'Samoan'),
('sms', 'Skolt Sami'),
('sna', 'sn', 'Shona'),
('snd', 'sd', 'Sindhi'),
('snk', 'Soninke'),
('sog', 'Sogdian'),
('som', 'so', 'Somali'),
('son', 'Songhai languages'),
('sot', 'st', 'Sotho, Southern'),
('spa', 'es', 'Spanish'),
('srd', 'sc', 'Sardinian'),
('srn', 'Sranan Tongo'),
('srr', 'Serer'),
('ssa', 'Nilo-Saharan '),
('ssw', 'ss', 'Swati'),
('suk', 'Sukuma'),
('sun', 'su', 'Sundanese'),
('sus', 'Susu'),
('sux', 'Sumerian'),
('swa', 'sw', 'Swahili'),
('swe', 'sv', 'Swedish'),
('syc', 'Classical Syriac'),
('syr', 'Syriac'),
('tah', 'ty', 'Tahitian'),
('tai', 'Tai '),
('tam', 'ta', 'Tamil'),
('tat', 'tt', 'Tatar'),
('tel', 'te', 'Telugu'),
('tem', 'Timne'),
('ter', 'Tereno'),
('tet', 'Tetum'),
('tgk', 'tg', 'Tajik'),
('tgl', 'tl', 'Tagalog'),
('tha', 'th', 'Thai'),
('tib', 'bo', 'Tibetan'),
('tig', 'Tigre'),
('tir', 'ti', 'Tigrinya'),
('tiv', 'Tiv'),
('tkl', 'Tokelau'),
('tlh', 'Klingon'),
('tli', 'Tlingit'),
('tmh', 'Tamashek'),
('tog', 'Tonga '),
('ton', 'to', 'Tonga '),
('tpi', 'Tok Pisin'),
('tsi', 'Tsimshian'),
('tsn', 'tn', 'Tswana'),
('tso', 'ts', 'Tsonga'),
('tuk', 'tk', 'Turkmen'),
('tum', 'Tumbuka'),
('tup', 'Tupi languages'),
('tur', 'tr', 'Turkish'),
('tut', 'Altaic '),
('tvl', 'Tuvalu'),
('twi', 'tw', 'Twi'),
('tyv', 'Tuvinian'),
('udm', 'Udmurt'),
('uga', 'Ugaritic'),
('uig', 'ug', 'Uighur'),
('ukr', 'uk', 'Ukrainian'),
('umb', 'Umbundu'),
('und', 'Undetermined'),
('urd', 'ur', 'Urdu'),
('uzb', 'uz', 'Uzbek'),
('vai', 'Vai'),
('ven', 've', 'Venda'),
('vie', 'vi', 'Vietnamese'),
('vol', 'vo', 'Volap\xfck'),
('vot', 'Votic'),
('wak', 'Wakashan languages'),
('wal', 'Walamo'),
('war', 'Waray'),
('was', 'Washo'),
('wel', 'cy', 'Welsh'),
('wen', 'Sorbian languages'),
('wln', 'wa', 'Walloon'),
('wol', 'wo', 'Wolof'),
('xal', 'Kalmyk'),
('xho', 'xh', 'Xhosa'),
('yao', 'Yao'),
('yap', 'Yapese'),
('yid', 'yi', 'Yiddish'),
('yor', 'yo', 'Yoruba'),
('ypk', 'Yupik languages'),
('zap', 'Zapotec'),
('zbl', 'Blissymbols'),
('zen', 'Zenaga'),
('zha', 'za', 'Zhuang'),
('znd', 'Zande languages'),
('zul', 'zu', 'Zulu'),
('zun', 'Zuni'),
('zxx', 'No linguistic content'),
('zza', 'Zaza'),
)
| gpl-3.0 | 7,245,538,614,709,254,000 | 27.18622 | 74 | 0.455338 | false |
lewischeng-ms/pox | pox/misc/dnsspy.py | 5 | 2069 | # Copyright 2011 James McCauley
# Copyright 2008 (C) Nicira, Inc.
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
This is a port of NOX's DNSSpy component.
"""
from pox.core import core
import pox.openflow.libopenflow_01 as of
from pox.lib.revent import *
from pox.lib.packet import *
log = core.getLogger()
class DNSSpy (EventMixin):
def __init__ (self):
self.ip_records = {}
self.listenTo(core.openflow)
def _handle_ConnectionUp (self, event):
msg = of.ofp_flow_mod()
msg.match = of.ofp_match()
msg.match.dl_type = ethernet.IP_TYPE
msg.match.nw_proto = ipv4.UDP_PROTOCOL
msg.match.tp_src = 53
msg.actions.append(of.ofp_action_output(port = of.OFPP_CONTROLLER))
event.connection.send(msg)
def _handle_PacketIn (self, event):
p = event.parse().find('dns')
if p is not None:
log.debug(p)
for answer in p.answers:
if answer.qtype == dns.dns.rr.A_TYPE:
val = self.ip_records[answer.rddata]
if answer.name not in val:
val.insert(0, answer.name)
log.info("add dns entry: %s %s" % (answer.rddata, answer.name))
for addition in p.additional:
if addition.qtype == dns.dns.rr.A_TYPE:
val = self.ip_records[addition.rddata]
if addition.name not in val:
val.insert(0, addition.name)
log.info("additional dns entry: %s %s" % (addition.rddata, addition.name))
def launch ():
core.registerNew(DNSSpy)
| gpl-3.0 | 1,055,570,198,829,311,400 | 29.880597 | 86 | 0.669889 | false |
AliZafar120/NetworkStimulatorSPl3 | rapidnet_visualizer/src/ns3/test/test_gui_playback.py | 1 | 3036 | import sys
import gtk
from ns3.viewer.net_view_area import NetViewArea
from ns3.viewer.time_slider import TimeSlider
from ns3.viewer.node_style import NodeStyle, NodeShape
from ns3.viewer.link_style import LinkStyle
from ns3.drivers.node_style_driver import LoadNodeStyles
from ns3.drivers.link_style_driver import LoadLinkStyles
from ns3.drivers.file_driver import LoadLog
from ns3.model.net_world_model import NetWorldModel
from ns3.viewer.animation_model import AnimationModel
def main ():
hasInterval = False
interval = 100
if len (sys.argv) <= 1:
print 'Usage: test_gui_playback decorator.log [linkstyle_file nodestyle_file interval]'
sys.exit (0)
decorator_log = sys.argv[1]
linkstyle_file = None
nodestyle_file = None
if len (sys.argv) >= 3:
linkstyle_file = sys.argv[3]
if len (sys.argv) >= 4:
nodestyle_file = sys.argv[2]
if len (sys.argv) >= 5:
hasInterval = True
interval = int (sys.argv[4])
window = gtk.Window()
window.connect ("delete-event", gtk.main_quit)
window.set_title ("py ns3-decorator")
window.set_border_width (0)
window.set_size_request (410, 440)
netView = NetViewArea ()
scrolledWindow = gtk.ScrolledWindow ()
scrolledWindow.set_policy (gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
scrolledWindow.add_with_viewport(netView)
playback = TimeSlider()
vbox = gtk.VBox ()
window.add (vbox)
vbox.pack_start (scrolledWindow, expand = True)
hbox = gtk.HBox ()
vbox.pack_start (hbox, expand = False)
pButton = gtk.Button (stock = gtk.STOCK_MEDIA_PAUSE)
label = pButton.get_children()[0].get_children ()[0].get_children()[1]
label.set_label ('')
hbox.pack_start (pButton, expand = False)
hbox.pack_start (playback, expand = True)
window.show_all()
window.present ()
#Setup some default styles
linkStyles = {}
linkStyles['week'] = LinkStyle (0, 1, 0, 1, 1)
linkStyles['strong'] = LinkStyle (10, 0, 0, 0, 1)
linkStyles["fancy"] = LinkStyle (4, 1, 0, 0.5, 1)
if linkstyle_file != None:
LoadLinkStyles(linkstyle_file, linkStyles)
nodeStyles = {}
nodeStyles["command"] = NodeStyle(lineWidth = 0.25, r = 0, g = 0, b = 1, \
a = 1, hasLineWidth = True, hasColor = True)
nodeStyles["user"] = NodeStyle (lineWidth = 4, hasLineWidth = True)
nodeStyles["client"] = NodeStyle (lineWidth = 0, r = 1, g = 0, b = 1, \
a = 1, hasLineWidth = True, hasColor = True)
if nodestyle_file != None:
LoadNodeStyles(nodestyle_file, nodeStyles)
netView.linkStyles = linkStyles
netView.nodeStyles = nodeStyles
netWorldModel = NetWorldModel ()
LoadLog (netWorldModel, decorator_log)
#for event in netWorldModel.events:
# print event, event.__dict__
playback.SetWorldModel (netWorldModel)
netView.SetWorldModel (netWorldModel)
anim = AnimationModel ()
anim.SetWorldModel (netWorldModel)
if hasInterval:
anim.SetInterval (interval)
anim.AddNotifier (netView)
anim.AddNotifier (playback)
anim.ResetTo (0)
anim.Start()
gtk.main ()
if __name__ == "__main__":
main ()
| gpl-2.0 | -1,535,345,298,507,349,800 | 28.192308 | 91 | 0.695652 | false |
rahulunair/nova | nova/virt/xenapi/fake.py | 1 | 41677 | # Copyright (c) 2010 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Parts of this file are based upon xmlrpclib.py, the XML-RPC client
# interface included in the Python distribution.
#
# Copyright (c) 1999-2002 by Secret Labs AB
# Copyright (c) 1999-2002 by Fredrik Lundh
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# Secret Labs AB or the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
"""
A fake XenAPI SDK.
"""
import base64
import pickle
import random
import six
from xml.sax import saxutils
import zlib
from os_xenapi.client import session as xenapi_session
from os_xenapi.client import XenAPI
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from oslo_utils import units
from oslo_utils import uuidutils
from nova import exception
from nova.i18n import _
_CLASSES = ['host', 'network', 'session', 'pool', 'SR', 'VBD',
'PBD', 'VDI', 'VIF', 'PIF', 'VM', 'VLAN', 'task',
'GPU_group', 'PGPU', 'VGPU_type']
_after_create_functions = {}
_destroy_functions = {}
_db_content = {}
LOG = logging.getLogger(__name__)
def add_to_dict(functions):
"""A decorator that adds a function to dictionary."""
def decorator(func):
functions[func.__name__] = func
return func
return decorator
def reset():
for c in _CLASSES:
_db_content[c] = {}
create_host('fake')
create_vm('fake dom 0',
'Running',
is_a_template=False,
is_control_domain=True,
domid='0')
def reset_table(table):
if table not in _CLASSES:
return
_db_content[table] = {}
def _create_pool(name_label):
return _create_object('pool',
{'name_label': name_label})
def create_host(name_label, hostname='fake_name', address='fake_addr',
software_version={'platform_name': 'fake_platform',
'platform_version': '1.0.0'}):
host_ref = _create_object('host',
{'name_label': name_label,
'hostname': hostname,
'address': address,
'software_version': software_version})
host_default_sr_ref = _create_local_srs(host_ref)
_create_local_pif(host_ref)
# Create a pool if we don't have one already
if len(_db_content['pool']) == 0:
pool_ref = _create_pool('')
_db_content['pool'][pool_ref]['master'] = host_ref
_db_content['pool'][pool_ref]['default-SR'] = host_default_sr_ref
_db_content['pool'][pool_ref]['suspend-image-SR'] = host_default_sr_ref
def create_network(name_label, bridge):
return _create_object('network',
{'name_label': name_label,
'bridge': bridge})
def create_vm(name_label, status, **kwargs):
if status == 'Running':
domid = "%d" % random.randrange(1, 1 << 16)
resident_on = list(_db_content['host'])[0]
else:
domid = "-1"
resident_on = ''
vm_rec = {'name_label': name_label,
'domid': domid,
'power_state': status,
'blocked_operations': {},
'resident_on': resident_on}
vm_rec.update(kwargs.copy())
vm_ref = _create_object('VM', vm_rec)
after_VM_create(vm_ref, vm_rec)
return vm_ref
@add_to_dict(_destroy_functions)
def destroy_vm(vm_ref):
vm_rec = _db_content['VM'][vm_ref]
vbd_refs = vm_rec['VBDs']
# NOTE(johannes): Shallow copy since destroy_vbd will remove itself
# from the list
for vbd_ref in vbd_refs[:]:
destroy_vbd(vbd_ref)
del _db_content['VM'][vm_ref]
@add_to_dict(_destroy_functions)
def destroy_vbd(vbd_ref):
vbd_rec = _db_content['VBD'][vbd_ref]
vm_ref = vbd_rec['VM']
vm_rec = _db_content['VM'][vm_ref]
vm_rec['VBDs'].remove(vbd_ref)
vdi_ref = vbd_rec['VDI']
vdi_rec = _db_content['VDI'][vdi_ref]
vdi_rec['VBDs'].remove(vbd_ref)
del _db_content['VBD'][vbd_ref]
@add_to_dict(_destroy_functions)
def destroy_vdi(vdi_ref):
vdi_rec = _db_content['VDI'][vdi_ref]
vbd_refs = vdi_rec['VBDs']
# NOTE(johannes): Shallow copy since destroy_vbd will remove itself
# from the list
for vbd_ref in vbd_refs[:]:
destroy_vbd(vbd_ref)
del _db_content['VDI'][vdi_ref]
def create_vdi(name_label, sr_ref, **kwargs):
vdi_rec = {
'SR': sr_ref,
'read_only': False,
'type': '',
'name_label': name_label,
'name_description': '',
'sharable': False,
'other_config': {},
'location': '',
'xenstore_data': {},
'sm_config': {'vhd-parent': None},
'physical_utilisation': '123',
'managed': True,
}
vdi_rec.update(kwargs)
vdi_ref = _create_object('VDI', vdi_rec)
after_VDI_create(vdi_ref, vdi_rec)
return vdi_ref
@add_to_dict(_after_create_functions)
def after_VDI_create(vdi_ref, vdi_rec):
vdi_rec.setdefault('VBDs', [])
def create_vbd(vm_ref, vdi_ref, userdevice=0, other_config=None):
if other_config is None:
other_config = {}
vbd_rec = {'VM': vm_ref,
'VDI': vdi_ref,
'userdevice': str(userdevice),
'currently_attached': False,
'other_config': other_config}
vbd_ref = _create_object('VBD', vbd_rec)
after_VBD_create(vbd_ref, vbd_rec)
return vbd_ref
@add_to_dict(_after_create_functions)
def after_VBD_create(vbd_ref, vbd_rec):
"""Create read-only fields and backref from VM and VDI to VBD when VBD
is created.
"""
vbd_rec['currently_attached'] = False
# TODO(snikitin): Find a better way for generating of device name.
# Usually 'userdevice' has numeric values like '1', '2', '3', etc.
# Ideally they should be transformed to something like 'xvda', 'xvdb',
# 'xvdx', etc. But 'userdevice' also may be 'autodetect', 'fake' or even
# unset. We should handle it in future.
vbd_rec['device'] = vbd_rec.get('userdevice', '')
vbd_rec.setdefault('other_config', {})
vm_ref = vbd_rec['VM']
vm_rec = _db_content['VM'][vm_ref]
vm_rec['VBDs'].append(vbd_ref)
vm_name_label = _db_content['VM'][vm_ref]['name_label']
vbd_rec['vm_name_label'] = vm_name_label
vdi_ref = vbd_rec['VDI']
if vdi_ref and vdi_ref != "OpaqueRef:NULL":
vdi_rec = _db_content['VDI'][vdi_ref]
vdi_rec['VBDs'].append(vbd_ref)
@add_to_dict(_after_create_functions)
def after_VIF_create(vif_ref, vif_rec):
"""Create backref from VM to VIF when VIF is created.
"""
vm_ref = vif_rec['VM']
vm_rec = _db_content['VM'][vm_ref]
vm_rec['VIFs'].append(vif_ref)
@add_to_dict(_after_create_functions)
def after_VM_create(vm_ref, vm_rec):
"""Create read-only fields in the VM record."""
vm_rec.setdefault('domid', "-1")
vm_rec.setdefault('is_control_domain', False)
vm_rec.setdefault('is_a_template', False)
vm_rec.setdefault('memory_static_max', str(8 * units.Gi))
vm_rec.setdefault('memory_dynamic_max', str(8 * units.Gi))
vm_rec.setdefault('VCPUs_max', str(4))
vm_rec.setdefault('VBDs', [])
vm_rec.setdefault('VIFs', [])
vm_rec.setdefault('resident_on', '')
def create_pbd(host_ref, sr_ref, attached):
config = {'path': '/var/run/sr-mount/%s' % sr_ref}
return _create_object('PBD',
{'device_config': config,
'host': host_ref,
'SR': sr_ref,
'currently_attached': attached})
def create_task(name_label):
return _create_object('task',
{'name_label': name_label,
'status': 'pending'})
def _create_local_srs(host_ref):
"""Create an SR that looks like the one created on the local disk by
default by the XenServer installer. Also, fake the installation of
an ISO SR.
"""
create_sr(name_label='Local storage ISO',
type='iso',
other_config={'i18n-original-value-name_label':
'Local storage ISO',
'i18n-key': 'local-storage-iso'},
physical_size=80000,
physical_utilisation=40000,
virtual_allocation=80000,
host_ref=host_ref)
return create_sr(name_label='Local storage',
type='ext',
other_config={'i18n-original-value-name_label':
'Local storage',
'i18n-key': 'local-storage'},
physical_size=40000,
physical_utilisation=20000,
virtual_allocation=10000,
host_ref=host_ref)
def create_sr(**kwargs):
sr_ref = _create_object(
'SR',
{'name_label': kwargs.get('name_label'),
'type': kwargs.get('type'),
'content_type': kwargs.get('type', 'user'),
'shared': kwargs.get('shared', False),
'physical_size': kwargs.get('physical_size', str(1 << 30)),
'physical_utilisation': str(
kwargs.get('physical_utilisation', 0)),
'virtual_allocation': str(kwargs.get('virtual_allocation', 0)),
'other_config': kwargs.get('other_config', {}),
'VDIs': kwargs.get('VDIs', [])})
pbd_ref = create_pbd(kwargs.get('host_ref'), sr_ref, True)
_db_content['SR'][sr_ref]['PBDs'] = [pbd_ref]
return sr_ref
def _create_local_pif(host_ref):
pif_ref = _create_object('PIF',
{'name-label': 'Fake PIF',
'MAC': '00:11:22:33:44:55',
'physical': True,
'VLAN': -1,
'device': 'fake0',
'host_uuid': host_ref,
'network': '',
'IP': '10.1.1.1',
'IPv6': '',
'uuid': '',
'management': 'true',
'host': 'fake_host_ref'})
_db_content['PIF'][pif_ref]['uuid'] = pif_ref
return pif_ref
def _create_object(table, obj):
ref = uuidutils.generate_uuid()
obj['uuid'] = uuidutils.generate_uuid()
obj['ref'] = ref
_db_content[table][ref] = obj
return ref
def _create_sr(table, obj):
sr_type = obj[6]
# Forces fake to support iscsi only
if sr_type != 'iscsi' and sr_type != 'nfs':
raise XenAPI.Failure(['SR_UNKNOWN_DRIVER', sr_type])
host_ref = list(_db_content['host'])[0]
sr_ref = _create_object(table, obj[2])
if sr_type == 'iscsi':
vdi_ref = create_vdi('', sr_ref)
pbd_ref = create_pbd(host_ref, sr_ref, True)
_db_content['SR'][sr_ref]['VDIs'] = [vdi_ref]
_db_content['SR'][sr_ref]['PBDs'] = [pbd_ref]
_db_content['VDI'][vdi_ref]['SR'] = sr_ref
_db_content['PBD'][pbd_ref]['SR'] = sr_ref
return sr_ref
def _create_vlan(pif_ref, vlan_num, network_ref):
pif_rec = get_record('PIF', pif_ref)
vlan_pif_ref = _create_object('PIF',
{'name-label': 'Fake VLAN PIF',
'MAC': '00:11:22:33:44:55',
'physical': True,
'VLAN': vlan_num,
'device': pif_rec['device'],
'host_uuid': pif_rec['host_uuid']})
return _create_object('VLAN',
{'tagged-pif': pif_ref,
'untagged-pif': vlan_pif_ref,
'tag': vlan_num})
def get_all(table):
return list(_db_content[table].keys())
def get_all_records(table):
return _db_content[table]
def _query_matches(record, query):
# Simple support for the XenServer query language:
# 'field "host"="<uuid>" and field "SR"="<sr uuid>"'
# Tested through existing tests (e.g. calls to find_network_with_bridge)
and_clauses = query.split(" and ")
if len(and_clauses) > 1:
matches = True
for clause in and_clauses:
matches = matches and _query_matches(record, clause)
return matches
or_clauses = query.split(" or ")
if len(or_clauses) > 1:
matches = False
for clause in or_clauses:
matches = matches or _query_matches(record, clause)
return matches
if query.startswith('not '):
return not _query_matches(record, query[4:])
# Now it must be a single field - bad queries never match
if not query.startswith('field'):
return False
(field, value) = query[6:].split('=', 1)
# Some fields (e.g. name_label, memory_overhead) have double
# underscores in the DB, but only single underscores when querying
field = field.replace("__", "_").strip(" \"'")
value = value.strip(" \"'")
# Strings should be directly compared
if isinstance(record[field], six.string_types):
return record[field] == value
# But for all other value-checks, convert to a string first
# (Notably used for booleans - which can be lower or camel
# case and are interpreted/sanitised by XAPI)
return str(record[field]).lower() == value.lower()
def get_all_records_where(table_name, query):
matching_records = {}
table = _db_content[table_name]
for record in table:
if _query_matches(table[record], query):
matching_records[record] = table[record]
return matching_records
def get_record(table, ref):
if ref in _db_content[table]:
return _db_content[table].get(ref)
else:
raise XenAPI.Failure(['HANDLE_INVALID', table, ref])
def check_for_session_leaks():
if len(_db_content['session']) > 0:
raise exception.NovaException('Sessions have leaked: %s' %
_db_content['session'])
def as_value(s):
"""Helper function for simulating XenAPI plugin responses. It
escapes and wraps the given argument.
"""
return '<value>%s</value>' % saxutils.escape(s)
def as_json(*args, **kwargs):
"""Helper function for simulating XenAPI plugin responses for those
that are returning JSON. If this function is given plain arguments,
then these are rendered as a JSON list. If it's given keyword
arguments then these are rendered as a JSON dict.
"""
arg = args or kwargs
return jsonutils.dumps(arg)
class Failure(Exception):
def __init__(self, details):
self.details = details
def __str__(self):
try:
return str(self.details)
except Exception:
return "XenAPI Fake Failure: %s" % str(self.details)
class SessionBase(object):
"""Base class for Fake Sessions."""
def __init__(self, uri, user=None, passwd=None):
self._session = None
xenapi_session.apply_session_helpers(self)
if user is not None:
self.xenapi.login_with_password(user, passwd)
def pool_get_default_SR(self, _1, pool_ref):
return list(_db_content['pool'].values())[0]['default-SR']
def VBD_insert(self, _1, vbd_ref, vdi_ref):
vbd_rec = get_record('VBD', vbd_ref)
get_record('VDI', vdi_ref)
vbd_rec['empty'] = False
vbd_rec['VDI'] = vdi_ref
def VBD_plug(self, _1, ref):
rec = get_record('VBD', ref)
if rec['currently_attached']:
raise XenAPI.Failure(['DEVICE_ALREADY_ATTACHED', ref])
rec['currently_attached'] = True
rec['device'] = 'fakedev'
def VBD_unplug(self, _1, ref):
rec = get_record('VBD', ref)
if not rec['currently_attached']:
raise XenAPI.Failure(['DEVICE_ALREADY_DETACHED', ref])
rec['currently_attached'] = False
rec['device'] = ''
def VBD_add_to_other_config(self, _1, vbd_ref, key, value):
db_ref = _db_content['VBD'][vbd_ref]
if 'other_config' not in db_ref:
db_ref['other_config'] = {}
if key in db_ref['other_config']:
raise XenAPI.Failure(
['MAP_DUPLICATE_KEY', 'VBD', 'other_config', vbd_ref, key])
db_ref['other_config'][key] = value
def VBD_get_other_config(self, _1, vbd_ref):
db_ref = _db_content['VBD'][vbd_ref]
if 'other_config' not in db_ref:
return {}
return db_ref['other_config']
def PBD_create(self, _1, pbd_rec):
pbd_ref = _create_object('PBD', pbd_rec)
_db_content['PBD'][pbd_ref]['currently_attached'] = False
return pbd_ref
def PBD_plug(self, _1, pbd_ref):
rec = get_record('PBD', pbd_ref)
if rec['currently_attached']:
raise XenAPI.Failure(['DEVICE_ALREADY_ATTACHED', rec])
rec['currently_attached'] = True
sr_ref = rec['SR']
_db_content['SR'][sr_ref]['PBDs'] = [pbd_ref]
def PBD_unplug(self, _1, pbd_ref):
rec = get_record('PBD', pbd_ref)
if not rec['currently_attached']:
raise XenAPI.Failure(['DEVICE_ALREADY_DETACHED', rec])
rec['currently_attached'] = False
sr_ref = rec['SR']
_db_content['SR'][sr_ref]['PBDs'].remove(pbd_ref)
def SR_introduce(self, _1, sr_uuid, label, desc, type, content_type,
shared, sm_config):
for ref, rec in _db_content['SR'].items():
if rec.get('uuid') == sr_uuid:
# make forgotten = 0 and return ref
_db_content['SR'][ref]['forgotten'] = 0
return ref
# SR not found in db, so we create one
params = {'sr_uuid': sr_uuid,
'label': label,
'desc': desc,
'type': type,
'content_type': content_type,
'shared': shared,
'sm_config': sm_config}
sr_ref = _create_object('SR', params)
_db_content['SR'][sr_ref]['uuid'] = sr_uuid
_db_content['SR'][sr_ref]['forgotten'] = 0
vdi_per_lun = False
if type == 'iscsi':
# Just to be clear
vdi_per_lun = True
if vdi_per_lun:
# we need to create a vdi because this introduce
# is likely meant for a single vdi
vdi_ref = create_vdi('', sr_ref)
_db_content['SR'][sr_ref]['VDIs'] = [vdi_ref]
_db_content['VDI'][vdi_ref]['SR'] = sr_ref
return sr_ref
def SR_forget(self, _1, sr_ref):
_db_content['SR'][sr_ref]['forgotten'] = 1
def SR_scan(self, _1, sr_ref):
return
def VM_get_xenstore_data(self, _1, vm_ref):
return _db_content['VM'][vm_ref].get('xenstore_data', {})
def VM_remove_from_xenstore_data(self, _1, vm_ref, key):
db_ref = _db_content['VM'][vm_ref]
if 'xenstore_data' not in db_ref:
return
if key in db_ref['xenstore_data']:
del db_ref['xenstore_data'][key]
def VM_add_to_xenstore_data(self, _1, vm_ref, key, value):
db_ref = _db_content['VM'][vm_ref]
if 'xenstore_data' not in db_ref:
db_ref['xenstore_data'] = {}
db_ref['xenstore_data'][key] = value
def VM_pool_migrate(self, _1, vm_ref, host_ref, options):
pass
def VDI_remove_from_other_config(self, _1, vdi_ref, key):
db_ref = _db_content['VDI'][vdi_ref]
if 'other_config' not in db_ref:
return
if key in db_ref['other_config']:
del db_ref['other_config'][key]
def VDI_add_to_other_config(self, _1, vdi_ref, key, value):
db_ref = _db_content['VDI'][vdi_ref]
if 'other_config' not in db_ref:
db_ref['other_config'] = {}
if key in db_ref['other_config']:
raise XenAPI.Failure(
['MAP_DUPLICATE_KEY', 'VDI', 'other_config', vdi_ref, key])
db_ref['other_config'][key] = value
def VDI_copy(self, _1, vdi_to_copy_ref, sr_ref):
db_ref = _db_content['VDI'][vdi_to_copy_ref]
name_label = db_ref['name_label']
read_only = db_ref['read_only']
sharable = db_ref['sharable']
other_config = db_ref['other_config'].copy()
return create_vdi(name_label, sr_ref, sharable=sharable,
read_only=read_only, other_config=other_config)
def VDI_clone(self, _1, vdi_to_clone_ref):
db_ref = _db_content['VDI'][vdi_to_clone_ref]
sr_ref = db_ref['SR']
return self.VDI_copy(_1, vdi_to_clone_ref, sr_ref)
def host_compute_free_memory(self, _1, ref):
# Always return 12GB available
return 12 * units.Gi
def _plugin_agent_version(self, method, args):
return as_json(returncode='0', message='1.0\\r\\n')
def _plugin_agent_key_init(self, method, args):
return as_json(returncode='D0', message='1')
def _plugin_agent_password(self, method, args):
return as_json(returncode='0', message='success')
def _plugin_agent_inject_file(self, method, args):
return as_json(returncode='0', message='success')
def _plugin_agent_resetnetwork(self, method, args):
return as_json(returncode='0', message='success')
def _plugin_agent_agentupdate(self, method, args):
url = args["url"]
md5 = args["md5sum"]
message = "success with %(url)s and hash:%(md5)s" % dict(url=url,
md5=md5)
return as_json(returncode='0', message=message)
def _plugin_noop(self, method, args):
return ''
def _plugin_pickle_noop(self, method, args):
return pickle.dumps(None)
def _plugin_migration_transfer_vhd(self, method, args):
kwargs = pickle.loads(args['params'])['kwargs']
vdi_ref = self.xenapi_request('VDI.get_by_uuid',
(kwargs['vdi_uuid'], ))
assert vdi_ref
return pickle.dumps(None)
_plugin_glance_upload_vhd2 = _plugin_pickle_noop
_plugin_kernel_copy_vdi = _plugin_noop
_plugin_kernel_create_kernel_ramdisk = _plugin_noop
_plugin_kernel_remove_kernel_ramdisk = _plugin_noop
_plugin_migration_move_vhds_into_sr = _plugin_noop
def _plugin_xenhost_host_data(self, method, args):
return jsonutils.dumps({
'host_memory': {'total': 10,
'overhead': 20,
'free': 30,
'free-computed': 40},
'host_uuid': 'fb97583b-baa1-452d-850e-819d95285def',
'host_name-label': 'fake-xenhost',
'host_name-description': 'Default install of XenServer',
'host_hostname': 'fake-xenhost',
'host_ip_address': '10.219.10.24',
'enabled': 'true',
'host_capabilities': ['xen-3.0-x86_64',
'xen-3.0-x86_32p',
'hvm-3.0-x86_32',
'hvm-3.0-x86_32p',
'hvm-3.0-x86_64'],
'host_other-config': {
'agent_start_time': '1412774967.',
'iscsi_iqn': 'iqn.2014-10.org.example:39fa9ee3',
'boot_time': '1412774885.',
},
'host_cpu_info': {
'physical_features': '0098e3fd-bfebfbff-00000001-28100800',
'modelname': 'Intel(R) Xeon(R) CPU X3430 @ 2.40GHz',
'vendor': 'GenuineIntel',
'features': '0098e3fd-bfebfbff-00000001-28100800',
'family': 6,
'maskable': 'full',
'cpu_count': 4,
'socket_count': '1',
'flags': 'fpu de tsc msr pae mce cx8 apic sep mtrr mca '
'cmov pat clflush acpi mmx fxsr sse sse2 ss ht '
'nx constant_tsc nonstop_tsc aperfmperf pni vmx '
'est ssse3 sse4_1 sse4_2 popcnt hypervisor ida '
'tpr_shadow vnmi flexpriority ept vpid',
'stepping': 5,
'model': 30,
'features_after_reboot': '0098e3fd-bfebfbff-00000001-28100800',
'speed': '2394.086'
},
})
def _plugin_poweraction(self, method, args):
return jsonutils.dumps({"power_action": method[5:]})
_plugin_xenhost_host_reboot = _plugin_poweraction
_plugin_xenhost_host_startup = _plugin_poweraction
_plugin_xenhost_host_shutdown = _plugin_poweraction
def _plugin_xenhost_set_host_enabled(self, method, args):
enabled = 'enabled' if args.get('enabled') == 'true' else 'disabled'
return jsonutils.dumps({"status": enabled})
def _plugin_xenhost_host_uptime(self, method, args):
return jsonutils.dumps({"uptime": "fake uptime"})
def _plugin_xenhost_network_config(self, method, args):
return pickle.dumps({"fake_network": "fake conf"})
def _plugin_xenhost_get_pci_device_details(self, method, args):
"""Simulate the ouput of three pci devices.
Both of those devices are available for pci passtrough but
only one will match with the pci whitelist used in the
method test_pci_passthrough_devices_*().
Return a single list.
"""
# Driver is not pciback
dev_bad1 = ["Slot:\t0000:86:10.0", "Class:\t0604", "Vendor:\t10b5",
"Device:\t8747", "Rev:\tba", "Driver:\tpcieport", "\n"]
# Driver is pciback but vendor and device are bad
dev_bad2 = ["Slot:\t0000:88:00.0", "Class:\t0300", "Vendor:\t0bad",
"Device:\tcafe", "SVendor:\t10de", "SDevice:\t100d",
"Rev:\ta1", "Driver:\tpciback", "\n"]
# Driver is pciback and vendor, device are used for matching
dev_good = ["Slot:\t0000:87:00.0", "Class:\t0300", "Vendor:\t10de",
"Device:\t11bf", "SVendor:\t10de", "SDevice:\t100d",
"Rev:\ta1", "Driver:\tpciback", "\n"]
lspci_output = "\n".join(dev_bad1 + dev_bad2 + dev_good)
return pickle.dumps(lspci_output)
def _plugin_xenhost_get_pci_type(self, method, args):
return pickle.dumps("type-PCI")
def _plugin_console_get_console_log(self, method, args):
dom_id = args["dom_id"]
if dom_id == 0:
raise XenAPI.Failure('Guest does not have a console')
return base64.b64encode(
zlib.compress(("dom_id: %s" % dom_id).encode('utf-8')))
def _plugin_dom0_plugin_version_get_version(self, method, args):
return pickle.dumps(
xenapi_session.XenAPISession.PLUGIN_REQUIRED_VERSION)
def _plugin_xenhost_query_gc(self, method, args):
return pickle.dumps("False")
def _plugin_partition_utils_make_partition(self, method, args):
return pickle.dumps(None)
def host_call_plugin(self, _1, _2, plugin, method, args):
plugin = plugin.rstrip('.py')
func = getattr(self, '_plugin_%s_%s' % (plugin, method), None)
if not func:
raise Exception('No simulation in host_call_plugin for %s,%s' %
(plugin, method))
return func(method, args)
def VDI_get_virtual_size(self, *args):
return 1 * units.Gi
def VDI_resize_online(self, *args):
return 'derp'
VDI_resize = VDI_resize_online
def _VM_reboot(self, session, vm_ref):
db_ref = _db_content['VM'][vm_ref]
if db_ref['power_state'] != 'Running':
raise XenAPI.Failure(['VM_BAD_POWER_STATE', 'fake-opaque-ref',
db_ref['power_state'].lower(), 'halted'])
db_ref['power_state'] = 'Running'
db_ref['domid'] = '%d' % (random.randrange(1, 1 << 16))
def VM_clean_reboot(self, session, vm_ref):
return self._VM_reboot(session, vm_ref)
def VM_hard_reboot(self, session, vm_ref):
return self._VM_reboot(session, vm_ref)
def VM_hard_shutdown(self, session, vm_ref):
db_ref = _db_content['VM'][vm_ref]
db_ref['power_state'] = 'Halted'
db_ref['domid'] = "-1"
VM_clean_shutdown = VM_hard_shutdown
def VM_suspend(self, session, vm_ref):
db_ref = _db_content['VM'][vm_ref]
db_ref['power_state'] = 'Suspended'
def VM_pause(self, session, vm_ref):
db_ref = _db_content['VM'][vm_ref]
db_ref['power_state'] = 'Paused'
def VM_query_data_source(self, session, vm_ref, field):
vm = {'cpu0': 0.11,
'cpu1': 0.22,
'cpu2': 0.33,
'cpu3': 0.44,
'memory': 8 * units.Gi, # 8GB in bytes
'memory_internal_free': 5 * units.Mi, # 5GB in kilobytes
'vif_0_rx': 50,
'vif_0_tx': 100,
'vbd_0_read': 50,
'vbd_0_write': 100}
return vm.get(field, 0)
def pool_eject(self, session, host_ref):
pass
def pool_join(self, session, hostname, username, password):
pass
def pool_set_name_label(self, session, pool_ref, name):
pass
def host_migrate_receive(self, session, destref, nwref, options):
return {"value": "fake_migrate_data"}
def VM_assert_can_migrate(self, session, vmref, migrate_data, live,
vdi_map, vif_map, options):
pass
def VM_migrate_send(self, session, mref, migrate_data, live, vdi_map,
vif_map, options):
pass
def VM_remove_from_blocked_operations(self, session, vm_ref, key):
# operation is idempotent, XenServer doesn't care if the key exists
_db_content['VM'][vm_ref]['blocked_operations'].pop(key, None)
def xenapi_request(self, methodname, params):
if methodname.startswith('login'):
self._login(methodname, params)
return None
elif methodname == 'logout' or methodname == 'session.logout':
self._logout()
return None
else:
full_params = (self._session,) + params
meth = getattr(self, methodname, None)
if meth is None:
LOG.debug('Raising NotImplemented')
raise NotImplementedError(
_('xenapi.fake does not have an implementation for %s') %
methodname)
return meth(*full_params)
def call_xenapi(self, *args):
return self.xenapi_request(args[0], args[1:])
def get_all_refs_and_recs(self, cls):
return get_all_records(cls).items()
def get_rec(self, cls, ref):
return _db_content[cls].get(ref, None)
def _login(self, method, params):
self._session = uuidutils.generate_uuid()
_session_info = {'uuid': uuidutils.generate_uuid(),
'this_host': list(_db_content['host'])[0]}
_db_content['session'][self._session] = _session_info
self.host_ref = list(_db_content['host'])[0]
def _logout(self):
s = self._session
self._session = None
if s not in _db_content['session']:
raise exception.NovaException(
"Logging out a session that is invalid or already logged "
"out: %s" % s)
del _db_content['session'][s]
def __getattr__(self, name):
if name == 'handle':
return self._session
elif name == 'xenapi':
return _Dispatcher(self.xenapi_request, None)
elif name.startswith('login') or name.startswith('slave_local'):
return lambda *params: self._login(name, params)
elif name.startswith('Async'):
return lambda *params: self._async(name, params)
elif '.' in name:
impl = getattr(self, name.replace('.', '_'))
if impl is not None:
def callit(*params):
LOG.debug('Calling %(name)s %(impl)s',
{'name': name, 'impl': impl})
self._check_session(params)
return impl(*params)
return callit
if self._is_gettersetter(name, True):
LOG.debug('Calling getter %s', name)
return lambda *params: self._getter(name, params)
elif self._is_gettersetter(name, False):
LOG.debug('Calling setter %s', name)
return lambda *params: self._setter(name, params)
elif self._is_create(name):
return lambda *params: self._create(name, params)
elif self._is_destroy(name):
return lambda *params: self._destroy(name, params)
elif name == 'XenAPI':
return FakeXenAPI()
else:
return None
def _is_gettersetter(self, name, getter):
bits = name.split('.')
return (len(bits) == 2 and
bits[0] in _CLASSES and
bits[1].startswith(getter and 'get_' or 'set_'))
def _is_create(self, name):
return self._is_method(name, 'create')
def _is_destroy(self, name):
return self._is_method(name, 'destroy')
def _is_method(self, name, meth):
bits = name.split('.')
return (len(bits) == 2 and
bits[0] in _CLASSES and
bits[1] == meth)
def _getter(self, name, params):
self._check_session(params)
(cls, func) = name.split('.')
if func == 'get_all':
self._check_arg_count(params, 1)
return get_all(cls)
if func == 'get_all_records':
self._check_arg_count(params, 1)
return get_all_records(cls)
if func == 'get_all_records_where':
self._check_arg_count(params, 2)
return get_all_records_where(cls, params[1])
if func == 'get_record':
self._check_arg_count(params, 2)
return get_record(cls, params[1])
if func in ('get_by_name_label', 'get_by_uuid'):
self._check_arg_count(params, 2)
return_singleton = (func == 'get_by_uuid')
return self._get_by_field(
_db_content[cls], func[len('get_by_'):], params[1],
return_singleton=return_singleton)
if func == 'get_VIFs':
self._check_arg_count(params, 2)
# FIXME(mriedem): figure out how to use _get_by_field for VIFs,
# or just stop relying on this fake DB and use mock
return _db_content['VIF'].keys()
if func == 'get_bridge':
self._check_arg_count(params, 2)
# FIXME(mriedem): figure out how to use _get_by_field for bridge,
# or just stop relying on this fake DB and use mock
return 'fake_bridge'
if len(params) == 2:
field = func[len('get_'):]
ref = params[1]
if (ref in _db_content[cls]):
if (field in _db_content[cls][ref]):
return _db_content[cls][ref][field]
else:
raise XenAPI.Failure(['HANDLE_INVALID', cls, ref])
LOG.debug('Raising NotImplemented')
raise NotImplementedError(
_('xenapi.fake does not have an implementation for %s or it has '
'been called with the wrong number of arguments') % name)
def _setter(self, name, params):
self._check_session(params)
(cls, func) = name.split('.')
if len(params) == 3:
field = func[len('set_'):]
ref = params[1]
val = params[2]
if (ref in _db_content[cls] and
field in _db_content[cls][ref]):
_db_content[cls][ref][field] = val
return
LOG.debug('Raising NotImplemented')
raise NotImplementedError(
'xenapi.fake does not have an implementation for %s or it has '
'been called with the wrong number of arguments or the database '
'is missing that field' % name)
def _create(self, name, params):
self._check_session(params)
is_sr_create = name == 'SR.create'
is_vlan_create = name == 'VLAN.create'
# Storage Repositories have a different API
expected = is_sr_create and 10 or is_vlan_create and 4 or 2
self._check_arg_count(params, expected)
(cls, _) = name.split('.')
ref = (is_sr_create and
_create_sr(cls, params) or
is_vlan_create and
_create_vlan(params[1], params[2], params[3]) or
_create_object(cls, params[1]))
# Call hook to provide any fixups needed (ex. creating backrefs)
after_hook = 'after_%s_create' % cls
try:
func = _after_create_functions[after_hook]
except KeyError:
pass
else:
func(ref, params[1])
obj = get_record(cls, ref)
# Add RO fields
if cls == 'VM':
obj['power_state'] = 'Halted'
return ref
def _destroy(self, name, params):
self._check_session(params)
self._check_arg_count(params, 2)
table = name.split('.')[0]
ref = params[1]
if ref not in _db_content[table]:
raise XenAPI.Failure(['HANDLE_INVALID', table, ref])
# Call destroy function (if exists)
destroy_func = _destroy_functions.get('destroy_%s' % table.lower())
if destroy_func:
destroy_func(ref)
else:
del _db_content[table][ref]
def _async(self, name, params):
task_ref = create_task(name)
task = _db_content['task'][task_ref]
func = name[len('Async.'):]
try:
result = self.xenapi_request(func, params[1:])
if result:
result = as_value(result)
task['result'] = result
task['status'] = 'success'
except XenAPI.Failure as exc:
task['error_info'] = exc.details
task['status'] = 'failed'
task['finished'] = timeutils.utcnow()
return task_ref
def _check_session(self, params):
if (self._session is None or
self._session not in _db_content['session']):
raise XenAPI.Failure(
['HANDLE_INVALID', 'session', self._session])
if len(params) == 0 or params[0] != self._session:
LOG.debug('Raising NotImplemented')
raise NotImplementedError('Call to XenAPI without using .xenapi')
def _check_arg_count(self, params, expected):
actual = len(params)
if actual != expected:
raise XenAPI.Failure(
['MESSAGE_PARAMETER_COUNT_MISMATCH', expected, actual])
def _get_by_field(self, recs, k, v, return_singleton):
result = []
for ref, rec in recs.items():
if rec.get(k) == v:
result.append(ref)
if return_singleton:
try:
return result[0]
except IndexError:
raise XenAPI.Failure(['UUID_INVALID', v, result, recs, k])
return result
class FakeXenAPI(object):
def __init__(self):
self.Failure = XenAPI.Failure
# Based upon _Method from xmlrpclib.
class _Dispatcher(object):
def __init__(self, send, name):
self.__send = send
self.__name = name
def __repr__(self):
if self.__name:
return '<xenapi.fake._Dispatcher for %s>' % self.__name
else:
return '<xenapi.fake._Dispatcher>'
def __getattr__(self, name):
if self.__name is None:
return _Dispatcher(self.__send, name)
else:
return _Dispatcher(self.__send, "%s.%s" % (self.__name, name))
def __call__(self, *args):
return self.__send(self.__name, args)
| apache-2.0 | -2,799,547,797,793,958,000 | 34.56058 | 79 | 0.553471 | false |
SCSSoftware/BlenderTools | addon/io_scs_tools/exp/pim/piece.py | 1 | 9610 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Copyright (C) 2013-2014: SCS Software
from collections import OrderedDict
from io_scs_tools.exp.pim.piece_stream import Stream
from io_scs_tools.internals.structure import SectionData as _SectionData
from io_scs_tools.utils.printout import lprint
class Piece:
__index = 0
__vertex_count = 0
__triangle_count = 0
__stream_count = 0
__material = None # save whole material reference to get index out of it when packing
__streams = OrderedDict() # dict of Stream class
__triangles = [] # list of Triangle class
__vertices_hash = {}
__global_piece_count = 0
__global_vertex_count = 0
__global_triangle_count = 0
@staticmethod
def reset_counters():
Piece.__global_piece_count = 0
Piece.__global_triangle_count = 0
Piece.__global_vertex_count = 0
@staticmethod
def get_global_piece_count():
return Piece.__global_piece_count
@staticmethod
def get_global_vertex_count():
return Piece.__global_vertex_count
@staticmethod
def get_global_triangle_count():
return Piece.__global_triangle_count
@staticmethod
def __calc_vertex_hash(index, uvs, rgba, tangent):
"""Calculates vertex hash from original vertex index, uvs components and vertex color.
:param index: original index from Blender mesh
:type index: int
:param uvs: list of uvs used on vertex (each uv must be in SCS coordinates)
:type uvs: list of (tuple | mathutils.Vector)
:param rgba: rgba representation of vertex color in SCS values
:type rgba: tuple | mathutils.Color
:param tangent: vertex tangent in SCS coordinates or none, if piece doesn't have tangents
:type tangent: tuple | None
:return: calculated vertex hash
:rtype: str
"""
frmt = "%.4f"
vertex_hash = str(index)
for uv in uvs:
vertex_hash = ''.join((vertex_hash, frmt % uv[0], frmt % uv[1]))
vertex_hash = ''.join((vertex_hash, frmt % rgba[0], frmt % rgba[1], frmt % rgba[2], frmt % rgba[3]))
if tangent:
vertex_hash = ''.join((vertex_hash, frmt % tangent[0], frmt % tangent[1], frmt % tangent[2], frmt % tangent[3]))
return vertex_hash
def __init__(self, index, material):
"""Constructs empty piece.
NOTE: empty piece will contain for mandatory stream which will be empty: POSITION, NORMAL, UV0, RGBA
:param index:
:type index:
:param material: material that should be used on for this piece
:type material: io_scs_tools.exp.pim.material.Material
"""
self.__vertex_count = 0
self.__triangle_count = 0
self.__stream_count = 0
self.__streams = OrderedDict()
self.__triangles = []
self.__vertices_hash = {}
self.__index = index
self.__material = material
# CONSTRUCT ALL MANDATORY STREAMS
stream = Stream(Stream.Types.POSITION, -1)
self.__streams[Stream.Types.POSITION] = stream
stream = Stream(Stream.Types.NORMAL, -1)
self.__streams[Stream.Types.NORMAL] = stream
Piece.__global_piece_count += 1
def add_triangle(self, triangle):
"""Adds new triangle to piece
NOTE: if length of given triangle iterable is different than 3 it will be refused!
:param triangle: tuple of 3 integers representing vertex indices
:type triangle: tuple
:return: True if added; False otherwise
:rtype:
"""
if len(triangle) != 3:
return False
else:
# check indecies integrity
for vertex in triangle:
if vertex < 0 or vertex >= self.__vertex_count:
return False
self.__triangles.append(tuple(triangle))
Piece.__global_triangle_count += 1
return True
def add_vertex(self, vert_index, position, normal, uvs, uvs_aliases, rgba, tangent):
"""Adds new vertex to position and normal streams
:param vert_index: original vertex index from Blender mesh
:type vert_index: int | str
:param position: vector or tuple of vertex position in SCS coordinates
:type position: tuple | mathutils.Vector
:param normal: vector or tuple of vertex normal in SCS coordinates
:type normal: tuple | mathutils.Vector
:param uvs: list of uvs used on vertex (each uv must be in SCS coordinates)
:type uvs: list of (tuple | mathutils.Vector)
:param uvs_aliases: list of uv aliases names per uv layer
:type uvs_aliases: list[list[str]]
:param rgba: rgba representation of vertex color in SCS values
:type rgba: tuple | mathutils.Color
:param tangent: tuple representation of vertex tangent in SCS values or None if piece doesn't have tangents
:type tangent: tuple | None
:return: vertex index inside piece streams ( use it for adding triangles )
:rtype: int
"""
vertex_hash = self.__calc_vertex_hash(vert_index, uvs, rgba, tangent)
# save vertex if the vertex with the same properties doesn't exists yet in streams
if vertex_hash not in self.__vertices_hash:
stream = self.__streams[Stream.Types.POSITION]
stream.add_entry(position)
stream = self.__streams[Stream.Types.NORMAL]
stream.add_entry(normal)
for i, uv in enumerate(uvs):
uv_type = Stream.Types.UV + str(i)
# create more uv streams on demand
if uv_type not in self.__streams:
self.__streams[uv_type] = Stream(Stream.Types.UV, i)
stream = self.__streams[uv_type]
""":type: Stream"""
stream.add_entry(uv)
for alias in uvs_aliases[i]:
stream.add_alias(alias)
if tangent:
# create tangent stream on demand
if Stream.Types.TANGENT not in self.__streams:
self.__streams[Stream.Types.TANGENT] = Stream(Stream.Types.TANGENT, -1)
stream = self.__streams[Stream.Types.TANGENT]
stream.add_entry(tangent)
if Stream.Types.RGBA not in self.__streams:
self.__streams[Stream.Types.RGBA] = Stream(Stream.Types.RGBA, -1)
stream = self.__streams[Stream.Types.RGBA]
stream.add_entry(rgba)
vert_index_internal = stream.get_size() - 1 # streams has to be alligned so I can take last one for the index
self.__vertices_hash[vertex_hash] = vert_index_internal
self.__vertex_count = vert_index_internal + 1
Piece.__global_vertex_count += 1
return self.__vertices_hash[vertex_hash]
def get_index(self):
return self.__index
def get_vertex_count(self):
return self.__streams[Stream.Types.POSITION].get_size()
def get_as_section(self):
"""Gets piece represented with SectionData structure class.
:return: packed piece as section data
:rtype: io_scs_tools.internals.structure.SectionData
"""
# UPDATE COUNTERS
self.__vertex_count = self.__streams[Stream.Types.POSITION].get_size()
self.__triangle_count = len(self.__triangles)
self.__stream_count = len(self.__streams)
section = _SectionData("Piece")
section.props.append(("Index", self.__index))
if not self.__material or self.__material.get_index() == -1:
lprint("W Piece with index %s doesn't have data about material, expect errors in game!", (self.__index,))
section.props.append(("Material", -1))
else:
section.props.append(("Material", self.__material.get_index()))
section.props.append(("VertexCount", self.__vertex_count))
section.props.append(("TriangleCount", self.__triangle_count))
section.props.append(("StreamCount", self.__stream_count))
stream_size = None
for stream_tag in self.__streams:
stream = self.__streams[stream_tag]
# CHECK SYNC OF STREAMS
if not stream_size:
stream_size = stream.get_size()
elif stream_size != stream.get_size():
lprint("W Piece with index %s has desynced stream sizes, expect errors in game!", (self.__index,))
break
# APPEND STREAMS
section.sections.append(stream.get_as_section())
# APPEND TRIANGLES
triangle_section = _SectionData("Triangles")
for triangle in self.__triangles:
triangle_section.data.append(triangle)
section.sections.append(triangle_section)
return section
| gpl-2.0 | -6,222,418,562,750,311,000 | 36.98419 | 124 | 0.615609 | false |
russellthackston/comp-chem-util | myriad/aws/lambda/pymysql/tests/thirdparty/test_MySQLdb/capabilities.py | 20 | 10191 | #!/usr/bin/env python -O
""" Script to test database capabilities and the DB-API interface
for functionality and memory leaks.
Adapted from a script by M-A Lemburg.
"""
import sys
from time import time
try:
import unittest2 as unittest
except ImportError:
import unittest
PY2 = sys.version_info[0] == 2
class DatabaseTest(unittest.TestCase):
db_module = None
connect_args = ()
connect_kwargs = dict(use_unicode=True, charset="utf8")
create_table_extra = "ENGINE=INNODB CHARACTER SET UTF8"
rows = 10
debug = False
def setUp(self):
db = self.db_module.connect(*self.connect_args, **self.connect_kwargs)
self.connection = db
self.cursor = db.cursor()
self.BLOBText = ''.join([chr(i) for i in range(256)] * 100);
if PY2:
self.BLOBUText = unicode().join(unichr(i) for i in range(16834))
else:
self.BLOBUText = "".join(chr(i) for i in range(16834))
data = bytearray(range(256)) * 16
self.BLOBBinary = self.db_module.Binary(data)
leak_test = True
def tearDown(self):
if self.leak_test:
import gc
del self.cursor
orphans = gc.collect()
self.assertFalse(orphans, "%d orphaned objects found after deleting cursor" % orphans)
del self.connection
orphans = gc.collect()
self.assertFalse(orphans, "%d orphaned objects found after deleting connection" % orphans)
def table_exists(self, name):
try:
self.cursor.execute('select * from %s where 1=0' % name)
except Exception:
return False
else:
return True
def quote_identifier(self, ident):
return '"%s"' % ident
def new_table_name(self):
i = id(self.cursor)
while True:
name = self.quote_identifier('tb%08x' % i)
if not self.table_exists(name):
return name
i = i + 1
def create_table(self, columndefs):
""" Create a table using a list of column definitions given in
columndefs.
generator must be a function taking arguments (row_number,
col_number) returning a suitable data object for insertion
into the table.
"""
self.table = self.new_table_name()
self.cursor.execute('CREATE TABLE %s (%s) %s' %
(self.table,
',\n'.join(columndefs),
self.create_table_extra))
def check_data_integrity(self, columndefs, generator):
# insert
self.create_table(columndefs)
insert_statement = ('INSERT INTO %s VALUES (%s)' %
(self.table,
','.join(['%s'] * len(columndefs))))
data = [ [ generator(i,j) for j in range(len(columndefs)) ]
for i in range(self.rows) ]
if self.debug:
print(data)
self.cursor.executemany(insert_statement, data)
self.connection.commit()
# verify
self.cursor.execute('select * from %s' % self.table)
l = self.cursor.fetchall()
if self.debug:
print(l)
self.assertEqual(len(l), self.rows)
try:
for i in range(self.rows):
for j in range(len(columndefs)):
self.assertEqual(l[i][j], generator(i,j))
finally:
if not self.debug:
self.cursor.execute('drop table %s' % (self.table))
def test_transactions(self):
columndefs = ( 'col1 INT', 'col2 VARCHAR(255)')
def generator(row, col):
if col == 0: return row
else: return ('%i' % (row%10))*255
self.create_table(columndefs)
insert_statement = ('INSERT INTO %s VALUES (%s)' %
(self.table,
','.join(['%s'] * len(columndefs))))
data = [ [ generator(i,j) for j in range(len(columndefs)) ]
for i in range(self.rows) ]
self.cursor.executemany(insert_statement, data)
# verify
self.connection.commit()
self.cursor.execute('select * from %s' % self.table)
l = self.cursor.fetchall()
self.assertEqual(len(l), self.rows)
for i in range(self.rows):
for j in range(len(columndefs)):
self.assertEqual(l[i][j], generator(i,j))
delete_statement = 'delete from %s where col1=%%s' % self.table
self.cursor.execute(delete_statement, (0,))
self.cursor.execute('select col1 from %s where col1=%s' % \
(self.table, 0))
l = self.cursor.fetchall()
self.assertFalse(l, "DELETE didn't work")
self.connection.rollback()
self.cursor.execute('select col1 from %s where col1=%s' % \
(self.table, 0))
l = self.cursor.fetchall()
self.assertTrue(len(l) == 1, "ROLLBACK didn't work")
self.cursor.execute('drop table %s' % (self.table))
def test_truncation(self):
columndefs = ( 'col1 INT', 'col2 VARCHAR(255)')
def generator(row, col):
if col == 0: return row
else: return ('%i' % (row%10))*((255-self.rows//2)+row)
self.create_table(columndefs)
insert_statement = ('INSERT INTO %s VALUES (%s)' %
(self.table,
','.join(['%s'] * len(columndefs))))
try:
self.cursor.execute(insert_statement, (0, '0'*256))
except Warning:
if self.debug: print(self.cursor.messages)
except self.connection.DataError:
pass
else:
self.fail("Over-long column did not generate warnings/exception with single insert")
self.connection.rollback()
try:
for i in range(self.rows):
data = []
for j in range(len(columndefs)):
data.append(generator(i,j))
self.cursor.execute(insert_statement,tuple(data))
except Warning:
if self.debug: print(self.cursor.messages)
except self.connection.DataError:
pass
else:
self.fail("Over-long columns did not generate warnings/exception with execute()")
self.connection.rollback()
try:
data = [ [ generator(i,j) for j in range(len(columndefs)) ]
for i in range(self.rows) ]
self.cursor.executemany(insert_statement, data)
except Warning:
if self.debug: print(self.cursor.messages)
except self.connection.DataError:
pass
else:
self.fail("Over-long columns did not generate warnings/exception with executemany()")
self.connection.rollback()
self.cursor.execute('drop table %s' % (self.table))
def test_CHAR(self):
# Character data
def generator(row,col):
return ('%i' % ((row+col) % 10)) * 255
self.check_data_integrity(
('col1 char(255)','col2 char(255)'),
generator)
def test_INT(self):
# Number data
def generator(row,col):
return row*row
self.check_data_integrity(
('col1 INT',),
generator)
def test_DECIMAL(self):
# DECIMAL
def generator(row,col):
from decimal import Decimal
return Decimal("%d.%02d" % (row, col))
self.check_data_integrity(
('col1 DECIMAL(5,2)',),
generator)
def test_DATE(self):
ticks = time()
def generator(row,col):
return self.db_module.DateFromTicks(ticks+row*86400-col*1313)
self.check_data_integrity(
('col1 DATE',),
generator)
def test_TIME(self):
ticks = time()
def generator(row,col):
return self.db_module.TimeFromTicks(ticks+row*86400-col*1313)
self.check_data_integrity(
('col1 TIME',),
generator)
def test_DATETIME(self):
ticks = time()
def generator(row,col):
return self.db_module.TimestampFromTicks(ticks+row*86400-col*1313)
self.check_data_integrity(
('col1 DATETIME',),
generator)
def test_TIMESTAMP(self):
ticks = time()
def generator(row,col):
return self.db_module.TimestampFromTicks(ticks+row*86400-col*1313)
self.check_data_integrity(
('col1 TIMESTAMP',),
generator)
def test_fractional_TIMESTAMP(self):
ticks = time()
def generator(row,col):
return self.db_module.TimestampFromTicks(ticks+row*86400-col*1313+row*0.7*col/3.0)
self.check_data_integrity(
('col1 TIMESTAMP',),
generator)
def test_LONG(self):
def generator(row,col):
if col == 0:
return row
else:
return self.BLOBUText # 'BLOB Text ' * 1024
self.check_data_integrity(
('col1 INT', 'col2 LONG'),
generator)
def test_TEXT(self):
def generator(row,col):
if col == 0:
return row
else:
return self.BLOBUText[:5192] # 'BLOB Text ' * 1024
self.check_data_integrity(
('col1 INT', 'col2 TEXT'),
generator)
def test_LONG_BYTE(self):
def generator(row,col):
if col == 0:
return row
else:
return self.BLOBBinary # 'BLOB\000Binary ' * 1024
self.check_data_integrity(
('col1 INT','col2 LONG BYTE'),
generator)
def test_BLOB(self):
def generator(row,col):
if col == 0:
return row
else:
return self.BLOBBinary # 'BLOB\000Binary ' * 1024
self.check_data_integrity(
('col1 INT','col2 BLOB'),
generator)
| mit | 8,673,656,556,234,855,000 | 33.197987 | 102 | 0.531744 | false |
Xaxetrov/OSCAR | oscar/agent/nn/playagent.py | 1 | 6344 | import numpy
from pysc2.agents import base_agent
from pysc2.lib import actions
from pysc2.lib import features
from learning_tools.A3C_learner.neuralmodel import get_neural_network
_PLAYER_RELATIVE = features.SCREEN_FEATURES.player_relative.index
_PLAYER_FRIENDLY = 1
_PLAYER_NEUTRAL = 3 # beacon/minerals
_PLAYER_HOSTILE = 4
_NO_OP = actions.FUNCTIONS.no_op.id
_MOVE_SCREEN = actions.FUNCTIONS.Move_screen.id
_ATTACK_SCREEN = actions.FUNCTIONS.Attack_screen.id
_SELECT_ARMY = actions.FUNCTIONS.select_army.id
_NOT_QUEUED = [0]
_SELECT_ALL = [0]
_EPSILON_GREEDY = 1.0 # exploration vs exploitation criteria
class PlayAgent(base_agent.BaseAgent):
"""
Same as DQNAgent but does not learn.
Simply applies a trained model to test it.
"""
model = None
def __init__(self):
super(PlayAgent, self).__init__()
self.model = get_neural_network()
try:
with open("constants.py", mode='r') as config:
self.number_of_run = int(config.readline())
self.epsilon_step = int(config.readline()) / 100.0
self.epsilon = 0.0
self.step_by_epsilon = 240 * self.number_of_run
except OSError:
self.number_of_run = 10
self.epsilon = _EPSILON_GREEDY
self.epsilon_step = 0.0
self.step_by_epsilon = -1
def get_random_action(self, obs):
"""return an available random action
-obs: the obs parameter given to the agent for the step call
"""
number_of_possible_action = 1 # _NO_OP
if _MOVE_SCREEN in obs.observation["available_actions"]:
number_of_possible_action += 256
if _SELECT_ARMY in obs.observation["available_actions"]:
number_of_possible_action += 1
# get a random number to select an action (including _NO_OP)
selected_action_id = numpy.random.randint(0, number_of_possible_action)
if _MOVE_SCREEN in obs.observation["available_actions"] and selected_action_id < 256:
return self.get_move_action(selected_action_id)
else:
# here two case: whether we have action id 256 or 257 or we have 0 or 1
# in both case if _SELECT_ARMY is not available the following call handles it
return self.get_non_spacial_action(selected_action_id % 256)
@staticmethod
def get_move_action(linear_position):
"""
Returns a pysc2 move action and argument to get to a given position
:param linear_position: position of the move on a 16x16 grid, integer equal to y*16+x
:return: The move action
"""
x_16 = (linear_position % 16)
y_16 = (linear_position // 16)
x_64 = x_16 * 4
y_64 = y_16 * 4
action_args = [_NOT_QUEUED, [x_64, y_64]]
return _MOVE_SCREEN, action_args
@staticmethod
def get_non_spacial_action(action_id):
"""
Returns a pysc2 action corresponding to the given action id
:param action_id: 0 -> NO_OP; 1 -> Select all army
:return: an action id and its arguments
"""
if action_id == 1:
selected_action = _SELECT_ARMY
action_args = [_SELECT_ALL]
else:
selected_action = _NO_OP
action_args = []
return selected_action, action_args
def step(self, obs, locked_choice=None):
super(PlayAgent, self).step(obs)
if numpy.random.rand() < self.epsilon:
state = [obs.observation[SCREEN][features.SCREEN_FEATURES.player_relative.index],
obs.observation[SCREEN][features.SCREEN_FEATURES.selected.index]]
formatted_state = numpy.zeros(shape=(1, 64, 64, 2), dtype=float)
for formatted_row, state0_row, state1_row in zip(formatted_state[0], state[0], state[1]):
for formatted_case, state0_case, state1_case in zip(formatted_row, state0_row, state1_row):
formatted_case[0] = state0_case
formatted_case[1] = state1_case
# get reward prediction from neural network
action = self.model.predict(formatted_state, batch_size=1)
# if numpy.isnan(numpy.max(action[0])) or numpy.isnan(numpy.max(action[1])):
# print("action contain NaN !")
# if numpy.isnan(numpy.max(formatted_state)):
# print("formatted_state contain NaN too !!!")
# exit(1)
# compute best reward of the two main branch
best_reward_spacial_action = numpy.max(action[1])
best_reward_non_spacial_action = numpy.max(action[0][0][0:2])
action_vector = action[0][0]
# mask _SELECT_ARMY action if not available
if _SELECT_ARMY not in obs.observation["available_actions"]:
action_vector[1] = 0.0
# /!\ in this case the neural network will learn not to do this action -> side effect ?
# if best_reward_non_spacial_action < action_vector[2] \
if best_reward_non_spacial_action < best_reward_spacial_action \
and _MOVE_SCREEN in obs.observation["available_actions"]:
# get the best position according to reward
position_vector = action[1][0]
max_coordinate = numpy.argmax(position_vector)
selected_action, action_args = self.get_move_action(max_coordinate)
else:
# select best action according to reward
print(action_vector)
best_action_id = numpy.argmax(action_vector[0:2])
selected_action, action_args = self.get_non_spacial_action(best_action_id)
else:
selected_action, action_args = self.get_random_action(obs)
return actions.FunctionCall(selected_action, action_args)
def reset(self):
super(PlayAgent, self).reset()
# print("reward for this game:", self.reward, "(", self.steps, "steps)")
if self.steps == self.step_by_epsilon:
with open("reward.csv", mode='a') as out_file:
out_file.write(str(self.epsilon) + ", " + str(self.reward * 240.0 / self.steps) + "\n")
self.epsilon += self.epsilon_step
self.reward = 0
self.steps = 0
| apache-2.0 | -8,732,523,221,495,277,000 | 41.864865 | 107 | 0.599149 | false |
strets123/cbh_chembl_ws_extension | cbh_chembl_ws_extension/features/steps/salts.py | 2 | 3160 | from behave import given, when, then
import json
import os
from django.contrib.auth.models import User, Group
from pybel import readfile
@given('I have valid salted compound')
def step(context):
context.inchi = "InChI=1S/2CHF3O3S.Cu/c2*2-1(3,4)8(5,6)7;/h2*(H,5,6,7);/q;;+2/p-2"
context.post_data["ctab"] = """
17 14 0 0000 0 0 0 0 0999 V2000
0.0000 0.0000 0.0000 Cu 0 2 0 0 0 0 0 0 0 0 0
2.8286 -1.2375 0.0000 O 0 5 0 0 0 0 0 0 0 0 0
2.8286 -0.4125 0.0000 S 0 0 0 0 0 0 0 0 0 0 0
3.6536 -0.4125 0.0000 O 0 0 0 0 0 0 0 0 0 0 0
2.0036 -0.4125 0.0000 O 0 0 0 0 0 0 0 0 0 0 0
2.8286 0.4125 0.0000 C 0 0 0 0 0 0 0 0 0 0 0
2.8286 1.2375 0.0000 F 0 0 0 0 0 0 0 0 0 0 0
3.6536 0.4125 0.0000 F 0 0 0 0 0 0 0 0 0 0 0
2.0036 0.4125 0.0000 F 0 0 0 0 0 0 0 0 0 0 0
0.0000 -5.3625 0.0000 O 0 5 0 0 0 0 0 0 0 0 0
0.0000 -4.5375 0.0000 S 0 0 0 0 0 0 0 0 0 0 0
0.8250 -4.5375 0.0000 O 0 0 0 0 0 0 0 0 0 0 0
-0.8250 -4.5375 0.0000 O 0 0 0 0 0 0 0 0 0 0 0
0.0000 -3.7125 0.0000 C 0 0 0 0 0 0 0 0 0 0 0
0.0000 -2.8875 0.0000 F 0 0 0 0 0 0 0 0 0 0 0
0.8250 -3.7125 0.0000 F 0 0 0 0 0 0 0 0 0 0 0
-0.8250 -3.7125 0.0000 F 0 0 0 0 0 0 0 0 0 0 0
2 3 1 0
3 4 2 0
3 5 2 0
3 6 1 0
6 7 1 0
6 8 1 0
6 9 1 0
10 11 1 0
11 12 2 0
11 13 2 0
11 14 1 0
14 15 1 0
14 16 1 0
14 17 1 0
M CHG 3 1 2 2 -1 10 -1
M END"""
@given("I have valid salted compounds within a ChemDraw {format} file")
def step(context, format=None):
# pull the contents of our chemdraw test file
fp = 'files/behave-triflate.%s' % (format)
fn = os.path.join(os.path.dirname(__file__), fp)
# convert the chemdraw file contents to mol
print(fp)
print(fn)
print(format)
mols = [mol.write("smi").split("\t")[0] for mol in readfile(format, fn)]
#file_contents = "".join(mols)
print(len(mols))
context.post_data["type"] = "Smiles"
context.post_data["objects"] = mols
# also populate our inchi list
@then('retain its salt')
def step(context, action=None, projkey=None):
from cbh_chembl_model_extension.models import CBHCompoundBatch
from cbh_core_model.models import Project
from rdkit import Chem
from rdkit.Chem import AllChem, inchi
path = "/dev/cbh_compound_batches/"
resp = context.api_client.get(
path,
format='json',
data=context.post_data,
)
reg_cmpds = context.ser.deserialize(resp.content)["objects"]
# retrieve registered inchi
reg_inchi = reg_cmpds[0]['standardInchi']
# convert our ctab mol to inchi
#m = Chem.MolFromMolBlock(context.post_data["ctab"])
#mol_inchi = inchi.MolToInchi(m)
# we are now using a hard coded inchi from Chemicalize
mol_inchi = context.inchi
# assert they are equal
assert mol_inchi == reg_inchi
| mit | -4,025,316,958,847,487,500 | 32.978495 | 86 | 0.550949 | false |
josesanch/django-oscar | src/oscar/apps/catalogue/south_migrations/0010_call_update_product_ratings.py | 7 | 18993 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from oscar.core.compat import AUTH_USER_MODEL, AUTH_USER_MODEL_NAME
class Migration(DataMigration):
def forwards(self, orm):
for product in orm.Product.objects.all():
# adapted from Product.calculate_rating
result = orm['reviews.ProductReview'].objects.filter(
product_id=product.id,
status=1
).aggregate(
sum=models.Sum('score'), count=models.Count('id'))
reviews_sum = result['sum'] or 0
reviews_count = result['count'] or 0
rating = None
if reviews_count > 0:
rating = float(reviews_sum) / reviews_count
orm.Product.objects.filter(id=product.id).update(rating=rating)
def backwards(self, orm):
# rating field will be deleted anyway if migrating backwards
pass
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
AUTH_USER_MODEL: {
'Meta': {'object_name': AUTH_USER_MODEL_NAME},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.contributor': {
'Meta': {'object_name': 'Contributor'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.contributorrole': {
'Meta': {'object_name': 'ContributorRole'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'name_plural': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'rating': ('django.db.models.fields.FloatField', [], {'null': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalogue.productcontributor': {
'Meta': {'object_name': 'ProductContributor'},
'contributor': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Contributor']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"}),
'role': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ContributorRole']", 'null': 'True', 'blank': 'True'})
},
'catalogue.productimage': {
'Meta': {'ordering': "['display_order']", 'unique_together': "(('product', 'display_order'),)", 'object_name': 'ProductImage'},
'caption': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'display_order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'original': ('django.db.models.fields.files.ImageField', [], {'max_length': '100'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'images'", 'to': "orm['catalogue.Product']"})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'reviews.productreview': {
'Meta': {'ordering': "['-delta_votes', 'id']", 'unique_together': "(('product', 'user'),)", 'object_name': 'ProductReview'},
'body': ('django.db.models.fields.TextField', [], {}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'delta_votes': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reviews'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['catalogue.Product']"}),
'score': ('django.db.models.fields.SmallIntegerField', [], {}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'total_votes': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'reviews'", 'null': 'True', 'to': u"orm['{0}']".format(AUTH_USER_MODEL)})
},
'reviews.vote': {
'Meta': {'ordering': "['-date_created']", 'unique_together': "(('user', 'review'),)", 'object_name': 'Vote'},
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'delta': ('django.db.models.fields.SmallIntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'review': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': u"orm['reviews.ProductReview']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'review_votes'", 'to': u"orm['{0}']".format(AUTH_USER_MODEL)})
}
}
complete_apps = ['catalogue', 'reviews']
symmetrical = True
| bsd-3-clause | 2,444,137,537,843,999,000 | 79.478814 | 222 | 0.557521 | false |
mattjmorrison/ReportLab | src/reportlab/lib/extformat.py | 10 | 2164 | #Copyright ReportLab Europe Ltd. 2000-2010
#see license.txt for license details
__version__='''$Id: extformat.py 3665 2010-02-09 15:55:45Z rgbecker $'''
__doc__='''Apparently not used anywhere, purpose unknown!'''
from tokenize import tokenprog
import sys
def _matchorfail(text, pos):
match = tokenprog.match(text, pos)
if match is None: raise ValueError(text, pos)
return match, match.end()
'''
Extended dictionary formatting
We allow expressions in the parentheses instead of
just a simple variable.
'''
def dictformat(_format, L={}, G={}):
format = _format
S = {}
chunks = []
pos = 0
n = 0
while 1:
pc = format.find("%", pos)
if pc < 0: break
nextchar = format[pc+1]
if nextchar == "(":
chunks.append(format[pos:pc])
pos, level = pc+2, 1
while level:
match, pos = _matchorfail(format, pos)
tstart, tend = match.regs[3]
token = format[tstart:tend]
if token == "(": level = level+1
elif token == ")": level = level-1
vname = '__superformat_%d' % n
n += 1
S[vname] = eval(format[pc+2:pos-1],G,L)
chunks.append('%%(%s)' % vname)
else:
nc = pc+1+(nextchar=="%")
chunks.append(format[pos:nc])
pos = nc
if pos < len(format): chunks.append(format[pos:])
return (''.join(chunks)) % S
def magicformat(format):
"""Evaluate and substitute the appropriate parts of the string."""
frame = sys._getframe(1)
return dictformat(format,frame.f_locals, frame.f_globals)
if __name__=='__main__':
from reportlab.lib.formatters import DecimalFormatter
_DF={}
def df(n,dp=2,ds='.',ts=','):
try:
_df = _DF[dp,ds]
except KeyError:
_df = _DF[dp,ds] = DecimalFormatter(places=dp,decimalSep=ds,thousandSep=ts)
return _df(n)
from reportlab.lib.extformat import magicformat
Z={'abc': ('ab','c')}
x = 300000.23
percent=79.2
class dingo:
a=3
print(magicformat('''
$%%(df(x,dp=3))s --> $%(df(x,dp=3))s
$%%(df(x,dp=2,ds=',',ts='.'))s --> $%(df(x,dp=2,ds=',',ts='.'))s
%%(percent).2f%%%% --> %(percent).2f%%
%%(dingo.a)s --> %(dingo.a)s
%%(Z['abc'][0])s --> %(Z['abc'][0])s
'''))
def func0(aa=1):
def func1(bb=2):
print(magicformat('bb=%(bb)s Z=%(Z)r'))
func1('BB')
func0('AA')
| bsd-3-clause | -2,870,351,378,579,206,700 | 24.458824 | 78 | 0.615527 | false |
mtausig/RIOT | tests/gnrc_netif/tests/01-run.py | 7 | 8711 | #!/usr/bin/env python3
# Copyright (C) 2016 Kaspar Schleiser <[email protected]>
# Copyright (C) 2016 Takuo Yonezawa <[email protected]>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
def testfunc(child):
# embUnit tests
child.expect(r"OK \(\d+ tests\)")
# output cross-checked hex data with WireShark -> "Import from Hex Dump..."
# test_netapi_send__raw_unicast_ethernet_packet
child.expect("Sending data from Ethernet device:")
child.expect("00000000 3E E6 B5 0F 19 23 3E E6 B5 22 FD 0A FF FF 41 42")
child.expect("00000010 43 44 45 46 47 00")
# test_netapi_send__raw_broadcast_ethernet_packet
child.expect("Sending data from Ethernet device:")
child.expect("00000000 FF FF FF FF FF FF 3E E6 B5 22 FD 0A FF FF 41 42")
child.expect("00000010 43 44 45 46 47 00")
# test_netapi_send__raw_unicast_ieee802154_long_long_packet
child.expect(r"Sending data from IEEE 802\.15\.4 device:")
child.expect("00000000 41 DC 00 00 00 0B FD 22 19 0F B5 E6 3E 0A FD 22")
child.expect("00000010 19 0F B5 E6 3E 31 32 33 41 42 43 44 45 46 47 00")
# test_netapi_send__raw_unicast_ieee802154_long_short_packet
child.expect(r"Sending data from IEEE 802\.15\.4 device:")
child.expect("00000000 41 D8 01 00 00 0B FD 0A FD 22 19 0F B5 E6 3E 31")
child.expect("00000010 32 33 41 42 43 44 45 46 47 00")
# test_netapi_send__raw_unicast_ieee802154_short_long_packet1
child.expect(r"Sending data from IEEE 802\.15\.4 device:")
child.expect("00000000 41 9C 02 00 00 0B FD 22 19 0F B5 E6 3E 0A FD 31")
child.expect("00000010 32 33 41 42 43 44 45 46 47 00")
# test_netapi_send__raw_unicast_ieee802154_short_long_packet2
child.expect(r"Sending data from IEEE 802\.15\.4 device:")
child.expect("00000000 41 9C 03 00 00 0B FD 22 19 0F B5 E6 3E 0A FD 31")
child.expect("00000010 32 33 41 42 43 44 45 46 47 00")
# test_netapi_send__raw_unicast_ieee802154_short_short_packet
child.expect(r"Sending data from IEEE 802\.15\.4 device:")
child.expect("00000000 41 98 04 00 00 0B FD 0A FD 31 32 33 41 42 43 44")
child.expect("00000010 45 46 47 00")
# test_netapi_send__raw_broadcast_ieee802154_long_packet
child.expect(r"Sending data from IEEE 802\.15\.4 device:")
child.expect("00000000 41 D8 05 00 00 FF FF 0A FD 22 19 0F B5 E6 3E 31")
child.expect("00000010 32 33 41 42 43 44 45 46 47 00")
# test_netapi_send__raw_broadcast_ieee802154_short_packet
child.expect(r"Sending data from IEEE 802\.15\.4 device:")
child.expect("00000000 41 98 06 00 00 FF FF 0A FD 31 32 33 41 42 43 44")
child.expect("00000010 45 46 47 00")
# test_netapi_send__ipv6_unicast_ethernet_packet
child.expect("Sending data from Ethernet device:")
child.expect("00000000 3E E6 B5 0F 19 23 3E E6 B5 22 FD 0A 86 DD 60 00")
child.expect("00000010 00 00 00 08 3B 40 FE 80 00 00 00 00 00 00 3C E6")
child.expect("00000020 B5 FF FE 22 FD 0A FE 80 00 00 00 00 00 00 3C E6")
child.expect("00000030 B5 FF FE 0F 19 23 41 42 43 44 45 46 47 00")
# test_netapi_send__ipv6_multicast_ethernet_packet
child.expect("Sending data from Ethernet device:")
child.expect("00000000 33 33 00 00 00 01 3E E6 B5 22 FD 0A 86 DD 60 00")
child.expect("00000010 00 00 00 08 3B 40 FE 80 00 00 00 00 00 00 3C E6")
child.expect("00000020 B5 FF FE 22 FD 0A FF 02 00 00 00 00 00 00 00 00")
child.expect("00000030 00 00 00 00 00 01 41 42 43 44 45 46 47 00")
# test_netapi_send__ipv6_unicast_ieee802154_packet
child.expect("Sending data from IEEE 802.15.4 device:")
child.expect("00000000 41 DC 07 00 00 0B FD 22 19 0F B5 E6 3E 0A FD 22")
child.expect("00000010 19 0F B5 E6 3E 60 00 00 00 00 08 3B 40 FE 80 00")
child.expect("00000020 00 00 00 00 00 3C E6 B5 0F 19 22 FD 0A FE 80 00")
child.expect("00000030 00 00 00 00 00 3C E6 B5 FF FE 0F 19 23 41 42 43")
child.expect("00000040 44 45 46 47 00")
# test_netapi_send__ipv6_multicast_ieee802154_packet
child.expect("Sending data from IEEE 802.15.4 device:")
child.expect("00000000 41 D8 08 00 00 FF FF 0A FD 22 19 0F B5 E6 3E 60")
child.expect("00000010 00 00 00 00 08 3B 40 FE 80 00 00 00 00 00 00 3C")
child.expect("00000020 E6 B5 0F 19 22 FD 0A FF 02 00 00 00 00 00 00 00")
child.expect("00000030 00 00 00 00 00 00 01 41 42 43 44 45 46 47 00")
# test_netapi_recv__empty_ethernet_payload
child.expect("pktdump dumping Ethernet packet with empty payload")
child.expect("PKTDUMP: data received:")
child.expect(r"~~ SNIP 0 - size: 0 byte, type: NETTYPE_UNDEF \(0\)")
child.expect(r"00000000~~ SNIP 1 - size: \d+ byte, type: NETTYPE_NETIF \(-1\)")
child.expect(r"if_pid: (\d+) rssi: -?\d+ lqi: \d+")
assert 0 < int(child.match.group(1))
child.expect("flags: 0x0")
child.expect("src_l2addr: 3E:E6:B5:22:FD:0B")
child.expect("dst_l2addr: 3E:E6:B5:22:FD:0A")
child.expect(r"~~ PKT - 2 snips, total size: \d+ byte")
# test_netapi_recv__empty_ieee802154_payload
child.expect(r"pktdump dumping IEEE 802\.15\.4 packet with empty payload")
child.expect("PKTDUMP: data received:")
child.expect(r"~~ SNIP 0 - size: 0 byte, type: NETTYPE_UNDEF \(0\)")
child.expect(r"00000000~~ SNIP 1 - size: \d+ byte, type: NETTYPE_NETIF \(-1\)")
child.expect(r"if_pid: (\d+) rssi: -?\d+ lqi: \d+")
assert 0 < int(child.match.group(1))
child.expect("flags: 0x0")
child.expect("src_l2addr: 3E:E6:B5:0F:19:22:FD:0B")
child.expect("dst_l2addr: 3E:E6:B5:0F:19:22:FD:0A")
child.expect(r"~~ PKT - 2 snips, total size: \d+ byte")
# test_netapi_recv__raw_ethernet_payload
child.expect("pktdump dumping Ethernet packet with payload 12 34 45 56")
child.expect("PKTDUMP: data received:")
child.expect(r"~~ SNIP 0 - size: 4 byte, type: NETTYPE_UNDEF \(0\)")
child.expect("00000000 12 34 45 56")
child.expect(r"~~ SNIP 1 - size: \d+ byte, type: NETTYPE_NETIF \(-1\)")
child.expect(r"if_pid: (\d+) rssi: -?\d+ lqi: \d+")
assert 0 < int(child.match.group(1))
child.expect("flags: 0x0")
child.expect("src_l2addr: 3E:E6:B5:22:FD:0B")
child.expect("dst_l2addr: 3E:E6:B5:22:FD:0A")
child.expect(r"~~ PKT - 2 snips, total size: \d+ byte")
# test_netapi_recv__raw_ieee802154_payload
child.expect(r"pktdump dumping IEEE 802\.15\.4 packet with payload 12 34 45 56")
child.expect("PKTDUMP: data received:")
child.expect(r"~~ SNIP 0 - size: 4 byte, type: NETTYPE_UNDEF \(0\)")
child.expect("00000000 12 34 45 56")
child.expect(r"~~ SNIP 1 - size: \d+ byte, type: NETTYPE_NETIF \(-1\)")
child.expect(r"if_pid: (\d+) rssi: -?\d+ lqi: \d+")
assert 0 < int(child.match.group(1))
child.expect("flags: 0x0")
child.expect("src_l2addr: 3E:E6:B5:0F:19:22:FD:0B")
child.expect("dst_l2addr: 3E:E6:B5:0F:19:22:FD:0A")
child.expect(r"~~ PKT - 2 snips, total size: \d+ byte")
# test_netapi_recv__ipv6_ethernet_payload
child.expect("pktdump dumping IPv6 over Ethernet packet with payload 01")
child.expect("PKTDUMP: data received:")
# payload not dumped because not parsed yet, but header looks okay, so let's
# assume the payload is as well
child.expect(r"~~ SNIP 0 - size: 41 byte, type: NETTYPE_IPV6 \(2\)")
child.expect(r"traffic class: 0x00 \(ECN: 0x0, DSCP: 0x00\)")
child.expect("flow label: 0x00000")
child.expect("length: 1 next header: 59 hop limit: 64")
child.expect("source address: fe80::3fe6:b5ff:fe22:fd0a")
child.expect("destination address: fe80::3fe6:b5ff:fe22:fd0b")
child.expect(r"~~ SNIP 1 - size: \d+ byte, type: NETTYPE_NETIF \(-1\)")
child.expect(r"if_pid: (\d+) rssi: -?\d+ lqi: \d+")
assert 0 < int(child.match.group(1))
child.expect("flags: 0x0")
child.expect("src_l2addr: 3E:E6:B5:22:FD:0B")
child.expect("dst_l2addr: 3E:E6:B5:22:FD:0A")
child.expect(r"~~ PKT - 2 snips, total size: \d+ byte")
if __name__ == "__main__":
sys.exit(run(testfunc, timeout=1, traceback=True))
| lgpl-2.1 | 4,346,733,464,777,080,300 | 58.664384 | 92 | 0.623924 | false |
AsherBond/MondocosmOS | grass_trunk/scripts/v.db.droprow/v.db.droprow.py | 2 | 1441 | #!/usr/bin/env python
############################################################################
#
# MODULE: v.db.droprow
# AUTHOR(S): Markus Neteler
# Pythonized by Martin Landa
# PURPOSE: Interface to v.extract -r to drop ...
# COPYRIGHT: (C) 2009 by the GRASS Development Team
#
# This program is free software under the GNU General Public
# License (>=v2). Read the file COPYING that comes with GRASS
# for details.
#
#############################################################################
#%module
#% description: Removes a vector feature from a vector map through attribute selection.
#% keywords: vector
#% keywords: database
#% keywords: attribute table
#%end
#%option G_OPT_V_INPUT
#%end
#%option G_OPT_V_FIELD
#%end
#%option G_OPT_DB_WHERE
#% required :yes
#%end
#%option G_OPT_V_OUTPUT
#%end
import sys
import grass.script as grass
def main():
# delete vectors via reverse selection
ret = grass.run_command('v.extract',
flags = 'r',
input = options['input'], layer = options['layer'],
output = options['output'], where = options['where'])
if ret != 0:
return 1
# write cmd history:
grass.vector_history(map = options['output'])
return 0
if __name__ == "__main__":
options, flags = grass.parser()
sys.exit(main())
| agpl-3.0 | 3,355,757,292,743,143,400 | 24.280702 | 87 | 0.526024 | false |
adongy/spreads | spreads/vendor/huey/__init__.py | 7 | 2101 | from api import Huey, crontab
try:
import redis
from backends.redis_backend import RedisBlockingQueue
from backends.redis_backend import RedisDataStore
from backends.redis_backend import RedisEventEmitter
from backends.redis_backend import RedisSchedule
class RedisHuey(Huey):
def __init__(self, name='huey', store_none=False, always_eager=False,
**conn_kwargs):
queue = RedisBlockingQueue(name, **conn_kwargs)
result_store = RedisDataStore(name, **conn_kwargs)
schedule = RedisSchedule(name, **conn_kwargs)
events = RedisEventEmitter(name, **conn_kwargs)
super(RedisHuey, self).__init__(
queue=queue,
result_store=result_store,
schedule=schedule,
events=events,
store_none=store_none,
always_eager=always_eager)
except ImportError:
class RedisHuey(object):
def __init__(self, *args, **kwargs):
raise RuntimeError('Error, "redis" is not installed. Install '
'using pip: "pip install redis"')
from backends.sqlite_backend import SqliteQueue
from backends.sqlite_backend import SqliteDataStore
from backends.sqlite_backend import SqliteEventEmitter
from backends.sqlite_backend import SqliteSchedule
class SqliteHuey(Huey):
def __init__(self, name='huey', store_none=False, always_eager=False,
location=None):
if location is None:
raise ValueError("Please specify a database file with the "
"'location' parameter")
queue = SqliteQueue(name, location)
result_store = SqliteDataStore(name, location)
schedule = SqliteSchedule(name, location)
events = SqliteEventEmitter(name, location=location)
super(SqliteHuey, self).__init__(
queue=queue,
result_store=result_store,
schedule=schedule,
events=events,
store_none=store_none,
always_eager=always_eager)
| agpl-3.0 | -8,652,692,313,373,966,000 | 38.641509 | 77 | 0.621609 | false |
gxx/andrewcrosio.com | blog/blog/conf/settings.py | 1 | 3058 | """
Django settings for blog project.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'replace_me_in_production'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = [
'andrewcrosio.com',
'www.andrewcrosio.com'
]
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'axes',
'blog',
)
MIDDLEWARE_CLASSES = (
'blog.middleware.session.UbiquitousSessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'axes.middleware.FailedLoginMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'blog.urls'
WSGI_APPLICATION = 'blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.abspath(os.path.join(BASE_DIR, 'db.sqlite3')),
}
}
# Internationalization
# https://docs.djangoproject.com/en/dev/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Sessions
SESSION_COOKIE_AGE = 3600 * 24 * 365 # One year cookie
# Templates
TEMPLATE_CONTEXT_PROCESSORS = [
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.core.context_processors.request",
"django.contrib.messages.context_processors.messages",
]
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/dev/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
# Attempt to import local settings, if any
try:
from .local_settings import *
except ImportError:
pass
if DEBUG:
try:
# Ensure we have it, otherwise do nothing
import django_extensions
INSTALLED_APPS = INSTALLED_APPS + ('django_extensions',)
except ImportError:
pass
| gpl-2.0 | 3,182,777,056,244,373,500 | 23.861789 | 77 | 0.713538 | false |
ctuning/ck-env | module/apk/module.py | 2 | 15053 | #
# Collective Knowledge (APK entries)
#
# See CK LICENSE.txt for licensing details
# See CK COPYRIGHT.txt for copyright details
#
# Developer: cTuning foundation, [email protected], http://cTuning.org
#
cfg={} # Will be updated by CK (meta description of this module)
work={} # Will be updated by CK (temporal data)
ck=None # Will be updated by CK (initialized CK kernel)
# Local settings
import os
##############################################################################
# Initialize module
def init(i):
"""
Input: {}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
return {'return':0}
##############################################################################
# detect installed APKs
def detect(i):
"""
Input: {
(data_uoa) or (name) - get params only for this APK
(target_os) - target Android OS (ck search os --tags=android) (default = android-32)
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
o=i.get('out','')
oo=''
if o=='con': oo='con'
hos=i.get('host_os','')
tos=i.get('target_os','')
tdid=i.get('device_id','')
target=i.get('target','')
if target=='' and tos=='':
tos='android-32'
ii={'action':'shell',
'module_uoa':cfg['module_deps']['os'],
'host_os':hos,
'target_os':tos,
'device_id':tdid,
'target':target,
'split_to_list':'yes',
'should_be_remote':'yes',
'cmd':'pm list packages'}
r=ck.access(ii)
if r['return']>0: return r
tosd=r['target_os_dict']
lst=r['stdout_lst']
params={}
name=i.get('name','')
if name=='':
name=i.get('data_uoa','')
iapk=0
for package in sorted(lst):
if package.startswith('package:'):
package=package[8:]
if (name!='' and package!=name) or package=='':
continue
iapk+=1
if o=='con':
ck.out(package)
params[package]={}
# Get parameters
ii={'action':'shell',
'module_uoa':cfg['module_deps']['os'],
'host_os':hos,
'target_os':tos,
'device_id':tdid,
'target':target,
'split_to_list':'yes',
'should_be_remote':'yes',
'cmd':'dumpsys package '+package}
r=ck.access(ii)
if r['return']>0: return r
ll=r['stdout_lst']
for q in ll:
j=q.find('=')
if j>0:
j1=q.rfind(' ', 0, j)
k=q[j1+1:j]
v=q[j+1:]
j2=v.find(' targetSdk=')
if j2>0:
vv=v[j2+11:]
v=v[:j2]
kk='targetSdk'
params[package][kk]=vv
params[package][k]=v
if name!='':
if iapk==0:
return {'return':16, 'error':'APK was not found on the target device', 'target_os_dict':tosd}
if o=='con':
ck.out('')
ck.out('Parameters:')
ck.out('')
for k in sorted(params[name]):
v=params[name][k]
ck.out(' '+k+' = '+v)
return {'return':0, 'params':params, 'target_os_dict':tosd}
##############################################################################
# check APK
def install(i):
"""
Input: {
(host_os)
(target_os)
(device_id)
name or data_uoa - APK name
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
import os
o=i.get('out','')
oo=''
if o=='con': oo=o
name=i.get('name','')
if name=='':
name=i.get('data_uoa','')
if name=='':
return {'return':1, 'error':'APK "name" is not defined'}
hos=i.get('host_os','')
tos=i.get('target_os','')
tdid=i.get('device_id','')
target=i.get('target','')
if target=='' and tos=='':
tos='android19-arm'
i['target_os']=tos
# return {'return':1, 'error':'"target_os" or "target" is not specified'}
xtdid=''
if tdid!='': xtdid=' -s '+tdid
rr={'return':0}
# Detect if APK is installed
r=detect(i)
if r['return']>0 and r['return']!=16: return r
if r['return']==0:
rr['params']=r['params']
if r['return']==16:
# APK is not installed
tosd=r['target_os_dict']
abi=tosd.get('abi','')
if o=='con':
ck.out('Searching APK for "'+name+'" and ABI="'+abi+'" ...')
# Check if available in the CK
r=ck.access({'action':'load',
'module_uoa':work['self_module_uid'],
'data_uoa':name})
if r['return']>0 and r['return']!=16: return r
found=False
if r['return']==0:
p=r['path']
d=r['dict']
aname=''
for apk in d.get('apks',[]):
if abi in apk.get('abis',[]):
aname=apk.get('apk_name','')
break
# If the preferred abi didn't match but is 64-bit,
# look for a 32-bit binary (worst case won't install)
alt_abi = ''
if abi in ['arm64-v8a','arm64']:
alt_abi='armeabi'
elif abi=='x86-64':
alt_abi='x86'
if alt_abi!='':
for apk in d.get('apks',[]):
if alt_abi in apk.get('abis',[]):
aname=apk.get('apk_name','')
break
if aname!='':
pp=os.path.join(p, aname)
if os.path.isfile(pp):
# Trying to install
if o=='con':
ck.out(' APK found ('+aname+') - trying to install ...')
if alt_abi!='':
ck.out(' First choice ABI "'+abi+'" not found, using "'+alt_abi+'"')
ii={'action':'shell',
'module_uoa':cfg['module_deps']['os'],
'host_os':hos,
'target_os':hos,
'cmd':'adb '+xtdid+' install -r -d '+pp,
'out':oo}
r=ck.access(ii)
if r['return']>0: return r
rc=r['return_code']
if rc>0:
return {'return':1, 'error':'command may have failed (return code='+str(rc)+')'}
# Detecting params
r=detect(i)
if r['return']>0 and r['return']!=16: return r
if r['return']==0:
rr['params']=r['params']
found=True
# If not found
if not found:
if o=='con':
ck.out('')
ck.out('APK "'+name+'" with abi "'+abi+'" was not found in CK.')
ck.out('You can download it and then register in the CK via')
ck.out(' $ ck add apk:{APK name} --path={full path to downloaded APK}')
ck.out('')
return {'return':16, 'error':'APK is not installed on target device and was not found in CK'}
return rr
##############################################################################
# add apk
def add(i):
"""
Input: {
(data_uoa) - CK entry to add APK (should be official APK name, i.e. openscience.crowdsource.experiments)
(repo_uoa) - repo where to add APK
(abi) - list of ABI separated by comma (default=armeabi,armeabi-v7a,arm64-v8a)
(version) - version
(versioncode) - versioncode
(path) - path to APK on local host (apk_name will be automatically detected)
(apk_name) - force APK name
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
import os
import shutil
o=i.get('out','')
# Check APK name
apk_name=i.get('apk_name','')
path=i.get('path','')
if path!='':
if not os.path.isfile(path):
return {'return':1, 'error':'APK is not found ('+path+')'}
if apk_name=='':
apk_name=os.path.basename(path)
# Check ABI
abi=i.get('abi','')
if abi=='':
r=ck.inp({'text':'Enter list of ABI separated by comma or Enter for "armeabi,armeabi-v7a,arm64-v8a"): '})
if r['return']>0: return r
abi=r['string'].strip()
if abi=='':
abi='armeabi,armeabi-v7a,arm64-v8a'
if abi=='':
return {'return':1, 'error':'"abi" is not specified'}
abis=abi.split(',')
# Version
version=i.get('version','')
if version=='':
r=ck.inp({'text':'Enter APK version: '})
if r['return']>0: return r
version=r['string'].strip()
if version=='':
return {'return':1, 'error':'"version" is not specified'}
# VersionCode
versioncode=i.get('versioncode','')
# Check CK entry name
duoa=i.get('data_uoa','')
ruoa=i.get('repo_uoa','')
if duoa=='':
r=ck.inp({'text':'Enter CK entry name (must be official APK name): '})
if r['return']>0: return r
duoa=r['string'].strip()
# Check if already exists
r=ck.access({'action':'load',
'module_uoa':work['self_module_uid'],
'data_uoa':duoa})
if r['return']>0 and r['return']!=16: return r
if r['return']==0:
ruoa=r['repo_uid']
pp=r['path']
dd=r['dict']
else:
r=ck.access({'action':'add',
'module_uoa':work['self_module_uid'],
'common_func':'yes',
'data_uoa':duoa,
'repo_uoa':ruoa})
if r['return']>0: return r
pp=r['path']
dd={}
# Create dirs and copy files
p2=os.path.join(pp,apk_name)
shutil.copyfile(path, p2)
# Update dict
if 'apks' not in dd: dd['apks']=[]
dd['apks'].append({'abis':abis,
'apk_name':apk_name,
'version':version,
'versioncode':versioncode})
r=ck.access({'action':'update',
'module_uoa':work['self_module_uid'],
'data_uoa':duoa,
'repo_uoa':ruoa,
'dict':dd,
'sort_keys':'yes',
'substitute':'yes',
'ignore_update':'yes'})
if r['return']>0: return r
p=r['path']
if o=='con':
ck.out('')
ck.out('APK successfully registered in the CK ('+p+')')
return r
##############################################################################
# uninstall APK
def uninstall(i):
"""
Input: {
(data_uoa) or (name) - get params only for this APK
(target_os) - target Android OS (ck search os --tags=android) (default = android-32)
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
# First check if exists
o=i.get('out','')
oo=''
if o=='con':
i['out']=''
oo=o
r=detect(i)
if r['return']>0: return r
hos=i.get('host_os','')
tos=i.get('target_os','')
tdid=i.get('device_id','')
target=i.get('target','')
xtdid=''
if tdid!='': xtdid=' -s '+tdid
if target=='' and tos=='':
tos='android-32'
name=i.get('name','')
if name=='':
name=i.get('data_uoa','')
ii={'action':'shell',
'module_uoa':cfg['module_deps']['os'],
'host_os':hos,
'target_os':hos,
'out':oo,
'cmd':'adb '+xtdid+' uninstall '+name}
r=ck.access(ii)
if r['return']>0: return r
rc=r['return_code']
if rc>0:
return {'return':1, 'error':'command may have failed (return code='+str(rc)+')'}
return r
##############################################################################
# List the apks installed on a target device
def list_installed(i):
"""
Input: {
(device_id)
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
# First check if exists
o=i.get('out','')
oo=''
if o=='con':
i['out']=''
oo=o
# r=detect(i)
# if r['return']>0: return r
hos=i.get('host_os','')
tos=i.get('target_os','')
tdid=i.get('device_id','')
target=i.get('target','')
xtdid=''
if tdid!='': xtdid=' -s '+tdid
if target=='' and tos=='':
tos='android-32'
ii={'action':'shell',
'module_uoa':cfg['module_deps']['os'],
'host_os':hos,
'target_os':hos,
'out':oo,
'cmd':'adb '+xtdid+' shell pm list packages -f'}
r=ck.access(ii)
if r['return']>0: return r
rc=r['return_code']
if rc>0:
return {'return':1, 'error':'command may have failed (return code='+str(rc)+')'}
output = r['stdout']
# Output format is "package:[path]=[package]"
packages = [ a.split('=')[1] for a in output.split('\n') if '=' in a ]
if o=='con':
for p in packages:
ck.out(p)
return { 'return':0, 'lst':packages}
##############################################################################
# Uninstall all applications on a target device
def uninstall_all(i):
"""
Input: {
(device_id)
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
Uninstall all applications on the device specified.
"""
# First check if exists
o=i.get('out','')
oo=''
if o=='con':
i['out']=''
oo=o
hos=i.get('host_os','')
tos=i.get('target_os','')
tdid=i.get('device_id','')
target=i.get('target','')
r=list_installed({'device_id':tdid})
if r['return']>0: return r
for apk in r['lst']:
ii={'data_uoa':apk,
'device_id':tdid}
uninstall(ii)
if r['return']>0: return r
return {'return':0}
| bsd-3-clause | -7,371,507,197,331,777,000 | 25.133681 | 119 | 0.431741 | false |
janez87/social-knowledge-extractor | test/crawler_test/tweets_chunk_tests.py | 2 | 1042 | __author__ = 'marcotagliabue'
import unittest
from dandelion import DataTXT
from utils import mongo_manager
import configuration
from Model import tweets_chunk
class ChunksTest(unittest.TestCase):
def setUp(self):
# Retrieve all tweets
tweets = list(mongo_manager.MongoManager(configuration.db_name).find("tweets", {}))[10:16]
self.datatxt = DataTXT(app_id=configuration.APP1_ID, app_key=configuration.API_KEY_DANDELION1)
self.t = tweets_chunk.TweetsChunk(tweets)
def test_chunks(self):
unique = self.t.get_unique_string()
print(unique)
response = self.datatxt.nex(self.t.get_unique_string(),
**{"include": ["types", "categories", "abstract", "alternate_labels"],
"social.hashtag": True, "social.mention": True})
print(response.annotations)
self.t.split_annotation_each_tweet(response.annotations)
print(self.t.index_tweet)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | -8,622,158,109,543,462,000 | 32.612903 | 106 | 0.631478 | false |
giserh/coordTransform_py | coordTransform_utils.py | 1 | 4994 | # -*- coding: utf-8 -*-
import json
import requests
import math
key = 'your key here' # 这里填写你的百度开放平台的key
x_pi = 3.14159265358979324 * 3000.0 / 180.0
pi = 3.1415926535897932384626 # π
a = 6378245.0 # 长半轴
ee = 0.00669342162296594323 # 扁率
def geocode(address):
"""
利用百度geocoding服务解析地址获取位置坐标
:param address:需要解析的地址
:return:
"""
geocoding = {'s': 'rsv3',
'key': key,
'city': '全国',
'address': address}
res = requests.get(
"http://restapi.amap.com/v3/geocode/geo", params=geocoding)
if res.status_code == 200:
json = res.json()
status = json.get('status')
count = json.get('count')
if status == '1' and int(count) >= 1:
geocodes = json.get('geocodes')[0]
lng = float(geocodes.get('location').split(',')[0])
lat = float(geocodes.get('location').split(',')[1])
return [lng, lat]
else:
return None
else:
return None
def gcj02tobd09(lng, lat):
"""
火星坐标系(GCJ-02)转百度坐标系(BD-09)
谷歌、高德——>百度
:param lng:火星坐标经度
:param lat:火星坐标纬度
:return:
"""
z = math.sqrt(lng * lng + lat * lat) + 0.00002 * math.sin(lat * x_pi)
theta = math.atan2(lat, lng) + 0.000003 * math.cos(lng * x_pi)
bd_lng = z * math.cos(theta) + 0.0065
bd_lat = z * math.sin(theta) + 0.006
return [bd_lng, bd_lat]
def bd09togcj02(bd_lon, bd_lat):
"""
百度坐标系(BD-09)转火星坐标系(GCJ-02)
百度——>谷歌、高德
:param bd_lat:百度坐标纬度
:param bd_lon:百度坐标经度
:return:转换后的坐标列表形式
"""
x = bd_lon - 0.0065
y = bd_lat - 0.006
z = math.sqrt(x * x + y * y) - 0.00002 * math.sin(y * x_pi)
theta = math.atan2(y, x) - 0.000003 * math.cos(x * x_pi)
gg_lng = z * math.cos(theta)
gg_lat = z * math.sin(theta)
return [gg_lng, gg_lat]
def wgs84togcj02(lng, lat):
"""
WGS84转GCJ02(火星坐标系)
:param lng:WGS84坐标系的经度
:param lat:WGS84坐标系的纬度
:return:
"""
if out_of_china(lng, lat): # 判断是否在国内
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [mglng, mglat]
def gcj02towgs84(lng, lat):
"""
GCJ02(火星坐标系)转GPS84
:param lng:火星坐标系的经度
:param lat:火星坐标系纬度
:return:
"""
if out_of_china(lng, lat):
return lng, lat
dlat = transformlat(lng - 105.0, lat - 35.0)
dlng = transformlng(lng - 105.0, lat - 35.0)
radlat = lat / 180.0 * pi
magic = math.sin(radlat)
magic = 1 - ee * magic * magic
sqrtmagic = math.sqrt(magic)
dlat = (dlat * 180.0) / ((a * (1 - ee)) / (magic * sqrtmagic) * pi)
dlng = (dlng * 180.0) / (a / sqrtmagic * math.cos(radlat) * pi)
mglat = lat + dlat
mglng = lng + dlng
return [lng * 2 - mglng, lat * 2 - mglat]
def transformlat(lng, lat):
ret = -100.0 + 2.0 * lng + 3.0 * lat + 0.2 * lat * lat + \
0.1 * lng * lat + 0.2 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lat * pi) + 40.0 *
math.sin(lat / 3.0 * pi)) * 2.0 / 3.0
ret += (160.0 * math.sin(lat / 12.0 * pi) + 320 *
math.sin(lat * pi / 30.0)) * 2.0 / 3.0
return ret
def transformlng(lng, lat):
ret = 300.0 + lng + 2.0 * lat + 0.1 * lng * lng + \
0.1 * lng * lat + 0.1 * math.sqrt(math.fabs(lng))
ret += (20.0 * math.sin(6.0 * lng * pi) + 20.0 *
math.sin(2.0 * lng * pi)) * 2.0 / 3.0
ret += (20.0 * math.sin(lng * pi) + 40.0 *
math.sin(lng / 3.0 * pi)) * 2.0 / 3.0
ret += (150.0 * math.sin(lng / 12.0 * pi) + 300.0 *
math.sin(lng / 30.0 * pi)) * 2.0 / 3.0
return ret
def out_of_china(lng, lat):
"""
判断是否在国内,不在国内不做偏移
:param lng:
:param lat:
:return:
"""
if lng < 72.004 or lng > 137.8347:
return True
if lat < 0.8293 or lat > 55.8271:
return True
return False
if __name__ == '__main__':
lng = 128.543
lat = 37.065
result1 = gcj02tobd09(lng, lat)
result2 = bd09togcj02(lng, lat)
result3 = wgs84togcj02(lng, lat)
result4 = gcj02towgs84(lng, lat)
result5 = geocode('北京市朝阳区朝阳公园')
print result1, result2, result3, result4, result5
| mit | -5,559,326,173,723,235,000 | 27.487654 | 73 | 0.530661 | false |
cs-au-dk/Artemis | WebKit/Tools/Scripts/webkitpy/layout_tests/port/win.py | 1 | 4847 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import re
import sys
from webkitpy.common.system.executive import ScriptError, Executive
from webkitpy.common.system.path import abspath_to_uri
from webkitpy.layout_tests.port.apple import ApplePort
_log = logging.getLogger(__name__)
class WinPort(ApplePort):
port_name = "win"
# This is a list of all supported OS-VERSION pairs for the AppleWin port
# and the order of fallback between them. Matches ORWT.
VERSION_FALLBACK_ORDER = ["win-xp", "win-vista", "win-7sp0", "win"]
def do_text_results_differ(self, expected_text, actual_text):
# Sanity was restored in WK2, so we don't need this hack there.
if self.get_option('webkit_test_runner'):
return ApplePort.do_text_results_differ(self, expected_text, actual_text)
# This is a hack (which dates back to ORWT).
# Windows does not have an EDITING DELEGATE, so we strip any EDITING DELEGATE
# messages to make more of the tests pass.
# It's possible more of the ports might want this and this could move down into WebKitPort.
delegate_regexp = re.compile("^EDITING DELEGATE: .*?\n", re.MULTILINE)
expected_text = delegate_regexp.sub("", expected_text)
actual_text = delegate_regexp.sub("", actual_text)
return expected_text != actual_text
def baseline_search_path(self):
fallback_index = self.VERSION_FALLBACK_ORDER.index(self._port_name_with_version())
fallback_names = list(self.VERSION_FALLBACK_ORDER[fallback_index:])
# FIXME: The AppleWin port falls back to AppleMac for some results. Eventually we'll have a shared 'apple' port.
if self.get_option('webkit_test_runner'):
fallback_names.insert(0, 'win-wk2')
fallback_names.append('mac-wk2')
# Note we do not add 'wk2' here, even though it's included in _skipped_search_paths().
# FIXME: Perhaps we should get this list from MacPort?
fallback_names.extend(['mac-lion', 'mac'])
return map(self._webkit_baseline_path, fallback_names)
def operating_system(self):
return 'win'
def show_results_html_file(self, results_filename):
self._run_script('run-safari', [abspath_to_uri(results_filename)])
# FIXME: webkitperl/httpd.pm installs /usr/lib/apache/libphp4.dll on cycwin automatically
# as part of running old-run-webkit-tests. That's bad design, but we may need some similar hack.
# We might use setup_environ_for_server for such a hack (or modify apache_http_server.py).
def _runtime_feature_list(self):
supported_features_command = [self._path_to_driver(), '--print-supported-features']
try:
output = self._executive.run_command(supported_features_command, error_handler=Executive.ignore_error)
except OSError, e:
_log.warn("Exception running driver: %s, %s. Driver must be built before calling WebKitPort.test_expectations()." % (supported_features_command, e))
return None
# Note: win/DumpRenderTree.cpp does not print a leading space before the features_string.
match_object = re.match("SupportedFeatures:\s*(?P<features_string>.*)\s*", output)
if not match_object:
return None
return match_object.group('features_string').split(' ')
| gpl-3.0 | 7,427,172,287,631,490,000 | 49.489583 | 161 | 0.710749 | false |
TwoFlyLiu/novel | distrib/novel-linux-i386_0.0.1/usr/bin/novel-0.0.1/log.py | 3 | 1731 | #!/usr/bin/env python3
import logging, getopt, sys
import config as cfg
from logging.handlers import RotatingFileHandler
FORMATTER = logging.Formatter("%(asctime)s [%(levelname)s]: %(message)s")
def config(*args):
if len(args) == 0:
cmd_args = sys.argv[1:]
else:
cmd_args = args[0]
# 这儿设置根日志总过滤级别为全开
logging.getLogger('').setLevel(logging.DEBUG) #总开关必须开,方便后面的handler进行再过滤
config_logging(cmd_args)
config_rotating_logging()
def config_rotating_logging():
#设置本地回滚日志,即使命令行没有制定--log=debug,他内部也会自动记录日志的
rotating_handler = RotatingFileHandler('%s/%s' %(cfg.config['log_dirname'], cfg.config['log_filename']), maxBytes=cfg.config['max_log_file_size'],
backupCount=cfg.config["log_backup_count"])
rotating_handler.setLevel(logging.DEBUG)
rotating_handler.setFormatter(FORMATTER)
logging.getLogger("").addHandler(rotating_handler)
def config_logging(cmd_args):
str_level = parse_cmdline(cmd_args)
log_level = getattr(logging, str_level.upper(), None)
console_handler = logging.StreamHandler()
console_handler.setLevel(log_level)
console_handler.setFormatter(FORMATTER)
logging.getLogger('').addHandler(console_handler) #logging.getLogger('')获取的是全局的logger
def parse_cmdline(cmd_args):
try:
opts, args = getopt.getopt(cmd_args, "l:", ["log="])
except getopt.GetoptError:
print("Usage: %s --log=[DEBUG|INFO|WARNING|ERROR|CRITICAL]" %sys.argv[0])
sys.exit(1)
for opt, arg in opts:
if opt in ("-l", "--log"):
return arg
return "WARNING"
| gpl-3.0 | 432,530,988,444,907,970 | 30.82 | 150 | 0.675676 | false |
dart-lang/sdk | tools/dom/scripts/database.py | 2 | 11954 | #!/usr/bin/env python3
# Copyright (c) 2011, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Module to manage IDL files."""
import copy
import pickle
import logging
import os
import os.path
import shutil
import idlnode
import idlrenderer
from generator import IsDartCollectionType, IsPureInterface
_logger = logging.getLogger('database')
class Database(object):
"""The Database class manages a collection of IDL files stored
inside a directory.
Each IDL is describing a single interface. The IDL files are written in the
FremontCut syntax, which is derived from the Web IDL syntax and includes
annotations.
Database operations include adding, updating and removing IDL files.
"""
def __init__(self, root_dir):
"""Initializes a Database over a given directory.
Args:
root_dir -- a directory. If directory does not exist, it will
be created.
"""
self._root_dir = root_dir
if not os.path.exists(root_dir):
_logger.debug('creating root directory %s' % root_dir)
os.makedirs(root_dir)
self._all_interfaces = {}
self._interfaces_to_delete = []
self._enums = {}
self._all_dictionaries = {}
# TODO(terry): Hack to remember all typedef unions.
self._all_type_defs = {}
def Clone(self):
new_database = Database(self._root_dir)
new_database._all_interfaces = copy.deepcopy(self._all_interfaces)
new_database._interfaces_to_delete = copy.deepcopy(
self._interfaces_to_delete)
new_database._enums = copy.deepcopy(self._enums)
new_database._all_dictionaries = copy.deepcopy(self._all_dictionaries)
new_database._all_type_defs = copy.deepcopy(self._all_type_defs)
return new_database
def Delete(self):
"""Deletes the database by deleting its directory"""
if os.path.exists(self._root_dir):
shutil.rmtree(self._root_dir)
# reset in-memory constructs
self._all_interfaces = {}
def _ScanForInterfaces(self):
"""Iteratores over the database files and lists all interface names.
Return:
A list of interface names.
"""
res = []
def Visitor(_, dirname, names):
for name in names:
if os.path.isfile(os.path.join(dirname, name)):
root, ext = os.path.splitext(name)
if ext == '.idl':
res.append(root)
os.path.walk(self._root_dir, Visitor, None)
return res
def _FilePath(self, interface_name):
"""Calculates the file path that a given interface should
be saved to.
Args:
interface_name -- the name of the interface.
"""
return os.path.join(self._root_dir, '%s.idl' % interface_name)
def _LoadInterfaceFile(self, interface_name):
"""Loads an interface from the database.
Returns:
An IDLInterface instance or None if the interface is not found.
Args:
interface_name -- the name of the interface.
"""
file_name = self._FilePath(interface_name)
_logger.info('loading %s' % file_name)
if not os.path.exists(file_name):
return None
f = open(file_name, 'r')
content = f.read()
f.close()
# Parse file:
idl_file = idlnode.IDLFile(self._idlparser.parse(content), file_name)
if not idl_file.interfaces:
raise RuntimeError('No interface found in %s' % file_name)
elif len(idl_file.interfaces) > 1:
raise RuntimeError('Expected one interface in %s' % file_name)
interface = idl_file.interfaces[0]
self._all_interfaces[interface_name] = interface
return interface
def Load(self):
"""Loads all interfaces into memory.
"""
# FIXME: Speed this up by multi-threading.
for (interface_name) in self._ScanForInterfaces():
self._LoadInterfaceFile(interface_name)
self.Cache()
def Cache(self):
"""Serialize the database using pickle for faster startup in the future
"""
output_file = open(os.path.join(self._root_dir, 'cache.pickle'), 'wb')
pickle.dump(self._all_interfaces, output_file)
pickle.dump(self._interfaces_to_delete, output_file)
def LoadFromCache(self):
"""Deserialize the database using pickle for fast startup
"""
input_file_name = os.path.join(self._root_dir, 'cache.pickle')
if not os.path.isfile(input_file_name):
self.Load()
return
input_file = open(input_file_name, 'rb')
self._all_interfaces = pickle.load(input_file)
self._interfaces_to_delete = pickle.load(input_file)
input_file.close()
def Save(self):
"""Saves all in-memory interfaces into files."""
for interface in self._all_interfaces.values():
self._SaveInterfaceFile(interface)
for interface_name in self._interfaces_to_delete:
self._DeleteInterfaceFile(interface_name)
def _SaveInterfaceFile(self, interface):
"""Saves an interface into the database.
Args:
interface -- an IDLInterface instance.
"""
interface_name = interface.id
# Actual saving
file_path = self._FilePath(interface_name)
_logger.debug('writing %s' % file_path)
dir_name = os.path.dirname(file_path)
if not os.path.exists(dir_name):
_logger.debug('creating directory %s' % dir_name)
os.mkdir(dir_name)
# Render the IDLInterface object into text.
text = idlrenderer.render(interface)
f = open(file_path, 'w')
f.write(text)
f.close()
def HasInterface(self, interface_name):
"""Returns True if the interface is in memory"""
return interface_name in self._all_interfaces
def GetInterface(self, interface_name):
"""Returns an IDLInterface corresponding to the interface_name
from memory.
Args:
interface_name -- the name of the interface.
"""
if interface_name not in self._all_interfaces:
raise RuntimeError('Interface %s is not loaded' % interface_name)
return self._all_interfaces[interface_name]
def AddInterface(self, interface):
"""Returns an IDLInterface corresponding to the interface_name
from memory.
Args:
interface -- the name of the interface.
"""
interface_name = interface.id
if interface_name in self._all_interfaces:
raise RuntimeError('Interface %s already exists' % interface_name)
self._all_interfaces[interface_name] = interface
def GetInterfaces(self):
"""Returns a list of all loaded interfaces."""
res = []
for _, interface in sorted(self._all_interfaces.items()):
res.append(interface)
return res
def DeleteInterface(self, interface_name):
"""Deletes an interface from the database. File is deleted when
Save() is called.
Args:
interface_name -- the name of the interface.
"""
if interface_name not in self._all_interfaces:
raise RuntimeError('Interface %s not found' % interface_name)
self._interfaces_to_delete.append(interface_name)
del self._all_interfaces[interface_name]
def _DeleteInterfaceFile(self, interface_name):
"""Actual file deletion"""
file_path = self._FilePath(interface_name)
if os.path.exists(file_path):
_logger.debug('deleting %s' % file_path)
os.remove(file_path)
def Hierarchy(self, interface):
yield interface
for parent in interface.parents:
parent_name = parent.type.id
if not self.HasInterface(parent.type.id):
continue
for parent_interface in self.Hierarchy(
self.GetInterface(parent.type.id)):
yield parent_interface
def HasEnum(self, enum_name):
return enum_name in self._enums
def GetEnum(self, enum_name):
return self._enums[enum_name]
def AddEnum(self, enum):
self._enums[enum.id] = enum
def HasDictionary(self, dictionary_name):
"""Returns True if the dictionary is in memory"""
return dictionary_name in self._all_dictionaries
def GetDictionary(self, dictionary_name):
"""Returns an IDLDictionary corresponding to the dictionary_name
from memory.
Args:
dictionary_name -- the name of the dictionary.
"""
if dictionary_name not in self._all_dictionaries:
raise RuntimeError('Dictionary %s is not loaded' % dictionary_name)
return self._all_dictionaries[dictionary_name]
def AddDictionary(self, dictionary):
"""Returns an IDLDictionary corresponding to the dictionary_name
from memory.
Args:
dictionary -- the name of the dictionary.
"""
dictionary_name = dictionary.id
if dictionary_name in self._all_dictionaries:
raise RuntimeError('Dictionary %s already exists' % dictionary_name)
self._all_dictionaries[dictionary_name] = dictionary
def GetDictionaries(self):
"""Returns a list of all loaded dictionaries."""
res = []
for _, dictionary in sorted(self._all_dictionaries.items()):
res.append(dictionary)
return res
def HasTypeDef(self, type_def_name):
"""Returns True if the typedef is in memory"""
return type_def_name in self._all_type_defs
def GetTypeDef(self, type_def_name):
"""Returns an IDLTypeDef corresponding to the type_def_name
from memory.
Args:
type_def_name -- the name of the typedef.
"""
if type_def_name not in self._all_type_defs:
raise RuntimeError('Typedef %s is not loaded' % type_def_name)
return self._all_type_defs[type_def_name]
def AddTypeDef(self, type_def):
"""Add only a typedef that a unions they map to any (no type)."""
type_def_name = type_def.id
if type_def_name in self._all_type_defs:
raise RuntimeError('Typedef %s already exists' % type_def_name)
self._all_type_defs[type_def_name] = type_def
print(' Added typedef %s' % type_def_name)
def TransitiveSecondaryParents(self, interface, propagate_event_target):
"""Returns a list of all non-primary parents.
The list contains the interface objects for interfaces defined in the
database, and the name for undefined interfaces.
"""
def walk(parents, walk_result):
for parent in parents:
parent_name = parent.type.id
if IsDartCollectionType(parent_name):
if not (parent_name in walk_result):
walk_result.append(parent_name)
continue
if self.HasInterface(parent_name):
parent_interface = self.GetInterface(parent_name)
if not (parent_interface in walk_result):
# Interface has multi-inherited don't add interfaces more than once
# to our parent result list.
walk_result.append(parent_interface)
walk(parent_interface.parents, walk_result)
return walk_result
result = []
if interface.parents:
parent = interface.parents[0]
if (IsPureInterface(parent.type.id, self) or
(propagate_event_target and parent.type.id == 'EventTarget')):
result = walk(interface.parents, [])
else:
result = walk(interface.parents[1:], [])
return result
| bsd-3-clause | 733,561,349,666,326,000 | 33.851312 | 91 | 0.617534 | false |
afrolov1/nova | nova/api/openstack/compute/plugins/v3/migrations.py | 29 | 2172 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
from nova import compute
from nova.objects import base as obj_base
ALIAS = "os-migrations"
def authorize(context, action_name):
action = 'v3:%s:%s' % (ALIAS, action_name)
extensions.extension_authorizer('compute', action)(context)
def output(migrations_obj):
"""Returns the desired output of the API from an object.
From a MigrationsList's object this method returns a list of
primitive objects with the only necessary fields.
"""
objects = obj_base.obj_to_primitive(migrations_obj)
for obj in objects:
del obj['deleted']
del obj['deleted_at']
return objects
class MigrationsController(object):
"""Controller for accessing migrations in OpenStack API."""
def __init__(self):
self.compute_api = compute.API()
@extensions.expected_errors(())
def index(self, req):
"""Return all migrations in progress."""
context = req.environ['nova.context']
authorize(context, "index")
migrations = self.compute_api.get_migrations(context, req.GET)
return {'migrations': output(migrations)}
class Migrations(extensions.V3APIExtensionBase):
"""Provide data on migrations."""
name = "Migrations"
alias = ALIAS
version = 1
def get_resources(self):
resources = []
resource = extensions.ResourceExtension('os-migrations',
MigrationsController())
resources.append(resource)
return resources
def get_controller_extensions(self):
return []
| apache-2.0 | -451,041,208,208,691,900 | 31.41791 | 78 | 0.671731 | false |
Arthaey/anki | aqt/reviewer.py | 6 | 25686 | # -*- coding: utf-8 -*-
# Copyright: Damien Elmes <[email protected]>
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
from __future__ import division
import difflib
import re
import cgi
import unicodedata as ucd
import HTMLParser
from anki.lang import _, ngettext
from aqt.qt import *
from anki.utils import stripHTML, isMac, json
from anki.hooks import addHook, runHook
from anki.sound import playFromText, clearAudioQueue, play
from aqt.utils import mungeQA, getBase, openLink, tooltip, askUserDialog, \
downArrow
from aqt.sound import getAudio
import aqt
class Reviewer(object):
"Manage reviews. Maintains a separate state."
def __init__(self, mw):
self.mw = mw
self.web = mw.web
self.card = None
self.cardQueue = []
self.hadCardQueue = False
self._answeredIds = []
self._recordedAudio = None
self.typeCorrect = None # web init happens before this is set
self.state = None
self.bottom = aqt.toolbar.BottomBar(mw, mw.bottomWeb)
# qshortcut so we don't autorepeat
self.delShortcut = QShortcut(QKeySequence("Delete"), self.mw)
self.delShortcut.setAutoRepeat(False)
self.mw.connect(self.delShortcut, SIGNAL("activated()"), self.onDelete)
addHook("leech", self.onLeech)
def show(self):
self.mw.col.reset()
self.mw.keyHandler = self._keyHandler
self.web.setLinkHandler(self._linkHandler)
self.web.setKeyHandler(self._catchEsc)
if isMac:
self.bottom.web.setFixedHeight(46)
else:
self.bottom.web.setFixedHeight(52+self.mw.fontHeightDelta*4)
self.bottom.web.setLinkHandler(self._linkHandler)
self._reps = None
self.nextCard()
def lastCard(self):
if self._answeredIds:
if not self.card or self._answeredIds[-1] != self.card.id:
try:
return self.mw.col.getCard(self._answeredIds[-1])
except TypeError:
# id was deleted
return
def cleanup(self):
runHook("reviewCleanup")
# Fetching a card
##########################################################################
def nextCard(self):
elapsed = self.mw.col.timeboxReached()
if elapsed:
part1 = ngettext("%d card studied in", "%d cards studied in", elapsed[1]) % elapsed[1]
mins = int(round(elapsed[0]/60))
part2 = ngettext("%s minute.", "%s minutes.", mins) % mins
fin = _("Finish")
diag = askUserDialog("%s %s" % (part1, part2),
[_("Continue"), fin])
diag.setIcon(QMessageBox.Information)
if diag.run() == fin:
return self.mw.moveToState("deckBrowser")
self.mw.col.startTimebox()
if self.cardQueue:
# undone/edited cards to show
c = self.cardQueue.pop()
c.startTimer()
self.hadCardQueue = True
else:
if self.hadCardQueue:
# the undone/edited cards may be sitting in the regular queue;
# need to reset
self.mw.col.reset()
self.hadCardQueue = False
c = self.mw.col.sched.getCard()
self.card = c
clearAudioQueue()
if not c:
self.mw.moveToState("overview")
return
if self._reps is None or self._reps % 100 == 0:
# we recycle the webview periodically so webkit can free memory
self._initWeb()
else:
self._showQuestion()
# Audio
##########################################################################
def replayAudio(self, previewer=None):
if previewer:
state = previewer._previewState
c = previewer.card
else:
state = self.state
c = self.card
clearAudioQueue()
if state == "question":
playFromText(c.q())
elif state == "answer":
txt = ""
if self._replayq(c, previewer):
txt = c.q()
txt += c.a()
playFromText(txt)
# Initializing the webview
##########################################################################
_revHtml = """
<img src="qrc:/icons/rating.png" id=star class=marked>
<div id=qa></div>
<script>
var ankiPlatform = "desktop";
var typeans;
function _updateQA (q, answerMode, klass) {
$("#qa").html(q);
typeans = document.getElementById("typeans");
if (typeans) {
typeans.focus();
}
if (answerMode) {
var e = $("#answer");
if (e[0]) { e[0].scrollIntoView(); }
} else {
window.scrollTo(0, 0);
}
if (klass) {
document.body.className = klass;
}
// don't allow drags of images, which cause them to be deleted
$("img").attr("draggable", false);
};
function _toggleStar (show) {
if (show) {
$(".marked").show();
} else {
$(".marked").hide();
}
}
function _getTypedText () {
if (typeans) {
py.link("typeans:"+typeans.value);
}
};
function _typeAnsPress() {
if (window.event.keyCode === 13) {
py.link("ansHack");
}
}
</script>
"""
def _initWeb(self):
self._reps = 0
self._bottomReady = False
base = getBase(self.mw.col)
# main window
self.web.stdHtml(self._revHtml, self._styles(),
loadCB=lambda x: self._showQuestion(),
head=base)
# show answer / ease buttons
self.bottom.web.show()
self.bottom.web.stdHtml(
self._bottomHTML(),
self.bottom._css + self._bottomCSS,
loadCB=lambda x: self._showAnswerButton())
# Showing the question
##########################################################################
def _mungeQA(self, buf):
return self.typeAnsFilter(mungeQA(self.mw.col, buf))
def _showQuestion(self):
self._reps += 1
self.state = "question"
self.typedAnswer = None
c = self.card
# grab the question and play audio
if c.isEmpty():
q = _("""\
The front of this card is empty. Please run Tools>Empty Cards.""")
else:
q = c.q()
if self.autoplay(c):
playFromText(q)
# render & update bottom
q = self._mungeQA(q)
klass = "card card%d" % (c.ord+1)
self.web.eval("_updateQA(%s, false, '%s');" % (json.dumps(q), klass))
self._toggleStar()
if self._bottomReady:
self._showAnswerButton()
# if we have a type answer field, focus main web
if self.typeCorrect:
self.mw.web.setFocus()
# user hook
runHook('showQuestion')
def autoplay(self, card):
return self.mw.col.decks.confForDid(
card.odid or card.did)['autoplay']
def _replayq(self, card, previewer=None):
s = previewer if previewer else self
return s.mw.col.decks.confForDid(
s.card.odid or s.card.did).get('replayq', True)
def _toggleStar(self):
self.web.eval("_toggleStar(%s);" % json.dumps(
self.card.note().hasTag("marked")))
# Showing the answer
##########################################################################
def _showAnswer(self):
if self.mw.state != "review":
# showing resetRequired screen; ignore space
return
self.state = "answer"
c = self.card
a = c.a()
# play audio?
if self.autoplay(c):
playFromText(a)
# render and update bottom
a = self._mungeQA(a)
self.web.eval("_updateQA(%s, true);" % json.dumps(a))
self._showEaseButtons()
# user hook
runHook('showAnswer')
# Answering a card
############################################################
def _answerCard(self, ease):
"Reschedule card and show next."
if self.mw.state != "review":
# showing resetRequired screen; ignore key
return
if self.state != "answer":
return
if self.mw.col.sched.answerButtons(self.card) < ease:
return
self.mw.col.sched.answerCard(self.card, ease)
self._answeredIds.append(self.card.id)
self.mw.autosave()
self.nextCard()
# Handlers
############################################################
def _catchEsc(self, evt):
if evt.key() == Qt.Key_Escape:
self.web.eval("$('#typeans').blur();")
return True
def _showAnswerHack(self):
# on <qt4.8, calling _showAnswer() directly fails to show images on
# the answer side. But if we trigger it via the bottom web's python
# link, it inexplicably works.
self.bottom.web.eval("py.link('ans');")
def _keyHandler(self, evt):
key = unicode(evt.text())
if key == "e":
self.mw.onEditCurrent()
elif (key == " " or evt.key() in (Qt.Key_Return, Qt.Key_Enter)):
if self.state == "question":
self._showAnswerHack()
elif self.state == "answer":
self._answerCard(self._defaultEase())
elif key == "r" or evt.key() == Qt.Key_F5:
self.replayAudio()
elif key == "*":
self.onMark()
elif key == "=":
self.onBuryNote()
elif key == "-":
self.onBuryCard()
elif key == "!":
self.onSuspend()
elif key == "@":
self.onSuspendCard()
elif key == "V":
self.onRecordVoice()
elif key == "o":
self.onOptions()
elif key in ("1", "2", "3", "4"):
self._answerCard(int(key))
elif key == "v":
self.onReplayRecorded()
def _linkHandler(self, url):
if url == "ans":
self._showAnswer()
elif url == "ansHack":
self.mw.progress.timer(100, self._showAnswerHack, False)
elif url.startswith("ease"):
self._answerCard(int(url[4:]))
elif url == "edit":
self.mw.onEditCurrent()
elif url == "more":
self.showContextMenu()
elif url.startswith("typeans:"):
(cmd, arg) = url.split(":", 1)
self.typedAnswer = arg
else:
openLink(url)
# CSS
##########################################################################
_css = """
hr { background-color:#ccc; margin: 1em; }
body { margin:1.5em; }
img { max-width: 95%; max-height: 95%; }
.marked { position:fixed; right: 7px; top: 7px; display: none; }
#typeans { width: 100%; }
.typeGood { background: #0f0; }
.typeBad { background: #f00; }
.typeMissed { background: #ccc; }
"""
def _styles(self):
return self._css
# Type in the answer
##########################################################################
typeAnsPat = "\[\[type:(.+?)\]\]"
def typeAnsFilter(self, buf):
if self.state == "question":
return self.typeAnsQuestionFilter(buf)
else:
return self.typeAnsAnswerFilter(buf)
def typeAnsQuestionFilter(self, buf):
self.typeCorrect = None
clozeIdx = None
m = re.search(self.typeAnsPat, buf)
if not m:
return buf
fld = m.group(1)
# if it's a cloze, extract data
if fld.startswith("cloze:"):
# get field and cloze position
clozeIdx = self.card.ord + 1
fld = fld.split(":")[1]
# loop through fields for a match
for f in self.card.model()['flds']:
if f['name'] == fld:
self.typeCorrect = self.card.note()[f['name']]
if clozeIdx:
# narrow to cloze
self.typeCorrect = self._contentForCloze(
self.typeCorrect, clozeIdx)
self.typeFont = f['font']
self.typeSize = f['size']
break
if not self.typeCorrect:
if self.typeCorrect is None:
if clozeIdx:
warn = _("""\
Please run Tools>Empty Cards""")
else:
warn = _("Type answer: unknown field %s") % fld
return re.sub(self.typeAnsPat, warn, buf)
else:
# empty field, remove type answer pattern
return re.sub(self.typeAnsPat, "", buf)
return re.sub(self.typeAnsPat, """
<center>
<input type=text id=typeans onkeypress="_typeAnsPress();"
style="font-family: '%s'; font-size: %spx;">
</center>
""" % (self.typeFont, self.typeSize), buf)
def typeAnsAnswerFilter(self, buf):
# tell webview to call us back with the input content
self.web.eval("_getTypedText();")
if not self.typeCorrect:
return re.sub(self.typeAnsPat, "", buf)
origSize = len(buf)
buf = buf.replace("<hr id=answer>", "")
hadHR = len(buf) != origSize
# munge correct value
parser = HTMLParser.HTMLParser()
cor = stripHTML(self.mw.col.media.strip(self.typeCorrect))
# ensure we don't chomp multiple whitespace
cor = cor.replace(" ", " ")
cor = parser.unescape(cor)
cor = cor.replace(u"\xa0", " ")
given = self.typedAnswer
# compare with typed answer
res = self.correct(given, cor, showBad=False)
# and update the type answer area
def repl(match):
# can't pass a string in directly, and can't use re.escape as it
# escapes too much
s = """
<span style="font-family: '%s'; font-size: %spx">%s</span>""" % (
self.typeFont, self.typeSize, res)
if hadHR:
# a hack to ensure the q/a separator falls before the answer
# comparison when user is using {{FrontSide}}
s = "<hr id=answer>" + s
return s
return re.sub(self.typeAnsPat, repl, buf)
def _contentForCloze(self, txt, idx):
matches = re.findall("\{\{c%s::(.+?)\}\}"%idx, txt)
if not matches:
return None
def noHint(txt):
if "::" in txt:
return txt.split("::")[0]
return txt
matches = [noHint(txt) for txt in matches]
uniqMatches = set(matches)
if len(uniqMatches) == 1:
txt = matches[0]
else:
txt = ", ".join(matches)
return txt
def tokenizeComparison(self, given, correct):
# compare in NFC form so accents appear correct
given = ucd.normalize("NFC", given)
correct = ucd.normalize("NFC", correct)
try:
s = difflib.SequenceMatcher(None, given, correct, autojunk=False)
except:
# autojunk was added in python 2.7.1
s = difflib.SequenceMatcher(None, given, correct)
givenElems = []
correctElems = []
givenPoint = 0
correctPoint = 0
offby = 0
def logBad(old, new, str, array):
if old != new:
array.append((False, str[old:new]))
def logGood(start, cnt, str, array):
if cnt:
array.append((True, str[start:start+cnt]))
for x, y, cnt in s.get_matching_blocks():
# if anything was missed in correct, pad given
if cnt and y-offby > x:
givenElems.append((False, "-"*(y-x-offby)))
offby = y-x
# log any proceeding bad elems
logBad(givenPoint, x, given, givenElems)
logBad(correctPoint, y, correct, correctElems)
givenPoint = x+cnt
correctPoint = y+cnt
# log the match
logGood(x, cnt, given, givenElems)
logGood(y, cnt, correct, correctElems)
return givenElems, correctElems
def correct(self, given, correct, showBad=True):
"Diff-corrects the typed-in answer."
givenElems, correctElems = self.tokenizeComparison(given, correct)
def good(s):
return "<span class=typeGood>"+cgi.escape(s)+"</span>"
def bad(s):
return "<span class=typeBad>"+cgi.escape(s)+"</span>"
def missed(s):
return "<span class=typeMissed>"+cgi.escape(s)+"</span>"
if given == correct:
res = good(given)
else:
res = ""
for ok, txt in givenElems:
if ok:
res += good(txt)
else:
res += bad(txt)
res += "<br>↓<br>"
for ok, txt in correctElems:
if ok:
res += good(txt)
else:
res += missed(txt)
res = "<div><code id=typeans>" + res + "</code></div>"
return res
# Bottom bar
##########################################################################
_bottomCSS = """
body {
background: -webkit-gradient(linear, left top, left bottom,
from(#fff), to(#ddd));
border-bottom: 0;
border-top: 1px solid #aaa;
margin: 0;
padding: 0px;
padding-left: 5px; padding-right: 5px;
}
button {
min-width: 60px; white-space: nowrap;
}
.hitem { margin-top: 2px; }
.stat { padding-top: 5px; }
.stat2 { padding-top: 3px; font-weight: normal; }
.stattxt { padding-left: 5px; padding-right: 5px; white-space: nowrap; }
.nobold { font-weight: normal; display: inline-block; padding-top: 4px; }
.spacer { height: 18px; }
.spacer2 { height: 16px; }
"""
def _bottomHTML(self):
return """
<table width=100%% cellspacing=0 cellpadding=0>
<tr>
<td align=left width=50 valign=top class=stat>
<br>
<button title="%(editkey)s" onclick="py.link('edit');">%(edit)s</button></td>
<td align=center valign=top id=middle>
</td>
<td width=50 align=right valign=top class=stat><span id=time class=stattxt>
</span><br>
<button onclick="py.link('more');">%(more)s %(downArrow)s</button>
</td>
</tr>
</table>
<script>
var time = %(time)d;
var maxTime = 0;
$(function () {
$("#ansbut").focus();
updateTime();
setInterval(function () { time += 1; updateTime() }, 1000);
});
var updateTime = function () {
if (!maxTime) {
$("#time").text("");
return;
}
time = Math.min(maxTime, time);
var m = Math.floor(time / 60);
var s = time %% 60;
if (s < 10) {
s = "0" + s;
}
var e = $("#time");
if (maxTime == time) {
e.html("<font color=red>" + m + ":" + s + "</font>");
} else {
e.text(m + ":" + s);
}
}
function showQuestion(txt, maxTime_) {
// much faster than jquery's .html()
$("#middle")[0].innerHTML = txt;
$("#ansbut").focus();
time = 0;
maxTime = maxTime_;
}
function showAnswer(txt) {
$("#middle")[0].innerHTML = txt;
$("#defease").focus();
}
</script>
""" % dict(rem=self._remaining(), edit=_("Edit"),
editkey=_("Shortcut key: %s") % "E",
more=_("More"),
downArrow=downArrow(),
time=self.card.timeTaken() // 1000)
def _showAnswerButton(self):
self._bottomReady = True
if not self.typeCorrect:
self.bottom.web.setFocus()
middle = '''
<span class=stattxt>%s</span><br>
<button title="%s" id=ansbut onclick='py.link(\"ans\");'>%s</button>''' % (
self._remaining(), _("Shortcut key: %s") % _("Space"), _("Show Answer"))
# wrap it in a table so it has the same top margin as the ease buttons
middle = "<table cellpadding=0><tr><td class=stat2 align=center>%s</td></tr></table>" % middle
if self.card.shouldShowTimer():
maxTime = self.card.timeLimit() / 1000
else:
maxTime = 0
self.bottom.web.eval("showQuestion(%s,%d);" % (
json.dumps(middle), maxTime))
def _showEaseButtons(self):
self.bottom.web.setFocus()
middle = self._answerButtons()
self.bottom.web.eval("showAnswer(%s);" % json.dumps(middle))
def _remaining(self):
if not self.mw.col.conf['dueCounts']:
return ""
if self.hadCardQueue:
# if it's come from the undo queue, don't count it separately
counts = list(self.mw.col.sched.counts())
else:
counts = list(self.mw.col.sched.counts(self.card))
idx = self.mw.col.sched.countIdx(self.card)
counts[idx] = "<u>%s</u>" % (counts[idx])
space = " + "
ctxt = '<font color="#000099">%s</font>' % counts[0]
ctxt += space + '<font color="#C35617">%s</font>' % counts[1]
ctxt += space + '<font color="#007700">%s</font>' % counts[2]
return ctxt
def _defaultEase(self):
if self.mw.col.sched.answerButtons(self.card) == 4:
return 3
else:
return 2
def _answerButtonList(self):
l = ((1, _("Again")),)
cnt = self.mw.col.sched.answerButtons(self.card)
if cnt == 2:
return l + ((2, _("Good")),)
elif cnt == 3:
return l + ((2, _("Good")), (3, _("Easy")))
else:
return l + ((2, _("Hard")), (3, _("Good")), (4, _("Easy")))
def _answerButtons(self):
times = []
default = self._defaultEase()
def but(i, label):
if i == default:
extra = "id=defease"
else:
extra = ""
due = self._buttonTime(i)
return '''
<td align=center>%s<button %s title="%s" onclick='py.link("ease%d");'>\
%s</button></td>''' % (due, extra, _("Shortcut key: %s") % i, i, label)
buf = "<center><table cellpading=0 cellspacing=0><tr>"
for ease, label in self._answerButtonList():
buf += but(ease, label)
buf += "</tr></table>"
script = """
<script>$(function () { $("#defease").focus(); });</script>"""
return buf + script
def _buttonTime(self, i):
if not self.mw.col.conf['estTimes']:
return "<div class=spacer></div>"
txt = self.mw.col.sched.nextIvlStr(self.card, i, True) or " "
return '<span class=nobold>%s</span><br>' % txt
# Leeches
##########################################################################
def onLeech(self, card):
# for now
s = _("Card was a leech.")
if card.queue < 0:
s += " " + _("It has been suspended.")
tooltip(s)
# Context menu
##########################################################################
# note the shortcuts listed here also need to be defined above
def showContextMenu(self):
opts = [
[_("Mark Note"), "*", self.onMark],
[_("Bury Card"), "-", self.onBuryCard],
[_("Bury Note"), "=", self.onBuryNote],
[_("Suspend Card"), "@", self.onSuspendCard],
[_("Suspend Note"), "!", self.onSuspend],
[_("Delete Note"), "Delete", self.onDelete],
[_("Options"), "O", self.onOptions],
None,
[_("Replay Audio"), "R", self.replayAudio],
[_("Record Own Voice"), "Shift+V", self.onRecordVoice],
[_("Replay Own Voice"), "V", self.onReplayRecorded],
]
m = QMenu(self.mw)
for row in opts:
if not row:
m.addSeparator()
continue
label, scut, func = row
a = m.addAction(label)
a.setShortcut(QKeySequence(scut))
a.connect(a, SIGNAL("triggered()"), func)
runHook("Reviewer.contextMenuEvent",self,m)
m.exec_(QCursor.pos())
def onOptions(self):
self.mw.onDeckConf(self.mw.col.decks.get(
self.card.odid or self.card.did))
def onMark(self):
f = self.card.note()
if f.hasTag("marked"):
f.delTag("marked")
else:
f.addTag("marked")
f.flush()
self._toggleStar()
def onSuspend(self):
self.mw.checkpoint(_("Suspend"))
self.mw.col.sched.suspendCards(
[c.id for c in self.card.note().cards()])
tooltip(_("Note suspended."))
self.mw.reset()
def onSuspendCard(self):
self.mw.checkpoint(_("Suspend"))
self.mw.col.sched.suspendCards([self.card.id])
tooltip(_("Card suspended."))
self.mw.reset()
def onDelete(self):
# need to check state because the shortcut is global to the main
# window
if self.mw.state != "review" or not self.card:
return
self.mw.checkpoint(_("Delete"))
cnt = len(self.card.note().cards())
self.mw.col.remNotes([self.card.note().id])
self.mw.reset()
tooltip(ngettext(
"Note and its %d card deleted.",
"Note and its %d cards deleted.",
cnt) % cnt)
def onBuryCard(self):
self.mw.checkpoint(_("Bury"))
self.mw.col.sched.buryCards([self.card.id])
self.mw.reset()
tooltip(_("Card buried."))
def onBuryNote(self):
self.mw.checkpoint(_("Bury"))
self.mw.col.sched.buryNote(self.card.nid)
self.mw.reset()
tooltip(_("Note buried."))
def onRecordVoice(self):
self._recordedAudio = getAudio(self.mw, encode=False)
self.onReplayRecorded()
def onReplayRecorded(self):
if not self._recordedAudio:
return tooltip(_("You haven't recorded your voice yet."))
clearAudioQueue()
play(self._recordedAudio)
| agpl-3.0 | -5,684,123,282,236,373,000 | 31.888604 | 102 | 0.515806 | false |
matterker/csvkit | tests/test_convert/test_xls.py | 21 | 4335 | #!/usr/bin/env python
import datetime
try:
import unittest2 as unittest
except ImportError:
import unittest
import six
import xlrd
from xlrd.xldate import xldate_from_date_tuple as xldate, xldate_from_time_tuple as xltime, xldate_from_datetime_tuple as xldatetime
from csvkit.exceptions import XLSDataError
from csvkit.convert import xls
class TestXLS(unittest.TestCase):
def test_empty_column(self):
normal_values = xls.normalize_empty(['', '', ''])
self.assertEquals(normal_values, (None, [None, None, None]))
def test_text_column(self):
normal_values = xls.normalize_text([u'This', u'', u'text'])
self.assertEquals(normal_values, (six.text_type, [u'This', None, u'text']))
def test_numbers_column_integral(self):
normal_values = xls.normalize_numbers([1.0, 418000000, -817, 0.0, ''])
self.assertEquals(normal_values, (int, [1, 418000000, -817, 0, None]))
def test_numbers_column_float(self):
normal_values = xls.normalize_numbers([1.01, 418000000, -817.043, 0.0001, ''])
self.assertEquals(normal_values, (float, [1.01, 418000000.0, -817.043, 0.0001, None]))
def test_dates_column_dates(self):
normal_values = xls.normalize_dates([
xldate((2004, 6, 5), 0),
xldate((1984, 2, 23), 0),
xldate((1907, 12, 25), 0),
''], 0)
self.assertEquals(normal_values, (datetime.date, [datetime.date(2004, 6, 5), datetime.date(1984, 2, 23), datetime.date(1907, 12, 25), None]))
def test_dates_column_times(self):
normal_values = xls.normalize_dates([
xltime((14, 30, 0)),
xltime((4, 5, 37)),
xltime((0, 0, 0)),
''], 0)
self.assertEquals(normal_values, (datetime.time, [datetime.time(14, 30, 0), datetime.time(4, 5, 37), datetime.time(0, 0, 0), None]))
def test_dates_column_datetimes(self):
normal_values = xls.normalize_dates([
xldatetime((2004, 6, 5, 14, 30, 23), 0),
xldatetime((1984, 2, 23, 0, 0, 0), 0),
xldatetime((1907, 12, 25, 2, 0, 0), 0),
''], 0)
self.assertEquals(normal_values, (datetime.datetime, [datetime.datetime(2004, 6, 5, 14, 30, 23), datetime.datetime(1984, 2, 23, 0, 0, 0), datetime.datetime(1907, 12, 25, 2, 0, 0), None]))
def test_dates_column_dates_and_times(self):
self.assertRaises(XLSDataError, xls.normalize_dates, [
xldate((2004, 6, 5), 0),
xltime((4, 5, 37)),
''], 0)
def tests_dates_column_dates_and_datetimes(self):
normal_values = xls.normalize_dates([
xldate((2004, 6, 5), 0),
xldatetime((2001, 1, 1, 4, 5, 37), 0),
''], 0)
self.assertEquals(normal_values, (datetime.datetime, [datetime.datetime(2004, 6, 5, 0, 0, 0), datetime.datetime(2001, 1, 1, 4, 5, 37), None]))
def test_dates_column_times_and_datetimes(self):
self.assertRaises(XLSDataError, xls.normalize_dates, [
xldatetime((2004, 6, 5, 0, 30, 0), 0),
xltime((4, 5, 37)),
''], 0)
def test_determine_column_type_single(self):
column_type = xls.determine_column_type([xlrd.biffh.XL_CELL_NUMBER, xlrd.biffh.XL_CELL_NUMBER, xlrd.biffh.XL_CELL_EMPTY])
self.assertEquals(column_type, xlrd.biffh.XL_CELL_NUMBER)
def test_determine_column_type_multiple(self):
column_type = xls.determine_column_type([xlrd.biffh.XL_CELL_NUMBER, xlrd.biffh.XL_CELL_TEXT, xlrd.biffh.XL_CELL_EMPTY])
self.assertEquals(column_type, xlrd.biffh.XL_CELL_TEXT)
def test_determine_column_type_empty(self):
column_type = xls.determine_column_type([xlrd.biffh.XL_CELL_EMPTY, xlrd.biffh.XL_CELL_EMPTY, xlrd.biffh.XL_CELL_EMPTY])
self.assertEquals(column_type, xlrd.biffh.XL_CELL_EMPTY)
def test_xls(self):
with open('examples/test.xls', 'rb') as f:
output = xls.xls2csv(f)
with open('examples/testxls_converted.csv', 'r') as f:
self.assertEquals(f.read(), output)
def test_xls_with_sheet(self):
with open('examples/sheets.xls', 'rb') as f:
output = xls.xls2csv(f, sheet='Sheet2')
with open('examples/sheetsxls_converted.csv', 'r') as f:
self.assertEquals(f.read(), output)
| mit | -6,741,753,145,712,667,000 | 41.920792 | 195 | 0.61015 | false |
vikas-parashar/zulip | zerver/lib/actions.py | 1 | 159321 | from __future__ import absolute_import
from __future__ import print_function
from typing import (
AbstractSet, Any, AnyStr, Callable, Dict, Iterable, Mapping, MutableMapping,
Optional, Sequence, Set, Tuple, TypeVar, Union
)
from django.utils.translation import ugettext as _
from django.conf import settings
from django.core import validators
from django.contrib.sessions.models import Session
from zerver.lib.bugdown import (
BugdownRenderingException,
version as bugdown_version
)
from zerver.lib.cache import (
to_dict_cache_key,
to_dict_cache_key_id,
)
from zerver.lib.context_managers import lockfile
from zerver.lib.message import (
access_message,
MessageDict,
message_to_dict,
render_markdown,
)
from zerver.models import Realm, RealmEmoji, Stream, UserProfile, UserActivity, RealmAlias, \
Subscription, Recipient, Message, Attachment, UserMessage, valid_stream_name, \
Client, DefaultStream, UserPresence, Referral, PushDeviceToken, MAX_SUBJECT_LENGTH, \
MAX_MESSAGE_LENGTH, get_client, get_stream, get_recipient, get_huddle, \
get_user_profile_by_id, PreregistrationUser, get_display_recipient, \
get_realm, get_realm_by_string_id, bulk_get_recipients, \
email_allowed_for_realm, email_to_username, display_recipient_cache_key, \
get_user_profile_by_email, get_stream_cache_key, \
UserActivityInterval, get_active_user_dicts_in_realm, get_active_streams, \
realm_filters_for_domain, RealmFilter, receives_offline_notifications, \
ScheduledJob, realm_filters_for_domain, get_owned_bot_dicts, \
get_old_unclaimed_attachments, get_cross_realm_emails, receives_online_notifications
from zerver.lib.alert_words import alert_words_in_realm
from zerver.lib.avatar import get_avatar_url, avatar_url
from django.db import transaction, IntegrityError, connection
from django.db.models import F, Q
from django.db.models.query import QuerySet
from django.core.exceptions import ValidationError
from importlib import import_module
from django.core.mail import EmailMessage
from django.utils.timezone import now
from confirmation.models import Confirmation
import six
from six import text_type
from six.moves import filter
from six.moves import map
from six.moves import range
from six import unichr
session_engine = import_module(settings.SESSION_ENGINE)
from zerver.lib.create_user import random_api_key
from zerver.lib.timestamp import timestamp_to_datetime, datetime_to_timestamp
from zerver.lib.queue import queue_json_publish
from django.utils import timezone
from zerver.lib.create_user import create_user
from zerver.lib import bugdown
from zerver.lib.cache import cache_with_key, cache_set, \
user_profile_by_email_cache_key, cache_set_many, \
cache_delete, cache_delete_many
from zerver.decorator import statsd_increment
from zerver.lib.event_queue import request_event_queue, get_user_events, send_event
from zerver.lib.utils import log_statsd_event, statsd
from zerver.lib.html_diff import highlight_html_differences
from zerver.lib.alert_words import user_alert_words, add_user_alert_words, \
remove_user_alert_words, set_user_alert_words
from zerver.lib.push_notifications import num_push_devices_for_user, \
send_apple_push_notification, send_android_push_notification
from zerver.lib.notifications import clear_followup_emails_queue
from zerver.lib.narrow import check_supported_events_narrow_filter
from zerver.lib.request import JsonableError
from zerver.lib.session_user import get_session_user
from zerver.lib.upload import attachment_url_re, attachment_url_to_path_id, \
claim_attachment, delete_message_image
from zerver.lib.str_utils import NonBinaryStr, force_str
import DNS
import ujson
import time
import traceback
import re
import datetime
import os
import platform
import logging
import itertools
from collections import defaultdict
import copy
# This will be used to type annotate parameters in a function if the function
# works on both str and unicode in python 2 but in python 3 it only works on str.
SizedTextIterable = Union[Sequence[text_type], AbstractSet[text_type]]
STREAM_ASSIGNMENT_COLORS = [
"#76ce90", "#fae589", "#a6c7e5", "#e79ab5",
"#bfd56f", "#f4ae55", "#b0a5fd", "#addfe5",
"#f5ce6e", "#c2726a", "#94c849", "#bd86e5",
"#ee7e4a", "#a6dcbf", "#95a5fd", "#53a063",
"#9987e1", "#e4523d", "#c2c2c2", "#4f8de4",
"#c6a8ad", "#e7cc4d", "#c8bebf", "#a47462"]
# Store an event in the log for re-importing messages
def log_event(event):
# type: (MutableMapping[str, Any]) -> None
if settings.EVENT_LOG_DIR is None:
return
if "timestamp" not in event:
event["timestamp"] = time.time()
if not os.path.exists(settings.EVENT_LOG_DIR):
os.mkdir(settings.EVENT_LOG_DIR)
template = os.path.join(settings.EVENT_LOG_DIR,
'%s.' + platform.node()
+ datetime.datetime.now().strftime('.%Y-%m-%d'))
with lockfile(template % ('lock',)):
with open(template % ('events',), 'a') as log:
log.write(force_str(ujson.dumps(event) + u'\n'))
def active_user_ids(realm):
# type: (Realm) -> List[int]
return [userdict['id'] for userdict in get_active_user_dicts_in_realm(realm)]
def can_access_stream_user_ids(stream):
# type: (Stream) -> Set[int]
# return user ids of users who can access the attributes of
# a stream, such as its name/description
if stream.is_public():
return set(active_user_ids(stream.realm))
else:
return private_stream_user_ids(stream)
def private_stream_user_ids(stream):
# type: (Stream) -> Set[int]
# TODO: Find similar queries elsewhere and de-duplicate this code.
subscriptions = Subscription.objects.filter(
recipient__type=Recipient.STREAM,
recipient__type_id=stream.id,
active=True)
return {sub['user_profile_id'] for sub in subscriptions.values('user_profile_id')}
def bot_owner_userids(user_profile):
# type: (UserProfile) -> Sequence[int]
is_private_bot = (
user_profile.default_sending_stream and user_profile.default_sending_stream.invite_only or
user_profile.default_events_register_stream and user_profile.default_events_register_stream.invite_only)
if is_private_bot:
return (user_profile.bot_owner_id,) # TODO: change this to list instead of tuple
else:
return active_user_ids(user_profile.realm)
def realm_user_count(realm):
# type: (Realm) -> int
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False).count()
def get_topic_history_for_stream(user_profile, recipient):
# type: (UserProfile, Recipient) -> List[Tuple[str, int]]
# We tested the below query on some large prod datasets, and we never
# saw more than 50ms to execute it, so we think that's acceptable,
# but we will monitor it, and we may later optimize it further.
query = '''
SELECT topic, read, count(*)
FROM (
SELECT
("zerver_usermessage"."flags" & 1) as read,
"zerver_message"."subject" as topic,
"zerver_message"."id" as message_id
FROM "zerver_usermessage"
INNER JOIN "zerver_message" ON (
"zerver_usermessage"."message_id" = "zerver_message"."id"
) WHERE (
"zerver_usermessage"."user_profile_id" = %s AND
"zerver_message"."recipient_id" = %s
) ORDER BY "zerver_usermessage"."message_id" DESC
) messages_for_stream
GROUP BY topic, read
ORDER BY max(message_id) desc
'''
cursor = connection.cursor()
cursor.execute(query, [user_profile.id, recipient.id])
rows = cursor.fetchall()
cursor.close()
topic_names = dict() # type: Dict[str, str]
topic_counts = dict() # type: Dict[str, int]
topics = []
for row in rows:
topic_name, read, count = row
if topic_name.lower() not in topic_names:
topic_names[topic_name.lower()] = topic_name
topic_name = topic_names[topic_name.lower()]
if topic_name not in topic_counts:
topic_counts[topic_name] = 0
topics.append(topic_name)
if not read:
topic_counts[topic_name] += count
history = [(topic, topic_counts[topic]) for topic in topics]
return history
def send_signup_message(sender, signups_stream, user_profile,
internal=False, realm=None):
# type: (UserProfile, text_type, UserProfile, bool, Optional[Realm]) -> None
if internal:
# When this is done using manage.py vs. the web interface
internal_blurb = " **INTERNAL SIGNUP** "
else:
internal_blurb = " "
user_count = realm_user_count(user_profile.realm)
# Send notification to realm notifications stream if it exists
# Don't send notification for the first user in a realm
if user_profile.realm.notifications_stream is not None and user_count > 1:
internal_send_message(sender, "stream",
user_profile.realm.notifications_stream.name,
"New users", "%s just signed up for Zulip. Say hello!" % \
(user_profile.full_name,),
realm=user_profile.realm)
internal_send_message(sender,
"stream", signups_stream, user_profile.realm.domain,
"%s <`%s`> just signed up for Zulip!%s(total: **%i**)" % (
user_profile.full_name,
user_profile.email,
internal_blurb,
user_count,
)
)
def notify_new_user(user_profile, internal=False):
# type: (UserProfile, bool) -> None
if settings.NEW_USER_BOT is not None:
send_signup_message(settings.NEW_USER_BOT, "signups", user_profile, internal)
statsd.gauge("users.signups.%s" % (user_profile.realm.domain.replace('.', '_')), 1, delta=True)
def add_new_user_history(user_profile, streams):
# type: (UserProfile, Iterable[Stream]) -> None
"""Give you the last 100 messages on your public streams, so you have
something to look at in your home view once you finish the
tutorial."""
one_week_ago = now() - datetime.timedelta(weeks=1)
recipients = Recipient.objects.filter(type=Recipient.STREAM,
type_id__in=[stream.id for stream in streams
if not stream.invite_only])
recent_messages = Message.objects.filter(recipient_id__in=recipients,
pub_date__gt=one_week_ago).order_by("-id")
message_ids_to_use = list(reversed(recent_messages.values_list('id', flat=True)[0:100]))
if len(message_ids_to_use) == 0:
return
# Handle the race condition where a message arrives between
# bulk_add_subscriptions above and the Message query just above
already_ids = set(UserMessage.objects.filter(message_id__in=message_ids_to_use,
user_profile=user_profile).values_list("message_id", flat=True))
ums_to_create = [UserMessage(user_profile=user_profile, message_id=message_id,
flags=UserMessage.flags.read)
for message_id in message_ids_to_use
if message_id not in already_ids]
UserMessage.objects.bulk_create(ums_to_create)
# Does the processing for a new user account:
# * Subscribes to default/invitation streams
# * Fills in some recent historical messages
# * Notifies other users in realm and Zulip about the signup
# * Deactivates PreregistrationUser objects
# * subscribe the user to newsletter if newsletter_data is specified
def process_new_human_user(user_profile, prereg_user=None, newsletter_data=None):
# type: (UserProfile, Optional[PreregistrationUser], Optional[Dict[str, str]]) -> None
mit_beta_user = user_profile.realm.is_zephyr_mirror_realm
try:
streams = prereg_user.streams.all()
except AttributeError:
# This will catch both the case where prereg_user is None and where it
# is a MitUser.
streams = []
# If the user's invitation didn't explicitly list some streams, we
# add the default streams
if len(streams) == 0:
streams = get_default_subs(user_profile)
bulk_add_subscriptions(streams, [user_profile])
add_new_user_history(user_profile, streams)
# mit_beta_users don't have a referred_by field
if not mit_beta_user and prereg_user is not None and prereg_user.referred_by is not None \
and settings.NOTIFICATION_BOT is not None:
# This is a cross-realm private message.
internal_send_message(settings.NOTIFICATION_BOT,
"private", prereg_user.referred_by.email, user_profile.realm.domain,
"%s <`%s`> accepted your invitation to join Zulip!" % (
user_profile.full_name,
user_profile.email,
)
)
# Mark any other PreregistrationUsers that are STATUS_ACTIVE as
# inactive so we can keep track of the PreregistrationUser we
# actually used for analytics
if prereg_user is not None:
PreregistrationUser.objects.filter(email__iexact=user_profile.email).exclude(
id=prereg_user.id).update(status=0)
else:
PreregistrationUser.objects.filter(email__iexact=user_profile.email).update(status=0)
notify_new_user(user_profile)
if newsletter_data is not None:
# If the user was created automatically via the API, we may
# not want to register them for the newsletter
queue_json_publish(
"signups",
{
'EMAIL': user_profile.email,
'merge_vars': {
'NAME': user_profile.full_name,
'REALM_ID': user_profile.realm.id,
'OPTIN_IP': newsletter_data["IP"],
'OPTIN_TIME': datetime.datetime.isoformat(now().replace(microsecond=0)),
},
},
lambda event: None)
def notify_created_user(user_profile):
# type: (UserProfile) -> None
event = dict(type="realm_user", op="add",
person=dict(email=user_profile.email,
user_id=user_profile.id,
is_admin=user_profile.is_realm_admin,
full_name=user_profile.full_name,
is_bot=user_profile.is_bot))
send_event(event, active_user_ids(user_profile.realm))
def notify_created_bot(user_profile):
# type: (UserProfile) -> None
def stream_name(stream):
# type: (Stream) -> Optional[text_type]
if not stream:
return None
return stream.name
default_sending_stream_name = stream_name(user_profile.default_sending_stream)
default_events_register_stream_name = stream_name(user_profile.default_events_register_stream)
event = dict(type="realm_bot", op="add",
bot=dict(email=user_profile.email,
user_id=user_profile.id,
full_name=user_profile.full_name,
api_key=user_profile.api_key,
default_sending_stream=default_sending_stream_name,
default_events_register_stream=default_events_register_stream_name,
default_all_public_streams=user_profile.default_all_public_streams,
avatar_url=avatar_url(user_profile),
owner=user_profile.bot_owner.email,
))
send_event(event, bot_owner_userids(user_profile))
def do_create_user(email, password, realm, full_name, short_name,
active=True, bot_type=None, bot_owner=None, tos_version=None,
avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
default_sending_stream=None, default_events_register_stream=None,
default_all_public_streams=None, prereg_user=None,
newsletter_data=None):
# type: (text_type, text_type, Realm, text_type, text_type, bool, Optional[int], Optional[UserProfile], Optional[text_type], text_type, Optional[Stream], Optional[Stream], bool, Optional[PreregistrationUser], Optional[Dict[str, str]]) -> UserProfile
event = {'type': 'user_created',
'timestamp': time.time(),
'full_name': full_name,
'short_name': short_name,
'user': email,
'domain': realm.domain,
'bot': bool(bot_type)}
if bot_type:
event['bot_owner'] = bot_owner.email
log_event(event)
user_profile = create_user(email=email, password=password, realm=realm,
full_name=full_name, short_name=short_name,
active=active, bot_type=bot_type, bot_owner=bot_owner,
tos_version=tos_version, avatar_source=avatar_source,
default_sending_stream=default_sending_stream,
default_events_register_stream=default_events_register_stream,
default_all_public_streams=default_all_public_streams)
notify_created_user(user_profile)
if bot_type:
notify_created_bot(user_profile)
else:
process_new_human_user(user_profile, prereg_user=prereg_user,
newsletter_data=newsletter_data)
return user_profile
def user_sessions(user_profile):
# type: (UserProfile) -> List[Session]
return [s for s in Session.objects.all()
if get_session_user(s) == user_profile.id]
def delete_session(session):
# type: (Session) -> None
session_engine.SessionStore(session.session_key).delete() # type: ignore # import_module
def delete_user_sessions(user_profile):
# type: (UserProfile) -> None
for session in Session.objects.all():
if get_session_user(session) == user_profile.id:
delete_session(session)
def delete_realm_user_sessions(realm):
# type: (Realm) -> None
realm_user_ids = [user_profile.id for user_profile in
UserProfile.objects.filter(realm=realm)]
for session in Session.objects.filter(expire_date__gte=datetime.datetime.now()):
if get_session_user(session) in realm_user_ids:
delete_session(session)
def delete_all_user_sessions():
# type: () -> None
for session in Session.objects.all():
delete_session(session)
def delete_all_deactivated_user_sessions():
# type: () -> None
for session in Session.objects.all():
user_profile_id = get_session_user(session)
if user_profile_id is None:
continue
user_profile = get_user_profile_by_id(user_profile_id)
if not user_profile.is_active or user_profile.realm.deactivated:
logging.info("Deactivating session for deactivated user %s" % (user_profile.email,))
delete_session(session)
def active_humans_in_realm(realm):
# type: (Realm) -> Sequence[UserProfile]
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False)
def do_set_realm_name(realm, name):
# type: (Realm, text_type) -> None
realm.name = name
realm.save(update_fields=['name'])
event = dict(
type="realm",
op="update",
property='name',
value=name,
)
send_event(event, active_user_ids(realm))
def do_set_realm_restricted_to_domain(realm, restricted):
# type: (Realm, bool) -> None
realm.restricted_to_domain = restricted
realm.save(update_fields=['restricted_to_domain'])
event = dict(
type="realm",
op="update",
property='restricted_to_domain',
value=restricted,
)
send_event(event, active_user_ids(realm))
def do_set_realm_invite_required(realm, invite_required):
# type: (Realm, bool) -> None
realm.invite_required = invite_required
realm.save(update_fields=['invite_required'])
event = dict(
type="realm",
op="update",
property='invite_required',
value=invite_required,
)
send_event(event, active_user_ids(realm))
def do_set_realm_invite_by_admins_only(realm, invite_by_admins_only):
# type: (Realm, bool) -> None
realm.invite_by_admins_only = invite_by_admins_only
realm.save(update_fields=['invite_by_admins_only'])
event = dict(
type="realm",
op="update",
property='invite_by_admins_only',
value=invite_by_admins_only,
)
send_event(event, active_user_ids(realm))
def do_set_realm_authentication_methods(realm, authentication_methods):
# type: (Realm, Dict[str, bool]) -> None
for key, value in list(authentication_methods.items()):
index = getattr(realm.authentication_methods, key).number
realm.authentication_methods.set_bit(index, int(value))
realm.save(update_fields=['authentication_methods'])
event = dict(
type="realm",
op="update_dict",
property='default',
data=dict(authentication_methods=realm.authentication_methods_dict())
)
send_event(event, active_user_ids(realm))
def do_set_realm_create_stream_by_admins_only(realm, create_stream_by_admins_only):
# type: (Realm, bool) -> None
realm.create_stream_by_admins_only = create_stream_by_admins_only
realm.save(update_fields=['create_stream_by_admins_only'])
event = dict(
type="realm",
op="update",
property='create_stream_by_admins_only',
value=create_stream_by_admins_only,
)
send_event(event, active_user_ids(realm))
def do_set_realm_message_editing(realm, allow_message_editing, message_content_edit_limit_seconds):
# type: (Realm, bool, int) -> None
realm.allow_message_editing = allow_message_editing
realm.message_content_edit_limit_seconds = message_content_edit_limit_seconds
realm.save(update_fields=['allow_message_editing', 'message_content_edit_limit_seconds'])
event = dict(
type="realm",
op="update_dict",
property="default",
data=dict(allow_message_editing=allow_message_editing,
message_content_edit_limit_seconds=message_content_edit_limit_seconds),
)
send_event(event, active_user_ids(realm))
def do_set_realm_default_language(realm, default_language):
# type: (Realm, text_type) -> None
if default_language == 'zh_CN':
# NB: remove this once we upgrade to Django 1.9
# zh-cn and zh-tw will be replaced by zh-hans and zh-hant in
# Django 1.9
default_language= 'zh_HANS'
realm.default_language = default_language
realm.save(update_fields=['default_language'])
event = dict(
type="realm",
op="update",
property="default_language",
value=default_language
)
send_event(event, active_user_ids(realm))
def do_deactivate_realm(realm):
# type: (Realm) -> None
"""
Deactivate this realm. Do NOT deactivate the users -- we need to be able to
tell the difference between users that were intentionally deactivated,
e.g. by a realm admin, and users who can't currently use Zulip because their
realm has been deactivated.
"""
if realm.deactivated:
return
realm.deactivated = True
realm.save(update_fields=["deactivated"])
for user in active_humans_in_realm(realm):
# Don't deactivate the users, but do delete their sessions so they get
# bumped to the login screen, where they'll get a realm deactivation
# notice when they try to log in.
delete_user_sessions(user)
def do_reactivate_realm(realm):
# type: (Realm) -> None
realm.deactivated = False
realm.save(update_fields=["deactivated"])
def do_deactivate_user(user_profile, log=True, _cascade=True):
# type: (UserProfile, bool, bool) -> None
if not user_profile.is_active:
return
user_profile.is_active = False
user_profile.save(update_fields=["is_active"])
delete_user_sessions(user_profile)
if log:
log_event({'type': 'user_deactivated',
'timestamp': time.time(),
'user': user_profile.email,
'domain': user_profile.realm.domain})
event = dict(type="realm_user", op="remove",
person=dict(email=user_profile.email,
user_id=user_profile.id,
full_name=user_profile.full_name))
send_event(event, active_user_ids(user_profile.realm))
if user_profile.is_bot:
event = dict(type="realm_bot", op="remove",
bot=dict(email=user_profile.email,
user_id=user_profile.id,
full_name=user_profile.full_name))
send_event(event, bot_owner_userids(user_profile))
if _cascade:
bot_profiles = UserProfile.objects.filter(is_bot=True, is_active=True,
bot_owner=user_profile)
for profile in bot_profiles:
do_deactivate_user(profile, _cascade=False)
def do_deactivate_stream(stream, log=True):
# type: (Stream, bool) -> None
user_profiles = UserProfile.objects.filter(realm=stream.realm)
for user_profile in user_profiles:
bulk_remove_subscriptions([user_profile], [stream])
was_invite_only = stream.invite_only
stream.deactivated = True
stream.invite_only = True
# Preserve as much as possible the original stream name while giving it a
# special prefix that both indicates that the stream is deactivated and
# frees up the original name for reuse.
old_name = stream.name
new_name = ("!DEACTIVATED:" + old_name)[:Stream.MAX_NAME_LENGTH]
for i in range(20):
existing_deactivated_stream = get_stream(new_name, stream.realm)
if existing_deactivated_stream:
# This stream has alrady been deactivated, keep prepending !s until
# we have a unique stream name or you've hit a rename limit.
new_name = ("!" + new_name)[:Stream.MAX_NAME_LENGTH]
else:
break
# If you don't have a unique name at this point, this will fail later in the
# code path.
stream.name = new_name[:Stream.MAX_NAME_LENGTH]
stream.save()
# Remove the old stream information from remote cache.
old_cache_key = get_stream_cache_key(old_name, stream.realm)
cache_delete(old_cache_key)
if not was_invite_only:
stream_dict = stream.to_dict()
stream_dict.update(dict(name=old_name, invite_only=was_invite_only))
event = dict(type="stream", op="delete",
streams=[stream_dict])
send_event(event, active_user_ids(stream.realm))
def do_change_user_email(user_profile, new_email):
# type: (UserProfile, text_type) -> None
old_email = user_profile.email
user_profile.email = new_email
user_profile.save(update_fields=["email"])
log_event({'type': 'user_email_changed',
'old_email': old_email,
'new_email': new_email})
def compute_irc_user_fullname(email):
# type: (NonBinaryStr) -> NonBinaryStr
return email.split("@")[0] + " (IRC)"
def compute_jabber_user_fullname(email):
# type: (NonBinaryStr) -> NonBinaryStr
return email.split("@")[0] + " (XMPP)"
def compute_mit_user_fullname(email):
# type: (NonBinaryStr) -> NonBinaryStr
try:
# Input is either e.g. [email protected] or user|[email protected]
match_user = re.match(r'^([a-zA-Z0-9_.-]+)(\|.+)?@mit\.edu$', email.lower())
if match_user and match_user.group(2) is None:
answer = DNS.dnslookup(
"%s.passwd.ns.athena.mit.edu" % (match_user.group(1),),
DNS.Type.TXT)
hesiod_name = force_str(answer[0][0]).split(':')[4].split(',')[0].strip()
if hesiod_name != "":
return hesiod_name
elif match_user:
return match_user.group(1).lower() + "@" + match_user.group(2).upper()[1:]
except DNS.Base.ServerError:
pass
except:
print ("Error getting fullname for %s:" % (email,))
traceback.print_exc()
return email.lower()
@cache_with_key(lambda realm, email, f: user_profile_by_email_cache_key(email),
timeout=3600*24*7)
def create_mirror_user_if_needed(realm, email, email_to_fullname):
# type: (Realm, text_type, Callable[[text_type], text_type]) -> UserProfile
try:
return get_user_profile_by_email(email)
except UserProfile.DoesNotExist:
try:
# Forge a user for this person
return create_user(email, None, realm,
email_to_fullname(email), email_to_username(email),
active=False, is_mirror_dummy=True)
except IntegrityError:
return get_user_profile_by_email(email)
def log_message(message):
# type: (Message) -> None
if not message.sending_client.name.startswith("test:"):
log_event(message.to_log_dict())
# Helper function. Defaults here are overriden by those set in do_send_messages
def do_send_message(message, rendered_content = None, no_log = False, stream = None, local_id = None):
# type: (Union[int, Message], Optional[text_type], bool, Optional[Stream], Optional[int]) -> int
return do_send_messages([{'message': message,
'rendered_content': rendered_content,
'no_log': no_log,
'stream': stream,
'local_id': local_id}])[0]
def render_incoming_message(message, content, message_users):
# type: (Message, text_type, Set[UserProfile]) -> text_type
realm_alert_words = alert_words_in_realm(message.get_realm())
try:
rendered_content = render_markdown(
message=message,
content=content,
realm_alert_words=realm_alert_words,
message_users=message_users,
)
except BugdownRenderingException:
raise JsonableError(_('Unable to render message'))
return rendered_content
def get_recipient_user_profiles(recipient, sender_id):
# type: (Recipient, text_type) -> List[UserProfile]
if recipient.type == Recipient.PERSONAL:
recipients = list(set([get_user_profile_by_id(recipient.type_id),
get_user_profile_by_id(sender_id)]))
# For personals, you send out either 1 or 2 copies, for
# personals to yourself or to someone else, respectively.
assert((len(recipients) == 1) or (len(recipients) == 2))
elif (recipient.type == Recipient.STREAM or recipient.type == Recipient.HUDDLE):
# We use select_related()/only() here, while the PERSONAL case above uses
# get_user_profile_by_id() to get UserProfile objects from cache. Streams will
# typically have more recipients than PMs, so get_user_profile_by_id() would be
# a bit more expensive here, given that we need to hit the DB anyway and only
# care about the email from the user profile.
fields = [
'user_profile__id',
'user_profile__email',
'user_profile__enable_online_push_notifications',
'user_profile__is_active',
'user_profile__realm__domain'
]
query = Subscription.objects.select_related("user_profile", "user_profile__realm").only(*fields).filter(
recipient=recipient, active=True)
recipients = [s.user_profile for s in query]
else:
raise ValueError('Bad recipient type')
return recipients
def do_send_messages(messages):
# type: (Sequence[Optional[MutableMapping[str, Any]]]) -> List[int]
# Filter out messages which didn't pass internal_prep_message properly
messages = [message for message in messages if message is not None]
# Filter out zephyr mirror anomalies where the message was already sent
already_sent_ids = [] # type: List[int]
new_messages = [] # type: List[MutableMapping[str, Any]]
for message in messages:
if isinstance(message['message'], int):
already_sent_ids.append(message['message'])
else:
new_messages.append(message)
messages = new_messages
# For consistency, changes to the default values for these gets should also be applied
# to the default args in do_send_message
for message in messages:
message['rendered_content'] = message.get('rendered_content', None)
message['no_log'] = message.get('no_log', False)
message['stream'] = message.get('stream', None)
message['local_id'] = message.get('local_id', None)
message['sender_queue_id'] = message.get('sender_queue_id', None)
# Log the message to our message log for populate_db to refill
for message in messages:
if not message['no_log']:
log_message(message['message'])
for message in messages:
message['recipients'] = get_recipient_user_profiles(message['message'].recipient,
message['message'].sender_id)
# Only deliver the message to active user recipients
message['active_recipients'] = [user_profile for user_profile in message['recipients']
if user_profile.is_active]
# Render our messages.
for message in messages:
assert message['message'].rendered_content is None
rendered_content = render_incoming_message(
message['message'],
message['message'].content,
message_users=message['active_recipients'])
message['message'].rendered_content = rendered_content
message['message'].rendered_content_version = bugdown_version
for message in messages:
message['message'].update_calculated_fields()
# Save the message receipts in the database
user_message_flags = defaultdict(dict) # type: Dict[int, Dict[int, List[str]]]
with transaction.atomic():
Message.objects.bulk_create([message['message'] for message in messages])
ums = [] # type: List[UserMessage]
for message in messages:
ums_to_create = [UserMessage(user_profile=user_profile, message=message['message'])
for user_profile in message['active_recipients']]
# These properties on the Message are set via
# render_markdown by code in the bugdown inline patterns
wildcard = message['message'].mentions_wildcard
mentioned_ids = message['message'].mentions_user_ids
ids_with_alert_words = message['message'].user_ids_with_alert_words
is_me_message = message['message'].is_me_message
for um in ums_to_create:
if um.user_profile.id == message['message'].sender.id and \
message['message'].sent_by_human():
um.flags |= UserMessage.flags.read
if wildcard:
um.flags |= UserMessage.flags.wildcard_mentioned
if um.user_profile_id in mentioned_ids:
um.flags |= UserMessage.flags.mentioned
if um.user_profile_id in ids_with_alert_words:
um.flags |= UserMessage.flags.has_alert_word
if is_me_message:
um.flags |= UserMessage.flags.is_me_message
user_message_flags[message['message'].id][um.user_profile_id] = um.flags_list()
ums.extend(ums_to_create)
UserMessage.objects.bulk_create(ums)
# Claim attachments in message
for message in messages:
if Message.content_has_attachment(message['message'].content):
do_claim_attachments(message['message'])
for message in messages:
# Render Markdown etc. here and store (automatically) in
# remote cache, so that the single-threaded Tornado server
# doesn't have to.
user_flags = user_message_flags.get(message['message'].id, {})
sender = message['message'].sender
user_presences = get_status_dict(sender)
presences = {}
for user_profile in message['active_recipients']:
if user_profile.email in user_presences:
presences[user_profile.id] = user_presences[user_profile.email]
event = dict(
type = 'message',
message = message['message'].id,
message_dict_markdown = message_to_dict(message['message'], apply_markdown=True),
message_dict_no_markdown = message_to_dict(message['message'], apply_markdown=False),
presences = presences)
users = [{'id': user.id,
'flags': user_flags.get(user.id, []),
'always_push_notify': user.enable_online_push_notifications}
for user in message['active_recipients']]
if message['message'].recipient.type == Recipient.STREAM:
# Note: This is where authorization for single-stream
# get_updates happens! We only attach stream data to the
# notify new_message request if it's a public stream,
# ensuring that in the tornado server, non-public stream
# messages are only associated to their subscribed users.
if message['stream'] is None:
message['stream'] = Stream.objects.select_related("realm").get(id=message['message'].recipient.type_id)
if message['stream'].is_public():
event['realm_id'] = message['stream'].realm.id
event['stream_name'] = message['stream'].name
if message['stream'].invite_only:
event['invite_only'] = True
if message['local_id'] is not None:
event['local_id'] = message['local_id']
if message['sender_queue_id'] is not None:
event['sender_queue_id'] = message['sender_queue_id']
send_event(event, users)
if (settings.ENABLE_FEEDBACK and
message['message'].recipient.type == Recipient.PERSONAL and
settings.FEEDBACK_BOT in [up.email for up in message['recipients']]):
queue_json_publish(
'feedback_messages',
message_to_dict(message['message'], apply_markdown=False),
lambda x: None
)
# Note that this does not preserve the order of message ids
# returned. In practice, this shouldn't matter, as we only
# mirror single zephyr messages at a time and don't otherwise
# intermingle sending zephyr messages with other messages.
return already_sent_ids + [message['message'].id for message in messages]
def do_send_typing_notification(notification):
# type: (Dict[str, Any]) -> None
recipient_user_profiles = get_recipient_user_profiles(notification['recipient'],
notification['sender'].id)
# Only deliver the notification to active user recipients
user_ids_to_notify = [profile.id for profile in recipient_user_profiles if profile.is_active]
sender_dict = {'user_id': notification['sender'].id, 'email': notification['sender'].email}
# Include a list of recipients in the event body to help identify where the typing is happening
recipient_dicts = [{'user_id': profile.id, 'email': profile.email} for profile in recipient_user_profiles]
event = dict(
type = 'typing',
op = notification['op'],
sender = sender_dict,
recipients = recipient_dicts)
send_event(event, user_ids_to_notify)
# check_send_typing_notification:
# Checks the typing notification and sends it
def check_send_typing_notification(sender, notification_to, operator):
# type: (UserProfile, Sequence[text_type], text_type) -> None
typing_notification = check_typing_notification(sender, notification_to, operator)
do_send_typing_notification(typing_notification)
# check_typing_notification:
# Returns typing notification ready for sending with do_send_typing_notification on success
# or the error message (string) on error.
def check_typing_notification(sender, notification_to, operator):
# type: (UserProfile, Sequence[text_type], text_type) -> Dict[str, Any]
if len(notification_to) == 0:
raise JsonableError(_('Missing parameter: \'to\' (recipient)'))
elif operator not in ('start', 'stop'):
raise JsonableError(_('Invalid \'op\' value (should be start or stop)'))
else:
try:
recipient = recipient_for_emails(notification_to, False,
sender, sender)
except ValidationError as e:
assert isinstance(e.messages[0], six.string_types)
raise JsonableError(e.messages[0])
if recipient.type == Recipient.STREAM:
raise ValueError('Forbidden recipient type')
return {'sender': sender, 'recipient': recipient, 'op': operator}
def do_create_stream(realm, stream_name):
# type: (Realm, text_type) -> None
# This is used by a management command now, mostly to facilitate testing. It
# doesn't simulate every single aspect of creating a subscription; for example,
# we don't send Zulips to users to tell them they have been subscribed.
stream = Stream()
stream.realm = realm
stream.name = stream_name
stream.save()
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
subscribers = UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False)
bulk_add_subscriptions([stream], subscribers)
def create_stream_if_needed(realm, stream_name, invite_only=False):
# type: (Realm, text_type, bool) -> Tuple[Stream, bool]
(stream, created) = Stream.objects.get_or_create(
realm=realm, name__iexact=stream_name,
defaults={'name': stream_name, 'invite_only': invite_only})
if created:
Recipient.objects.create(type_id=stream.id, type=Recipient.STREAM)
if not invite_only:
event = dict(type="stream", op="create",
streams=[stream.to_dict()])
send_event(event, active_user_ids(realm))
return stream, created
def create_streams_if_needed(realm, stream_names, invite_only):
# type: (Realm, List[text_type], bool) -> Tuple[List[Stream], List[Stream]]
added_streams = [] # type: List[Stream]
existing_streams = [] # type: List[Stream]
for stream_name in stream_names:
stream, created = create_stream_if_needed(realm,
stream_name,
invite_only=invite_only)
if created:
added_streams.append(stream)
else:
existing_streams.append(stream)
return added_streams, existing_streams
def recipient_for_emails(emails, not_forged_mirror_message,
user_profile, sender):
# type: (Iterable[text_type], bool, UserProfile, UserProfile) -> Recipient
recipient_profile_ids = set()
# We exempt cross-realm bots from the check that all the recipients
# are in the same domain.
realm_domains = set()
exempt_emails = get_cross_realm_emails()
if sender.email not in exempt_emails:
realm_domains.add(sender.realm.domain)
for email in emails:
try:
user_profile = get_user_profile_by_email(email)
except UserProfile.DoesNotExist:
raise ValidationError(_("Invalid email '%s'") % (email,))
if (not user_profile.is_active and not user_profile.is_mirror_dummy) or \
user_profile.realm.deactivated:
raise ValidationError(_("'%s' is no longer using Zulip.") % (email,))
recipient_profile_ids.add(user_profile.id)
if email not in exempt_emails:
realm_domains.add(user_profile.realm.domain)
if not_forged_mirror_message and user_profile.id not in recipient_profile_ids:
raise ValidationError(_("User not authorized for this query"))
if len(realm_domains) > 1:
raise ValidationError(_("You can't send private messages outside of your organization."))
# If the private message is just between the sender and
# another person, force it to be a personal internally
if (len(recipient_profile_ids) == 2
and sender.id in recipient_profile_ids):
recipient_profile_ids.remove(sender.id)
if len(recipient_profile_ids) > 1:
# Make sure the sender is included in huddle messages
recipient_profile_ids.add(sender.id)
huddle = get_huddle(list(recipient_profile_ids))
return get_recipient(Recipient.HUDDLE, huddle.id)
else:
return get_recipient(Recipient.PERSONAL, list(recipient_profile_ids)[0])
def already_sent_mirrored_message_id(message):
# type: (Message) -> Optional[int]
if message.recipient.type == Recipient.HUDDLE:
# For huddle messages, we use a 10-second window because the
# timestamps aren't guaranteed to actually match between two
# copies of the same message.
time_window = datetime.timedelta(seconds=10)
else:
time_window = datetime.timedelta(seconds=0)
messages = Message.objects.filter(
sender=message.sender,
recipient=message.recipient,
content=message.content,
subject=message.subject,
sending_client=message.sending_client,
pub_date__gte=message.pub_date - time_window,
pub_date__lte=message.pub_date + time_window)
if messages.exists():
return messages[0].id
return None
def extract_recipients(s):
# type: (Union[str, Iterable[text_type]]) -> List[text_type]
# We try to accept multiple incoming formats for recipients.
# See test_extract_recipients() for examples of what we allow.
try:
data = ujson.loads(s) # type: ignore # This function has a super weird union argument.
except ValueError:
data = s
if isinstance(data, six.string_types):
data = data.split(',') # type: ignore # https://github.com/python/typeshed/pull/138
if not isinstance(data, list):
raise ValueError("Invalid data type for recipients")
recipients = data
# Strip recipients, and then remove any duplicates and any that
# are the empty string after being stripped.
recipients = [recipient.strip() for recipient in recipients]
return list(set(recipient for recipient in recipients if recipient))
# check_send_message:
# Returns the id of the sent message. Has same argspec as check_message.
def check_send_message(sender, client, message_type_name, message_to,
subject_name, message_content, realm=None, forged=False,
forged_timestamp=None, forwarder_user_profile=None, local_id=None,
sender_queue_id=None):
# type: (UserProfile, Client, text_type, Sequence[text_type], text_type, text_type, Optional[Realm], bool, Optional[float], Optional[UserProfile], Optional[text_type], Optional[text_type]) -> int
message = check_message(sender, client, message_type_name, message_to,
subject_name, message_content, realm, forged, forged_timestamp,
forwarder_user_profile, local_id, sender_queue_id)
return do_send_messages([message])[0]
def check_stream_name(stream_name):
# type: (text_type) -> None
if stream_name == "":
raise JsonableError(_("Stream can't be empty"))
if len(stream_name) > Stream.MAX_NAME_LENGTH:
raise JsonableError(_("Stream name too long"))
if not valid_stream_name(stream_name):
raise JsonableError(_("Invalid stream name"))
def send_pm_if_empty_stream(sender, stream, stream_name, realm):
# type: (UserProfile, Stream, text_type, Realm) -> None
"""If a bot sends a message to a stream that doesn't exist or has no
subscribers, sends a notification to the bot owner (if not a
cross-realm bot) so that the owner can correct the issue."""
if sender.realm.is_zephyr_mirror_realm or sender.realm.deactivated:
return
if not sender.is_bot or sender.bot_owner is None:
return
# Don't send these notifications for cross-realm bot messages
# (e.g. from EMAIL_GATEWAY_BOT) since the owner for
# EMAIL_GATEWAY_BOT is probably the server administrator, not
# the owner of the bot who could potentially fix the problem.
if sender.realm != realm:
return
if stream is not None:
num_subscribers = stream.num_subscribers()
if num_subscribers > 0:
return
# We warn the user once every 5 minutes to avoid a flood of
# PMs on a misconfigured integration, re-using the
# UserProfile.last_reminder field, which is not used for bots.
last_reminder = sender.last_reminder
waitperiod = datetime.timedelta(minutes=UserProfile.BOT_OWNER_STREAM_ALERT_WAITPERIOD)
if last_reminder and timezone.now() - last_reminder <= waitperiod:
return
if stream is None:
error_msg = "that stream does not yet exist. To create it, "
else:
# num_subscribers == 0
error_msg = "there are no subscribers to that stream. To join it, "
content = ("Hi there! We thought you'd like to know that your bot **%s** just "
"tried to send a message to stream `%s`, but %s"
"click the gear in the left-side stream list." %
(sender.full_name, stream_name, error_msg))
message = internal_prep_message(settings.NOTIFICATION_BOT, "private",
sender.bot_owner.email, "", content)
do_send_messages([message])
sender.last_reminder = timezone.now()
sender.save(update_fields=['last_reminder'])
# check_message:
# Returns message ready for sending with do_send_message on success or the error message (string) on error.
def check_message(sender, client, message_type_name, message_to,
subject_name, message_content, realm=None, forged=False,
forged_timestamp=None, forwarder_user_profile=None, local_id=None,
sender_queue_id=None):
# type: (UserProfile, Client, text_type, Sequence[text_type], text_type, text_type, Optional[Realm], bool, Optional[float], Optional[UserProfile], Optional[text_type], Optional[text_type]) -> Dict[str, Any]
stream = None
if not message_to and message_type_name == 'stream' and sender.default_sending_stream:
# Use the users default stream
message_to = [sender.default_sending_stream.name]
elif len(message_to) == 0:
raise JsonableError(_("Message must have recipients"))
if len(message_content.strip()) == 0:
raise JsonableError(_("Message must not be empty"))
message_content = truncate_body(message_content)
if realm is None:
realm = sender.realm
if message_type_name == 'stream':
if len(message_to) > 1:
raise JsonableError(_("Cannot send to multiple streams"))
stream_name = message_to[0].strip()
check_stream_name(stream_name)
if subject_name is None:
raise JsonableError(_("Missing topic"))
subject = subject_name.strip()
if subject == "":
raise JsonableError(_("Topic can't be empty"))
subject = truncate_topic(subject)
## FIXME: Commented out temporarily while we figure out what we want
# if not valid_stream_name(subject):
# return json_error(_("Invalid subject name"))
stream = get_stream(stream_name, realm)
send_pm_if_empty_stream(sender, stream, stream_name, realm)
if stream is None:
raise JsonableError(_("Stream does not exist"))
recipient = get_recipient(Recipient.STREAM, stream.id)
if not stream.invite_only:
# This is a public stream
pass
elif subscribed_to_stream(sender, stream):
# Or it is private, but your are subscribed
pass
elif sender.is_api_super_user or (forwarder_user_profile is not None and
forwarder_user_profile.is_api_super_user):
# Or this request is being done on behalf of a super user
pass
elif sender.is_bot and subscribed_to_stream(sender.bot_owner, stream):
# Or you're a bot and your owner is subscribed.
pass
else:
# All other cases are an error.
raise JsonableError(_("Not authorized to send to stream '%s'") % (stream.name,))
elif message_type_name == 'private':
mirror_message = client and client.name in ["zephyr_mirror", "irc_mirror", "jabber_mirror", "JabberMirror"]
not_forged_mirror_message = mirror_message and not forged
try:
recipient = recipient_for_emails(message_to, not_forged_mirror_message,
forwarder_user_profile, sender)
except ValidationError as e:
assert isinstance(e.messages[0], six.string_types)
raise JsonableError(e.messages[0])
else:
raise JsonableError(_("Invalid message type"))
message = Message()
message.sender = sender
message.content = message_content
message.recipient = recipient
if message_type_name == 'stream':
message.subject = subject
if forged and forged_timestamp is not None:
# Forged messages come with a timestamp
message.pub_date = timestamp_to_datetime(forged_timestamp)
else:
message.pub_date = timezone.now()
message.sending_client = client
# We render messages later in the process.
assert message.rendered_content is None
if client.name == "zephyr_mirror":
id = already_sent_mirrored_message_id(message)
if id is not None:
return {'message': id}
return {'message': message, 'stream': stream, 'local_id': local_id, 'sender_queue_id': sender_queue_id}
def internal_prep_message(sender_email, recipient_type_name, recipients,
subject, content, realm=None):
# type: (text_type, str, text_type, text_type, text_type, Optional[Realm]) -> Optional[Dict[str, Any]]
"""
Create a message object and checks it, but doesn't send it or save it to the database.
The internal function that calls this can therefore batch send a bunch of created
messages together as one database query.
Call do_send_messages with a list of the return values of this method.
"""
if len(content) > MAX_MESSAGE_LENGTH:
content = content[0:3900] + "\n\n[message was too long and has been truncated]"
sender = get_user_profile_by_email(sender_email)
if realm is None:
realm = sender.realm
parsed_recipients = extract_recipients(recipients)
if recipient_type_name == "stream":
stream, _ = create_stream_if_needed(realm, parsed_recipients[0])
try:
return check_message(sender, get_client("Internal"), recipient_type_name,
parsed_recipients, subject, content, realm)
except JsonableError as e:
logging.error("Error queueing internal message by %s: %s" % (sender_email, str(e)))
return None
def internal_send_message(sender_email, recipient_type_name, recipients,
subject, content, realm=None):
# type: (text_type, str, text_type, text_type, text_type, Optional[Realm]) -> None
msg = internal_prep_message(sender_email, recipient_type_name, recipients,
subject, content, realm)
# internal_prep_message encountered an error
if msg is None:
return
do_send_messages([msg])
def pick_color(user_profile):
# type: (UserProfile) -> text_type
subs = Subscription.objects.filter(user_profile=user_profile,
active=True,
recipient__type=Recipient.STREAM)
return pick_color_helper(user_profile, subs)
def pick_color_helper(user_profile, subs):
# type: (UserProfile, Iterable[Subscription]) -> text_type
# These colors are shared with the palette in subs.js.
used_colors = [sub.color for sub in subs if sub.active]
available_colors = [s for s in STREAM_ASSIGNMENT_COLORS if s not in used_colors]
if available_colors:
return available_colors[0]
else:
return STREAM_ASSIGNMENT_COLORS[len(used_colors) % len(STREAM_ASSIGNMENT_COLORS)]
def get_subscription(stream_name, user_profile):
# type: (text_type, UserProfile) -> Subscription
stream = get_stream(stream_name, user_profile.realm)
recipient = get_recipient(Recipient.STREAM, stream.id)
return Subscription.objects.get(user_profile=user_profile,
recipient=recipient, active=True)
def validate_user_access_to_subscribers(user_profile, stream):
# type: (Optional[UserProfile], Stream) -> None
""" Validates whether the user can view the subscribers of a stream. Raises a JsonableError if:
* The user and the stream are in different realms
* The realm is MIT and the stream is not invite only.
* The stream is invite only, requesting_user is passed, and that user
does not subscribe to the stream.
"""
validate_user_access_to_subscribers_helper(
user_profile,
{"realm__domain": stream.realm.domain,
"realm_id": stream.realm_id,
"invite_only": stream.invite_only},
# We use a lambda here so that we only compute whether the
# user is subscribed if we have to
lambda: subscribed_to_stream(user_profile, stream))
def validate_user_access_to_subscribers_helper(user_profile, stream_dict, check_user_subscribed):
# type: (Optional[UserProfile], Mapping[str, Any], Callable[[], bool]) -> None
""" Helper for validate_user_access_to_subscribers that doesn't require a full stream object
* check_user_subscribed is a function that when called with no
arguments, will report whether the user is subscribed to the stream
"""
if user_profile is None:
raise ValidationError("Missing user to validate access for")
if user_profile.realm_id != stream_dict["realm_id"]:
raise ValidationError("Requesting user not in given realm")
if user_profile.realm.is_zephyr_mirror_realm and not stream_dict["invite_only"]:
raise JsonableError(_("You cannot get subscribers for public streams in this realm"))
if (stream_dict["invite_only"] and not check_user_subscribed()):
raise JsonableError(_("Unable to retrieve subscribers for invite-only stream"))
# sub_dict is a dictionary mapping stream_id => whether the user is subscribed to that stream
def bulk_get_subscriber_user_ids(stream_dicts, user_profile, sub_dict):
# type: (Iterable[Mapping[str, Any]], UserProfile, Mapping[int, bool]) -> Dict[int, List[int]]
target_stream_dicts = []
for stream_dict in stream_dicts:
try:
validate_user_access_to_subscribers_helper(user_profile, stream_dict,
lambda: sub_dict[stream_dict["id"]])
except JsonableError:
continue
target_stream_dicts.append(stream_dict)
subscriptions = Subscription.objects.select_related("recipient").filter(
recipient__type=Recipient.STREAM,
recipient__type_id__in=[stream["id"] for stream in target_stream_dicts],
user_profile__is_active=True,
active=True).values("user_profile_id", "recipient__type_id")
result = dict((stream["id"], []) for stream in stream_dicts) # type: Dict[int, List[int]]
for sub in subscriptions:
result[sub["recipient__type_id"]].append(sub["user_profile_id"])
return result
def get_subscribers_query(stream, requesting_user):
# type: (Stream, UserProfile) -> QuerySet
# TODO: Make a generic stub for QuerySet
""" Build a query to get the subscribers list for a stream, raising a JsonableError if:
'realm' is optional in stream.
The caller can refine this query with select_related(), values(), etc. depending
on whether it wants objects or just certain fields
"""
validate_user_access_to_subscribers(requesting_user, stream)
# Note that non-active users may still have "active" subscriptions, because we
# want to be able to easily reactivate them with their old subscriptions. This
# is why the query here has to look at the UserProfile.is_active flag.
subscriptions = Subscription.objects.filter(recipient__type=Recipient.STREAM,
recipient__type_id=stream.id,
user_profile__is_active=True,
active=True)
return subscriptions
def get_subscribers(stream, requesting_user=None):
# type: (Stream, Optional[UserProfile]) -> List[UserProfile]
subscriptions = get_subscribers_query(stream, requesting_user).select_related()
return [subscription.user_profile for subscription in subscriptions]
def get_subscriber_emails(stream, requesting_user=None):
# type: (Stream, Optional[UserProfile]) -> List[text_type]
subscriptions_query = get_subscribers_query(stream, requesting_user)
subscriptions = subscriptions_query.values('user_profile__email')
return [subscription['user_profile__email'] for subscription in subscriptions]
def maybe_get_subscriber_emails(stream, user_profile):
# type: (Stream, UserProfile) -> List[text_type]
""" Alternate version of get_subscriber_emails that takes a Stream object only
(not a name), and simply returns an empty list if unable to get a real
subscriber list (because we're on the MIT realm). """
try:
subscribers = get_subscriber_emails(stream, requesting_user=user_profile)
except JsonableError:
subscribers = []
return subscribers
def set_stream_color(user_profile, stream_name, color=None):
# type: (UserProfile, text_type, Optional[text_type]) -> text_type
subscription = get_subscription(stream_name, user_profile)
if not color:
color = pick_color(user_profile)
subscription.color = color
subscription.save(update_fields=["color"])
return color
def notify_subscriptions_added(user_profile, sub_pairs, stream_emails, no_log=False):
# type: (UserProfile, Iterable[Tuple[Subscription, Stream]], Callable[[Stream], List[text_type]], bool) -> None
if not no_log:
log_event({'type': 'subscription_added',
'user': user_profile.email,
'names': [stream.name for sub, stream in sub_pairs],
'domain': user_profile.realm.domain})
# Send a notification to the user who subscribed.
payload = [dict(name=stream.name,
stream_id=stream.id,
in_home_view=subscription.in_home_view,
invite_only=stream.invite_only,
color=subscription.color,
email_address=encode_email_address(stream),
desktop_notifications=subscription.desktop_notifications,
audible_notifications=subscription.audible_notifications,
description=stream.description,
pin_to_top=subscription.pin_to_top,
subscribers=stream_emails(stream))
for (subscription, stream) in sub_pairs]
event = dict(type="subscription", op="add",
subscriptions=payload)
send_event(event, [user_profile.id])
def get_peer_user_ids_for_stream_change(stream, altered_users, subscribed_users):
# type: (Stream, Iterable[UserProfile], Iterable[UserProfile]) -> Set[int]
'''
altered_users is a list of users that we are adding/removing
subscribed_users is the list of already subscribed users
Based on stream policy, we notify the correct bystanders, while
not notifying altered_users (who get subscribers via another event)
'''
altered_user_ids = [user.id for user in altered_users]
if stream.invite_only:
# PRIVATE STREAMS
all_subscribed_ids = [user.id for user in subscribed_users]
return set(all_subscribed_ids) - set(altered_user_ids)
else:
# PUBLIC STREAMS
# We now do "peer_add" or "peer_remove" events even for streams
# users were never subscribed to, in order for the neversubscribed
# structure to stay up-to-date.
return set(active_user_ids(stream.realm)) - set(altered_user_ids)
def query_all_subs_by_stream(streams):
# type: (Iterable[Stream]) -> Dict[int, List[UserProfile]]
all_subs = Subscription.objects.filter(recipient__type=Recipient.STREAM,
recipient__type_id__in=[stream.id for stream in streams],
user_profile__is_active=True,
active=True).select_related('recipient', 'user_profile')
all_subs_by_stream = defaultdict(list) # type: Dict[int, List[UserProfile]]
for sub in all_subs:
all_subs_by_stream[sub.recipient.type_id].append(sub.user_profile)
return all_subs_by_stream
def bulk_add_subscriptions(streams, users):
# type: (Iterable[Stream], Iterable[UserProfile]) -> Tuple[List[Tuple[UserProfile, Stream]], List[Tuple[UserProfile, Stream]]]
recipients_map = bulk_get_recipients(Recipient.STREAM, [stream.id for stream in streams]) # type: Mapping[int, Recipient]
recipients = [recipient.id for recipient in recipients_map.values()] # type: List[int]
stream_map = {} # type: Dict[int, Stream]
for stream in streams:
stream_map[recipients_map[stream.id].id] = stream
subs_by_user = defaultdict(list) # type: Dict[int, List[Subscription]]
all_subs_query = Subscription.objects.select_related("user_profile")
for sub in all_subs_query.filter(user_profile__in=users,
recipient__type=Recipient.STREAM):
subs_by_user[sub.user_profile_id].append(sub)
already_subscribed = [] # type: List[Tuple[UserProfile, Stream]]
subs_to_activate = [] # type: List[Tuple[Subscription, Stream]]
new_subs = [] # type: List[Tuple[UserProfile, int, Stream]]
for user_profile in users:
needs_new_sub = set(recipients) # type: Set[int]
for sub in subs_by_user[user_profile.id]:
if sub.recipient_id in needs_new_sub:
needs_new_sub.remove(sub.recipient_id)
if sub.active:
already_subscribed.append((user_profile, stream_map[sub.recipient_id]))
else:
subs_to_activate.append((sub, stream_map[sub.recipient_id]))
# Mark the sub as active, without saving, so that
# pick_color will consider this to be an active
# subscription when picking colors
sub.active = True
for recipient_id in needs_new_sub:
new_subs.append((user_profile, recipient_id, stream_map[recipient_id]))
subs_to_add = [] # type: List[Tuple[Subscription, Stream]]
for (user_profile, recipient_id, stream) in new_subs:
color = pick_color_helper(user_profile, subs_by_user[user_profile.id])
sub_to_add = Subscription(user_profile=user_profile, active=True,
color=color, recipient_id=recipient_id,
desktop_notifications=user_profile.enable_stream_desktop_notifications,
audible_notifications=user_profile.enable_stream_sounds)
subs_by_user[user_profile.id].append(sub_to_add)
subs_to_add.append((sub_to_add, stream))
# TODO: XXX: This transaction really needs to be done at the serializeable
# transaction isolation level.
with transaction.atomic():
occupied_streams_before = list(get_occupied_streams(user_profile.realm))
Subscription.objects.bulk_create([sub for (sub, stream) in subs_to_add])
Subscription.objects.filter(id__in=[sub.id for (sub, stream) in subs_to_activate]).update(active=True)
occupied_streams_after = list(get_occupied_streams(user_profile.realm))
new_occupied_streams = [stream for stream in
set(occupied_streams_after) - set(occupied_streams_before)
if not stream.invite_only]
if new_occupied_streams:
event = dict(type="stream", op="occupy",
streams=[stream.to_dict()
for stream in new_occupied_streams])
send_event(event, active_user_ids(user_profile.realm))
# Notify all existing users on streams that users have joined
# First, get all users subscribed to the streams that we care about
# We fetch all subscription information upfront, as it's used throughout
# the following code and we want to minize DB queries
all_subs_by_stream = query_all_subs_by_stream(streams=streams)
def fetch_stream_subscriber_emails(stream):
# type: (Stream) -> List[text_type]
if stream.realm.is_zephyr_mirror_realm and not stream.invite_only:
return []
users = all_subs_by_stream[stream.id]
return [u.email for u in users]
sub_tuples_by_user = defaultdict(list) # type: Dict[int, List[Tuple[Subscription, Stream]]]
new_streams = set() # type: Set[Tuple[int, int]]
for (sub, stream) in subs_to_add + subs_to_activate:
sub_tuples_by_user[sub.user_profile.id].append((sub, stream))
new_streams.add((sub.user_profile.id, stream.id))
for user_profile in users:
if len(sub_tuples_by_user[user_profile.id]) == 0:
continue
sub_pairs = sub_tuples_by_user[user_profile.id]
notify_subscriptions_added(user_profile, sub_pairs, fetch_stream_subscriber_emails)
for stream in streams:
if stream.realm.is_zephyr_mirror_realm and not stream.invite_only:
continue
new_users = [user for user in users if (user.id, stream.id) in new_streams]
peer_user_ids = get_peer_user_ids_for_stream_change(
stream=stream,
altered_users=new_users,
subscribed_users=all_subs_by_stream[stream.id]
)
if peer_user_ids:
for added_user in new_users:
event = dict(type="subscription", op="peer_add",
subscriptions=[stream.name],
user_id=added_user.id)
send_event(event, peer_user_ids)
return ([(user_profile, stream) for (user_profile, recipient_id, stream) in new_subs] +
[(sub.user_profile, stream) for (sub, stream) in subs_to_activate],
already_subscribed)
def notify_subscriptions_removed(user_profile, streams, no_log=False):
# type: (UserProfile, Iterable[Stream], bool) -> None
if not no_log:
log_event({'type': 'subscription_removed',
'user': user_profile.email,
'names': [stream.name for stream in streams],
'domain': user_profile.realm.domain})
payload = [dict(name=stream.name, stream_id=stream.id) for stream in streams]
event = dict(type="subscription", op="remove",
subscriptions=payload)
send_event(event, [user_profile.id])
def bulk_remove_subscriptions(users, streams):
# type: (Iterable[UserProfile], Iterable[Stream]) -> Tuple[List[Tuple[UserProfile, Stream]], List[Tuple[UserProfile, Stream]]]
recipients_map = bulk_get_recipients(Recipient.STREAM,
[stream.id for stream in streams]) # type: Mapping[int, Recipient]
stream_map = {} # type: Dict[int, Stream]
for stream in streams:
stream_map[recipients_map[stream.id].id] = stream
subs_by_user = dict((user_profile.id, []) for user_profile in users) # type: Dict[int, List[Subscription]]
for sub in Subscription.objects.select_related("user_profile").filter(user_profile__in=users,
recipient__in=list(recipients_map.values()),
active=True):
subs_by_user[sub.user_profile_id].append(sub)
subs_to_deactivate = [] # type: List[Tuple[Subscription, Stream]]
not_subscribed = [] # type: List[Tuple[UserProfile, Stream]]
for user_profile in users:
recipients_to_unsub = set([recipient.id for recipient in recipients_map.values()])
for sub in subs_by_user[user_profile.id]:
recipients_to_unsub.remove(sub.recipient_id)
subs_to_deactivate.append((sub, stream_map[sub.recipient_id]))
for recipient_id in recipients_to_unsub:
not_subscribed.append((user_profile, stream_map[recipient_id]))
# TODO: XXX: This transaction really needs to be done at the serializeable
# transaction isolation level.
with transaction.atomic():
occupied_streams_before = list(get_occupied_streams(user_profile.realm))
Subscription.objects.filter(id__in=[sub.id for (sub, stream_name) in
subs_to_deactivate]).update(active=False)
occupied_streams_after = list(get_occupied_streams(user_profile.realm))
new_vacant_streams = [stream for stream in
set(occupied_streams_before) - set(occupied_streams_after)
if not stream.invite_only]
if new_vacant_streams:
event = dict(type="stream", op="vacate",
streams=[stream.to_dict()
for stream in new_vacant_streams])
send_event(event, active_user_ids(user_profile.realm))
altered_user_dict = defaultdict(list) # type: Dict[int, List[UserProfile]]
streams_by_user = defaultdict(list) # type: Dict[int, List[Stream]]
for (sub, stream) in subs_to_deactivate:
streams_by_user[sub.user_profile_id].append(stream)
altered_user_dict[stream.id].append(sub.user_profile)
for user_profile in users:
if len(streams_by_user[user_profile.id]) == 0:
continue
notify_subscriptions_removed(user_profile, streams_by_user[user_profile.id])
all_subs_by_stream = query_all_subs_by_stream(streams=streams)
for stream in streams:
if stream.realm.is_zephyr_mirror_realm and not stream.invite_only:
continue
altered_users = altered_user_dict[stream.id]
peer_user_ids = get_peer_user_ids_for_stream_change(
stream=stream,
altered_users=altered_users,
subscribed_users=all_subs_by_stream[stream.id]
)
if peer_user_ids:
for removed_user in altered_users:
event = dict(type="subscription",
op="peer_remove",
subscriptions=[stream.name],
user_id=removed_user.id)
send_event(event, peer_user_ids)
return ([(sub.user_profile, stream) for (sub, stream) in subs_to_deactivate],
not_subscribed)
def log_subscription_property_change(user_email, stream_name, property, value):
# type: (text_type, text_type, text_type, Any) -> None
event = {'type': 'subscription_property',
'property': property,
'user': user_email,
'stream_name': stream_name,
'value': value}
log_event(event)
def do_change_subscription_property(user_profile, sub, stream_name,
property_name, value):
# type: (UserProfile, Subscription, text_type, text_type, Any) -> None
setattr(sub, property_name, value)
sub.save(update_fields=[property_name])
log_subscription_property_change(user_profile.email, stream_name,
property_name, value)
event = dict(type="subscription",
op="update",
email=user_profile.email,
property=property_name,
value=value,
name=stream_name)
send_event(event, [user_profile.id])
def do_activate_user(user_profile, log=True, join_date=timezone.now()):
# type: (UserProfile, bool, datetime.datetime) -> None
user_profile.is_active = True
user_profile.is_mirror_dummy = False
user_profile.set_unusable_password()
user_profile.date_joined = join_date
user_profile.tos_version = settings.TOS_VERSION
user_profile.save(update_fields=["is_active", "date_joined", "password",
"is_mirror_dummy", "tos_version"])
if log:
domain = user_profile.realm.domain
log_event({'type': 'user_activated',
'user': user_profile.email,
'domain': domain})
notify_created_user(user_profile)
def do_reactivate_user(user_profile):
# type: (UserProfile) -> None
# Unlike do_activate_user, this is meant for re-activating existing users,
# so it doesn't reset their password, etc.
user_profile.is_active = True
user_profile.save(update_fields=["is_active"])
domain = user_profile.realm.domain
log_event({'type': 'user_reactivated',
'user': user_profile.email,
'domain': domain})
notify_created_user(user_profile)
def do_change_password(user_profile, password, log=True, commit=True,
hashed_password=False):
# type: (UserProfile, text_type, bool, bool, bool) -> None
if hashed_password:
# This is a hashed password, not the password itself.
user_profile.set_password(password)
else:
user_profile.set_password(password)
if commit:
user_profile.save(update_fields=["password"])
if log:
log_event({'type': 'user_change_password',
'user': user_profile.email,
'pwhash': user_profile.password})
def do_change_full_name(user_profile, full_name, log=True):
# type: (UserProfile, text_type, bool) -> None
user_profile.full_name = full_name
user_profile.save(update_fields=["full_name"])
if log:
log_event({'type': 'user_change_full_name',
'user': user_profile.email,
'full_name': full_name})
payload = dict(email=user_profile.email,
user_id=user_profile.id,
full_name=user_profile.full_name)
send_event(dict(type='realm_user', op='update', person=payload),
active_user_ids(user_profile.realm))
if user_profile.is_bot:
send_event(dict(type='realm_bot', op='update', bot=payload),
bot_owner_userids(user_profile))
def do_change_tos_version(user_profile, tos_version, log=True):
# type: (UserProfile, text_type, bool) -> None
user_profile.tos_version = tos_version
user_profile.save(update_fields=["tos_version"])
if log:
log_event({'type': 'user_change_tos_version',
'user': user_profile.email,
'tos_version': tos_version})
def do_regenerate_api_key(user_profile, log=True):
# type: (UserProfile, bool) -> None
user_profile.api_key = random_api_key()
user_profile.save(update_fields=["api_key"])
if log:
log_event({'type': 'user_change_api_key',
'user': user_profile.email})
if user_profile.is_bot:
send_event(dict(type='realm_bot',
op='update',
bot=dict(email=user_profile.email,
user_id=user_profile.id,
api_key=user_profile.api_key,
)),
bot_owner_userids(user_profile))
def do_change_avatar_source(user_profile, avatar_source, log=True):
# type: (UserProfile, text_type, bool) -> None
user_profile.avatar_source = avatar_source
user_profile.save(update_fields=["avatar_source"])
if log:
log_event({'type': 'user_change_avatar_source',
'user': user_profile.email,
'avatar_source': avatar_source})
if user_profile.is_bot:
send_event(dict(type='realm_bot',
op='update',
bot=dict(email=user_profile.email,
user_id=user_profile.id,
avatar_url=avatar_url(user_profile),
)),
bot_owner_userids(user_profile))
else:
payload = dict(
email=user_profile.email,
avatar_url=avatar_url(user_profile),
user_id=user_profile.id
)
send_event(dict(type='realm_user',
op='update',
person=payload),
active_user_ids(user_profile.realm))
def _default_stream_permision_check(user_profile, stream):
# type: (UserProfile, Optional[Stream]) -> None
# Any user can have a None default stream
if stream is not None:
if user_profile.is_bot:
user = user_profile.bot_owner
else:
user = user_profile
if stream.invite_only and not subscribed_to_stream(user, stream):
raise JsonableError(_('Insufficient permission'))
def do_change_default_sending_stream(user_profile, stream, log=True):
# type: (UserProfile, Stream, bool) -> None
_default_stream_permision_check(user_profile, stream)
user_profile.default_sending_stream = stream
user_profile.save(update_fields=['default_sending_stream'])
if log:
log_event({'type': 'user_change_default_sending_stream',
'user': user_profile.email,
'stream': str(stream)})
if user_profile.is_bot:
if stream:
stream_name = stream.name
else:
stream_name = None
send_event(dict(type='realm_bot',
op='update',
bot=dict(email=user_profile.email,
user_id=user_profile.id,
default_sending_stream=stream_name,
)),
bot_owner_userids(user_profile))
def do_change_default_events_register_stream(user_profile, stream, log=True):
# type: (UserProfile, Stream, bool) -> None
_default_stream_permision_check(user_profile, stream)
user_profile.default_events_register_stream = stream
user_profile.save(update_fields=['default_events_register_stream'])
if log:
log_event({'type': 'user_change_default_events_register_stream',
'user': user_profile.email,
'stream': str(stream)})
if user_profile.is_bot:
if stream:
stream_name = stream.name
else:
stream_name = None
send_event(dict(type='realm_bot',
op='update',
bot=dict(email=user_profile.email,
user_id=user_profile.id,
default_events_register_stream=stream_name,
)),
bot_owner_userids(user_profile))
def do_change_default_all_public_streams(user_profile, value, log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.default_all_public_streams = value
user_profile.save(update_fields=['default_all_public_streams'])
if log:
log_event({'type': 'user_change_default_all_public_streams',
'user': user_profile.email,
'value': str(value)})
if user_profile.is_bot:
send_event(dict(type='realm_bot',
op='update',
bot=dict(email=user_profile.email,
user_id=user_profile.id,
default_all_public_streams=user_profile.default_all_public_streams,
)),
bot_owner_userids(user_profile))
def do_change_is_admin(user_profile, value, permission='administer'):
# type: (UserProfile, bool, str) -> None
if permission == "administer":
user_profile.is_realm_admin = value
user_profile.save(update_fields=["is_realm_admin"])
elif permission == "api_super_user":
user_profile.is_api_super_user = value
user_profile.save(update_fields=["is_api_super_user"])
else:
raise Exception("Unknown permission")
if permission == 'administer':
event = dict(type="realm_user", op="update",
person=dict(email=user_profile.email,
is_admin=value))
send_event(event, active_user_ids(user_profile.realm))
def do_change_bot_type(user_profile, value):
# type: (UserProfile, int) -> None
user_profile.bot_type = value
user_profile.save(update_fields=["bot_type"])
def do_make_stream_public(user_profile, realm, stream_name):
# type: (UserProfile, Realm, text_type) -> None
stream_name = stream_name.strip()
stream = get_stream(stream_name, realm)
if not stream:
raise JsonableError(_('Unknown stream "%s"') % (stream_name,))
if not subscribed_to_stream(user_profile, stream):
raise JsonableError(_('You are not invited to this stream.'))
stream.invite_only = False
stream.save(update_fields=['invite_only'])
def do_make_stream_private(realm, stream_name):
# type: (Realm, text_type) -> None
stream_name = stream_name.strip()
stream = get_stream(stream_name, realm)
if not stream:
raise JsonableError(_('Unknown stream "%s"') % (stream_name,))
stream.invite_only = True
stream.save(update_fields=['invite_only'])
def do_rename_stream(realm, old_name, new_name, log=True):
# type: (Realm, text_type, text_type, bool) -> Dict[str, text_type]
old_name = old_name.strip()
new_name = new_name.strip()
stream = get_stream(old_name, realm)
if not stream:
raise JsonableError(_('Unknown stream "%s"') % (old_name,))
# Will raise if there's an issue.
check_stream_name(new_name)
if get_stream(new_name, realm) and old_name.lower() != new_name.lower():
raise JsonableError(_('Stream name "%s" is already taken') % (new_name,))
old_name = stream.name
stream.name = new_name
stream.save(update_fields=["name"])
if log:
log_event({'type': 'stream_name_change',
'domain': realm.domain,
'new_name': new_name})
recipient = get_recipient(Recipient.STREAM, stream.id)
messages = Message.objects.filter(recipient=recipient).only("id")
# Update the display recipient and stream, which are easy single
# items to set.
old_cache_key = get_stream_cache_key(old_name, realm)
new_cache_key = get_stream_cache_key(stream.name, realm)
if old_cache_key != new_cache_key:
cache_delete(old_cache_key)
cache_set(new_cache_key, stream)
cache_set(display_recipient_cache_key(recipient.id), stream.name)
# Delete cache entries for everything else, which is cheaper and
# clearer than trying to set them. display_recipient is the out of
# date field in all cases.
cache_delete_many(
to_dict_cache_key_id(message.id, True) for message in messages)
cache_delete_many(
to_dict_cache_key_id(message.id, False) for message in messages)
new_email = encode_email_address(stream)
# We will tell our users to essentially
# update stream.name = new_name where name = old_name
# and update stream.email = new_email where name = old_name.
# We could optimize this by trying to send one message, but the
# client code really wants one property update at a time, and
# updating stream names is a pretty infrequent operation.
# More importantly, we want to key these updates by id, not name,
# since id is the immutable primary key, and obviously name is not.
data_updates = [
['email_address', new_email],
['name', new_name],
]
for property, value in data_updates:
event = dict(
op="update",
type="stream",
property=property,
value=value,
name=old_name
)
send_event(event, can_access_stream_user_ids(stream))
# Even though the token doesn't change, the web client needs to update the
# email forwarding address to display the correctly-escaped new name.
return {"email_address": new_email}
def do_change_stream_description(realm, stream_name, new_description):
# type: (Realm, text_type, text_type) -> None
stream = get_stream(stream_name, realm)
stream.description = new_description
stream.save(update_fields=['description'])
event = dict(type='stream', op='update',
property='description', name=stream_name,
value=new_description)
send_event(event, can_access_stream_user_ids(stream))
def do_create_realm(string_id, name, restricted_to_domain=None,
invite_required=None, org_type=None):
# type: (text_type, text_type, Optional[bool], Optional[bool], Optional[int]) -> Tuple[Realm, bool]
realm = get_realm_by_string_id(string_id)
created = not realm
if created:
kwargs = {} # type: Dict[str, Any]
if restricted_to_domain is not None:
kwargs['restricted_to_domain'] = restricted_to_domain
if invite_required is not None:
kwargs['invite_required'] = invite_required
if org_type is not None:
kwargs['org_type'] = org_type
realm = Realm(string_id=string_id, name=name,
domain=string_id + '@acme.com', **kwargs)
realm.save()
# Create stream once Realm object has been saved
notifications_stream, _ = create_stream_if_needed(realm, Realm.DEFAULT_NOTIFICATION_STREAM_NAME)
realm.notifications_stream = notifications_stream
realm.save(update_fields=['notifications_stream'])
# Include a welcome message in this notifications stream
product_name = "Zulip"
content = """Hello, and welcome to %s!
This is a message on stream `%s` with the topic `welcome`. We'll use this stream for
system-generated notifications.""" % (product_name, notifications_stream.name,)
msg = internal_prep_message(settings.WELCOME_BOT, 'stream',
notifications_stream.name, "welcome",
content, realm=realm)
do_send_messages([msg])
# Log the event
log_event({"type": "realm_created",
"string_id": string_id,
"restricted_to_domain": restricted_to_domain,
"invite_required": invite_required,
"org_type": org_type})
if settings.NEW_USER_BOT is not None:
signup_message = "Signups enabled"
internal_send_message(settings.NEW_USER_BOT, "stream",
"signups", string_id, signup_message)
return (realm, created)
def do_change_enable_stream_desktop_notifications(user_profile,
enable_stream_desktop_notifications,
log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.enable_stream_desktop_notifications = enable_stream_desktop_notifications
user_profile.save(update_fields=["enable_stream_desktop_notifications"])
event = {'type': 'update_global_notifications',
'user': user_profile.email,
'notification_name': 'enable_stream_desktop_notifications',
'setting': enable_stream_desktop_notifications}
if log:
log_event(event)
send_event(event, [user_profile.id])
def do_change_enable_stream_sounds(user_profile, enable_stream_sounds, log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.enable_stream_sounds = enable_stream_sounds
user_profile.save(update_fields=["enable_stream_sounds"])
event = {'type': 'update_global_notifications',
'user': user_profile.email,
'notification_name': 'enable_stream_sounds',
'setting': enable_stream_sounds}
if log:
log_event(event)
send_event(event, [user_profile.id])
def do_change_enable_desktop_notifications(user_profile, enable_desktop_notifications, log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.enable_desktop_notifications = enable_desktop_notifications
user_profile.save(update_fields=["enable_desktop_notifications"])
event = {'type': 'update_global_notifications',
'user': user_profile.email,
'notification_name': 'enable_desktop_notifications',
'setting': enable_desktop_notifications}
if log:
log_event(event)
send_event(event, [user_profile.id])
def do_change_enable_sounds(user_profile, enable_sounds, log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.enable_sounds = enable_sounds
user_profile.save(update_fields=["enable_sounds"])
event = {'type': 'update_global_notifications',
'user': user_profile.email,
'notification_name': 'enable_sounds',
'setting': enable_sounds}
if log:
log_event(event)
send_event(event, [user_profile.id])
def do_change_enable_offline_email_notifications(user_profile, offline_email_notifications, log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.enable_offline_email_notifications = offline_email_notifications
user_profile.save(update_fields=["enable_offline_email_notifications"])
event = {'type': 'update_global_notifications',
'user': user_profile.email,
'notification_name': 'enable_offline_email_notifications',
'setting': offline_email_notifications}
if log:
log_event(event)
send_event(event, [user_profile.id])
def do_change_enable_offline_push_notifications(user_profile, offline_push_notifications, log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.enable_offline_push_notifications = offline_push_notifications
user_profile.save(update_fields=["enable_offline_push_notifications"])
event = {'type': 'update_global_notifications',
'user': user_profile.email,
'notification_name': 'enable_offline_push_notifications',
'setting': offline_push_notifications}
if log:
log_event(event)
send_event(event, [user_profile.id])
def do_change_enable_online_push_notifications(user_profile, online_push_notifications, log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.enable_online_push_notifications = online_push_notifications
user_profile.save(update_fields=["enable_online_push_notifications"])
event = {'type': 'update_global_notifications',
'user': user_profile.email,
'notification_name': 'online_push_notifications',
'setting': online_push_notifications}
if log:
log_event(event)
send_event(event, [user_profile.id])
def do_change_enable_digest_emails(user_profile, enable_digest_emails, log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.enable_digest_emails = enable_digest_emails
user_profile.save(update_fields=["enable_digest_emails"])
if not enable_digest_emails:
# Remove any digest emails that have been enqueued.
clear_followup_emails_queue(user_profile.email)
event = {'type': 'update_global_notifications',
'user': user_profile.email,
'notification_name': 'enable_digest_emails',
'setting': enable_digest_emails}
if log:
log_event(event)
send_event(event, [user_profile.id])
def do_change_autoscroll_forever(user_profile, autoscroll_forever, log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.autoscroll_forever = autoscroll_forever
user_profile.save(update_fields=["autoscroll_forever"])
if log:
log_event({'type': 'autoscroll_forever',
'user': user_profile.email,
'autoscroll_forever': autoscroll_forever})
def do_change_enter_sends(user_profile, enter_sends):
# type: (UserProfile, bool) -> None
user_profile.enter_sends = enter_sends
user_profile.save(update_fields=["enter_sends"])
def do_change_default_desktop_notifications(user_profile, default_desktop_notifications):
# type: (UserProfile, bool) -> None
user_profile.default_desktop_notifications = default_desktop_notifications
user_profile.save(update_fields=["default_desktop_notifications"])
def do_change_twenty_four_hour_time(user_profile, setting_value, log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.twenty_four_hour_time = setting_value
user_profile.save(update_fields=["twenty_four_hour_time"])
event = {'type': 'update_display_settings',
'user': user_profile.email,
'setting_name': 'twenty_four_hour_time',
'setting': setting_value}
if log:
log_event(event)
send_event(event, [user_profile.id])
def do_change_left_side_userlist(user_profile, setting_value, log=True):
# type: (UserProfile, bool, bool) -> None
user_profile.left_side_userlist = setting_value
user_profile.save(update_fields=["left_side_userlist"])
event = {'type': 'update_display_settings',
'user': user_profile.email,
'setting_name':'left_side_userlist',
'setting': setting_value}
if log:
log_event(event)
send_event(event, [user_profile.id])
def do_change_default_language(user_profile, setting_value, log=True):
# type: (UserProfile, text_type, bool) -> None
if setting_value == 'zh_CN':
# NB: remove this once we upgrade to Django 1.9
# zh-cn and zh-tw will be replaced by zh-hans and zh-hant in
# Django 1.9
setting_value = 'zh_HANS'
user_profile.default_language = setting_value
user_profile.save(update_fields=["default_language"])
event = {'type': 'update_display_settings',
'user': user_profile.email,
'setting_name':'default_language',
'setting': setting_value}
if log:
log_event(event)
send_event(event, [user_profile.id])
def set_default_streams(realm, stream_names):
# type: (Realm, Iterable[text_type]) -> None
DefaultStream.objects.filter(realm=realm).delete()
for stream_name in stream_names:
stream, _ = create_stream_if_needed(realm, stream_name)
DefaultStream.objects.create(stream=stream, realm=realm)
# Always include the realm's default notifications streams, if it exists
if realm.notifications_stream is not None:
DefaultStream.objects.get_or_create(stream=realm.notifications_stream, realm=realm)
log_event({'type': 'default_streams',
'domain': realm.domain,
'streams': stream_names})
def notify_default_streams(realm):
# type: (Realm) -> None
event = dict(
type="default_streams",
default_streams=streams_to_dicts_sorted(get_default_streams_for_realm(realm))
)
send_event(event, active_user_ids(realm))
def do_add_default_stream(realm, stream_name):
# type: (Realm, text_type) -> None
stream, _ = create_stream_if_needed(realm, stream_name)
if not DefaultStream.objects.filter(realm=realm, stream=stream).exists():
DefaultStream.objects.create(realm=realm, stream=stream)
notify_default_streams(realm)
def do_remove_default_stream(realm, stream_name):
# type: (Realm, text_type) -> None
stream = get_stream(stream_name, realm)
if stream is None:
raise JsonableError(_("Stream does not exist"))
DefaultStream.objects.filter(realm=realm, stream=stream).delete()
notify_default_streams(realm)
def get_default_streams_for_realm(realm):
# type: (Realm) -> List[Stream]
return [default.stream for default in
DefaultStream.objects.select_related("stream", "stream__realm").filter(realm=realm)]
def get_default_subs(user_profile):
# type: (UserProfile) -> List[Stream]
# Right now default streams are realm-wide. This wrapper gives us flexibility
# to some day further customize how we set up default streams for new users.
return get_default_streams_for_realm(user_profile.realm)
# returns default streams in json serializeable format
def streams_to_dicts_sorted(streams):
# type: (List[Stream]) -> List[Dict[str, Any]]
return sorted([stream.to_dict() for stream in streams], key=lambda elt: elt["name"])
def do_update_user_activity_interval(user_profile, log_time):
# type: (UserProfile, datetime.datetime) -> None
effective_end = log_time + datetime.timedelta(minutes=15)
# This code isn't perfect, because with various races we might end
# up creating two overlapping intervals, but that shouldn't happen
# often, and can be corrected for in post-processing
try:
last = UserActivityInterval.objects.filter(user_profile=user_profile).order_by("-end")[0]
# There are two ways our intervals could overlap:
# (1) The start of the new interval could be inside the old interval
# (2) The end of the new interval could be inside the old interval
# In either case, we just extend the old interval to include the new interval.
if ((log_time <= last.end and log_time >= last.start) or
(effective_end <= last.end and effective_end >= last.start)):
last.end = max(last.end, effective_end)
last.start = min(last.start, log_time)
last.save(update_fields=["start", "end"])
return
except IndexError:
pass
# Otherwise, the intervals don't overlap, so we should make a new one
UserActivityInterval.objects.create(user_profile=user_profile, start=log_time,
end=effective_end)
@statsd_increment('user_activity')
def do_update_user_activity(user_profile, client, query, log_time):
# type: (UserProfile, Client, text_type, datetime.datetime) -> None
(activity, created) = UserActivity.objects.get_or_create(
user_profile = user_profile,
client = client,
query = query,
defaults={'last_visit': log_time, 'count': 0})
activity.count += 1
activity.last_visit = log_time
activity.save(update_fields=["last_visit", "count"])
def send_presence_changed(user_profile, presence):
# type: (UserProfile, UserPresence) -> None
presence_dict = presence.to_dict()
event = dict(type="presence", email=user_profile.email,
server_timestamp=time.time(),
presence={presence_dict['client']: presence.to_dict()})
send_event(event, active_user_ids(user_profile.realm))
def consolidate_client(client):
# type: (Client) -> Client
# The web app reports a client as 'website'
# The desktop app reports a client as ZulipDesktop
# due to it setting a custom user agent. We want both
# to count as web users
# Alias ZulipDesktop to website
if client.name in ['ZulipDesktop']:
return get_client('website')
else:
return client
@statsd_increment('user_presence')
def do_update_user_presence(user_profile, client, log_time, status):
# type: (UserProfile, Client, datetime.datetime, int) -> None
client = consolidate_client(client)
(presence, created) = UserPresence.objects.get_or_create(
user_profile = user_profile,
client = client,
defaults = {'timestamp': log_time,
'status': status})
stale_status = (log_time - presence.timestamp) > datetime.timedelta(minutes=1, seconds=10)
was_idle = presence.status == UserPresence.IDLE
became_online = (status == UserPresence.ACTIVE) and (stale_status or was_idle)
# If an object was created, it has already been saved.
#
# We suppress changes from ACTIVE to IDLE before stale_status is reached;
# this protects us from the user having two clients open: one active, the
# other idle. Without this check, we would constantly toggle their status
# between the two states.
if not created and stale_status or was_idle or status == presence.status:
# The following block attempts to only update the "status"
# field in the event that it actually changed. This is
# important to avoid flushing the UserPresence cache when the
# data it would return to a client hasn't actually changed
# (see the UserPresence post_save hook for details).
presence.timestamp = log_time
update_fields = ["timestamp"]
if presence.status != status:
presence.status = status
update_fields.append("status")
presence.save(update_fields=update_fields)
if not user_profile.realm.is_zephyr_mirror_realm and (created or became_online):
# Push event to all users in the realm so they see the new user
# appear in the presence list immediately, or the newly online
# user without delay. Note that we won't send an update here for a
# timestamp update, because we rely on the browser to ping us every 50
# seconds for realm-wide status updates, and those updates should have
# recent timestamps, which means the browser won't think active users
# have gone idle. If we were more aggressive in this function about
# sending timestamp updates, we could eliminate the ping responses, but
# that's not a high priority for now, considering that most of our non-MIT
# realms are pretty small.
send_presence_changed(user_profile, presence)
def update_user_activity_interval(user_profile, log_time):
# type: (UserProfile, datetime.datetime) -> None
event={'user_profile_id': user_profile.id,
'time': datetime_to_timestamp(log_time)}
queue_json_publish("user_activity_interval", event,
lambda e: do_update_user_activity_interval(user_profile, log_time))
def update_user_presence(user_profile, client, log_time, status,
new_user_input):
# type: (UserProfile, Client, datetime.datetime, int, bool) -> None
event={'user_profile_id': user_profile.id,
'status': status,
'time': datetime_to_timestamp(log_time),
'client': client.name}
queue_json_publish("user_presence", event,
lambda e: do_update_user_presence(user_profile, client,
log_time, status))
if new_user_input:
update_user_activity_interval(user_profile, log_time)
def do_update_pointer(user_profile, pointer, update_flags=False):
# type: (UserProfile, int, bool) -> None
prev_pointer = user_profile.pointer
user_profile.pointer = pointer
user_profile.save(update_fields=["pointer"])
if update_flags:
# Until we handle the new read counts in the Android app
# natively, this is a shim that will mark as read any messages
# up until the pointer move
UserMessage.objects.filter(user_profile=user_profile,
message__id__gt=prev_pointer,
message__id__lte=pointer,
flags=~UserMessage.flags.read) \
.update(flags=F('flags').bitor(UserMessage.flags.read))
event = dict(type='pointer', pointer=pointer)
send_event(event, [user_profile.id])
def do_update_message_flags(user_profile, operation, flag, messages, all, stream_obj, topic_name):
# type: (UserProfile, text_type, text_type, Sequence[int], bool, Optional[Stream], Optional[text_type]) -> int
flagattr = getattr(UserMessage.flags, flag)
if all:
log_statsd_event('bankruptcy')
msgs = UserMessage.objects.filter(user_profile=user_profile)
elif stream_obj is not None:
recipient = get_recipient(Recipient.STREAM, stream_obj.id)
if topic_name:
msgs = UserMessage.objects.filter(message__recipient=recipient,
user_profile=user_profile,
message__subject__iexact=topic_name)
else:
msgs = UserMessage.objects.filter(message__recipient=recipient, user_profile=user_profile)
else:
msgs = UserMessage.objects.filter(user_profile=user_profile,
message__id__in=messages)
# Hack to let you star any message
if msgs.count() == 0:
if not len(messages) == 1:
raise JsonableError(_("Invalid message(s)"))
if flag != "starred":
raise JsonableError(_("Invalid message(s)"))
# Validate that the user could have read the relevant message
message = access_message(user_profile, messages[0])[0]
# OK, this is a message that you legitimately have access
# to via narrowing to the stream it is on, even though you
# didn't actually receive it. So we create a historical,
# read UserMessage message row for you to star.
UserMessage.objects.create(user_profile=user_profile,
message=message,
flags=UserMessage.flags.historical | UserMessage.flags.read)
# The filter() statements below prevent postgres from doing a lot of
# unnecessary work, which is a big deal for users updating lots of
# flags (e.g. bankruptcy). This patch arose from seeing slow calls
# to POST /json/messages/flags in the logs. The filter() statements
# are kind of magical; they are actually just testing the one bit.
if operation == 'add':
msgs = msgs.filter(flags=~flagattr)
if stream_obj:
messages = list(msgs.values_list('message__id', flat=True))
count = msgs.update(flags=F('flags').bitor(flagattr))
elif operation == 'remove':
msgs = msgs.filter(flags=flagattr)
if stream_obj:
messages = list(msgs.values_list('message__id', flat=True))
count = msgs.update(flags=F('flags').bitand(~flagattr))
event = {'type': 'update_message_flags',
'operation': operation,
'flag': flag,
'messages': messages,
'all': all}
log_event(event)
send_event(event, [user_profile.id])
statsd.incr("flags.%s.%s" % (flag, operation), count)
return count
def subscribed_to_stream(user_profile, stream):
# type: (UserProfile, Stream) -> bool
try:
if Subscription.objects.get(user_profile=user_profile,
active=True,
recipient__type=Recipient.STREAM,
recipient__type_id=stream.id):
return True
return False
except Subscription.DoesNotExist:
return False
def truncate_content(content, max_length, truncation_message):
# type: (text_type, int, text_type) -> text_type
if len(content) > max_length:
content = content[:max_length - len(truncation_message)] + truncation_message
return content
def truncate_body(body):
# type: (text_type) -> text_type
return truncate_content(body, MAX_MESSAGE_LENGTH, "...")
def truncate_topic(topic):
# type: (text_type) -> text_type
return truncate_content(topic, MAX_SUBJECT_LENGTH, "...")
def update_user_message_flags(message, ums):
# type: (Message, Iterable[UserMessage]) -> None
wildcard = message.mentions_wildcard
mentioned_ids = message.mentions_user_ids
ids_with_alert_words = message.user_ids_with_alert_words
changed_ums = set() # type: Set[UserMessage]
def update_flag(um, should_set, flag):
# type: (UserMessage, bool, int) -> None
if should_set:
if not (um.flags & flag):
um.flags |= flag
changed_ums.add(um)
else:
if (um.flags & flag):
um.flags &= ~flag
changed_ums.add(um)
for um in ums:
has_alert_word = um.user_profile_id in ids_with_alert_words
update_flag(um, has_alert_word, UserMessage.flags.has_alert_word)
mentioned = um.user_profile_id in mentioned_ids
update_flag(um, mentioned, UserMessage.flags.mentioned)
update_flag(um, wildcard, UserMessage.flags.wildcard_mentioned)
is_me_message = getattr(message, 'is_me_message', False)
update_flag(um, is_me_message, UserMessage.flags.is_me_message)
for um in changed_ums:
um.save(update_fields=['flags'])
# We use transaction.atomic to support select_for_update in the attachment codepath.
@transaction.atomic
def do_update_message(user_profile, message, subject, propagate_mode, content, rendered_content):
# type: (UserProfile, Message, Optional[text_type], str, Optional[text_type], Optional[text_type]) -> None
event = {'type': 'update_message',
'sender': user_profile.email,
'message_id': message.id} # type: Dict[str, Any]
edit_history_event = {} # type: Dict[str, Any]
changed_messages = [message]
# Set first_rendered_content to be the oldest version of the
# rendered content recorded; which is the current version if the
# content hasn't been edited before. Note that because one could
# have edited just the subject, not every edit history event
# contains a prev_rendered_content element.
first_rendered_content = message.rendered_content
if message.edit_history is not None:
edit_history = ujson.loads(message.edit_history)
for old_edit_history_event in edit_history:
if 'prev_rendered_content' in old_edit_history_event:
first_rendered_content = old_edit_history_event['prev_rendered_content']
ums = UserMessage.objects.filter(message=message.id)
if content is not None:
update_user_message_flags(message, ums)
# We are turning off diff highlighting everywhere until ticket #1532 is addressed.
if False:
# Don't highlight message edit diffs on prod
rendered_content = highlight_html_differences(first_rendered_content, rendered_content)
event['orig_content'] = message.content
event['orig_rendered_content'] = message.rendered_content
edit_history_event["prev_content"] = message.content
edit_history_event["prev_rendered_content"] = message.rendered_content
edit_history_event["prev_rendered_content_version"] = message.rendered_content_version
message.content = content
message.rendered_content = rendered_content
message.rendered_content_version = bugdown_version
event["content"] = content
event["rendered_content"] = rendered_content
prev_content = edit_history_event['prev_content']
if Message.content_has_attachment(prev_content) or Message.content_has_attachment(message.content):
check_attachment_reference_change(prev_content, message)
if subject is not None:
orig_subject = message.topic_name()
subject = truncate_topic(subject)
event["orig_subject"] = orig_subject
event["propagate_mode"] = propagate_mode
message.subject = subject
event["stream_id"] = message.recipient.type_id
event["subject"] = subject
event['subject_links'] = bugdown.subject_links(message.sender.realm.domain.lower(), subject)
edit_history_event["prev_subject"] = orig_subject
if propagate_mode in ["change_later", "change_all"]:
propagate_query = Q(recipient = message.recipient, subject = orig_subject)
# We only change messages up to 2 days in the past, to avoid hammering our
# DB by changing an unbounded amount of messages
if propagate_mode == 'change_all':
before_bound = now() - datetime.timedelta(days=2)
propagate_query = propagate_query & ~Q(id = message.id) & \
Q(pub_date__range=(before_bound, now()))
if propagate_mode == 'change_later':
propagate_query = propagate_query & Q(id__gt = message.id)
messages = Message.objects.filter(propagate_query).select_related()
# Evaluate the query before running the update
messages_list = list(messages)
messages.update(subject=subject)
for m in messages_list:
# The cached ORM object is not changed by messages.update()
# and the remote cache update requires the new value
m.subject = subject
changed_messages += messages_list
message.last_edit_time = timezone.now()
event['edit_timestamp'] = datetime_to_timestamp(message.last_edit_time)
edit_history_event['timestamp'] = event['edit_timestamp']
if message.edit_history is not None:
edit_history.insert(0, edit_history_event)
else:
edit_history = [edit_history_event]
message.edit_history = ujson.dumps(edit_history)
log_event(event)
message.save(update_fields=["subject", "content", "rendered_content",
"rendered_content_version", "last_edit_time",
"edit_history"])
# Update the message as stored in the (deprecated) message
# cache (for shunting the message over to Tornado in the old
# get_messages API) and also the to_dict caches.
items_for_remote_cache = {}
event['message_ids'] = []
for changed_message in changed_messages:
event['message_ids'].append(changed_message.id)
items_for_remote_cache[to_dict_cache_key(changed_message, True)] = \
(MessageDict.to_dict_uncached(changed_message, apply_markdown=True),)
items_for_remote_cache[to_dict_cache_key(changed_message, False)] = \
(MessageDict.to_dict_uncached(changed_message, apply_markdown=False),)
cache_set_many(items_for_remote_cache)
def user_info(um):
# type: (UserMessage) -> Dict[str, Any]
return {
'id': um.user_profile_id,
'flags': um.flags_list()
}
send_event(event, list(map(user_info, ums)))
def encode_email_address(stream):
# type: (Stream) -> text_type
return encode_email_address_helper(stream.name, stream.email_token)
def encode_email_address_helper(name, email_token):
# type: (text_type, text_type) -> text_type
# Some deployments may not use the email gateway
if settings.EMAIL_GATEWAY_PATTERN == '':
return ''
# Given the fact that we have almost no restrictions on stream names and
# that what characters are allowed in e-mail addresses is complicated and
# dependent on context in the address, we opt for a very simple scheme:
#
# Only encode the stream name (leave the + and token alone). Encode
# everything that isn't alphanumeric plus _ as the percent-prefixed integer
# ordinal of that character, padded with zeroes to the maximum number of
# bytes of a UTF-8 encoded Unicode character.
encoded_name = re.sub("\W", lambda x: "%" + str(ord(x.group(0))).zfill(4), name)
encoded_token = "%s+%s" % (encoded_name, email_token)
return settings.EMAIL_GATEWAY_PATTERN % (encoded_token,)
def get_email_gateway_message_string_from_address(address):
# type: (text_type) -> Optional[text_type]
pattern_parts = [re.escape(part) for part in settings.EMAIL_GATEWAY_PATTERN.split('%s')]
if settings.EMAIL_GATEWAY_EXTRA_PATTERN_HACK:
# Accept mails delivered to any Zulip server
pattern_parts[-1] = settings.EMAIL_GATEWAY_EXTRA_PATTERN_HACK
match_email_re = re.compile("(.*?)".join(pattern_parts))
match = match_email_re.match(address)
if not match:
return None
msg_string = match.group(1)
return msg_string
def decode_email_address(email):
# type: (text_type) -> Tuple[text_type, text_type]
# Perform the reverse of encode_email_address. Returns a tuple of (streamname, email_token)
msg_string = get_email_gateway_message_string_from_address(email)
if '.' in msg_string:
# Workaround for Google Groups and other programs that don't accept emails
# that have + signs in them (see Trac #2102)
encoded_stream_name, token = msg_string.split('.')
else:
encoded_stream_name, token = msg_string.split('+')
stream_name = re.sub("%\d{4}", lambda x: unichr(int(x.group(0)[1:])), encoded_stream_name)
return stream_name, token
# In general, it's better to avoid using .values() because it makes
# the code pretty ugly, but in this case, it has significant
# performance impact for loading / for users with large numbers of
# subscriptions, so it's worth optimizing.
def gather_subscriptions_helper(user_profile):
# type: (UserProfile) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]], List[Dict[str, Any]]]
sub_dicts = Subscription.objects.select_related("recipient").filter(
user_profile = user_profile,
recipient__type = Recipient.STREAM).values(
"recipient__type_id", "in_home_view", "color", "desktop_notifications",
"audible_notifications", "active", "pin_to_top")
stream_ids = set([sub["recipient__type_id"] for sub in sub_dicts])
all_streams = get_active_streams(user_profile.realm).select_related(
"realm").values("id", "name", "invite_only", "realm_id", \
"realm__domain", "email_token", "description")
stream_dicts = [stream for stream in all_streams if stream['id'] in stream_ids]
stream_hash = {}
for stream in stream_dicts:
stream_hash[stream["id"]] = stream
all_streams_id = [stream["id"] for stream in all_streams]
subscribed = []
unsubscribed = []
never_subscribed = []
# Deactivated streams aren't in stream_hash.
streams = [stream_hash[sub["recipient__type_id"]] for sub in sub_dicts \
if sub["recipient__type_id"] in stream_hash]
streams_subscribed_map = dict((sub["recipient__type_id"], sub["active"]) for sub in sub_dicts)
# Add never subscribed streams to streams_subscribed_map
streams_subscribed_map.update({stream['id']: False for stream in all_streams if stream not in streams})
subscriber_map = bulk_get_subscriber_user_ids(all_streams, user_profile, streams_subscribed_map)
sub_unsub_stream_ids = set()
for sub in sub_dicts:
sub_unsub_stream_ids.add(sub["recipient__type_id"])
stream = stream_hash.get(sub["recipient__type_id"])
if not stream:
# This stream has been deactivated, don't include it.
continue
subscribers = subscriber_map[stream["id"]]
# Important: don't show the subscribers if the stream is invite only
# and this user isn't on it anymore.
if stream["invite_only"] and not sub["active"]:
subscribers = None
stream_dict = {'name': stream["name"],
'in_home_view': sub["in_home_view"],
'invite_only': stream["invite_only"],
'color': sub["color"],
'desktop_notifications': sub["desktop_notifications"],
'audible_notifications': sub["audible_notifications"],
'pin_to_top': sub["pin_to_top"],
'stream_id': stream["id"],
'description': stream["description"],
'email_address': encode_email_address_helper(stream["name"], stream["email_token"])}
if subscribers is not None:
stream_dict['subscribers'] = subscribers
if sub["active"]:
subscribed.append(stream_dict)
else:
unsubscribed.append(stream_dict)
all_streams_id_set = set(all_streams_id)
# Listing public streams are disabled for Zephyr mirroring realms.
if user_profile.realm.is_zephyr_mirror_realm:
never_subscribed_stream_ids = set() # type: Set[int]
else:
never_subscribed_stream_ids = all_streams_id_set - sub_unsub_stream_ids
never_subscribed_streams = [ns_stream_dict for ns_stream_dict in all_streams
if ns_stream_dict['id'] in never_subscribed_stream_ids]
for stream in never_subscribed_streams:
if not stream['invite_only']:
stream_dict = {'name': stream['name'],
'invite_only': stream['invite_only'],
'stream_id': stream['id'],
'description': stream['description']}
subscribers = subscriber_map[stream["id"]]
if subscribers is not None:
stream_dict['subscribers'] = subscribers
never_subscribed.append(stream_dict)
return (sorted(subscribed, key=lambda x: x['name']),
sorted(unsubscribed, key=lambda x: x['name']),
sorted(never_subscribed, key=lambda x: x['name']))
def gather_subscriptions(user_profile):
# type: (UserProfile) -> Tuple[List[Dict[str, Any]], List[Dict[str, Any]]]
subscribed, unsubscribed, never_subscribed = gather_subscriptions_helper(user_profile)
user_ids = set()
for subs in [subscribed, unsubscribed, never_subscribed]:
for sub in subs:
if 'subscribers' in sub:
for subscriber in sub['subscribers']:
user_ids.add(subscriber)
email_dict = get_emails_from_user_ids(list(user_ids))
for subs in [subscribed, unsubscribed]:
for sub in subs:
if 'subscribers' in sub:
sub['subscribers'] = [email_dict[user_id] for user_id in sub['subscribers']]
return (subscribed, unsubscribed)
def get_status_dict(requesting_user_profile):
# type: (UserProfile) -> Dict[text_type, Dict[text_type, Dict[str, Any]]]
if requesting_user_profile.realm.presence_disabled:
# Return an empty dict if presence is disabled in this realm
return defaultdict(dict)
return UserPresence.get_status_dict_by_realm(requesting_user_profile.realm_id)
def get_realm_user_dicts(user_profile):
# type: (UserProfile) -> List[Dict[str, text_type]]
return [{'email' : userdict['email'],
'user_id' : userdict['id'],
'is_admin' : userdict['is_realm_admin'],
'is_bot' : userdict['is_bot'],
'full_name' : userdict['full_name']}
for userdict in get_active_user_dicts_in_realm(user_profile.realm)]
def get_cross_realm_dicts():
# type: () -> List[Dict[str, Any]]
users = [get_user_profile_by_email(email) for email in get_cross_realm_emails()]
return [{'email' : user.email,
'user_id' : user.id,
'is_admin' : user.is_realm_admin,
'is_bot' : user.is_bot,
'full_name' : user.full_name}
for user in users]
# Fetch initial data. When event_types is not specified, clients want
# all event types. Whenever you add new code to this function, you
# should also add corresponding events for changes in the data
# structures and new code to apply_events (and add a test in EventsRegisterTest).
def fetch_initial_state_data(user_profile, event_types, queue_id):
# type: (UserProfile, Optional[Iterable[str]], str) -> Dict[str, Any]
state = {'queue_id': queue_id} # type: Dict[str, Any]
if event_types is None:
want = lambda msg_type: True
else:
want = set(event_types).__contains__
if want('alert_words'):
state['alert_words'] = user_alert_words(user_profile)
if want('message'):
# The client should use get_old_messages() to fetch messages
# starting with the max_message_id. They will get messages
# newer than that ID via get_events()
messages = Message.objects.filter(usermessage__user_profile=user_profile).order_by('-id')[:1]
if messages:
state['max_message_id'] = messages[0].id
else:
state['max_message_id'] = -1
if want('muted_topics'):
state['muted_topics'] = ujson.loads(user_profile.muted_topics)
if want('pointer'):
state['pointer'] = user_profile.pointer
if want('presence'):
state['presences'] = get_status_dict(user_profile)
if want('realm'):
state['realm_name'] = user_profile.realm.name
state['realm_restricted_to_domain'] = user_profile.realm.restricted_to_domain
state['realm_invite_required'] = user_profile.realm.invite_required
state['realm_invite_by_admins_only'] = user_profile.realm.invite_by_admins_only
state['realm_authentication_methods'] = user_profile.realm.authentication_methods_dict()
state['realm_create_stream_by_admins_only'] = user_profile.realm.create_stream_by_admins_only
state['realm_allow_message_editing'] = user_profile.realm.allow_message_editing
state['realm_message_content_edit_limit_seconds'] = user_profile.realm.message_content_edit_limit_seconds
state['realm_default_language'] = user_profile.realm.default_language
if want('realm_domain'):
state['realm_domain'] = user_profile.realm.domain
if want('realm_emoji'):
state['realm_emoji'] = user_profile.realm.get_emoji()
if want('realm_filters'):
state['realm_filters'] = realm_filters_for_domain(user_profile.realm.domain)
if want('realm_user'):
state['realm_users'] = get_realm_user_dicts(user_profile)
if want('realm_bot'):
state['realm_bots'] = get_owned_bot_dicts(user_profile)
if want('referral'):
state['referrals'] = {'granted': user_profile.invites_granted,
'used': user_profile.invites_used}
if want('subscription'):
subscriptions, unsubscribed, never_subscribed = gather_subscriptions_helper(user_profile)
state['subscriptions'] = subscriptions
state['unsubscribed'] = unsubscribed
state['never_subscribed'] = never_subscribed
if want('update_message_flags'):
# There's no initial data for message flag updates, client will
# get any updates during a session from get_events()
pass
if want('stream'):
state['streams'] = do_get_streams(user_profile)
if want('default_streams'):
state['realm_default_streams'] = streams_to_dicts_sorted(get_default_streams_for_realm(user_profile.realm))
if want('update_display_settings'):
state['twenty_four_hour_time'] = user_profile.twenty_four_hour_time
state['left_side_userlist'] = user_profile.left_side_userlist
default_language = user_profile.default_language
if user_profile.default_language == 'zh_HANS':
# NB: remove this once we upgrade to Django 1.9
# zh-cn and zh-tw will be replaced by zh-hans and zh-hant in
# Django 1.9
default_language = 'zh_CN'
state['default_language'] = default_language
return state
def apply_events(state, events, user_profile):
# type: (Dict[str, Any], Iterable[Dict[str, Any]], UserProfile) -> None
for event in events:
if event['type'] == "message":
state['max_message_id'] = max(state['max_message_id'], event['message']['id'])
elif event['type'] == "pointer":
state['pointer'] = max(state['pointer'], event['pointer'])
elif event['type'] == "realm_user":
person = event['person']
def our_person(p):
# type: (Dict[str, Any]) -> bool
return p['email'] == person['email']
if event['op'] == "add":
state['realm_users'].append(person)
elif event['op'] == "remove":
state['realm_users'] = [user for user in state['realm_users'] if not our_person(user)]
elif event['op'] == 'update':
for p in state['realm_users']:
if our_person(p):
# In the unlikely event that the current user
# just changed to/from being an admin, we need
# to add/remove the data on all bots in the
# realm. This is ugly and probably better
# solved by removing the all-realm-bots data
# given to admin users from this flow.
if ('is_admin' in person and 'realm_bots' in state and
user_profile.email == person['email']):
if p['is_admin'] and not person['is_admin']:
state['realm_bots'] = []
if not p['is_admin'] and person['is_admin']:
state['realm_bots'] = get_owned_bot_dicts(user_profile)
# Now update the person
p.update(person)
elif event['type'] == 'realm_bot':
if event['op'] == 'add':
state['realm_bots'].append(event['bot'])
if event['op'] == 'remove':
email = event['bot']['email']
state['realm_bots'] = [b for b in state['realm_bots'] if b['email'] != email]
if event['op'] == 'update':
for bot in state['realm_bots']:
if bot['email'] == event['bot']['email']:
bot.update(event['bot'])
elif event['type'] == 'stream':
if event['op'] == 'create':
for stream in event['streams']:
if not stream['invite_only']:
stream_data = copy.deepcopy(stream)
stream_data['subscribers'] = []
# Add stream to never_subscribed (if not invite_only)
state['never_subscribed'].append(stream_data)
if event['op'] == 'delete':
deleted_stream_ids = {stream['stream_id'] for stream in event['streams']}
state['streams'] = [s for s in state['streams'] if s['stream_id'] not in deleted_stream_ids]
state['never_subscribed'] = [stream for stream in state['never_subscribed'] if
stream['stream_id'] not in deleted_stream_ids]
if event['op'] == 'update':
# For legacy reasons, we call stream data 'subscriptions' in
# the state var here, for the benefit of the JS code.
for obj in state['subscriptions']:
if obj['name'].lower() == event['name'].lower():
obj[event['property']] = event['value']
# Also update the pure streams data
for stream in state['streams']:
if stream['name'].lower() == event['name'].lower():
prop = event['property']
if prop in stream:
stream[prop] = event['value']
elif event['op'] == "occupy":
state['streams'] += event['streams']
elif event['op'] == "vacate":
stream_ids = [s["stream_id"] for s in event['streams']]
state['streams'] = [s for s in state['streams'] if s["stream_id"] not in stream_ids]
elif event['type'] == 'default_streams':
state['realm_default_streams'] = event['default_streams']
elif event['type'] == 'realm':
if event['op'] == "update":
field = 'realm_' + event['property']
state[field] = event['value']
elif event['op'] == "update_dict":
for key, value in event['data'].items():
state['realm_' + key] = value
elif event['type'] == "subscription":
if event['op'] in ["add"]:
# Convert the user_profile IDs to emails since that's what register() returns
# TODO: Clean up this situation
for item in event["subscriptions"]:
item["subscribers"] = [get_user_profile_by_email(email).id for email in item["subscribers"]]
def name(sub):
# type: (Dict[str, Any]) -> text_type
return sub['name'].lower()
if event['op'] == "add":
added_names = set(map(name, event["subscriptions"]))
was_added = lambda s: name(s) in added_names
# add the new subscriptions
state['subscriptions'] += event['subscriptions']
# remove them from unsubscribed if they had been there
state['unsubscribed'] = [s for s in state['unsubscribed'] if not was_added(s)]
# remove them from never_subscribed if they had been there
state['never_subscribed'] = [s for s in state['never_subscribed'] if not was_added(s)]
elif event['op'] == "remove":
removed_names = set(map(name, event["subscriptions"]))
was_removed = lambda s: name(s) in removed_names
# Find the subs we are affecting.
removed_subs = list(filter(was_removed, state['subscriptions']))
# Remove our user from the subscribers of the removed subscriptions.
for sub in removed_subs:
sub['subscribers'] = [id for id in sub['subscribers'] if id != user_profile.id]
# We must effectively copy the removed subscriptions from subscriptions to
# unsubscribe, since we only have the name in our data structure.
state['unsubscribed'] += removed_subs
# Now filter out the removed subscriptions from subscriptions.
state['subscriptions'] = [s for s in state['subscriptions'] if not was_removed(s)]
elif event['op'] == 'update':
for sub in state['subscriptions']:
if sub['name'].lower() == event['name'].lower():
sub[event['property']] = event['value']
elif event['op'] == 'peer_add':
user_id = event['user_id']
for sub in state['subscriptions']:
if (sub['name'] in event['subscriptions'] and
user_id not in sub['subscribers']):
sub['subscribers'].append(user_id)
for sub in state['never_subscribed']:
if (sub['name'] in event['subscriptions'] and
user_id not in sub['subscribers']):
sub['subscribers'].append(user_id)
elif event['op'] == 'peer_remove':
user_id = event['user_id']
for sub in state['subscriptions']:
if (sub['name'] in event['subscriptions'] and
user_id in sub['subscribers']):
sub['subscribers'].remove(user_id)
elif event['type'] == "presence":
state['presences'][event['email']] = event['presence']
elif event['type'] == "update_message":
# The client will get the updated message directly
pass
elif event['type'] == "referral":
state['referrals'] = event['referrals']
elif event['type'] == "update_message_flags":
# The client will get the message with the updated flags directly
pass
elif event['type'] == "realm_emoji":
state['realm_emoji'] = event['realm_emoji']
elif event['type'] == "alert_words":
state['alert_words'] = event['alert_words']
elif event['type'] == "muted_topics":
state['muted_topics'] = event["muted_topics"]
elif event['type'] == "realm_filters":
state['realm_filters'] = event["realm_filters"]
elif event['type'] == "update_display_settings":
if event['setting_name'] == "twenty_four_hour_time":
state['twenty_four_hour_time'] = event["setting"]
if event['setting_name'] == 'left_side_userlist':
state['left_side_userlist'] = event["setting"]
else:
raise ValueError("Unexpected event type %s" % (event['type'],))
def do_events_register(user_profile, user_client, apply_markdown=True,
event_types=None, queue_lifespan_secs=0, all_public_streams=False,
narrow=[]):
# type: (UserProfile, Client, bool, Optional[Iterable[str]], int, bool, Iterable[Sequence[text_type]]) -> Dict[str, Any]
# Technically we don't need to check this here because
# build_narrow_filter will check it, but it's nicer from an error
# handling perspective to do it before contacting Tornado
check_supported_events_narrow_filter(narrow)
queue_id = request_event_queue(user_profile, user_client, apply_markdown,
queue_lifespan_secs, event_types, all_public_streams,
narrow=narrow)
if queue_id is None:
raise JsonableError(_("Could not allocate event queue"))
if event_types is not None:
event_types_set = set(event_types) # type: Optional[Set[str]]
else:
event_types_set = None
ret = fetch_initial_state_data(user_profile, event_types_set, queue_id)
# Apply events that came in while we were fetching initial data
events = get_user_events(user_profile, queue_id, -1)
apply_events(ret, events, user_profile)
if events:
ret['last_event_id'] = events[-1]['id']
else:
ret['last_event_id'] = -1
return ret
def do_send_confirmation_email(invitee, referrer):
# type: (PreregistrationUser, UserProfile) -> None
"""
Send the confirmation/welcome e-mail to an invited user.
`invitee` is a PreregistrationUser.
`referrer` is a UserProfile.
"""
subject_template_path = 'confirmation/invite_email_subject.txt'
body_template_path = 'confirmation/invite_email_body.txt'
context = {'referrer': referrer,
'support_email': settings.ZULIP_ADMINISTRATOR,
'verbose_support_offers': settings.VERBOSE_SUPPORT_OFFERS}
if referrer.realm.is_zephyr_mirror_realm:
subject_template_path = 'confirmation/mituser_invite_email_subject.txt'
body_template_path = 'confirmation/mituser_invite_email_body.txt'
Confirmation.objects.send_confirmation(
invitee, invitee.email, additional_context=context,
subject_template_path=subject_template_path,
body_template_path=body_template_path, host=referrer.realm.host)
@statsd_increment("push_notifications")
def handle_push_notification(user_profile_id, missed_message):
# type: (int, Dict[str, Any]) -> None
try:
user_profile = get_user_profile_by_id(user_profile_id)
if not (receives_offline_notifications(user_profile) or receives_online_notifications(user_profile)):
return
umessage = UserMessage.objects.get(user_profile=user_profile,
message__id=missed_message['message_id'])
message = umessage.message
if umessage.flags.read:
return
sender_str = message.sender.full_name
apple = num_push_devices_for_user(user_profile, kind=PushDeviceToken.APNS)
android = num_push_devices_for_user(user_profile, kind=PushDeviceToken.GCM)
if apple or android:
# TODO: set badge count in a better way
# Determine what alert string to display based on the missed messages
if message.recipient.type == Recipient.HUDDLE:
alert = "New private group message from %s" % (sender_str,)
elif message.recipient.type == Recipient.PERSONAL:
alert = "New private message from %s" % (sender_str,)
elif message.recipient.type == Recipient.STREAM:
alert = "New mention from %s" % (sender_str,)
else:
alert = "New Zulip mentions and private messages from %s" % (sender_str,)
if apple:
apple_extra_data = {'message_ids': [message.id]}
send_apple_push_notification(user_profile, alert, badge=1, zulip=apple_extra_data)
if android:
content = message.content
content_truncated = (len(content) > 200)
if content_truncated:
content = content[:200] + "..."
android_data = {
'user': user_profile.email,
'event': 'message',
'alert': alert,
'zulip_message_id': message.id, # message_id is reserved for CCS
'time': datetime_to_timestamp(message.pub_date),
'content': content,
'content_truncated': content_truncated,
'sender_email': message.sender.email,
'sender_full_name': message.sender.full_name,
'sender_avatar_url': get_avatar_url(message.sender.avatar_source, message.sender.email),
}
if message.recipient.type == Recipient.STREAM:
android_data['recipient_type'] = "stream"
android_data['stream'] = get_display_recipient(message.recipient)
android_data['topic'] = message.subject
elif message.recipient.type in (Recipient.HUDDLE, Recipient.PERSONAL):
android_data['recipient_type'] = "private"
send_android_push_notification(user_profile, android_data)
except UserMessage.DoesNotExist:
logging.error("Could not find UserMessage with message_id %s" %(missed_message['message_id'],))
def is_inactive(email):
# type: (text_type) -> None
try:
if get_user_profile_by_email(email).is_active:
raise ValidationError(u'%s is already active' % (email,))
except UserProfile.DoesNotExist:
pass
def user_email_is_unique(email):
# type: (text_type) -> None
try:
get_user_profile_by_email(email)
raise ValidationError(u'%s is already registered' % (email,))
except UserProfile.DoesNotExist:
pass
def do_invite_users(user_profile, invitee_emails, streams):
# type: (UserProfile, SizedTextIterable, Iterable[Stream]) -> Tuple[Optional[str], Dict[str, List[Tuple[text_type, str]]]]
new_prereg_users = [] # type: List[PreregistrationUser]
errors = [] # type: List[Tuple[text_type, str]]
skipped = [] # type: List[Tuple[text_type, str]]
ret_error = None # type: Optional[str]
ret_error_data = {} # type: Dict[str, List[Tuple[text_type, str]]]
for email in invitee_emails:
if email == '':
continue
try:
validators.validate_email(email)
except ValidationError:
errors.append((email, _("Invalid address.")))
continue
if not email_allowed_for_realm(email, user_profile.realm):
errors.append((email, _("Outside your domain.")))
continue
try:
existing_user_profile = get_user_profile_by_email(email)
except UserProfile.DoesNotExist:
existing_user_profile = None
try:
if existing_user_profile is not None and existing_user_profile.is_mirror_dummy:
# Mirror dummy users to be activated must be inactive
is_inactive(email)
else:
# Other users should not already exist at all.
user_email_is_unique(email)
except ValidationError:
skipped.append((email, _("Already has an account.")))
continue
# The logged in user is the referrer.
prereg_user = PreregistrationUser(email=email, referred_by=user_profile)
# We save twice because you cannot associate a ManyToMany field
# on an unsaved object.
prereg_user.save()
prereg_user.streams = streams
prereg_user.save()
new_prereg_users.append(prereg_user)
if errors:
ret_error = _("Some emails did not validate, so we didn't send any invitations.")
ret_error_data = {'errors': errors}
if skipped and len(skipped) == len(invitee_emails):
# All e-mails were skipped, so we didn't actually invite anyone.
ret_error = _("We weren't able to invite anyone.")
ret_error_data = {'errors': skipped}
return ret_error, ret_error_data
# If we encounter an exception at any point before now, there are no unwanted side-effects,
# since it is totally fine to have duplicate PreregistrationUsers
for user in new_prereg_users:
event = {"email": user.email, "referrer_email": user_profile.email}
queue_json_publish("invites", event,
lambda event: do_send_confirmation_email(user, user_profile))
if skipped:
ret_error = _("Some of those addresses are already using Zulip, "
"so we didn't send them an invitation. We did send "
"invitations to everyone else!")
ret_error_data = {'errors': skipped}
return ret_error, ret_error_data
def send_referral_event(user_profile):
# type: (UserProfile) -> None
event = dict(type="referral",
referrals=dict(granted=user_profile.invites_granted,
used=user_profile.invites_used))
send_event(event, [user_profile.id])
def do_refer_friend(user_profile, email):
# type: (UserProfile, text_type) -> None
content = ('Referrer: "%s" <%s>\n'
'Realm: %s\n'
'Referred: %s') % (user_profile.full_name, user_profile.email,
user_profile.realm.domain, email)
subject = "Zulip referral: %s" % (email,)
from_email = '"%s" <%s>' % (user_profile.full_name, '[email protected]')
to_email = '"Zulip Referrals" <[email protected]>'
headers = {'Reply-To' : '"%s" <%s>' % (user_profile.full_name, user_profile.email,)}
msg = EmailMessage(subject, content, from_email, [to_email], headers=headers)
msg.send()
referral = Referral(user_profile=user_profile, email=email)
referral.save()
user_profile.invites_used += 1
user_profile.save(update_fields=['invites_used'])
send_referral_event(user_profile)
def notify_realm_emoji(realm):
# type: (Realm) -> None
event = dict(type="realm_emoji", op="update",
realm_emoji=realm.get_emoji())
user_ids = [userdict['id'] for userdict in get_active_user_dicts_in_realm(realm)]
send_event(event, user_ids)
def check_add_realm_emoji(realm, name, img_url):
# type: (Realm, text_type, text_type) -> None
emoji = RealmEmoji(realm=realm, name=name, img_url=img_url)
emoji.full_clean()
emoji.save()
notify_realm_emoji(realm)
def do_remove_realm_emoji(realm, name):
# type: (Realm, text_type) -> None
RealmEmoji.objects.get(realm=realm, name=name).delete()
notify_realm_emoji(realm)
def notify_alert_words(user_profile, words):
# type: (UserProfile, Iterable[text_type]) -> None
event = dict(type="alert_words", alert_words=words)
send_event(event, [user_profile.id])
def do_add_alert_words(user_profile, alert_words):
# type: (UserProfile, Iterable[text_type]) -> None
words = add_user_alert_words(user_profile, alert_words)
notify_alert_words(user_profile, words)
def do_remove_alert_words(user_profile, alert_words):
# type: (UserProfile, Iterable[text_type]) -> None
words = remove_user_alert_words(user_profile, alert_words)
notify_alert_words(user_profile, words)
def do_set_alert_words(user_profile, alert_words):
# type: (UserProfile, List[text_type]) -> None
set_user_alert_words(user_profile, alert_words)
notify_alert_words(user_profile, alert_words)
def do_set_muted_topics(user_profile, muted_topics):
# type: (UserProfile, Union[List[List[text_type]], List[Tuple[text_type, text_type]]]) -> None
user_profile.muted_topics = ujson.dumps(muted_topics)
user_profile.save(update_fields=['muted_topics'])
event = dict(type="muted_topics", muted_topics=muted_topics)
send_event(event, [user_profile.id])
def notify_realm_filters(realm):
# type: (Realm) -> None
realm_filters = realm_filters_for_domain(realm.domain)
user_ids = [userdict['id'] for userdict in get_active_user_dicts_in_realm(realm)]
event = dict(type="realm_filters", realm_filters=realm_filters)
send_event(event, user_ids)
# NOTE: Regexes must be simple enough that they can be easily translated to JavaScript
# RegExp syntax. In addition to JS-compatible syntax, the following features are available:
# * Named groups will be converted to numbered groups automatically
# * Inline-regex flags will be stripped, and where possible translated to RegExp-wide flags
def do_add_realm_filter(realm, pattern, url_format_string):
# type: (Realm, text_type, text_type) -> int
pattern = pattern.strip()
url_format_string = url_format_string.strip()
realm_filter = RealmFilter(
realm=realm, pattern=pattern,
url_format_string=url_format_string)
realm_filter.full_clean()
realm_filter.save()
notify_realm_filters(realm)
return realm_filter.id
def do_remove_realm_filter(realm, pattern=None, id=None):
# type: (Realm, Optional[text_type], Optional[int]) -> None
if pattern is not None:
RealmFilter.objects.get(realm=realm, pattern=pattern).delete()
else:
RealmFilter.objects.get(realm=realm, pk=id).delete()
notify_realm_filters(realm)
def get_emails_from_user_ids(user_ids):
# type: (Sequence[int]) -> Dict[int, text_type]
# We may eventually use memcached to speed this up, but the DB is fast.
return UserProfile.emails_from_ids(user_ids)
def realm_aliases(realm):
# type: (Realm) -> List[text_type]
return [alias.domain for alias in realm.realmalias_set.all()]
def get_occupied_streams(realm):
# type: (Realm) -> QuerySet
# TODO: Make a generic stub for QuerySet
""" Get streams with subscribers """
subs_filter = Subscription.objects.filter(active=True, user_profile__realm=realm,
user_profile__is_active=True).values('recipient_id')
stream_ids = Recipient.objects.filter(
type=Recipient.STREAM, id__in=subs_filter).values('type_id')
return Stream.objects.filter(id__in=stream_ids, realm=realm, deactivated=False)
def do_get_streams(user_profile, include_public=True, include_subscribed=True,
include_all_active=False, include_default=False):
# type: (UserProfile, bool, bool, bool, bool) -> List[Dict[str, Any]]
if include_all_active and not user_profile.is_api_super_user:
raise JsonableError(_("User not authorized for this query"))
# Listing public streams are disabled for Zephyr mirroring realms.
include_public = include_public and not user_profile.realm.is_zephyr_mirror_realm
# Start out with all streams in the realm with subscribers
query = get_occupied_streams(user_profile.realm)
if not include_all_active:
user_subs = Subscription.objects.select_related("recipient").filter(
active=True, user_profile=user_profile,
recipient__type=Recipient.STREAM)
if include_subscribed:
recipient_check = Q(id__in=[sub.recipient.type_id for sub in user_subs])
if include_public:
invite_only_check = Q(invite_only=False)
if include_subscribed and include_public:
query = query.filter(recipient_check | invite_only_check)
elif include_public:
query = query.filter(invite_only_check)
elif include_subscribed:
query = query.filter(recipient_check)
else:
# We're including nothing, so don't bother hitting the DB.
query = []
streams = [(row.to_dict()) for row in query]
streams.sort(key=lambda elt: elt["name"])
if include_default:
is_default = {}
default_streams = get_default_streams_for_realm(user_profile.realm)
for default_stream in default_streams:
is_default[default_stream.id] = True
for stream in streams:
stream['is_default'] = is_default.get(stream["stream_id"], False)
return streams
def do_claim_attachments(message):
# type: (Message) -> List[Tuple[text_type, bool]]
attachment_url_list = attachment_url_re.findall(message.content)
results = []
for url in attachment_url_list:
path_id = attachment_url_to_path_id(url)
user_profile = message.sender
is_message_realm_public = False
if message.recipient.type == Recipient.STREAM:
is_message_realm_public = Stream.objects.get(id=message.recipient.type_id).is_public()
if path_id is not None:
is_claimed = claim_attachment(user_profile, path_id, message,
is_message_realm_public)
results.append((path_id, is_claimed))
return results
def do_delete_old_unclaimed_attachments(weeks_ago):
# type: (int) -> None
old_unclaimed_attachments = get_old_unclaimed_attachments(weeks_ago)
for attachment in old_unclaimed_attachments:
delete_message_image(attachment.path_id)
attachment.delete()
def check_attachment_reference_change(prev_content, message):
# type: (text_type, Message) -> None
new_content = message.content
prev_attachments = set(attachment_url_re.findall(prev_content))
new_attachments = set(attachment_url_re.findall(new_content))
to_remove = list(prev_attachments - new_attachments)
path_ids = []
for url in to_remove:
path_id = attachment_url_to_path_id(url)
path_ids.append(path_id)
attachments_to_update = Attachment.objects.filter(path_id__in=path_ids).select_for_update()
message.attachment_set.remove(*attachments_to_update)
to_add = list(new_attachments - prev_attachments)
if len(to_add) > 0:
do_claim_attachments(message)
| apache-2.0 | -3,383,029,409,355,278,000 | 43.527949 | 253 | 0.623226 | false |
ESS-LLP/erpnext | erpnext/regional/report/gstr_2/gstr_2.py | 7 | 6769 | # Copyright (c) 2013, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from datetime import date
from erpnext.regional.report.gstr_1.gstr_1 import Gstr1Report
def execute(filters=None):
return Gstr2Report(filters).run()
class Gstr2Report(Gstr1Report):
def __init__(self, filters=None):
self.filters = frappe._dict(filters or {})
self.columns = []
self.data = []
self.doctype = "Purchase Invoice"
self.tax_doctype = "Purchase Taxes and Charges"
self.select_columns = """
name as invoice_number,
supplier_name,
posting_date,
base_grand_total,
base_rounded_total,
supplier_gstin,
place_of_supply,
ecommerce_gstin,
reverse_charge,
invoice_type,
return_against,
is_return,
invoice_type,
export_type,
reason_for_issuing_document,
eligibility_for_itc,
itc_integrated_tax,
itc_central_tax,
itc_state_tax,
itc_cess_amount
"""
def get_data(self):
self.get_igst_invoices()
for inv, items_based_on_rate in self.items_based_on_tax_rate.items():
invoice_details = self.invoices.get(inv)
for rate, items in items_based_on_rate.items():
row, taxable_value = self.get_row_data_for_invoice(inv, invoice_details, rate, items)
tax_amount = taxable_value * rate / 100
if inv in self.igst_invoices:
row += [tax_amount, 0, 0]
else:
row += [0, tax_amount / 2, tax_amount / 2]
row += [
self.invoice_cess.get(inv),
invoice_details.get('eligibility_for_itc'),
invoice_details.get('itc_integrated_tax'),
invoice_details.get('itc_central_tax'),
invoice_details.get('itc_state_tax'),
invoice_details.get('itc_cess_amount')
]
if self.filters.get("type_of_business") == "CDNR":
row.append("Y" if invoice_details.posting_date <= date(2017, 7, 1) else "N")
row.append("C" if invoice_details.return_against else "R")
self.data.append(row)
def get_igst_invoices(self):
self.igst_invoices = []
for d in self.tax_details:
is_igst = True if d[1] in self.gst_accounts.igst_account else False
if is_igst and d[0] not in self.igst_invoices:
self.igst_invoices.append(d[0])
def get_conditions(self):
conditions = ""
for opts in (("company", " and company=%(company)s"),
("from_date", " and posting_date>=%(from_date)s"),
("to_date", " and posting_date<=%(to_date)s")):
if self.filters.get(opts[0]):
conditions += opts[1]
if self.filters.get("type_of_business") == "B2B":
conditions += "and ifnull(invoice_type, '') != 'Export' and is_return != 1 "
elif self.filters.get("type_of_business") == "CDNR":
conditions += """ and is_return = 1 """
return conditions
def get_columns(self):
self.tax_columns = [
{
"fieldname": "rate",
"label": "Rate",
"fieldtype": "Int",
"width": 60
},
{
"fieldname": "taxable_value",
"label": "Taxable Value",
"fieldtype": "Currency",
"width": 100
},
{
"fieldname": "integrated_tax_paid",
"label": "Integrated Tax Paid",
"fieldtype": "Currency",
"width": 100
},
{
"fieldname": "central_tax_paid",
"label": "Central Tax Paid",
"fieldtype": "Currency",
"width": 100
},
{
"fieldname": "state_tax_paid",
"label": "State/UT Tax Paid",
"fieldtype": "Currency",
"width": 100
},
{
"fieldname": "cess_amount",
"label": "Cess Paid",
"fieldtype": "Currency",
"width": 100
},
{
"fieldname": "eligibility_for_itc",
"label": "Eligibility For ITC",
"fieldtype": "Data",
"width": 100
},
{
"fieldname": "itc_integrated_tax",
"label": "Availed ITC Integrated Tax",
"fieldtype": "Currency",
"width": 100
},
{
"fieldname": "itc_central_tax",
"label": "Availed ITC Central Tax",
"fieldtype": "Currency",
"width": 100
},
{
"fieldname": "itc_state_tax",
"label": "Availed ITC State/UT Tax",
"fieldtype": "Currency",
"width": 100
},
{
"fieldname": "itc_cess_amount",
"label": "Availed ITC Cess ",
"fieldtype": "Currency",
"width": 100
}
]
self.other_columns = []
if self.filters.get("type_of_business") == "B2B":
self.invoice_columns = [
{
"fieldname": "supplier_gstin",
"label": "GSTIN of Supplier",
"fieldtype": "Data",
"width": 120
},
{
"fieldname": "invoice_number",
"label": "Invoice Number",
"fieldtype": "Link",
"options": "Purchase Invoice",
"width": 120
},
{
"fieldname": "posting_date",
"label": "Invoice date",
"fieldtype": "Date",
"width": 120
},
{
"fieldname": "invoice_value",
"label": "Invoice Value",
"fieldtype": "Currency",
"width": 120
},
{
"fieldname": "place_of_supply",
"label": "Place of Supply",
"fieldtype": "Data",
"width": 120
},
{
"fieldname": "reverse_charge",
"label": "Reverse Charge",
"fieldtype": "Data",
"width": 80
},
{
"fieldname": "invoice_type",
"label": "Invoice Type",
"fieldtype": "Data",
"width": 80
}
]
elif self.filters.get("type_of_business") == "CDNR":
self.invoice_columns = [
{
"fieldname": "supplier_gstin",
"label": "GSTIN of Supplier",
"fieldtype": "Data",
"width": 120
},
{
"fieldname": "invoice_number",
"label": "Note/Refund Voucher Number",
"fieldtype": "Link",
"options": "Purchase Invoice"
},
{
"fieldname": "posting_date",
"label": "Note/Refund Voucher date",
"fieldtype": "Date",
"width": 120
},
{
"fieldname": "return_against",
"label": "Invoice/Advance Payment Voucher Number",
"fieldtype": "Link",
"options": "Purchase Invoice",
"width": 120
},
{
"fieldname": "posting_date",
"label": "Invoice/Advance Payment Voucher date",
"fieldtype": "Date",
"width": 120
},
{
"fieldname": "reason_for_issuing_document",
"label": "Reason For Issuing document",
"fieldtype": "Data",
"width": 120
},
{
"fieldname": "supply_type",
"label": "Supply Type",
"fieldtype": "Data",
"width": 120
},
{
"fieldname": "invoice_value",
"label": "Invoice Value",
"fieldtype": "Currency",
"width": 120
}
]
self.other_columns = [
{
"fieldname": "pre_gst",
"label": "PRE GST",
"fieldtype": "Data",
"width": 50
},
{
"fieldname": "document_type",
"label": "Document Type",
"fieldtype": "Data",
"width": 50
}
]
self.columns = self.invoice_columns + self.tax_columns + self.other_columns
| gpl-3.0 | 4,715,858,335,174,644,000 | 23.614545 | 89 | 0.578667 | false |
D3f0/coreemu | daemon/core/misc/xmlparser.py | 4 | 1267 | # CORE
# Copyright (c) 2014 The Boeing Company.
# See the LICENSE file included in this distribution.
from xml.dom.minidom import parse
from xmlutils import getoneelement
from xmlparser0 import CoreDocumentParser0
class CoreVersionParser(object):
'''\
Helper class to check the version of Network Plan document. This
simply looks for a "Scenario" element; when present, this
indicates a 0.0 version document. The dom member is set in order
to prevent parsing a file twice (it can be passed to the
appropriate CoreDocumentParser class.)
'''
def __init__(self, filename, options={}):
if 'dom' in options:
self.dom = options['dom']
else:
self.dom = parse(filename)
self.scenario = getoneelement(self.dom, 'Scenario')
if self.scenario is not None:
self.version = 0.0
else:
self.version = 'unknown'
def core_document_parser(session, filename, options):
vp = CoreVersionParser(filename, options)
if 'dom' not in options:
options['dom'] = vp.dom
if vp.version == 0.0:
doc = CoreDocumentParser0(session, filename, options)
else:
raise ValueError, 'unsupported document version: %s' % vp.version
return doc
| bsd-2-clause | -3,061,204,226,982,964,000 | 34.194444 | 73 | 0.662194 | false |
PressLabs/silver | silver/migrations/0008_auto_20150430_1804.py | 2 | 3210 | # Copyright (c) 2015 Presslabs SRL
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('silver', '0007_auto_20150430_1549'),
]
operations = [
migrations.AlterField(
model_name='documententry',
name='quantity',
field=models.DecimalField(
max_digits=19,
decimal_places=4,
validators=[django.core.validators.MinValueValidator(0.0)]),
),
migrations.AlterField(
model_name='documententry',
name='unit_price',
field=models.DecimalField(max_digits=19, decimal_places=4),
),
migrations.AlterField(
model_name='meteredfeature',
name='included_units',
field=models.DecimalField(
help_text=b'The number of included units per plan interval.',
max_digits=19,
decimal_places=4,
validators=[django.core.validators.MinValueValidator(0.0)]),
),
migrations.AlterField(
model_name='meteredfeature',
name='included_units_during_trial',
field=models.DecimalField(
decimal_places=4,
validators=[django.core.validators.MinValueValidator(0.0)],
max_digits=19,
blank=True,
help_text=b'The number of included units during the trial period.',
null=True),
),
migrations.AlterField(
model_name='meteredfeature',
name='price_per_unit',
field=models.DecimalField(
help_text=b'The price per unit.',
max_digits=19,
decimal_places=4,
validators=[django.core.validators.MinValueValidator(0.0)]),
),
migrations.AlterField(
model_name='meteredfeatureunitslog',
name='consumed_units',
field=models.DecimalField(
max_digits=19,
decimal_places=4,
validators=[django.core.validators.MinValueValidator(0.0)]),
),
migrations.AlterField(
model_name='plan',
name='amount',
field=models.DecimalField(
help_text=b'The amount in the specified currency to be charged on the interval specified.',
max_digits=19,
decimal_places=4,
validators=[django.core.validators.MinValueValidator(0.0)]),
),
]
| apache-2.0 | 1,217,616,252,393,458,400 | 35.067416 | 107 | 0.585358 | false |
mdibaiee/servo | components/script/dom/bindings/codegen/GlobalGen.py | 20 | 2610 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# We do one global pass over all the WebIDL to generate our prototype enum
# and generate information for subsequent phases.
import sys
sys.path.append("./parser/")
sys.path.append("./ply/")
import os
import WebIDL
import cPickle
from Configuration import Configuration
from CodegenRust import GlobalGenRoots, replaceFileIfChanged
def generate_file(config, name, filename):
root = getattr(GlobalGenRoots, name)(config)
code = root.define()
if replaceFileIfChanged(filename, code):
print "Generating %s" % (filename)
else:
print "%s hasn't changed - not touching it" % (filename)
def main():
# Parse arguments.
from optparse import OptionParser
usageString = "usage: %prog [options] webidldir [files]"
o = OptionParser(usage=usageString)
o.add_option("--cachedir", dest='cachedir', default=None,
help="Directory in which to cache lex/parse tables.")
o.add_option("--verbose-errors", action='store_true', default=False,
help="When an error happens, display the Python traceback.")
(options, args) = o.parse_args()
if len(args) < 2:
o.error(usageString)
configFile = args[0]
baseDir = args[1]
fileList = args[2:]
# Parse the WebIDL.
parser = WebIDL.Parser(options.cachedir)
for filename in fileList:
fullPath = os.path.normpath(os.path.join(baseDir, filename))
f = open(fullPath, 'rb')
lines = f.readlines()
f.close()
parser.parse(''.join(lines), fullPath)
parserResults = parser.finish()
# Write the parser results out to a pickle.
resultsFile = open('ParserResults.pkl', 'wb')
cPickle.dump(parserResults, resultsFile, -1)
resultsFile.close()
# Load the configuration.
config = Configuration(configFile, parserResults)
# Generate the prototype list.
generate_file(config, 'PrototypeList', 'PrototypeList.rs')
# Generate the common code.
generate_file(config, 'RegisterBindings', 'RegisterBindings.rs')
# Generate the type list.
generate_file(config, 'InterfaceTypes', 'InterfaceTypes.rs')
# Generate the type list.
generate_file(config, 'InheritTypes', 'InheritTypes.rs')
# Generate the module declarations.
generate_file(config, 'Bindings', 'Bindings/mod.rs')
generate_file(config, 'UnionTypes', 'UnionTypes.rs')
if __name__ == '__main__':
main()
| mpl-2.0 | -3,315,116,233,648,265,700 | 30.829268 | 77 | 0.675479 | false |
rmanoni/mi-instrument | mi/core/object.py | 2 | 21665 | #!/usr/bin/env python
__author__ = 'Adam R. Smith, Michael Meisinger, Tom Lennan'
import os
import re
import inspect
from collections import OrderedDict, Mapping, Iterable
from mi.core.log import log
from mi.core.exceptions import BadRequest
BUILT_IN_ATTRS = {'_id', '_rev', 'type_', 'blame_', 'persisted_version'}
class IonObjectBase(object):
def __str__(self):
ds = str(self.__dict__)
try:
# Remove the type_ from the dict str - cheaper this way than copying the dict
typeidx = ds.find("'type_': '")
if typeidx:
endidx = ds.find("'", typeidx+10)
if ds[typeidx-2] == ",":
typeidx -= 2
ds = ds[:typeidx] + ds[endidx+1:]
except Exception as ex:
log.warn("Could not create IonObject __str__ representation")
# This is a more eye pleasing variant but does not eval
return "%s(%s)" % (self.__class__.__name__, ds)
def __repr__(self):
return self.__str__()
def __eq__(self, other):
if type(other) == type(self):
if other.__dict__ == self.__dict__:
return True
return False
def __getitem__(self, key):
return getattr(self, key)
def __setitem__(self, key, value):
return setattr(self, key, value)
def __contains__(self, item):
return hasattr(self, item)
def has_key(self, key):
return hasattr(self, key)
def _validate(self):
"""
Compare fields to the schema and raise AttributeError if mismatched.
Named _validate instead of validate because the data may have a field named "validate".
"""
fields, schema = self.__dict__, self._schema
# Check for extra fields not defined in the schema
extra_fields = fields.viewkeys() - schema.viewkeys() - BUILT_IN_ATTRS
if len(extra_fields) > 0:
raise AttributeError('Fields found that are not in the schema: %r' % (list(extra_fields)))
required_decorator = 'Required'
content_type_decorator = 'ContentType'
content_count_decorator = 'ContentCount'
value_range_decorator = 'ValueRange'
value_pattern_decorator = 'ValuePattern'
# Check required field criteria met
for key in schema:
if 'decorators' in schema[key] and required_decorator in schema[key]['decorators']:
if not key in fields or fields[key] is None:
raise AttributeError('Required value "%s" not set' % key)
# Check each attribute
for key in fields.iterkeys():
if key in BUILT_IN_ATTRS:
continue
schema_val = schema[key]
# Correct any float or long types that got downgraded to int
if isinstance(fields[key], int):
if schema_val['type'] == 'float':
fields[key] = float(fields[key])
elif schema_val['type'] == 'long':
fields[key] = long(fields[key])
# argh, annoying work around for OrderedDict vs dict issue
if type(fields[key]) == dict and schema_val['type'] == 'OrderedDict':
fields[key] = OrderedDict(fields[key])
# Basic type checking
field_val = fields[key]
if type(field_val).__name__ != schema_val['type']:
# if the schema doesn't define a type, we can't very well validate it
if schema_val['type'] == 'NoneType':
continue
# Allow unicode instead of str. This may be too lenient.
if schema_val['type'] == 'str' and type(field_val).__name__ == 'unicode':
continue
# Already checked for required above. Assume optional and continue
if field_val is None:
continue
# Allow unicode instead of str. This may be too lenient.
if schema_val['type'] == 'str' and type(field_val).__name__ == 'unicode':
continue
# IonObjects are ok for dict fields too!
if isinstance(field_val, IonObjectBase) and schema_val['type'] == 'OrderedDict':
continue
if not key in fields or fields[key] is None:
raise AttributeError('Required value "%s" not set' % key)
# Check for inheritance
if self.check_inheritance_chain(type(field_val), schema_val['type']):
continue
# Check enum types
from pyon.core.registry import enum_classes
if isinstance(field_val, int) and schema_val['type'] in enum_classes:
if field_val not in enum_classes(schema_val['type'])._str_map:
raise AttributeError('Invalid enum value "%d" for field "%s.%s", should be between 1 and %d' %
(fields[key], type(self).__name__, key, len(enum_classes(schema_val['type'])._str_map)))
else:
continue
if type(field_val) == tuple and schema_val['type'] == 'list':
continue
if isinstance(field_val, IonObjectBase) and schema_val['type'] == 'dict':
log.warn('TODO: Please convert generic dict attribute type to abstract type for field "%s.%s"' % (type(self).__name__, key))
continue
# Special case check for ION object being passed where default type is dict or str
if 'decorators' in schema_val:
if content_type_decorator in schema_val['decorators']:
if isinstance(field_val, IonObjectBase) and schema_val['type'] == 'dict' or schema_val['type'] == 'str':
self.check_content(key, field_val, schema_val['decorators'][content_type_decorator])
continue
raise AttributeError('Invalid type "%s" for field "%s.%s", should be "%s"' %
(type(fields[key]), type(self).__name__, key, schema_val['type']))
if type(field_val).__name__ == 'str':
if value_pattern_decorator in schema_val['decorators']:
self.check_string_pattern_match(key, field_val, schema_val['decorators'][value_pattern_decorator])
if type(field_val).__name__ in ['int', 'float', 'long']:
if value_range_decorator in schema_val['decorators']:
self.check_numeric_value_range(key, field_val, schema_val['decorators'][value_range_decorator])
if 'decorators' in schema_val:
if content_type_decorator in schema_val['decorators']:
if schema_val['type'] == 'list':
self.check_collection_content(key, field_val, schema_val['decorators'][content_type_decorator])
elif schema_val['type'] == 'dict' or schema_val['type'] == 'OrderedDict':
self.check_collection_content(key, field_val.values(), schema_val['decorators'][content_type_decorator])
else:
self.check_content(key, field_val, schema_val['decorators'][content_type_decorator])
if content_count_decorator in schema_val['decorators']:
if schema_val['type'] == 'list':
self.check_collection_length(key, field_val, schema_val['decorators'][content_count_decorator])
if schema_val['type'] == 'dict' or schema_val['type'] == 'OrderedDict':
self.check_collection_length(key, field_val.values(), schema_val['decorators'][content_count_decorator])
if isinstance(field_val, IonObjectBase):
field_val._validate()
# Next validate only IonObjects found in child collections.
# Note that this is non-recursive; only for first-level collections.
elif isinstance(field_val, Mapping):
for subkey in field_val:
subval = field_val[subkey]
if isinstance(subval, IonObjectBase):
subval._validate()
elif isinstance(field_val, Iterable):
for subval in field_val:
if isinstance(subval, IonObjectBase):
subval._validate()
def _get_type(self):
return self.__class__.__name__
def _get_extends(self):
parents = [parent.__name__ for parent in self.__class__.__mro__ if parent.__name__ not in ['IonObjectBase', 'object', self._get_type()]]
return parents
def update(self, other):
"""
Method that allows self object attributes to be updated with other object.
Other object must be of same type or super type.
"""
if type(other) != type(self):
bases = inspect.getmro(self.__class__)
if other.__class__ not in bases:
raise BadRequest("Object %s and %s do not have compatible types for update" % (type(self).__name__, type(other).__name__))
for key in other.__dict__:
setattr(self, key, other.__dict__[key])
#Decorator methods
def get_class_decorator_value(self, decorator):
if getattr(self, '_class_info'):
if self._class_info['decorators'].has_key(decorator):
return self._class_info['decorators'][decorator]
return None
def is_decorator(self, field, decorator):
if self._schema[field]['decorators'].has_key(decorator):
return True
return False
def get_decorator_value(self, field, decorator):
if self._schema[field]['decorators'].has_key(decorator):
return self._schema[field]['decorators'][decorator]
return None
def find_field_for_decorator(self, decorator='', decorator_value=None):
'''
This method will iterate the set of fields in te object and look for the first field
that has the specified decorator and decorator value, if supplied.
@param decorator: The decorator on the field to be searched for
@param decorator_value: An optional value to search on
@return fld: The name of the field that has the decorator
'''
for fld in self._schema:
if self.is_decorator(fld, decorator ):
if decorator_value is not None and self.get_decorator_value(fld, decorator) == decorator_value:
return fld
else:
return fld
return None
# Decorator validation methods
def check_string_pattern_match(self, key, value, pattern):
m = re.match(pattern, value)
if not m:
raise AttributeError('Invalid value pattern %s for field "%s.%s", should match regular expression %s' %
(value, type(self).__name__, key, pattern))
def check_numeric_value_range(self, key, value, value_range):
if ',' in value_range:
min = eval(value_range.split(',')[0].strip())
max = eval(value_range.split(',')[1].strip())
else:
min = max = eval(value_range.split(',')[0].strip())
if value < min or value > max:
raise AttributeError('Invalid value %s for field "%s.%s", should be between %d and %d' %
(str(value), type(self).__name__, key, min, max))
def check_inheritance_chain(self, typ, expected_type):
for baseclz in typ.__bases__:
if baseclz.__name__ == expected_type.strip():
return True
if baseclz.__name__ == "object":
return False
else:
val = self.check_inheritance_chain(baseclz, expected_type)
return val
return False
def check_collection_content(self, key, list_values, content_types):
split_content_types = []
if ',' in content_types:
split_content_types = content_types.split(',')
else:
split_content_types.append(content_types)
for value in list_values:
match_found = False
for content_type in split_content_types:
#First check for valid ION types
from pyon.core.registry import issubtype
if isinstance(value, dict) and value.has_key('type_'):
if value['type_'] == content_type.strip() or issubtype(value['type_'], content_type.strip()):
match_found = True
break
if type(value).__name__ == content_type.strip():
match_found = True
break
# Check for inheritance
if self.check_inheritance_chain(type(value), content_type):
match_found = True
break
if not match_found:
raise AttributeError('Invalid value type %s in collection field "%s.%s", should be one of "%s"' %
(str(list_values), type(self).__name__, key, content_types))
def check_content(self, key, value, content_types):
split_content_types = []
if ',' in content_types:
split_content_types = content_types.split(',')
else:
split_content_types.append(content_types)
log.trace("split_content_types: %s", split_content_types)
for content_type in split_content_types:
if type(value).__name__ == content_type.strip():
return
# Check for inheritance
if self.check_inheritance_chain(type(value), content_type):
return
raise AttributeError('Invalid value type %s in field "%s.%s", should be one of "%s"' %
(str(value), type(self).__name__, key, content_types))
def check_collection_length(self, key, list_values, length):
if ',' in length:
min = int(length.split(',')[0].strip())
max = int(length.split(',')[1].strip())
else:
min = max = int(length.split(',')[0].strip())
if len(list_values) < min or len(list_values) > max:
raise AttributeError('Invalid value length for collection field "%s.%s", should be between %d and %d' %
(type(self).__name__, key, min, max))
class IonMessageObjectBase(IonObjectBase):
pass
def walk(o, cb, modify_key_value = 'value'):
"""
Utility method to do recursive walking of a possible iterable (inc dicts) and do inline transformations.
You supply a callback which receives an object. That object may be an iterable (which will then be walked
after you return it, as long as it remains an iterable), or it may be another object inside of that.
If a dict is discovered and
if modify_key_value = 'key', callback will modify only keys
if modify_key_value = 'key_value', callback will modify both keys and values
else callback will modify only values
"""
newo = cb(o)
if isinstance(newo, dict):
if modify_key_value == 'key':
return dict(((cb(k), v) for k, v in newo.iteritems()))
elif modify_key_value == 'key_value':
return dict(((cb(k), walk(v, cb, 'key_value')) for k, v in newo.iteritems()))
else:
return dict(((k, walk(v, cb)) for k, v in newo.iteritems()))
elif isinstance(newo, (list, tuple, set)):
return [walk(x, cb, modify_key_value) for x in newo]
elif isinstance(newo, IonObjectBase):
# IOs are not iterable and are a huge pain to make them look iterable, special casing is fine then
fields, set_fields = newo.__dict__, newo._schema
for fieldname in set_fields:
fieldval = getattr(newo, fieldname)
newfo = walk(fieldval, cb, modify_key_value)
if newfo != fieldval:
setattr(newo, fieldname, newfo)
return newo
else:
return newo
class IonObjectSerializationBase(object):
"""
Base serialization class for serializing/deserializing IonObjects.
Provides the operate method, which walks and applies a transform method. The operate method is
renamed serialize/deserialize in derived classes.
At this base level, the _transform method is undefined - you must pass one in. Using
IonObjectSerializer or IonObjectDeserializer defines them for you.
"""
def __init__(self, transform_method=None, **kwargs):
self._transform_method = transform_method or self._transform
def operate(self, obj):
return walk(obj, self._transform_method)
def _transform(self, obj):
raise NotImplementedError("Implement _transform in a derived class")
class IonObjectSerializer(IonObjectSerializationBase):
"""
Serializer for IonObjects.
Defines a _transform method to turn IonObjects into dictionaries to be deserialized by
an IonObjectDeserializer.
Used when being written to Datastore.
"""
def _transform(self, update_version=False):
def _transform(obj):
if isinstance(obj, IonObjectBase):
res = {k:v for k, v in obj.__dict__.iteritems() if k in obj._schema or k in BUILT_IN_ATTRS}
if not 'type_' in res:
res['type_'] = obj._get_type()
# update persisted_version if serializing for persistence
if update_version and 'TypeVersion' in obj._class_info['decorators']:
# convert TypeVersion in decorator from string to int
# because the object_model_generator converts TypeVersion to string
res['persisted_version'] = obj._class_info['decorators']['TypeVersion']
return res
return obj
return _transform
def serialize(self, obj, update_version=False):
self._transform_method = self._transform(update_version)
return IonObjectSerializationBase.operate(self, obj)
class IonObjectBlameSerializer(IonObjectSerializer):
def _transform(self, obj):
res = IonObjectSerializer._transform(self, obj)
blame = None
try:
blame = os.environ["BLAME"]
except:
pass
if blame and isinstance(obj, IonObjectBase):
res["blame_"] = blame
return res
class IonObjectDeserializer(IonObjectSerializationBase):
"""
Deserializer for IonObjects.
Defines a _transform method to transform dictionaries produced by IonObjectSerializer back
into IonObjects. You *MUST* pass an object registry
"""
deserialize = IonObjectSerializationBase.operate
def __init__(self, transform_method=None, obj_registry=None, **kwargs):
assert obj_registry
self._obj_registry = obj_registry
IonObjectSerializationBase.__init__(self, transform_method=transform_method)
def _transform(self, obj):
# Note: This check to detect an IonObject is a bit risky (only type_)
if isinstance(obj, dict) and "type_" in obj:
objc = obj
otype = objc['type_'].encode('ascii') # Correct?
# don't supply a dict - we want the object to initialize with all its defaults intact,
# which preserves things like IonEnumObject and invokes the setattr behavior we want there.
ion_obj = self._obj_registry.new(otype)
# get outdated attributes in data that are not defined in the current schema
extra_attributes = objc.viewkeys() - ion_obj._schema.viewkeys() - BUILT_IN_ATTRS
for extra in extra_attributes:
objc.pop(extra)
log.info('discard %s not in current schema' % extra)
for k, v in objc.iteritems():
# unicode translate to utf8
if isinstance(v, unicode):
v = str(v.encode('utf8'))
# CouchDB adds _attachments and puts metadata in it
# in pyon metadata is in the document
# so we discard _attachments while transforming between the two
if k not in ("type_", "_attachments", "_conflicts"):
setattr(ion_obj, k, v)
if k == "_conflicts":
log.warn("CouchDB conflict detected for ID=%S (ignored): %s", obj.get('_id', None), v)
return ion_obj
return obj
class IonObjectBlameDeserializer(IonObjectDeserializer):
def _transform(self, obj):
def handle_ion_obj(in_obj):
objc = in_obj.copy()
type = objc['type_'].encode('ascii')
# don't supply a dict - we want the object to initialize with all its defaults intact,
# which preserves things like IonEnumObject and invokes the setattr behavior we want there.
ion_obj = self._obj_registry.new(type)
for k, v in objc.iteritems():
if k != "type_":
setattr(ion_obj, k, v)
return ion_obj
# Note: This check to detect an IonObject is a bit risky (only type_)
if isinstance(obj, dict):
if "blame_" in obj:
if "type_" in obj:
return handle_ion_obj(obj)
else:
obj.pop("blame_")
else:
if "type_" in obj:
return handle_ion_obj(obj)
return obj
ion_serializer = IonObjectSerializer()
| bsd-2-clause | -5,931,472,128,882,139,000 | 39.495327 | 144 | 0.571152 | false |
HydrelioxGitHub/home-assistant | homeassistant/components/locative/device_tracker.py | 3 | 1318 | """
Support for the Locative platform.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.locative/
"""
import logging
from homeassistant.components.device_tracker import \
DOMAIN as DEVICE_TRACKER_DOMAIN
from homeassistant.components.locative import DOMAIN as LOCATIVE_DOMAIN
from homeassistant.components.locative import TRACKER_UPDATE
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util import slugify
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['locative']
DATA_KEY = '{}.{}'.format(LOCATIVE_DOMAIN, DEVICE_TRACKER_DOMAIN)
async def async_setup_entry(hass, entry, async_see):
"""Configure a dispatcher connection based on a config entry."""
async def _set_location(device, gps_location, location_name):
"""Fire HA event to set location."""
await async_see(
dev_id=slugify(device),
gps=gps_location,
location_name=location_name
)
hass.data[DATA_KEY] = async_dispatcher_connect(
hass, TRACKER_UPDATE, _set_location
)
return True
async def async_unload_entry(hass, entry):
"""Unload the config entry and remove the dispatcher connection."""
hass.data[DATA_KEY]()
return True
| apache-2.0 | 4,214,745,285,256,520,700 | 30.380952 | 74 | 0.719272 | false |
Tehsmash/ironic | ironic/netconf.py | 2 | 1590 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
from oslo.config import cfg
CONF = cfg.CONF
def _get_my_ip():
"""Returns the actual ip of the local machine.
This code figures out what source address would be used if some traffic
were to be sent out to some well known address on the Internet. In this
case, a Google DNS server is used, but the specific address does not
matter much. No traffic is actually sent.
"""
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return "127.0.0.1"
netconf_opts = [
cfg.StrOpt('my_ip',
default=_get_my_ip(),
help='IP address of this host.'),
]
CONF.register_opts(netconf_opts)
| apache-2.0 | 3,006,686,439,345,713,700 | 31.44898 | 78 | 0.683019 | false |
Giswater/giswater_qgis_plugin | sys_manager.py | 1 | 1247 | # -*- coding: utf-8 -*-
"""
This file is part of Giswater 3
The program is free software: you can redistribute it and/or modify it under the terms of the GNU
General Public License as published by the Free Software Foundation, either version 3 of the License,
or (at your option) any later version.
"""
import os
import sys
import subprocess
import time
import webbrowser
def open_file(file_path):
try:
# Check if file exist
if os.path.exists(file_path):
# Open file
if sys.platform == "win32":
os.startfile(file_path)
else:
opener = "open" if sys.platform == "darwin" else "xdg-open"
subprocess.call([opener, file_path])
else:
webbrowser.open(file_path)
except Exception as e:
return False
finally:
return True
def manage_tstamp(prefix_name='log', tstamp_format='%Y%m%d', extension='.log'):
tstamp = str(time.strftime(tstamp_format))
name = prefix_name + "_" + tstamp + ".log"
return name
def get_file_with_parents(filepath, levels=1):
common = filepath
for i in range(levels + 1):
common = os.path.dirname(common)
return os.path.relpath(filepath, common)
| gpl-3.0 | -6,133,701,337,317,172,000 | 25.531915 | 101 | 0.626303 | false |
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_07_01/operations/_network_interfaces_operations.py | 1 | 64223 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class NetworkInterfacesOperations(object):
"""NetworkInterfacesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2018_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
network_interface_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkInterface"
"""Gets information about the specified network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_07_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
parameters, # type: "_models.NetworkInterface"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkInterface"
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'NetworkInterface')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
network_interface_name, # type: str
parameters, # type: "_models.NetworkInterface"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.NetworkInterface"]
"""Creates or updates a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to the create or update network interface operation.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.NetworkInterface
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either NetworkInterface or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.NetworkInterface]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkInterface"
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
network_interface_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.NetworkInterface"]
"""Updates a network interface tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param parameters: Parameters supplied to update network interface tags.
:type parameters: ~azure.mgmt.network.v2018_07_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either NetworkInterface or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.NetworkInterface]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkInterfaceListResult"]
"""Gets all network interfaces in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/networkInterfaces'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkInterfaceListResult"]
"""Gets all network interfaces in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces'} # type: ignore
def _get_effective_route_table_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.EffectiveRouteListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EffectiveRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
# Construct URL
url = self._get_effective_route_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_effective_route_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore
def begin_get_effective_route_table(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.EffectiveRouteListResult"]
"""Gets all route tables applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either EffectiveRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.EffectiveRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.EffectiveRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._get_effective_route_table_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('EffectiveRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_effective_route_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveRouteTable'} # type: ignore
def _list_effective_network_security_groups_initial(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Optional["_models.EffectiveNetworkSecurityGroupListResult"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.EffectiveNetworkSecurityGroupListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2018-07-01"
accept = "application/json"
# Construct URL
url = self._list_effective_network_security_groups_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_effective_network_security_groups_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore
def begin_list_effective_network_security_groups(
self,
resource_group_name, # type: str
network_interface_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.EffectiveNetworkSecurityGroupListResult"]
"""Gets all network security groups applied to a network interface.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: Pass in True if you'd like the ARMPolling polling method,
False for no polling, or your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either EffectiveNetworkSecurityGroupListResult or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2018_07_01.models.EffectiveNetworkSecurityGroupListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.EffectiveNetworkSecurityGroupListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._list_effective_network_security_groups_initial(
resource_group_name=resource_group_name,
network_interface_name=network_interface_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('EffectiveNetworkSecurityGroupListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_effective_network_security_groups.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/networkInterfaces/{networkInterfaceName}/effectiveNetworkSecurityGroups'} # type: ignore
def list_virtual_machine_scale_set_vm_network_interfaces(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
virtualmachine_index, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkInterfaceListResult"]
"""Gets information about all network interfaces in a virtual machine in a virtual machine scale
set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_vm_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_vm_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces'} # type: ignore
def list_virtual_machine_scale_set_network_interfaces(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkInterfaceListResult"]
"""Gets all network interfaces in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_network_interfaces.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_network_interfaces.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/networkInterfaces'} # type: ignore
def get_virtual_machine_scale_set_network_interface(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
virtualmachine_index, # type: str
network_interface_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkInterface"
"""Get the specified network interface in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterface, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_07_01.models.NetworkInterface
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterface"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_network_interface.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterface', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_network_interface.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}'} # type: ignore
def list_virtual_machine_scale_set_ip_configurations(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
virtualmachine_index, # type: str
network_interface_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.NetworkInterfaceIPConfigurationListResult"]
"""Get the specified network interface ip configuration in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either NetworkInterfaceIPConfigurationListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfigurationListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceIPConfigurationListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_virtual_machine_scale_set_ip_configurations.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('NetworkInterfaceIPConfigurationListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_virtual_machine_scale_set_ip_configurations.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations'} # type: ignore
def get_virtual_machine_scale_set_ip_configuration(
self,
resource_group_name, # type: str
virtual_machine_scale_set_name, # type: str
virtualmachine_index, # type: str
network_interface_name, # type: str
ip_configuration_name, # type: str
expand=None, # type: Optional[str]
**kwargs # type: Any
):
# type: (...) -> "_models.NetworkInterfaceIPConfiguration"
"""Get the specified network interface ip configuration in a virtual machine scale set.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_machine_scale_set_name: The name of the virtual machine scale set.
:type virtual_machine_scale_set_name: str
:param virtualmachine_index: The virtual machine index.
:type virtualmachine_index: str
:param network_interface_name: The name of the network interface.
:type network_interface_name: str
:param ip_configuration_name: The name of the ip configuration.
:type ip_configuration_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: NetworkInterfaceIPConfiguration, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2018_07_01.models.NetworkInterfaceIPConfiguration
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.NetworkInterfaceIPConfiguration"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-03-30"
accept = "application/json"
# Construct URL
url = self.get_virtual_machine_scale_set_ip_configuration.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualMachineScaleSetName': self._serialize.url("virtual_machine_scale_set_name", virtual_machine_scale_set_name, 'str'),
'virtualmachineIndex': self._serialize.url("virtualmachine_index", virtualmachine_index, 'str'),
'networkInterfaceName': self._serialize.url("network_interface_name", network_interface_name, 'str'),
'ipConfigurationName': self._serialize.url("ip_configuration_name", ip_configuration_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('NetworkInterfaceIPConfiguration', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_virtual_machine_scale_set_ip_configuration.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/microsoft.Compute/virtualMachineScaleSets/{virtualMachineScaleSetName}/virtualMachines/{virtualmachineIndex}/networkInterfaces/{networkInterfaceName}/ipConfigurations/{ipConfigurationName}'} # type: ignore
| mit | 6,053,468,105,619,516,000 | 50.751007 | 354 | 0.646015 | false |
anbangleo/NlsdeWeb | Python-3.6.0/Lib/test/test_asyncio/test_events.py | 1 | 102515 | """Tests for events.py."""
import collections.abc
import functools
import gc
import io
import os
import platform
import re
import signal
import socket
try:
import ssl
except ImportError:
ssl = None
import subprocess
import sys
import threading
import time
import errno
import unittest
from unittest import mock
import weakref
if sys.platform != 'win32':
import tty
import asyncio
from asyncio import coroutines
from asyncio import proactor_events
from asyncio import selector_events
from asyncio import sslproto
from asyncio import test_utils
try:
from test import support
except ImportError:
from asyncio import test_support as support
def data_file(filename):
if hasattr(support, 'TEST_HOME_DIR'):
fullname = os.path.join(support.TEST_HOME_DIR, filename)
if os.path.isfile(fullname):
return fullname
fullname = os.path.join(os.path.dirname(__file__), filename)
if os.path.isfile(fullname):
return fullname
raise FileNotFoundError(filename)
def osx_tiger():
"""Return True if the platform is Mac OS 10.4 or older."""
if sys.platform != 'darwin':
return False
version = platform.mac_ver()[0]
version = tuple(map(int, version.split('.')))
return version < (10, 5)
ONLYCERT = data_file('ssl_cert.pem')
ONLYKEY = data_file('ssl_key.pem')
SIGNED_CERTFILE = data_file('keycert3.pem')
SIGNING_CA = data_file('pycacert.pem')
PEERCERT = {'serialNumber': 'B09264B1F2DA21D1',
'version': 1,
'subject': ((('countryName', 'XY'),),
(('localityName', 'Castle Anthrax'),),
(('organizationName', 'Python Software Foundation'),),
(('commonName', 'localhost'),)),
'issuer': ((('countryName', 'XY'),),
(('organizationName', 'Python Software Foundation CA'),),
(('commonName', 'our-ca-server'),)),
'notAfter': 'Nov 13 19:47:07 2022 GMT',
'notBefore': 'Jan 4 19:47:07 2013 GMT'}
class MyBaseProto(asyncio.Protocol):
connected = None
done = None
def __init__(self, loop=None):
self.transport = None
self.state = 'INITIAL'
self.nbytes = 0
if loop is not None:
self.connected = asyncio.Future(loop=loop)
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
if self.connected:
self.connected.set_result(None)
def data_received(self, data):
assert self.state == 'CONNECTED', self.state
self.nbytes += len(data)
def eof_received(self):
assert self.state == 'CONNECTED', self.state
self.state = 'EOF'
def connection_lost(self, exc):
assert self.state in ('CONNECTED', 'EOF'), self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class MyProto(MyBaseProto):
def connection_made(self, transport):
super().connection_made(transport)
transport.write(b'GET / HTTP/1.0\r\nHost: example.com\r\n\r\n')
class MyDatagramProto(asyncio.DatagramProtocol):
done = None
def __init__(self, loop=None):
self.state = 'INITIAL'
self.nbytes = 0
if loop is not None:
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'INITIALIZED'
def datagram_received(self, data, addr):
assert self.state == 'INITIALIZED', self.state
self.nbytes += len(data)
def error_received(self, exc):
assert self.state == 'INITIALIZED', self.state
def connection_lost(self, exc):
assert self.state == 'INITIALIZED', self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class MyReadPipeProto(asyncio.Protocol):
done = None
def __init__(self, loop=None):
self.state = ['INITIAL']
self.nbytes = 0
self.transport = None
if loop is not None:
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == ['INITIAL'], self.state
self.state.append('CONNECTED')
def data_received(self, data):
assert self.state == ['INITIAL', 'CONNECTED'], self.state
self.nbytes += len(data)
def eof_received(self):
assert self.state == ['INITIAL', 'CONNECTED'], self.state
self.state.append('EOF')
def connection_lost(self, exc):
if 'EOF' not in self.state:
self.state.append('EOF') # It is okay if EOF is missed.
assert self.state == ['INITIAL', 'CONNECTED', 'EOF'], self.state
self.state.append('CLOSED')
if self.done:
self.done.set_result(None)
class MyWritePipeProto(asyncio.BaseProtocol):
done = None
def __init__(self, loop=None):
self.state = 'INITIAL'
self.transport = None
if loop is not None:
self.done = asyncio.Future(loop=loop)
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
def connection_lost(self, exc):
assert self.state == 'CONNECTED', self.state
self.state = 'CLOSED'
if self.done:
self.done.set_result(None)
class MySubprocessProtocol(asyncio.SubprocessProtocol):
def __init__(self, loop):
self.state = 'INITIAL'
self.transport = None
self.connected = asyncio.Future(loop=loop)
self.completed = asyncio.Future(loop=loop)
self.disconnects = {fd: asyncio.Future(loop=loop) for fd in range(3)}
self.data = {1: b'', 2: b''}
self.returncode = None
self.got_data = {1: asyncio.Event(loop=loop),
2: asyncio.Event(loop=loop)}
def connection_made(self, transport):
self.transport = transport
assert self.state == 'INITIAL', self.state
self.state = 'CONNECTED'
self.connected.set_result(None)
def connection_lost(self, exc):
assert self.state == 'CONNECTED', self.state
self.state = 'CLOSED'
self.completed.set_result(None)
def pipe_data_received(self, fd, data):
assert self.state == 'CONNECTED', self.state
self.data[fd] += data
self.got_data[fd].set()
def pipe_connection_lost(self, fd, exc):
assert self.state == 'CONNECTED', self.state
if exc:
self.disconnects[fd].set_exception(exc)
else:
self.disconnects[fd].set_result(exc)
def process_exited(self):
assert self.state == 'CONNECTED', self.state
self.returncode = self.transport.get_returncode()
class EventLoopTestsMixin:
def setUp(self):
super().setUp()
self.loop = self.create_event_loop()
self.set_event_loop(self.loop)
def tearDown(self):
# just in case if we have transport close callbacks
if not self.loop.is_closed():
test_utils.run_briefly(self.loop)
self.loop.close()
gc.collect()
super().tearDown()
def test_run_until_complete_nesting(self):
@asyncio.coroutine
def coro1():
yield
@asyncio.coroutine
def coro2():
self.assertTrue(self.loop.is_running())
self.loop.run_until_complete(coro1())
self.assertRaises(
RuntimeError, self.loop.run_until_complete, coro2())
# Note: because of the default Windows timing granularity of
# 15.6 msec, we use fairly long sleep times here (~100 msec).
def test_run_until_complete(self):
t0 = self.loop.time()
self.loop.run_until_complete(asyncio.sleep(0.1, loop=self.loop))
t1 = self.loop.time()
self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0)
def test_run_until_complete_stopped(self):
@asyncio.coroutine
def cb():
self.loop.stop()
yield from asyncio.sleep(0.1, loop=self.loop)
task = cb()
self.assertRaises(RuntimeError,
self.loop.run_until_complete, task)
def test_call_later(self):
results = []
def callback(arg):
results.append(arg)
self.loop.stop()
self.loop.call_later(0.1, callback, 'hello world')
t0 = time.monotonic()
self.loop.run_forever()
t1 = time.monotonic()
self.assertEqual(results, ['hello world'])
self.assertTrue(0.08 <= t1-t0 <= 0.8, t1-t0)
def test_call_soon(self):
results = []
def callback(arg1, arg2):
results.append((arg1, arg2))
self.loop.stop()
self.loop.call_soon(callback, 'hello', 'world')
self.loop.run_forever()
self.assertEqual(results, [('hello', 'world')])
def test_call_soon_threadsafe(self):
results = []
lock = threading.Lock()
def callback(arg):
results.append(arg)
if len(results) >= 2:
self.loop.stop()
def run_in_thread():
self.loop.call_soon_threadsafe(callback, 'hello')
lock.release()
lock.acquire()
t = threading.Thread(target=run_in_thread)
t.start()
with lock:
self.loop.call_soon(callback, 'world')
self.loop.run_forever()
t.join()
self.assertEqual(results, ['hello', 'world'])
def test_call_soon_threadsafe_same_thread(self):
results = []
def callback(arg):
results.append(arg)
if len(results) >= 2:
self.loop.stop()
self.loop.call_soon_threadsafe(callback, 'hello')
self.loop.call_soon(callback, 'world')
self.loop.run_forever()
self.assertEqual(results, ['hello', 'world'])
def test_run_in_executor(self):
def run(arg):
return (arg, threading.get_ident())
f2 = self.loop.run_in_executor(None, run, 'yo')
res, thread_id = self.loop.run_until_complete(f2)
self.assertEqual(res, 'yo')
self.assertNotEqual(thread_id, threading.get_ident())
def test_reader_callback(self):
r, w = test_utils.socketpair()
r.setblocking(False)
bytes_read = bytearray()
def reader():
try:
data = r.recv(1024)
except BlockingIOError:
# Spurious readiness notifications are possible
# at least on Linux -- see man select.
return
if data:
bytes_read.extend(data)
else:
self.assertTrue(self.loop.remove_reader(r.fileno()))
r.close()
self.loop.add_reader(r.fileno(), reader)
self.loop.call_soon(w.send, b'abc')
test_utils.run_until(self.loop, lambda: len(bytes_read) >= 3)
self.loop.call_soon(w.send, b'def')
test_utils.run_until(self.loop, lambda: len(bytes_read) >= 6)
self.loop.call_soon(w.close)
self.loop.call_soon(self.loop.stop)
self.loop.run_forever()
self.assertEqual(bytes_read, b'abcdef')
def test_writer_callback(self):
r, w = test_utils.socketpair()
w.setblocking(False)
def writer(data):
w.send(data)
self.loop.stop()
data = b'x' * 1024
self.loop.add_writer(w.fileno(), writer, data)
self.loop.run_forever()
self.assertTrue(self.loop.remove_writer(w.fileno()))
self.assertFalse(self.loop.remove_writer(w.fileno()))
w.close()
read = r.recv(len(data) * 2)
r.close()
self.assertEqual(read, data)
def _basetest_sock_client_ops(self, httpd, sock):
if not isinstance(self.loop, proactor_events.BaseProactorEventLoop):
# in debug mode, socket operations must fail
# if the socket is not in blocking mode
self.loop.set_debug(True)
sock.setblocking(True)
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_connect(sock, httpd.address))
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n'))
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_recv(sock, 1024))
with self.assertRaises(ValueError):
self.loop.run_until_complete(
self.loop.sock_accept(sock))
# test in non-blocking mode
sock.setblocking(False)
self.loop.run_until_complete(
self.loop.sock_connect(sock, httpd.address))
self.loop.run_until_complete(
self.loop.sock_sendall(sock, b'GET / HTTP/1.0\r\n\r\n'))
data = self.loop.run_until_complete(
self.loop.sock_recv(sock, 1024))
# consume data
self.loop.run_until_complete(
self.loop.sock_recv(sock, 1024))
sock.close()
self.assertTrue(data.startswith(b'HTTP/1.0 200 OK'))
def test_sock_client_ops(self):
with test_utils.run_test_server() as httpd:
sock = socket.socket()
self._basetest_sock_client_ops(httpd, sock)
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_unix_sock_client_ops(self):
with test_utils.run_test_unix_server() as httpd:
sock = socket.socket(socket.AF_UNIX)
self._basetest_sock_client_ops(httpd, sock)
def test_sock_client_fail(self):
# Make sure that we will get an unused port
address = None
try:
s = socket.socket()
s.bind(('127.0.0.1', 0))
address = s.getsockname()
finally:
s.close()
sock = socket.socket()
sock.setblocking(False)
with self.assertRaises(ConnectionRefusedError):
self.loop.run_until_complete(
self.loop.sock_connect(sock, address))
sock.close()
def test_sock_accept(self):
listener = socket.socket()
listener.setblocking(False)
listener.bind(('127.0.0.1', 0))
listener.listen(1)
client = socket.socket()
client.connect(listener.getsockname())
f = self.loop.sock_accept(listener)
conn, addr = self.loop.run_until_complete(f)
self.assertEqual(conn.gettimeout(), 0)
self.assertEqual(addr, client.getsockname())
self.assertEqual(client.getpeername(), listener.getsockname())
client.close()
conn.close()
listener.close()
@unittest.skipUnless(hasattr(signal, 'SIGKILL'), 'No SIGKILL')
def test_add_signal_handler(self):
caught = 0
def my_handler():
nonlocal caught
caught += 1
# Check error behavior first.
self.assertRaises(
TypeError, self.loop.add_signal_handler, 'boom', my_handler)
self.assertRaises(
TypeError, self.loop.remove_signal_handler, 'boom')
self.assertRaises(
ValueError, self.loop.add_signal_handler, signal.NSIG+1,
my_handler)
self.assertRaises(
ValueError, self.loop.remove_signal_handler, signal.NSIG+1)
self.assertRaises(
ValueError, self.loop.add_signal_handler, 0, my_handler)
self.assertRaises(
ValueError, self.loop.remove_signal_handler, 0)
self.assertRaises(
ValueError, self.loop.add_signal_handler, -1, my_handler)
self.assertRaises(
ValueError, self.loop.remove_signal_handler, -1)
self.assertRaises(
RuntimeError, self.loop.add_signal_handler, signal.SIGKILL,
my_handler)
# Removing SIGKILL doesn't raise, since we don't call signal().
self.assertFalse(self.loop.remove_signal_handler(signal.SIGKILL))
# Now set a handler and handle it.
self.loop.add_signal_handler(signal.SIGINT, my_handler)
os.kill(os.getpid(), signal.SIGINT)
test_utils.run_until(self.loop, lambda: caught)
# Removing it should restore the default handler.
self.assertTrue(self.loop.remove_signal_handler(signal.SIGINT))
self.assertEqual(signal.getsignal(signal.SIGINT),
signal.default_int_handler)
# Removing again returns False.
self.assertFalse(self.loop.remove_signal_handler(signal.SIGINT))
@unittest.skipUnless(hasattr(signal, 'SIGALRM'), 'No SIGALRM')
def test_signal_handling_while_selecting(self):
# Test with a signal actually arriving during a select() call.
caught = 0
def my_handler():
nonlocal caught
caught += 1
self.loop.stop()
self.loop.add_signal_handler(signal.SIGALRM, my_handler)
signal.setitimer(signal.ITIMER_REAL, 0.01, 0) # Send SIGALRM once.
self.loop.run_forever()
self.assertEqual(caught, 1)
@unittest.skipUnless(hasattr(signal, 'SIGALRM'), 'No SIGALRM')
def test_signal_handling_args(self):
some_args = (42,)
caught = 0
def my_handler(*args):
nonlocal caught
caught += 1
self.assertEqual(args, some_args)
self.loop.add_signal_handler(signal.SIGALRM, my_handler, *some_args)
signal.setitimer(signal.ITIMER_REAL, 0.1, 0) # Send SIGALRM once.
self.loop.call_later(0.5, self.loop.stop)
self.loop.run_forever()
self.assertEqual(caught, 1)
def _basetest_create_connection(self, connection_fut, check_sockname=True):
tr, pr = self.loop.run_until_complete(connection_fut)
self.assertIsInstance(tr, asyncio.Transport)
self.assertIsInstance(pr, asyncio.Protocol)
self.assertIs(pr.transport, tr)
if check_sockname:
self.assertIsNotNone(tr.get_extra_info('sockname'))
self.loop.run_until_complete(pr.done)
self.assertGreater(pr.nbytes, 0)
tr.close()
def test_create_connection(self):
with test_utils.run_test_server() as httpd:
conn_fut = self.loop.create_connection(
lambda: MyProto(loop=self.loop), *httpd.address)
self._basetest_create_connection(conn_fut)
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_connection(self):
# Issue #20682: On Mac OS X Tiger, getsockname() returns a
# zero-length address for UNIX socket.
check_sockname = not osx_tiger()
with test_utils.run_test_unix_server() as httpd:
conn_fut = self.loop.create_unix_connection(
lambda: MyProto(loop=self.loop), httpd.address)
self._basetest_create_connection(conn_fut, check_sockname)
def test_create_connection_sock(self):
with test_utils.run_test_server() as httpd:
sock = None
infos = self.loop.run_until_complete(
self.loop.getaddrinfo(
*httpd.address, type=socket.SOCK_STREAM))
for family, type, proto, cname, address in infos:
try:
sock = socket.socket(family=family, type=type, proto=proto)
sock.setblocking(False)
self.loop.run_until_complete(
self.loop.sock_connect(sock, address))
except:
pass
else:
break
else:
assert False, 'Can not create socket.'
f = self.loop.create_connection(
lambda: MyProto(loop=self.loop), sock=sock)
tr, pr = self.loop.run_until_complete(f)
self.assertIsInstance(tr, asyncio.Transport)
self.assertIsInstance(pr, asyncio.Protocol)
self.loop.run_until_complete(pr.done)
self.assertGreater(pr.nbytes, 0)
tr.close()
def check_ssl_extra_info(self, client, check_sockname=True,
peername=None, peercert={}):
if check_sockname:
self.assertIsNotNone(client.get_extra_info('sockname'))
if peername:
self.assertEqual(peername,
client.get_extra_info('peername'))
else:
self.assertIsNotNone(client.get_extra_info('peername'))
self.assertEqual(peercert,
client.get_extra_info('peercert'))
# test SSL cipher
cipher = client.get_extra_info('cipher')
self.assertIsInstance(cipher, tuple)
self.assertEqual(len(cipher), 3, cipher)
self.assertIsInstance(cipher[0], str)
self.assertIsInstance(cipher[1], str)
self.assertIsInstance(cipher[2], int)
# test SSL object
sslobj = client.get_extra_info('ssl_object')
self.assertIsNotNone(sslobj)
self.assertEqual(sslobj.compression(),
client.get_extra_info('compression'))
self.assertEqual(sslobj.cipher(),
client.get_extra_info('cipher'))
self.assertEqual(sslobj.getpeercert(),
client.get_extra_info('peercert'))
self.assertEqual(sslobj.compression(),
client.get_extra_info('compression'))
def _basetest_create_ssl_connection(self, connection_fut,
check_sockname=True,
peername=None):
tr, pr = self.loop.run_until_complete(connection_fut)
self.assertIsInstance(tr, asyncio.Transport)
self.assertIsInstance(pr, asyncio.Protocol)
self.assertTrue('ssl' in tr.__class__.__name__.lower())
self.check_ssl_extra_info(tr, check_sockname, peername)
self.loop.run_until_complete(pr.done)
self.assertGreater(pr.nbytes, 0)
tr.close()
def _test_create_ssl_connection(self, httpd, create_connection,
check_sockname=True, peername=None):
conn_fut = create_connection(ssl=test_utils.dummy_ssl_context())
self._basetest_create_ssl_connection(conn_fut, check_sockname,
peername)
# ssl.Purpose was introduced in Python 3.4
if hasattr(ssl, 'Purpose'):
def _dummy_ssl_create_context(purpose=ssl.Purpose.SERVER_AUTH, *,
cafile=None, capath=None,
cadata=None):
"""
A ssl.create_default_context() replacement that doesn't enable
cert validation.
"""
self.assertEqual(purpose, ssl.Purpose.SERVER_AUTH)
return test_utils.dummy_ssl_context()
# With ssl=True, ssl.create_default_context() should be called
with mock.patch('ssl.create_default_context',
side_effect=_dummy_ssl_create_context) as m:
conn_fut = create_connection(ssl=True)
self._basetest_create_ssl_connection(conn_fut, check_sockname,
peername)
self.assertEqual(m.call_count, 1)
# With the real ssl.create_default_context(), certificate
# validation will fail
with self.assertRaises(ssl.SSLError) as cm:
conn_fut = create_connection(ssl=True)
# Ignore the "SSL handshake failed" log in debug mode
with test_utils.disable_logger():
self._basetest_create_ssl_connection(conn_fut, check_sockname,
peername)
self.assertEqual(cm.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_ssl_connection(self):
with test_utils.run_test_server(use_ssl=True) as httpd:
create_connection = functools.partial(
self.loop.create_connection,
lambda: MyProto(loop=self.loop),
*httpd.address)
self._test_create_ssl_connection(httpd, create_connection,
peername=httpd.address)
def test_legacy_create_ssl_connection(self):
with test_utils.force_legacy_ssl_support():
self.test_create_ssl_connection()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_ssl_unix_connection(self):
# Issue #20682: On Mac OS X Tiger, getsockname() returns a
# zero-length address for UNIX socket.
check_sockname = not osx_tiger()
with test_utils.run_test_unix_server(use_ssl=True) as httpd:
create_connection = functools.partial(
self.loop.create_unix_connection,
lambda: MyProto(loop=self.loop), httpd.address,
server_hostname='127.0.0.1')
self._test_create_ssl_connection(httpd, create_connection,
check_sockname,
peername=httpd.address)
def test_legacy_create_ssl_unix_connection(self):
with test_utils.force_legacy_ssl_support():
self.test_create_ssl_unix_connection()
def test_create_connection_local_addr(self):
with test_utils.run_test_server() as httpd:
port = support.find_unused_port()
f = self.loop.create_connection(
lambda: MyProto(loop=self.loop),
*httpd.address, local_addr=(httpd.address[0], port))
tr, pr = self.loop.run_until_complete(f)
expected = pr.transport.get_extra_info('sockname')[1]
self.assertEqual(port, expected)
tr.close()
def test_create_connection_local_addr_in_use(self):
with test_utils.run_test_server() as httpd:
f = self.loop.create_connection(
lambda: MyProto(loop=self.loop),
*httpd.address, local_addr=httpd.address)
with self.assertRaises(OSError) as cm:
self.loop.run_until_complete(f)
self.assertEqual(cm.exception.errno, errno.EADDRINUSE)
self.assertIn(str(httpd.address), cm.exception.strerror)
def test_connect_accepted_socket(self, server_ssl=None, client_ssl=None):
loop = self.loop
class MyProto(MyBaseProto):
def connection_lost(self, exc):
super().connection_lost(exc)
loop.call_soon(loop.stop)
def data_received(self, data):
super().data_received(data)
self.transport.write(expected_response)
lsock = socket.socket()
lsock.bind(('127.0.0.1', 0))
lsock.listen(1)
addr = lsock.getsockname()
message = b'test data'
response = None
expected_response = b'roger'
def client():
nonlocal response
try:
csock = socket.socket()
if client_ssl is not None:
csock = client_ssl.wrap_socket(csock)
csock.connect(addr)
csock.sendall(message)
response = csock.recv(99)
csock.close()
except Exception as exc:
print(
"Failure in client thread in test_connect_accepted_socket",
exc)
thread = threading.Thread(target=client, daemon=True)
thread.start()
conn, _ = lsock.accept()
proto = MyProto(loop=loop)
proto.loop = loop
loop.run_until_complete(
loop.connect_accepted_socket(
(lambda: proto), conn, ssl=server_ssl))
loop.run_forever()
proto.transport.close()
lsock.close()
thread.join(1)
self.assertFalse(thread.is_alive())
self.assertEqual(proto.state, 'CLOSED')
self.assertEqual(proto.nbytes, len(message))
self.assertEqual(response, expected_response)
@unittest.skipIf(ssl is None, 'No ssl module')
def test_ssl_connect_accepted_socket(self):
if (sys.platform == 'win32' and
sys.version_info < (3, 5) and
isinstance(self.loop, proactor_events.BaseProactorEventLoop)
):
raise unittest.SkipTest(
'SSL not supported with proactor event loops before Python 3.5'
)
server_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
server_context.load_cert_chain(ONLYCERT, ONLYKEY)
if hasattr(server_context, 'check_hostname'):
server_context.check_hostname = False
server_context.verify_mode = ssl.CERT_NONE
client_context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
if hasattr(server_context, 'check_hostname'):
client_context.check_hostname = False
client_context.verify_mode = ssl.CERT_NONE
self.test_connect_accepted_socket(server_context, client_context)
@mock.patch('asyncio.base_events.socket')
def create_server_multiple_hosts(self, family, hosts, mock_sock):
@asyncio.coroutine
def getaddrinfo(host, port, *args, **kw):
if family == socket.AF_INET:
return [(family, socket.SOCK_STREAM, 6, '', (host, port))]
else:
return [(family, socket.SOCK_STREAM, 6, '', (host, port, 0, 0))]
def getaddrinfo_task(*args, **kwds):
return asyncio.Task(getaddrinfo(*args, **kwds), loop=self.loop)
unique_hosts = set(hosts)
if family == socket.AF_INET:
mock_sock.socket().getsockbyname.side_effect = [
(host, 80) for host in unique_hosts]
else:
mock_sock.socket().getsockbyname.side_effect = [
(host, 80, 0, 0) for host in unique_hosts]
self.loop.getaddrinfo = getaddrinfo_task
self.loop._start_serving = mock.Mock()
self.loop._stop_serving = mock.Mock()
f = self.loop.create_server(lambda: MyProto(self.loop), hosts, 80)
server = self.loop.run_until_complete(f)
self.addCleanup(server.close)
server_hosts = {sock.getsockbyname()[0] for sock in server.sockets}
self.assertEqual(server_hosts, unique_hosts)
def test_create_server_multiple_hosts_ipv4(self):
self.create_server_multiple_hosts(socket.AF_INET,
['1.2.3.4', '5.6.7.8', '1.2.3.4'])
def test_create_server_multiple_hosts_ipv6(self):
self.create_server_multiple_hosts(socket.AF_INET6,
['::1', '::2', '::1'])
def test_create_server(self):
proto = MyProto(self.loop)
f = self.loop.create_server(lambda: proto, '0.0.0.0', 0)
server = self.loop.run_until_complete(f)
self.assertEqual(len(server.sockets), 1)
sock = server.sockets[0]
host, port = sock.getsockname()
self.assertEqual(host, '0.0.0.0')
client = socket.socket()
client.connect(('127.0.0.1', port))
client.sendall(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('sockname'))
self.assertEqual('127.0.0.1',
proto.transport.get_extra_info('peername')[0])
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# close server
server.close()
@unittest.skipUnless(hasattr(socket, 'SO_REUSEPORT'), 'No SO_REUSEPORT')
def test_create_server_reuse_port(self):
proto = MyProto(self.loop)
f = self.loop.create_server(
lambda: proto, '0.0.0.0', 0)
server = self.loop.run_until_complete(f)
self.assertEqual(len(server.sockets), 1)
sock = server.sockets[0]
self.assertFalse(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEPORT))
server.close()
test_utils.run_briefly(self.loop)
proto = MyProto(self.loop)
f = self.loop.create_server(
lambda: proto, '0.0.0.0', 0, reuse_port=True)
server = self.loop.run_until_complete(f)
self.assertEqual(len(server.sockets), 1)
sock = server.sockets[0]
self.assertTrue(
sock.getsockopt(
socket.SOL_SOCKET, socket.SO_REUSEPORT))
server.close()
def _make_unix_server(self, factory, **kwargs):
path = test_utils.gen_unix_socket_path()
self.addCleanup(lambda: os.path.exists(path) and os.unlink(path))
f = self.loop.create_unix_server(factory, path, **kwargs)
server = self.loop.run_until_complete(f)
return server, path
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server(self):
proto = MyProto(loop=self.loop)
server, path = self._make_unix_server(lambda: proto)
self.assertEqual(len(server.sockets), 1)
client = socket.socket(socket.AF_UNIX)
client.connect(path)
client.sendall(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# close server
server.close()
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_path_socket_error(self):
proto = MyProto(loop=self.loop)
sock = socket.socket()
with sock:
f = self.loop.create_unix_server(lambda: proto, '/test', sock=sock)
with self.assertRaisesRegex(ValueError,
'path and sock can not be specified '
'at the same time'):
self.loop.run_until_complete(f)
def _create_ssl_context(self, certfile, keyfile=None):
sslcontext = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext.options |= ssl.OP_NO_SSLv2
sslcontext.load_cert_chain(certfile, keyfile)
return sslcontext
def _make_ssl_server(self, factory, certfile, keyfile=None):
sslcontext = self._create_ssl_context(certfile, keyfile)
f = self.loop.create_server(factory, '127.0.0.1', 0, ssl=sslcontext)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
host, port = sock.getsockname()
self.assertEqual(host, '127.0.0.1')
return server, host, port
def _make_ssl_unix_server(self, factory, certfile, keyfile=None):
sslcontext = self._create_ssl_context(certfile, keyfile)
return self._make_unix_server(factory, ssl=sslcontext)
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, ONLYCERT, ONLYKEY)
f_c = self.loop.create_connection(MyBaseProto, host, port,
ssl=test_utils.dummy_ssl_context())
client, pr = self.loop.run_until_complete(f_c)
client.write(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# extra info is available
self.check_ssl_extra_info(client, peername=(host, port))
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# stop serving
server.close()
def test_legacy_create_server_ssl(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_ssl(self):
proto = MyProto(loop=self.loop)
server, path = self._make_ssl_unix_server(
lambda: proto, ONLYCERT, ONLYKEY)
f_c = self.loop.create_unix_connection(
MyBaseProto, path, ssl=test_utils.dummy_ssl_context(),
server_hostname='')
client, pr = self.loop.run_until_complete(f_c)
client.write(b'xxx')
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
test_utils.run_until(self.loop, lambda: proto.nbytes > 0)
self.assertEqual(3, proto.nbytes)
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
# the client socket must be closed after to avoid ECONNRESET upon
# recv()/send() on the serving socket
client.close()
# stop serving
server.close()
def test_legacy_create_unix_server_ssl(self):
with test_utils.force_legacy_ssl_support():
self.test_create_unix_server_ssl()
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl_verify_failed(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# no CA loaded
f_c = self.loop.create_connection(MyProto, host, port,
ssl=sslcontext_client)
with mock.patch.object(self.loop, 'call_exception_handler'):
with test_utils.disable_logger():
with self.assertRaisesRegex(ssl.SSLError,
'(?i)certificate.verify.failed'):
self.loop.run_until_complete(f_c)
# execute the loop to log the connection error
test_utils.run_briefly(self.loop)
# close connection
self.assertIsNone(proto.transport)
server.close()
def test_legacy_create_server_ssl_verify_failed(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl_verify_failed()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_ssl_verify_failed(self):
proto = MyProto(loop=self.loop)
server, path = self._make_ssl_unix_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# no CA loaded
f_c = self.loop.create_unix_connection(MyProto, path,
ssl=sslcontext_client,
server_hostname='invalid')
with mock.patch.object(self.loop, 'call_exception_handler'):
with test_utils.disable_logger():
with self.assertRaisesRegex(ssl.SSLError,
'(?i)certificate.verify.failed'):
self.loop.run_until_complete(f_c)
# execute the loop to log the connection error
test_utils.run_briefly(self.loop)
# close connection
self.assertIsNone(proto.transport)
server.close()
def test_legacy_create_unix_server_ssl_verify_failed(self):
with test_utils.force_legacy_ssl_support():
self.test_create_unix_server_ssl_verify_failed()
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl_match_failed(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
sslcontext_client.load_verify_locations(
cafile=SIGNING_CA)
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# incorrect server_hostname
f_c = self.loop.create_connection(MyProto, host, port,
ssl=sslcontext_client)
with mock.patch.object(self.loop, 'call_exception_handler'):
with test_utils.disable_logger():
with self.assertRaisesRegex(
ssl.CertificateError,
"hostname '127.0.0.1' doesn't match 'localhost'"):
self.loop.run_until_complete(f_c)
# close connection
proto.transport.close()
server.close()
def test_legacy_create_server_ssl_match_failed(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl_match_failed()
@unittest.skipIf(ssl is None, 'No ssl module')
@unittest.skipUnless(hasattr(socket, 'AF_UNIX'), 'No UNIX Sockets')
def test_create_unix_server_ssl_verified(self):
proto = MyProto(loop=self.loop)
server, path = self._make_ssl_unix_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
sslcontext_client.load_verify_locations(cafile=SIGNING_CA)
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# Connection succeeds with correct CA and server hostname.
f_c = self.loop.create_unix_connection(MyProto, path,
ssl=sslcontext_client,
server_hostname='localhost')
client, pr = self.loop.run_until_complete(f_c)
# close connection
proto.transport.close()
client.close()
server.close()
self.loop.run_until_complete(proto.done)
def test_legacy_create_unix_server_ssl_verified(self):
with test_utils.force_legacy_ssl_support():
self.test_create_unix_server_ssl_verified()
@unittest.skipIf(ssl is None, 'No ssl module')
def test_create_server_ssl_verified(self):
proto = MyProto(loop=self.loop)
server, host, port = self._make_ssl_server(
lambda: proto, SIGNED_CERTFILE)
sslcontext_client = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
sslcontext_client.options |= ssl.OP_NO_SSLv2
sslcontext_client.verify_mode = ssl.CERT_REQUIRED
sslcontext_client.load_verify_locations(cafile=SIGNING_CA)
if hasattr(sslcontext_client, 'check_hostname'):
sslcontext_client.check_hostname = True
# Connection succeeds with correct CA and server hostname.
f_c = self.loop.create_connection(MyProto, host, port,
ssl=sslcontext_client,
server_hostname='localhost')
client, pr = self.loop.run_until_complete(f_c)
# extra info is available
self.check_ssl_extra_info(client,peername=(host, port),
peercert=PEERCERT)
# close connection
proto.transport.close()
client.close()
server.close()
self.loop.run_until_complete(proto.done)
def test_legacy_create_server_ssl_verified(self):
with test_utils.force_legacy_ssl_support():
self.test_create_server_ssl_verified()
def test_create_server_sock(self):
proto = asyncio.Future(loop=self.loop)
class TestMyProto(MyProto):
def connection_made(self, transport):
super().connection_made(transport)
proto.set_result(self)
sock_ob = socket.socket(type=socket.SOCK_STREAM)
sock_ob.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock_ob.bind(('0.0.0.0', 0))
f = self.loop.create_server(TestMyProto, sock=sock_ob)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
self.assertIs(sock, sock_ob)
host, port = sock.getsockname()
self.assertEqual(host, '0.0.0.0')
client = socket.socket()
client.connect(('127.0.0.1', port))
client.send(b'xxx')
client.close()
server.close()
def test_create_server_addr_in_use(self):
sock_ob = socket.socket(type=socket.SOCK_STREAM)
sock_ob.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock_ob.bind(('0.0.0.0', 0))
f = self.loop.create_server(MyProto, sock=sock_ob)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
host, port = sock.getsockname()
f = self.loop.create_server(MyProto, host=host, port=port)
with self.assertRaises(OSError) as cm:
self.loop.run_until_complete(f)
self.assertEqual(cm.exception.errno, errno.EADDRINUSE)
server.close()
@unittest.skipUnless(support.IPV6_ENABLED, 'IPv6 not supported or enabled')
def test_create_server_dual_stack(self):
f_proto = asyncio.Future(loop=self.loop)
class TestMyProto(MyProto):
def connection_made(self, transport):
super().connection_made(transport)
f_proto.set_result(self)
try_count = 0
while True:
try:
port = support.find_unused_port()
f = self.loop.create_server(TestMyProto, host=None, port=port)
server = self.loop.run_until_complete(f)
except OSError as ex:
if ex.errno == errno.EADDRINUSE:
try_count += 1
self.assertGreaterEqual(5, try_count)
continue
else:
raise
else:
break
client = socket.socket()
client.connect(('127.0.0.1', port))
client.send(b'xxx')
proto = self.loop.run_until_complete(f_proto)
proto.transport.close()
client.close()
f_proto = asyncio.Future(loop=self.loop)
client = socket.socket(socket.AF_INET6)
client.connect(('::1', port))
client.send(b'xxx')
proto = self.loop.run_until_complete(f_proto)
proto.transport.close()
client.close()
server.close()
def test_server_close(self):
f = self.loop.create_server(MyProto, '0.0.0.0', 0)
server = self.loop.run_until_complete(f)
sock = server.sockets[0]
host, port = sock.getsockname()
client = socket.socket()
client.connect(('127.0.0.1', port))
client.send(b'xxx')
client.close()
server.close()
client = socket.socket()
self.assertRaises(
ConnectionRefusedError, client.connect, ('127.0.0.1', port))
client.close()
def test_create_datagram_endpoint(self):
class TestMyDatagramProto(MyDatagramProto):
def __init__(inner_self):
super().__init__(loop=self.loop)
def datagram_received(self, data, addr):
super().datagram_received(data, addr)
self.transport.sendto(b'resp:'+data, addr)
coro = self.loop.create_datagram_endpoint(
TestMyDatagramProto, local_addr=('127.0.0.1', 0))
s_transport, server = self.loop.run_until_complete(coro)
host, port = s_transport.get_extra_info('sockname')
self.assertIsInstance(s_transport, asyncio.Transport)
self.assertIsInstance(server, TestMyDatagramProto)
self.assertEqual('INITIALIZED', server.state)
self.assertIs(server.transport, s_transport)
coro = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(loop=self.loop),
remote_addr=(host, port))
transport, client = self.loop.run_until_complete(coro)
self.assertIsInstance(transport, asyncio.Transport)
self.assertIsInstance(client, MyDatagramProto)
self.assertEqual('INITIALIZED', client.state)
self.assertIs(client.transport, transport)
transport.sendto(b'xxx')
test_utils.run_until(self.loop, lambda: server.nbytes)
self.assertEqual(3, server.nbytes)
test_utils.run_until(self.loop, lambda: client.nbytes)
# received
self.assertEqual(8, client.nbytes)
# extra info is available
self.assertIsNotNone(transport.get_extra_info('sockname'))
# close connection
transport.close()
self.loop.run_until_complete(client.done)
self.assertEqual('CLOSED', client.state)
server.transport.close()
def test_create_datagram_endpoint_sock(self):
if (sys.platform == 'win32' and
isinstance(self.loop, proactor_events.BaseProactorEventLoop)):
raise unittest.SkipTest(
'UDP is not supported with proactor event loops')
sock = None
local_address = ('127.0.0.1', 0)
infos = self.loop.run_until_complete(
self.loop.getaddrinfo(
*local_address, type=socket.SOCK_DGRAM))
for family, type, proto, cname, address in infos:
try:
sock = socket.socket(family=family, type=type, proto=proto)
sock.setblocking(False)
sock.bind(address)
except:
pass
else:
break
else:
assert False, 'Can not create socket.'
f = self.loop.create_datagram_endpoint(
lambda: MyDatagramProto(loop=self.loop), sock=sock)
tr, pr = self.loop.run_until_complete(f)
self.assertIsInstance(tr, asyncio.Transport)
self.assertIsInstance(pr, MyDatagramProto)
tr.close()
self.loop.run_until_complete(pr.done)
def test_internal_fds(self):
loop = self.create_event_loop()
if not isinstance(loop, selector_events.BaseSelectorEventLoop):
loop.close()
self.skipTest('loop is not a BaseSelectorEventLoop')
self.assertEqual(1, loop._internal_fds)
loop.close()
self.assertEqual(0, loop._internal_fds)
self.assertIsNone(loop._csock)
self.assertIsNone(loop._ssock)
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_read_pipe(self):
proto = MyReadPipeProto(loop=self.loop)
rpipe, wpipe = os.pipe()
pipeobj = io.open(rpipe, 'rb', 1024)
@asyncio.coroutine
def connect():
t, p = yield from self.loop.connect_read_pipe(
lambda: proto, pipeobj)
self.assertIs(p, proto)
self.assertIs(t, proto.transport)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(0, proto.nbytes)
self.loop.run_until_complete(connect())
os.write(wpipe, b'1')
test_utils.run_until(self.loop, lambda: proto.nbytes >= 1)
self.assertEqual(1, proto.nbytes)
os.write(wpipe, b'2345')
test_utils.run_until(self.loop, lambda: proto.nbytes >= 5)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(5, proto.nbytes)
os.close(wpipe)
self.loop.run_until_complete(proto.done)
self.assertEqual(
['INITIAL', 'CONNECTED', 'EOF', 'CLOSED'], proto.state)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_unclosed_pipe_transport(self):
# This test reproduces the issue #314 on GitHub
loop = self.create_event_loop()
read_proto = MyReadPipeProto(loop=loop)
write_proto = MyWritePipeProto(loop=loop)
rpipe, wpipe = os.pipe()
rpipeobj = io.open(rpipe, 'rb', 1024)
wpipeobj = io.open(wpipe, 'w', 1024)
@asyncio.coroutine
def connect():
read_transport, _ = yield from loop.connect_read_pipe(
lambda: read_proto, rpipeobj)
write_transport, _ = yield from loop.connect_write_pipe(
lambda: write_proto, wpipeobj)
return read_transport, write_transport
# Run and close the loop without closing the transports
read_transport, write_transport = loop.run_until_complete(connect())
loop.close()
# These 'repr' calls used to raise an AttributeError
# See Issue #314 on GitHub
self.assertIn('open', repr(read_transport))
self.assertIn('open', repr(write_transport))
# Clean up (avoid ResourceWarning)
rpipeobj.close()
wpipeobj.close()
read_transport._pipe = None
write_transport._pipe = None
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
# select, poll and kqueue don't support character devices (PTY) on Mac OS X
# older than 10.6 (Snow Leopard)
@support.requires_mac_ver(10, 6)
# Issue #20495: The test hangs on FreeBSD 7.2 but pass on FreeBSD 9
@support.requires_freebsd_version(8)
def test_read_pty_output(self):
proto = MyReadPipeProto(loop=self.loop)
master, slave = os.openpty()
master_read_obj = io.open(master, 'rb', 0)
@asyncio.coroutine
def connect():
t, p = yield from self.loop.connect_read_pipe(lambda: proto,
master_read_obj)
self.assertIs(p, proto)
self.assertIs(t, proto.transport)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(0, proto.nbytes)
self.loop.run_until_complete(connect())
os.write(slave, b'1')
test_utils.run_until(self.loop, lambda: proto.nbytes)
self.assertEqual(1, proto.nbytes)
os.write(slave, b'2345')
test_utils.run_until(self.loop, lambda: proto.nbytes >= 5)
self.assertEqual(['INITIAL', 'CONNECTED'], proto.state)
self.assertEqual(5, proto.nbytes)
os.close(slave)
self.loop.run_until_complete(proto.done)
self.assertEqual(
['INITIAL', 'CONNECTED', 'EOF', 'CLOSED'], proto.state)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_write_pipe(self):
rpipe, wpipe = os.pipe()
pipeobj = io.open(wpipe, 'wb', 1024)
proto = MyWritePipeProto(loop=self.loop)
connect = self.loop.connect_write_pipe(lambda: proto, pipeobj)
transport, p = self.loop.run_until_complete(connect)
self.assertIs(p, proto)
self.assertIs(transport, proto.transport)
self.assertEqual('CONNECTED', proto.state)
transport.write(b'1')
data = bytearray()
def reader(data):
chunk = os.read(rpipe, 1024)
data += chunk
return len(data)
test_utils.run_until(self.loop, lambda: reader(data) >= 1)
self.assertEqual(b'1', data)
transport.write(b'2345')
test_utils.run_until(self.loop, lambda: reader(data) >= 5)
self.assertEqual(b'12345', data)
self.assertEqual('CONNECTED', proto.state)
os.close(rpipe)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
def test_write_pipe_disconnect_on_close(self):
rsock, wsock = test_utils.socketpair()
rsock.setblocking(False)
pipeobj = io.open(wsock.detach(), 'wb', 1024)
proto = MyWritePipeProto(loop=self.loop)
connect = self.loop.connect_write_pipe(lambda: proto, pipeobj)
transport, p = self.loop.run_until_complete(connect)
self.assertIs(p, proto)
self.assertIs(transport, proto.transport)
self.assertEqual('CONNECTED', proto.state)
transport.write(b'1')
data = self.loop.run_until_complete(self.loop.sock_recv(rsock, 1024))
self.assertEqual(b'1', data)
rsock.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
# select, poll and kqueue don't support character devices (PTY) on Mac OS X
# older than 10.6 (Snow Leopard)
@support.requires_mac_ver(10, 6)
def test_write_pty(self):
master, slave = os.openpty()
slave_write_obj = io.open(slave, 'wb', 0)
proto = MyWritePipeProto(loop=self.loop)
connect = self.loop.connect_write_pipe(lambda: proto, slave_write_obj)
transport, p = self.loop.run_until_complete(connect)
self.assertIs(p, proto)
self.assertIs(transport, proto.transport)
self.assertEqual('CONNECTED', proto.state)
transport.write(b'1')
data = bytearray()
def reader(data):
chunk = os.read(master, 1024)
data += chunk
return len(data)
test_utils.run_until(self.loop, lambda: reader(data) >= 1,
timeout=10)
self.assertEqual(b'1', data)
transport.write(b'2345')
test_utils.run_until(self.loop, lambda: reader(data) >= 5,
timeout=10)
self.assertEqual(b'12345', data)
self.assertEqual('CONNECTED', proto.state)
os.close(master)
# extra info is available
self.assertIsNotNone(proto.transport.get_extra_info('pipe'))
# close connection
proto.transport.close()
self.loop.run_until_complete(proto.done)
self.assertEqual('CLOSED', proto.state)
@unittest.skipUnless(sys.platform != 'win32',
"Don't support pipes for Windows")
# select, poll and kqueue don't support character devices (PTY) on Mac OS X
# older than 10.6 (Snow Leopard)
@support.requires_mac_ver(10, 6)
def test_bidirectional_pty(self):
master, read_slave = os.openpty()
write_slave = os.dup(read_slave)
tty.setraw(read_slave)
slave_read_obj = io.open(read_slave, 'rb', 0)
read_proto = MyReadPipeProto(loop=self.loop)
read_connect = self.loop.connect_read_pipe(lambda: read_proto,
slave_read_obj)
read_transport, p = self.loop.run_until_complete(read_connect)
self.assertIs(p, read_proto)
self.assertIs(read_transport, read_proto.transport)
self.assertEqual(['INITIAL', 'CONNECTED'], read_proto.state)
self.assertEqual(0, read_proto.nbytes)
slave_write_obj = io.open(write_slave, 'wb', 0)
write_proto = MyWritePipeProto(loop=self.loop)
write_connect = self.loop.connect_write_pipe(lambda: write_proto,
slave_write_obj)
write_transport, p = self.loop.run_until_complete(write_connect)
self.assertIs(p, write_proto)
self.assertIs(write_transport, write_proto.transport)
self.assertEqual('CONNECTED', write_proto.state)
data = bytearray()
def reader(data):
chunk = os.read(master, 1024)
data += chunk
return len(data)
write_transport.write(b'1')
test_utils.run_until(self.loop, lambda: reader(data) >= 1, timeout=10)
self.assertEqual(b'1', data)
self.assertEqual(['INITIAL', 'CONNECTED'], read_proto.state)
self.assertEqual('CONNECTED', write_proto.state)
os.write(master, b'a')
test_utils.run_until(self.loop, lambda: read_proto.nbytes >= 1,
timeout=10)
self.assertEqual(['INITIAL', 'CONNECTED'], read_proto.state)
self.assertEqual(1, read_proto.nbytes)
self.assertEqual('CONNECTED', write_proto.state)
write_transport.write(b'2345')
test_utils.run_until(self.loop, lambda: reader(data) >= 5, timeout=10)
self.assertEqual(b'12345', data)
self.assertEqual(['INITIAL', 'CONNECTED'], read_proto.state)
self.assertEqual('CONNECTED', write_proto.state)
os.write(master, b'bcde')
test_utils.run_until(self.loop, lambda: read_proto.nbytes >= 5,
timeout=10)
self.assertEqual(['INITIAL', 'CONNECTED'], read_proto.state)
self.assertEqual(5, read_proto.nbytes)
self.assertEqual('CONNECTED', write_proto.state)
os.close(master)
read_transport.close()
self.loop.run_until_complete(read_proto.done)
self.assertEqual(
['INITIAL', 'CONNECTED', 'EOF', 'CLOSED'], read_proto.state)
write_transport.close()
self.loop.run_until_complete(write_proto.done)
self.assertEqual('CLOSED', write_proto.state)
def test_prompt_cancellation(self):
r, w = test_utils.socketpair()
r.setblocking(False)
f = self.loop.sock_recv(r, 1)
ov = getattr(f, 'ov', None)
if ov is not None:
self.assertTrue(ov.pending)
@asyncio.coroutine
def main():
try:
self.loop.call_soon(f.cancel)
yield from f
except asyncio.CancelledError:
res = 'cancelled'
else:
res = None
finally:
self.loop.stop()
return res
start = time.monotonic()
t = asyncio.Task(main(), loop=self.loop)
self.loop.run_forever()
elapsed = time.monotonic() - start
self.assertLess(elapsed, 0.1)
self.assertEqual(t.result(), 'cancelled')
self.assertRaises(asyncio.CancelledError, f.result)
if ov is not None:
self.assertFalse(ov.pending)
self.loop._stop_serving(r)
r.close()
w.close()
def test_timeout_rounding(self):
def _run_once():
self.loop._run_once_counter += 1
orig_run_once()
orig_run_once = self.loop._run_once
self.loop._run_once_counter = 0
self.loop._run_once = _run_once
@asyncio.coroutine
def wait():
loop = self.loop
yield from asyncio.sleep(1e-2, loop=loop)
yield from asyncio.sleep(1e-4, loop=loop)
yield from asyncio.sleep(1e-6, loop=loop)
yield from asyncio.sleep(1e-8, loop=loop)
yield from asyncio.sleep(1e-10, loop=loop)
self.loop.run_until_complete(wait())
# The ideal number of call is 12, but on some platforms, the selector
# may sleep at little bit less than timeout depending on the resolution
# of the clock used by the kernel. Tolerate a few useless calls on
# these platforms.
self.assertLessEqual(self.loop._run_once_counter, 20,
{'clock_resolution': self.loop._clock_resolution,
'selector': self.loop._selector.__class__.__name__})
def test_remove_fds_after_closing(self):
loop = self.create_event_loop()
callback = lambda: None
r, w = test_utils.socketpair()
self.addCleanup(r.close)
self.addCleanup(w.close)
loop.add_reader(r, callback)
loop.add_writer(w, callback)
loop.close()
self.assertFalse(loop.remove_reader(r))
self.assertFalse(loop.remove_writer(w))
def test_add_fds_after_closing(self):
loop = self.create_event_loop()
callback = lambda: None
r, w = test_utils.socketpair()
self.addCleanup(r.close)
self.addCleanup(w.close)
loop.close()
with self.assertRaises(RuntimeError):
loop.add_reader(r, callback)
with self.assertRaises(RuntimeError):
loop.add_writer(w, callback)
def test_close_running_event_loop(self):
@asyncio.coroutine
def close_loop(loop):
self.loop.close()
coro = close_loop(self.loop)
with self.assertRaises(RuntimeError):
self.loop.run_until_complete(coro)
def test_close(self):
self.loop.close()
@asyncio.coroutine
def test():
pass
func = lambda: False
coro = test()
self.addCleanup(coro.close)
# operation blocked when the loop is closed
with self.assertRaises(RuntimeError):
self.loop.run_forever()
with self.assertRaises(RuntimeError):
fut = asyncio.Future(loop=self.loop)
self.loop.run_until_complete(fut)
with self.assertRaises(RuntimeError):
self.loop.call_soon(func)
with self.assertRaises(RuntimeError):
self.loop.call_soon_threadsafe(func)
with self.assertRaises(RuntimeError):
self.loop.call_later(1.0, func)
with self.assertRaises(RuntimeError):
self.loop.call_at(self.loop.time() + .0, func)
with self.assertRaises(RuntimeError):
self.loop.run_in_executor(None, func)
with self.assertRaises(RuntimeError):
self.loop.create_task(coro)
with self.assertRaises(RuntimeError):
self.loop.add_signal_handler(signal.SIGTERM, func)
class SubprocessTestsMixin:
def check_terminated(self, returncode):
if sys.platform == 'win32':
self.assertIsInstance(returncode, int)
# expect 1 but sometimes get 0
else:
self.assertEqual(-signal.SIGTERM, returncode)
def check_killed(self, returncode):
if sys.platform == 'win32':
self.assertIsInstance(returncode, int)
# expect 1 but sometimes get 0
else:
self.assertEqual(-signal.SIGKILL, returncode)
def test_subprocess_exec(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
stdin = transp.get_pipe_transport(0)
stdin.write(b'Python The Winner')
self.loop.run_until_complete(proto.got_data[1].wait())
with test_utils.disable_logger():
transp.close()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
self.assertEqual(b'Python The Winner', proto.data[1])
def test_subprocess_interactive(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
self.assertEqual('CONNECTED', proto.state)
stdin = transp.get_pipe_transport(0)
stdin.write(b'Python ')
self.loop.run_until_complete(proto.got_data[1].wait())
proto.got_data[1].clear()
self.assertEqual(b'Python ', proto.data[1])
stdin.write(b'The Winner')
self.loop.run_until_complete(proto.got_data[1].wait())
self.assertEqual(b'Python The Winner', proto.data[1])
with test_utils.disable_logger():
transp.close()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
def test_subprocess_shell(self):
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'echo Python')
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.get_pipe_transport(0).close()
self.loop.run_until_complete(proto.completed)
self.assertEqual(0, proto.returncode)
self.assertTrue(all(f.done() for f in proto.disconnects.values()))
self.assertEqual(proto.data[1].rstrip(b'\r\n'), b'Python')
self.assertEqual(proto.data[2], b'')
transp.close()
def test_subprocess_exitcode(self):
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'exit 7', stdin=None, stdout=None, stderr=None)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.completed)
self.assertEqual(7, proto.returncode)
transp.close()
def test_subprocess_close_after_finish(self):
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'exit 7', stdin=None, stdout=None, stderr=None)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.assertIsNone(transp.get_pipe_transport(0))
self.assertIsNone(transp.get_pipe_transport(1))
self.assertIsNone(transp.get_pipe_transport(2))
self.loop.run_until_complete(proto.completed)
self.assertEqual(7, proto.returncode)
self.assertIsNone(transp.close())
def test_subprocess_kill(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.kill()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
transp.close()
def test_subprocess_terminate(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.terminate()
self.loop.run_until_complete(proto.completed)
self.check_terminated(proto.returncode)
transp.close()
@unittest.skipIf(sys.platform == 'win32', "Don't have SIGHUP")
def test_subprocess_send_signal(self):
prog = os.path.join(os.path.dirname(__file__), 'echo.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
transp.send_signal(signal.SIGHUP)
self.loop.run_until_complete(proto.completed)
self.assertEqual(-signal.SIGHUP, proto.returncode)
transp.close()
def test_subprocess_stderr(self):
prog = os.path.join(os.path.dirname(__file__), 'echo2.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
stdin = transp.get_pipe_transport(0)
stdin.write(b'test')
self.loop.run_until_complete(proto.completed)
transp.close()
self.assertEqual(b'OUT:test', proto.data[1])
self.assertTrue(proto.data[2].startswith(b'ERR:test'), proto.data[2])
self.assertEqual(0, proto.returncode)
def test_subprocess_stderr_redirect_to_stdout(self):
prog = os.path.join(os.path.dirname(__file__), 'echo2.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog, stderr=subprocess.STDOUT)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
stdin = transp.get_pipe_transport(0)
self.assertIsNotNone(transp.get_pipe_transport(1))
self.assertIsNone(transp.get_pipe_transport(2))
stdin.write(b'test')
self.loop.run_until_complete(proto.completed)
self.assertTrue(proto.data[1].startswith(b'OUT:testERR:test'),
proto.data[1])
self.assertEqual(b'', proto.data[2])
transp.close()
self.assertEqual(0, proto.returncode)
def test_subprocess_close_client_stream(self):
prog = os.path.join(os.path.dirname(__file__), 'echo3.py')
connect = self.loop.subprocess_exec(
functools.partial(MySubprocessProtocol, self.loop),
sys.executable, prog)
transp, proto = self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.connected)
stdin = transp.get_pipe_transport(0)
stdout = transp.get_pipe_transport(1)
stdin.write(b'test')
self.loop.run_until_complete(proto.got_data[1].wait())
self.assertEqual(b'OUT:test', proto.data[1])
stdout.close()
self.loop.run_until_complete(proto.disconnects[1])
stdin.write(b'xxx')
self.loop.run_until_complete(proto.got_data[2].wait())
if sys.platform != 'win32':
self.assertEqual(b'ERR:BrokenPipeError', proto.data[2])
else:
# After closing the read-end of a pipe, writing to the
# write-end using os.write() fails with errno==EINVAL and
# GetLastError()==ERROR_INVALID_NAME on Windows!?! (Using
# WriteFile() we get ERROR_BROKEN_PIPE as expected.)
self.assertEqual(b'ERR:OSError', proto.data[2])
with test_utils.disable_logger():
transp.close()
self.loop.run_until_complete(proto.completed)
self.check_killed(proto.returncode)
def test_subprocess_wait_no_same_group(self):
# start the new process in a new session
connect = self.loop.subprocess_shell(
functools.partial(MySubprocessProtocol, self.loop),
'exit 7', stdin=None, stdout=None, stderr=None,
start_new_session=True)
_, proto = yield self.loop.run_until_complete(connect)
self.assertIsInstance(proto, MySubprocessProtocol)
self.loop.run_until_complete(proto.completed)
self.assertEqual(7, proto.returncode)
def test_subprocess_exec_invalid_args(self):
@asyncio.coroutine
def connect(**kwds):
yield from self.loop.subprocess_exec(
asyncio.SubprocessProtocol,
'pwd', **kwds)
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(universal_newlines=True))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(bufsize=4096))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(shell=True))
def test_subprocess_shell_invalid_args(self):
@asyncio.coroutine
def connect(cmd=None, **kwds):
if not cmd:
cmd = 'pwd'
yield from self.loop.subprocess_shell(
asyncio.SubprocessProtocol,
cmd, **kwds)
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(['ls', '-l']))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(universal_newlines=True))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(bufsize=4096))
with self.assertRaises(ValueError):
self.loop.run_until_complete(connect(shell=False))
if sys.platform == 'win32':
class SelectEventLoopTests(EventLoopTestsMixin, test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop()
class ProactorEventLoopTests(EventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.ProactorEventLoop()
if not sslproto._is_sslproto_available():
def test_create_ssl_connection(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl_verify_failed(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl_match_failed(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_create_server_ssl_verified(self):
raise unittest.SkipTest("need python 3.5 (ssl.MemoryBIO)")
def test_legacy_create_ssl_connection(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl_verify_failed(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl_match_failed(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_legacy_create_server_ssl_verified(self):
raise unittest.SkipTest("IocpEventLoop incompatible with legacy SSL")
def test_reader_callback(self):
raise unittest.SkipTest("IocpEventLoop does not have add_reader()")
def test_reader_callback_cancel(self):
raise unittest.SkipTest("IocpEventLoop does not have add_reader()")
def test_writer_callback(self):
raise unittest.SkipTest("IocpEventLoop does not have add_writer()")
def test_writer_callback_cancel(self):
raise unittest.SkipTest("IocpEventLoop does not have add_writer()")
def test_create_datagram_endpoint(self):
raise unittest.SkipTest(
"IocpEventLoop does not have create_datagram_endpoint()")
def test_remove_fds_after_closing(self):
raise unittest.SkipTest("IocpEventLoop does not have add_reader()")
else:
from asyncio import selectors
class UnixEventLoopTestsMixin(EventLoopTestsMixin):
def setUp(self):
super().setUp()
watcher = asyncio.SafeChildWatcher()
watcher.attach_loop(self.loop)
asyncio.set_child_watcher(watcher)
def tearDown(self):
asyncio.set_child_watcher(None)
super().tearDown()
if hasattr(selectors, 'KqueueSelector'):
class KqueueEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(
selectors.KqueueSelector())
# kqueue doesn't support character devices (PTY) on Mac OS X older
# than 10.9 (Maverick)
@support.requires_mac_ver(10, 9)
# Issue #20667: KqueueEventLoopTests.test_read_pty_output()
# hangs on OpenBSD 5.5
@unittest.skipIf(sys.platform.startswith('openbsd'),
'test hangs on OpenBSD')
def test_read_pty_output(self):
super().test_read_pty_output()
# kqueue doesn't support character devices (PTY) on Mac OS X older
# than 10.9 (Maverick)
@support.requires_mac_ver(10, 9)
def test_write_pty(self):
super().test_write_pty()
if hasattr(selectors, 'EpollSelector'):
class EPollEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(selectors.EpollSelector())
if hasattr(selectors, 'PollSelector'):
class PollEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(selectors.PollSelector())
# Should always exist.
class SelectEventLoopTests(UnixEventLoopTestsMixin,
SubprocessTestsMixin,
test_utils.TestCase):
def create_event_loop(self):
return asyncio.SelectorEventLoop(selectors.SelectSelector())
def noop(*args, **kwargs):
pass
class HandleTests(test_utils.TestCase):
def setUp(self):
super().setUp()
self.loop = mock.Mock()
self.loop.get_debug.return_value = True
def test_handle(self):
def callback(*args):
return args
args = ()
h = asyncio.Handle(callback, args, self.loop)
self.assertIs(h._callback, callback)
self.assertIs(h._args, args)
self.assertFalse(h._cancelled)
h.cancel()
self.assertTrue(h._cancelled)
def test_callback_with_exception(self):
def callback():
raise ValueError()
self.loop = mock.Mock()
self.loop.call_exception_handler = mock.Mock()
h = asyncio.Handle(callback, (), self.loop)
h._run()
self.loop.call_exception_handler.assert_called_with({
'message': test_utils.MockPattern('Exception in callback.*'),
'exception': mock.ANY,
'handle': h,
'source_traceback': h._source_traceback,
})
def test_handle_weakref(self):
wd = weakref.WeakValueDictionary()
h = asyncio.Handle(lambda: None, (), self.loop)
wd['h'] = h # Would fail without __weakref__ slot.
def test_handle_repr(self):
self.loop.get_debug.return_value = False
# simple function
h = asyncio.Handle(noop, (1, 2), self.loop)
filename, lineno = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<Handle noop(1, 2) at %s:%s>'
% (filename, lineno))
# cancelled handle
h.cancel()
self.assertEqual(repr(h),
'<Handle cancelled>')
# decorated function
cb = asyncio.coroutine(noop)
h = asyncio.Handle(cb, (), self.loop)
self.assertEqual(repr(h),
'<Handle noop() at %s:%s>'
% (filename, lineno))
# partial function
cb = functools.partial(noop, 1, 2)
h = asyncio.Handle(cb, (3,), self.loop)
regex = (r'^<Handle noop\(1, 2\)\(3\) at %s:%s>$'
% (re.escape(filename), lineno))
self.assertRegex(repr(h), regex)
# partial function with keyword args
cb = functools.partial(noop, x=1)
h = asyncio.Handle(cb, (2, 3), self.loop)
regex = (r'^<Handle noop\(x=1\)\(2, 3\) at %s:%s>$'
% (re.escape(filename), lineno))
self.assertRegex(repr(h), regex)
# partial method
if sys.version_info >= (3, 4):
method = HandleTests.test_handle_repr
cb = functools.partialmethod(method)
filename, lineno = test_utils.get_function_source(method)
h = asyncio.Handle(cb, (), self.loop)
cb_regex = r'<function HandleTests.test_handle_repr .*>'
cb_regex = (r'functools.partialmethod\(%s, , \)\(\)' % cb_regex)
regex = (r'^<Handle %s at %s:%s>$'
% (cb_regex, re.escape(filename), lineno))
self.assertRegex(repr(h), regex)
def test_handle_repr_debug(self):
self.loop.get_debug.return_value = True
# simple function
create_filename = __file__
create_lineno = sys._getframe().f_lineno + 1
h = asyncio.Handle(noop, (1, 2), self.loop)
filename, lineno = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<Handle noop(1, 2) at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
# cancelled handle
h.cancel()
self.assertEqual(
repr(h),
'<Handle cancelled noop(1, 2) at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
# double cancellation won't overwrite _repr
h.cancel()
self.assertEqual(
repr(h),
'<Handle cancelled noop(1, 2) at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
def test_handle_source_traceback(self):
loop = asyncio.get_event_loop_policy().new_event_loop()
loop.set_debug(True)
self.set_event_loop(loop)
def check_source_traceback(h):
lineno = sys._getframe(1).f_lineno - 1
self.assertIsInstance(h._source_traceback, list)
self.assertEqual(h._source_traceback[-1][:3],
(__file__,
lineno,
'test_handle_source_traceback'))
# call_soon
h = loop.call_soon(noop)
check_source_traceback(h)
# call_soon_threadsafe
h = loop.call_soon_threadsafe(noop)
check_source_traceback(h)
# call_later
h = loop.call_later(0, noop)
check_source_traceback(h)
# call_at
h = loop.call_later(0, noop)
check_source_traceback(h)
@unittest.skipUnless(hasattr(collections.abc, 'Coroutine'),
'No collections.abc.Coroutine')
def test_coroutine_like_object_debug_formatting(self):
# Test that asyncio can format coroutines that are instances of
# collections.abc.Coroutine, but lack cr_core or gi_code attributes
# (such as ones compiled with Cython).
class Coro:
def send(self, v):
pass
def throw(self, *exc):
pass
def close(self):
pass
def __await__(self):
pass
coro = Coro()
coro.__name__ = 'AAA'
self.assertTrue(asyncio.iscoroutine(coro))
self.assertEqual(coroutines._format_coroutine(coro), 'AAA()')
coro.__qualname__ = 'BBB'
self.assertEqual(coroutines._format_coroutine(coro), 'BBB()')
coro.cr_running = True
self.assertEqual(coroutines._format_coroutine(coro), 'BBB() running')
coro = Coro()
# Some coroutines might not have '__name__', such as
# built-in async_gen.asend().
self.assertEqual(coroutines._format_coroutine(coro), 'Coro()')
class TimerTests(unittest.TestCase):
def setUp(self):
super().setUp()
self.loop = mock.Mock()
def test_hash(self):
when = time.monotonic()
h = asyncio.TimerHandle(when, lambda: False, (),
mock.Mock())
self.assertEqual(hash(h), hash(when))
def test_timer(self):
def callback(*args):
return args
args = (1, 2, 3)
when = time.monotonic()
h = asyncio.TimerHandle(when, callback, args, mock.Mock())
self.assertIs(h._callback, callback)
self.assertIs(h._args, args)
self.assertFalse(h._cancelled)
# cancel
h.cancel()
self.assertTrue(h._cancelled)
self.assertIsNone(h._callback)
self.assertIsNone(h._args)
# when cannot be None
self.assertRaises(AssertionError,
asyncio.TimerHandle, None, callback, args,
self.loop)
def test_timer_repr(self):
self.loop.get_debug.return_value = False
# simple function
h = asyncio.TimerHandle(123, noop, (), self.loop)
src = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<TimerHandle when=123 noop() at %s:%s>' % src)
# cancelled handle
h.cancel()
self.assertEqual(repr(h),
'<TimerHandle cancelled when=123>')
def test_timer_repr_debug(self):
self.loop.get_debug.return_value = True
# simple function
create_filename = __file__
create_lineno = sys._getframe().f_lineno + 1
h = asyncio.TimerHandle(123, noop, (), self.loop)
filename, lineno = test_utils.get_function_source(noop)
self.assertEqual(repr(h),
'<TimerHandle when=123 noop() '
'at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
# cancelled handle
h.cancel()
self.assertEqual(repr(h),
'<TimerHandle cancelled when=123 noop() '
'at %s:%s created at %s:%s>'
% (filename, lineno, create_filename, create_lineno))
def test_timer_comparison(self):
def callback(*args):
return args
when = time.monotonic()
h1 = asyncio.TimerHandle(when, callback, (), self.loop)
h2 = asyncio.TimerHandle(when, callback, (), self.loop)
# TODO: Use assertLess etc.
self.assertFalse(h1 < h2)
self.assertFalse(h2 < h1)
self.assertTrue(h1 <= h2)
self.assertTrue(h2 <= h1)
self.assertFalse(h1 > h2)
self.assertFalse(h2 > h1)
self.assertTrue(h1 >= h2)
self.assertTrue(h2 >= h1)
self.assertTrue(h1 == h2)
self.assertFalse(h1 != h2)
h2.cancel()
self.assertFalse(h1 == h2)
h1 = asyncio.TimerHandle(when, callback, (), self.loop)
h2 = asyncio.TimerHandle(when + 10.0, callback, (), self.loop)
self.assertTrue(h1 < h2)
self.assertFalse(h2 < h1)
self.assertTrue(h1 <= h2)
self.assertFalse(h2 <= h1)
self.assertFalse(h1 > h2)
self.assertTrue(h2 > h1)
self.assertFalse(h1 >= h2)
self.assertTrue(h2 >= h1)
self.assertFalse(h1 == h2)
self.assertTrue(h1 != h2)
h3 = asyncio.Handle(callback, (), self.loop)
self.assertIs(NotImplemented, h1.__eq__(h3))
self.assertIs(NotImplemented, h1.__ne__(h3))
class AbstractEventLoopTests(unittest.TestCase):
def test_not_implemented(self):
f = mock.Mock()
loop = asyncio.AbstractEventLoop()
self.assertRaises(
NotImplementedError, loop.run_forever)
self.assertRaises(
NotImplementedError, loop.run_until_complete, None)
self.assertRaises(
NotImplementedError, loop.stop)
self.assertRaises(
NotImplementedError, loop.is_running)
self.assertRaises(
NotImplementedError, loop.is_closed)
self.assertRaises(
NotImplementedError, loop.close)
self.assertRaises(
NotImplementedError, loop.create_task, None)
self.assertRaises(
NotImplementedError, loop.call_later, None, None)
self.assertRaises(
NotImplementedError, loop.call_at, f, f)
self.assertRaises(
NotImplementedError, loop.call_soon, None)
self.assertRaises(
NotImplementedError, loop.time)
self.assertRaises(
NotImplementedError, loop.call_soon_threadsafe, None)
self.assertRaises(
NotImplementedError, loop.run_in_executor, f, f)
self.assertRaises(
NotImplementedError, loop.set_default_executor, f)
self.assertRaises(
NotImplementedError, loop.getaddrinfo, 'localhost', 8080)
self.assertRaises(
NotImplementedError, loop.getnameinfo, ('localhost', 8080))
self.assertRaises(
NotImplementedError, loop.create_connection, f)
self.assertRaises(
NotImplementedError, loop.create_server, f)
self.assertRaises(
NotImplementedError, loop.create_datagram_endpoint, f)
self.assertRaises(
NotImplementedError, loop.add_reader, 1, f)
self.assertRaises(
NotImplementedError, loop.remove_reader, 1)
self.assertRaises(
NotImplementedError, loop.add_writer, 1, f)
self.assertRaises(
NotImplementedError, loop.remove_writer, 1)
self.assertRaises(
NotImplementedError, loop.sock_recv, f, 10)
self.assertRaises(
NotImplementedError, loop.sock_sendall, f, 10)
self.assertRaises(
NotImplementedError, loop.sock_connect, f, f)
self.assertRaises(
NotImplementedError, loop.sock_accept, f)
self.assertRaises(
NotImplementedError, loop.add_signal_handler, 1, f)
self.assertRaises(
NotImplementedError, loop.remove_signal_handler, 1)
self.assertRaises(
NotImplementedError, loop.remove_signal_handler, 1)
self.assertRaises(
NotImplementedError, loop.connect_read_pipe, f,
mock.sentinel.pipe)
self.assertRaises(
NotImplementedError, loop.connect_write_pipe, f,
mock.sentinel.pipe)
self.assertRaises(
NotImplementedError, loop.subprocess_shell, f,
mock.sentinel)
self.assertRaises(
NotImplementedError, loop.subprocess_exec, f)
self.assertRaises(
NotImplementedError, loop.set_exception_handler, f)
self.assertRaises(
NotImplementedError, loop.default_exception_handler, f)
self.assertRaises(
NotImplementedError, loop.call_exception_handler, f)
self.assertRaises(
NotImplementedError, loop.get_debug)
self.assertRaises(
NotImplementedError, loop.set_debug, f)
class ProtocolsAbsTests(unittest.TestCase):
def test_empty(self):
f = mock.Mock()
p = asyncio.Protocol()
self.assertIsNone(p.connection_made(f))
self.assertIsNone(p.connection_lost(f))
self.assertIsNone(p.data_received(f))
self.assertIsNone(p.eof_received())
dp = asyncio.DatagramProtocol()
self.assertIsNone(dp.connection_made(f))
self.assertIsNone(dp.connection_lost(f))
self.assertIsNone(dp.error_received(f))
self.assertIsNone(dp.datagram_received(f, f))
sp = asyncio.SubprocessProtocol()
self.assertIsNone(sp.connection_made(f))
self.assertIsNone(sp.connection_lost(f))
self.assertIsNone(sp.pipe_data_received(1, f))
self.assertIsNone(sp.pipe_connection_lost(1, f))
self.assertIsNone(sp.process_exited())
class PolicyTests(unittest.TestCase):
def test_event_loop_policy(self):
policy = asyncio.AbstractEventLoopPolicy()
self.assertRaises(NotImplementedError, policy.get_event_loop)
self.assertRaises(NotImplementedError, policy.set_event_loop, object())
self.assertRaises(NotImplementedError, policy.new_event_loop)
self.assertRaises(NotImplementedError, policy.get_child_watcher)
self.assertRaises(NotImplementedError, policy.set_child_watcher,
object())
def test_get_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
self.assertIsNone(policy._local._loop)
loop = policy.get_event_loop()
self.assertIsInstance(loop, asyncio.AbstractEventLoop)
self.assertIs(policy._local._loop, loop)
self.assertIs(loop, policy.get_event_loop())
loop.close()
def test_get_event_loop_calls_set_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
with mock.patch.object(
policy, "set_event_loop",
wraps=policy.set_event_loop) as m_set_event_loop:
loop = policy.get_event_loop()
# policy._local._loop must be set through .set_event_loop()
# (the unix DefaultEventLoopPolicy needs this call to attach
# the child watcher correctly)
m_set_event_loop.assert_called_with(loop)
loop.close()
def test_get_event_loop_after_set_none(self):
policy = asyncio.DefaultEventLoopPolicy()
policy.set_event_loop(None)
self.assertRaises(RuntimeError, policy.get_event_loop)
@mock.patch('asyncio.events.threading.current_thread')
def test_get_event_loop_thread(self, m_current_thread):
def f():
policy = asyncio.DefaultEventLoopPolicy()
self.assertRaises(RuntimeError, policy.get_event_loop)
th = threading.Thread(target=f)
th.start()
th.join()
def test_new_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
loop = policy.new_event_loop()
self.assertIsInstance(loop, asyncio.AbstractEventLoop)
loop.close()
def test_set_event_loop(self):
policy = asyncio.DefaultEventLoopPolicy()
old_loop = policy.get_event_loop()
self.assertRaises(AssertionError, policy.set_event_loop, object())
loop = policy.new_event_loop()
policy.set_event_loop(loop)
self.assertIs(loop, policy.get_event_loop())
self.assertIsNot(old_loop, policy.get_event_loop())
loop.close()
old_loop.close()
def test_get_event_loop_policy(self):
policy = asyncio.get_event_loop_policy()
self.assertIsInstance(policy, asyncio.AbstractEventLoopPolicy)
self.assertIs(policy, asyncio.get_event_loop_policy())
def test_set_event_loop_policy(self):
self.assertRaises(
AssertionError, asyncio.set_event_loop_policy, object())
old_policy = asyncio.get_event_loop_policy()
policy = asyncio.DefaultEventLoopPolicy()
asyncio.set_event_loop_policy(policy)
self.assertIs(policy, asyncio.get_event_loop_policy())
self.assertIsNot(policy, old_policy)
def test_get_event_loop_returns_running_loop(self):
class Policy(asyncio.DefaultEventLoopPolicy):
def get_event_loop(self):
raise NotImplementedError
loop = None
old_policy = asyncio.get_event_loop_policy()
try:
asyncio.set_event_loop_policy(Policy())
loop = asyncio.new_event_loop()
self.assertIs(asyncio._get_running_loop(), None)
async def func():
self.assertIs(asyncio.get_event_loop(), loop)
self.assertIs(asyncio._get_running_loop(), loop)
loop.run_until_complete(func())
finally:
asyncio.set_event_loop_policy(old_policy)
if loop is not None:
loop.close()
self.assertIs(asyncio._get_running_loop(), None)
if __name__ == '__main__':
unittest.main()
| mit | 5,086,868,112,075,673,000 | 36.143116 | 81 | 0.593347 | false |
agx/git-buildpackage | gbp/rpm/linkedlist.py | 1 | 5840 | # vim: set fileencoding=utf-8 :
#
# (C) 2012 Intel Corporation <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, please see
# <http://www.gnu.org/licenses/>
"""Simple implementation of a doubly linked list"""
import collections
import gbp.log
class LinkedListNode(object):
"""Node of the linked list"""
def __init__(self, data="", prev_node=None, next_node=None):
self.prev = prev_node
self.next = next_node
self._data = data
def __str__(self):
return str(self.data)
@property
def data(self):
"""Get data stored into node"""
if self._data is None:
gbp.log.debug("BUG: referencing a deleted node!")
return("")
return self._data
def set_data(self, data):
"""
Set data stored into node
>>> node = LinkedListNode('foo')
>>> node.data
'foo'
>>> node.set_data('bar')
>>> node.data
'bar'
>>> node.set_data(None)
>>> node.data
''
"""
if data is None:
gbp.log.debug("BUG: trying to store 'None', not allowed")
data = ""
self._data = data
def delete(self):
"""Delete node"""
if self.prev:
self.prev.next = self.next
if self.next:
self.next.prev = self.prev
self._data = None
class LinkedListIterator(collections.Iterator):
"""Iterator for the linked list"""
def __init__(self, obj):
self._next = obj.first
def __next__(self):
ret = self._next
if ret:
self._next = ret.next
else:
raise StopIteration
return ret
def next(self):
return self.__next__()
class LinkedList(collections.Iterable):
"""Doubly linked list"""
def __init__(self):
self._first = None
self._last = None
def __iter__(self):
return LinkedListIterator(self)
def __len__(self):
for num, data in enumerate(self):
pass
return num + 1
@property
def first(self):
"""Get the first node of the list"""
return self._first
def prepend(self, data):
"""
Insert to the beginning of list
>>> list = LinkedList()
>>> [str(data) for data in list]
[]
>>> node = list.prepend("foo")
>>> len(list)
1
>>> node = list.prepend("bar")
>>> [str(data) for data in list]
['bar', 'foo']
"""
if self._first is None:
new = self._first = self._last = LinkedListNode(data)
else:
new = self.insert_before(self._first, data)
return new
def append(self, data):
"""
Insert to the end of list
>>> list = LinkedList()
>>> node = list.append('foo')
>>> len(list)
1
>>> node = list.append('bar')
>>> [str(data) for data in list]
['foo', 'bar']
"""
if self._last is None:
return self.prepend(data)
else:
return self.insert_after(self._last, data)
def insert_before(self, node, data=""):
"""
Insert before a node
>>> list = LinkedList()
>>> node1 = list.append('foo')
>>> node2 = list.insert_before(node1, 'bar')
>>> node3 = list.insert_before(node1, 'baz')
>>> [str(data) for data in list]
['bar', 'baz', 'foo']
"""
new = LinkedListNode(data, prev_node=node.prev, next_node=node)
if node.prev:
node.prev.next = new
else:
self._first = new
node.prev = new
return new
def insert_after(self, node, data=""):
"""
Insert after a node
>>> list = LinkedList()
>>> node1 = list.prepend('foo')
>>> node2 = list.insert_after(node1, 'bar')
>>> node3 = list.insert_after(node1, 'baz')
>>> [str(data) for data in list]
['foo', 'baz', 'bar']
"""
new = LinkedListNode(data, prev_node=node, next_node=node.next)
if node.next:
node.next.prev = new
else:
self._last = new
node.next = new
return new
def delete(self, node):
"""
Delete node
>>> list = LinkedList()
>>> node1 = list.prepend('foo')
>>> node2 = list.insert_after(node1, 'bar')
>>> node3 = list.insert_before(node2, 'baz')
>>> [str(data) for data in list]
['foo', 'baz', 'bar']
>>> str(list.delete(node3))
'foo'
>>> [str(data) for data in list]
['foo', 'bar']
>>> print("%s" % node3)
<BLANKLINE>
>>> str(list.delete(node1))
'bar'
>>> [str(data) for data in list]
['bar']
>>> list.delete(node2)
>>> [str(data) for data in list]
[]
"""
ret = node.prev
if node is self._first:
ret = self._first = self._first.next
if node is self._last:
self._last = self._last.prev
node.delete()
return ret
# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·:
| gpl-2.0 | 7,364,602,028,743,081,000 | 26.023148 | 73 | 0.519616 | false |
MaastrichtUniversity/irods-ruleset | python/get_user_group_memberships.py | 1 | 2190 | @make(inputs=[0, 1], outputs=[2], handler=Output.STORE)
def get_user_group_memberships(ctx, show_special_groups, username):
"""
Get the group membership of a given user
Parameters
----------
ctx : Context
Combined type of a callback and rei struct.
show_special_groups : str
'true'/'false' excepted values; If true, hide the special groups in the result
username : str
The username to use for the query
Returns
-------
list
a json list of groups objects
"""
user_id = ""
result = ctx.callback.userNameToUserId(username, user_id)
user_id = result["arguments"][1]
output = []
for result in row_iterator("order(USER_GROUP_NAME), USER_GROUP_ID",
"USER_ID = '{}'".format(user_id),
AS_LIST,
ctx.callback):
group_name = result[0]
group_id = result[1]
group_display_name = result[0]
group_description = ""
if group_name != username:
for metadata_result in row_iterator(
"META_USER_ATTR_NAME, META_USER_ATTR_VALUE, USER_GROUP_ID, USER_GROUP_NAME",
"USER_TYPE = 'rodsgroup' AND USER_GROUP_ID = '{}'".format(group_id),
AS_LIST,
ctx.callback):
if "displayName" == metadata_result[0]:
group_display_name = metadata_result[1]
elif "description" == metadata_result[0]:
group_description = metadata_result[1]
group_object = {"groupId": group_id,
"name": group_name,
"displayName": group_display_name,
"description": group_description
}
if show_special_groups == "false":
if group_name != "public" and group_name != "rodsadmin" \
and group_name != "DH-ingest" and group_name != "DH-project-admins":
output.append(group_object)
else:
output.append(group_object)
return output
| gpl-3.0 | -5,291,455,098,513,226,000 | 34.322581 | 96 | 0.510959 | false |
fw1121/bcbio-nextgen | bcbio/rnaseq/featureCounts.py | 3 | 2745 | import os
from bcbio.utils import (file_exists, get_in, safe_makedir)
from bcbio.pipeline import config_utils
from bcbio.log import logger
from bcbio.bam import is_paired
from bcbio.provenance import do
from bcbio.distributed.transaction import file_transaction
import bcbio.pipeline.datadict as dd
try:
import pandas as pd
except ImportError:
pd = None
def count(data):
"""
count reads mapping to genes using featureCounts
http://subread.sourceforge.net
"""
in_bam = dd.get_work_bam(data)
gtf_file = dd.get_gtf_file(data)
work_dir = dd.get_work_dir(data)
out_dir = os.path.join(work_dir, "htseq-count")
safe_makedir(out_dir)
count_file = os.path.join(out_dir, dd.get_sample_name(data)) + ".counts"
if file_exists(count_file):
return count_file
featureCounts = config_utils.get_program("featureCounts", dd.get_config(data))
paired_flag = _paired_flag(in_bam)
strand_flag = _strand_flag(data)
cmd = ("{featureCounts} -a {gtf_file} -o {tx_count_file} -s {strand_flag} "
"{paired_flag} {in_bam}")
message = ("Count reads in {tx_count_file} mapping to {gtf_file} using "
"featureCounts")
with file_transaction(data, count_file) as tx_count_file:
do.run(cmd.format(**locals()), message.format(**locals()))
fixed_count_file = _format_count_file(count_file, data)
os.rename(fixed_count_file, count_file)
return count_file
def _format_count_file(count_file, data):
"""
this cuts the count file produced from featureCounts down to
a two column file of gene ids and number of reads mapping to
each gene
"""
COUNT_COLUMN = 5
out_file = os.path.splitext(count_file)[0] + ".fixed.counts"
if file_exists(out_file):
return out_file
df = pd.io.parsers.read_table(count_file, sep="\t", index_col=0, header=1)
df_sub = df.ix[:, COUNT_COLUMN]
with file_transaction(data, out_file) as tx_out_file:
df_sub.to_csv(tx_out_file, sep="\t", index_label="id", header=False)
return out_file
def _strand_flag(data):
"""
0: unstranded 1: stranded 2: reverse stranded
"""
strand_flag = {"unstranded": "0",
"firststrand": "2",
"secondstrand": "1"}
stranded = dd.get_strandedness(data)
assert stranded in strand_flag, ("%s is not a valid strandedness value. "
"Valid values are 'firststrand', 'secondstrand', "
"and 'unstranded")
return strand_flag[stranded]
def _paired_flag(bam_file):
"""
sets flags to handle paired-end BAM files
"""
if is_paired(bam_file):
return "-p -B -C"
else:
return ""
| mit | -7,919,623,223,516,557,000 | 31.294118 | 87 | 0.627687 | false |
Takonan/csc411_a3 | keras_nn.py | 1 | 10943 | from utils import *
# import csutils
import theano
import yaml
import matplotlib.pyplot as plt
import numpy as np
from keras.models import *
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.optimizers import SGD, RMSprop
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.utils import np_utils
from keras.regularizers import l2
#from sklearn.cross_validation import cross_val_score
from sklearn.cross_validation import LabelKFold
def build_cnn():
# Use CNN, from https://github.com/fchollet/keras/blob/master/examples/mnist_cnn.py
# input image dimensions
img_rows, img_cols = 32, 32
# number of convolutional filters to use
nb_filters = 64
# size of pooling area for max pooling
nb_pool = 2
# convolution kernel size
nb_conv = 5
nb_conv2 = 3
# number of classes
nb_classes = 7
model = Sequential()
model.add(Convolution2D(nb_filters, nb_conv, nb_conv,
border_mode='valid',
input_shape=(1, img_rows, img_cols)))
model.add(Activation('relu'))
model.add(Convolution2D(nb_filters, nb_conv, nb_conv))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool)))
model.add(Activation('relu'))
#model.add(Convolution2D(nb_filters, nb_conv2, nb_conv2))
#model.add(Activation('relu'))
#model.add(MaxPooling2D(pool_size=(nb_pool, nb_pool)))
model.add(Dropout(0.50))
model.add(Flatten())
model.add(Dense(256,W_regularizer=l2(0.001)))
model.add(Activation('relu'))
model.add(Dropout(0.50))
model.add(Dense(64, W_regularizer=l2(0.001)))
model.add(Activation('relu'))
model.add(Dropout(0.50))
model.add(Dense(nb_classes))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam')
return model
def build_mlp():
# Dense(64) is a fully-connected layer with 64 hidden units.
# in the first layer, you must specify the expected input data shape:
# here, 20-dimensional vectors.
# model = Sequential()
# model.add(Dense(64, input_dim=1024, init='uniform', activation='tanh'))
# model.add(Dropout(0.5))
# model.add(Dense(64, init='uniform', activation='tanh'))
# model.add(Dropout(0.5))
# model.add(Dense(7, init='uniform', activation='softmax'))
# sgd = SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
# model.compile(loss='categorical_crossentropy', optimizer=sgd)
# model = Sequential()
# model.add(Dense(128,input_dim=1024))
# model.add(Activation('relu'))
# model.add(Dropout(0.2))
# model.add(Dense(128))
# model.add(Activation('relu'))
# model.add(Dropout(0.2))
# model.add(Dense(7))
# model.add(Activation('softmax'))
# rms = RMSprop()
# model.compile(loss='categorical_crossentropy', optimizer=rms)
# # Here's a Deep Dumb MLP (DDMLP) # Accuacy with 3 fold labelKFold = 0.54905982906
model = Sequential()
model.add(Dense(512, input_shape=(1024,), init='lecun_uniform'))
model.add(Activation('tanh'))
model.add(Dropout(0.25))
model.add(Dense(256, init='lecun_uniform'))
model.add(Activation('tanh'))
model.add(Dropout(0.25))
model.add(Dense(64, init='lecun_uniform'))
model.add(Activation('tanh'))
model.add(Dropout(0.25))
model.add(Dense(7, init='lecun_uniform'))
model.add(Activation('softmax'))
rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms)
return model
def main(model_type='CNN', model_checkpoint='model.yaml', weights_checkpoint='NNweights_', useBagging=True):
inputs, targets, identities = load_data_with_identity(True)
lkf = LabelKFold(identities, n_folds=10)
if model_type == 'CNN':
inputs = inputs.reshape(inputs.shape[0], 1, 32,32) # For CNN model
inputs = preprocess_images(inputs)
print "Loaded the data..."
# Load the unlabeled data
unlabeled_inputs = load_unlabeled_data(include_mirror=False)
unlabeled_inputs = unlabeled_inputs.reshape(unlabeled_inputs.shape[0], 1, 32,32)
unlabeled_inputs = preprocess_images(unlabeled_inputs)
mega_inputs = np.append(unlabeled_inputs, inputs, axis=0)
ZCAMatrix = zca_whitening(mega_inputs, epsilon=10e-2)
print "Done computing ZCAMatrix on unlabeled + labeled input...."
print "ZCAMatrix shape: ", ZCAMatrix.shape
outfile = open('ZCAMatrix.npy','w')
np.save(outfile,ZCAMatrix)
outfile.close()
print "Saved ZCAMatrix as ZCAMatrix.npy..."
n_folds = 10
lkf = LabelKFold(identities, n_folds)
nn_list = []
score_list = np.zeros(len(lkf))
index = 0
val_loss = 1e7
val_acc = 0
batch_size = 1024
nb_classes = 7
nb_epoch = 100
training_stats = np.zeros((nb_epoch, 4))
for train_index, test_index in lkf:
#print("TRAIN:", train_index, "TEST:", test_index)
X_train, X_test = inputs[train_index], inputs[test_index]
y_train, y_test = targets[train_index], targets[test_index]
print X_train.shape
#print y_train.shape
#print "Transforming X_train, X_test with ZCA"
#X_train = np.dot(X_train.reshape(X_train.shape[0],X_train.shape[1]*X_train.shape[2]*X_train.shape[3]),ZCAMatrix)
X_train = X_train.reshape(X_train.shape[0], 1, 32,32)
#X_test = np.dot(X_test.reshape(X_test.shape[0],X_test.shape[1]*X_test.shape[2]*X_test.shape[3]),ZCAMatrix)
X_test = X_test.reshape(X_test.shape[0], 1, 32,32)
# ShowMeans(X_train[2000:2004]) # Debug: Show faces after being processed
# convert class vectors to binary class matrices
y_train_oneOfK = np_utils.to_categorical(y_train-1, nb_classes)
y_test_oneOfK = np_utils.to_categorical(y_test-1, nb_classes)
# y_train = np_utils.to_categorical(y_train-1)
# print y_train
# y_test_oneOfK = np_utils.to_categorical(y_test-1)
# print y_test
# print y_test.shape
if model_type == 'CNN':
model = build_cnn()
else:
if model_type == 'MLP':
model = build_mlp()
# model.fit(X_train, y_train, nb_epoch=20, batch_size=100, show_accuracy=True)
# score = model.evaluate(X_test, y_test_oneOfK, batch_size=100, show_accuracy=True)
val_loss = 1e7
val_acc = 0
patience = 0
for epoch_i in np.arange(nb_epoch):
model.fit(X_train, y_train_oneOfK,
batch_size=batch_size, nb_epoch=1,
show_accuracy=True, verbose=2,
validation_data=(X_test, y_test_oneOfK))
score = model.evaluate(X_test, y_test_oneOfK,
show_accuracy=True, verbose=0)
print "Score:", score
#print X_test.shape
if (score[0] < val_loss):
patience = 0
model.save_weights(weights_checkpoint+"{:d}.h5".format(index), overwrite=True)
print "Saved weights to "+weights_checkpoint+"{:d}.h5".format(index)
val_loss = score[0]
val_acc = score[1]
else:
patience += 1
if patience > 20:
print "Running out of patience...at {:d}".format(epoch_i)
break
pred = model.predict_classes(X_test)
# print "Prediction: ", pred
# print "y_test - 1: ", y_test-1
print "Manual score, fold {:d}".format(index), (pred == (y_test-1)).mean()
score_list[index] = (pred == (y_test-1)).mean()
# Randomly choose a fold to record
if index==7:
train_score = model.evaluate(X_train, y_train_oneOfK,
show_accuracy=True, verbose=0)
training_stats[epoch_i, :2] = train_score
training_stats[epoch_i, 2:] = score
outfile = open('training_stats.npy','w')
np.save(outfile, training_stats)
outfile.close()
print "Saved training stats for fold"
# Save model and weights
yaml_string = model.to_yaml()
with open(model_checkpoint, 'w+') as outfile:
outfile.write(yaml.dump(yaml_string, default_flow_style=True))
# Only save the weights when the current index score is equal to the best so far
#if (index > 0 and score_list[index] == score_list.max()):
# model.save_weights(weights_checkpoint, overwrite=True)
# print "Saved weights"
nn_list.append(model)
index += 1
# use the NN model to classify test data
print score_list
print score_list.mean()
print "Last weights validation loss {:0.4f} accuracy {:0.4f}".format(val_loss, val_acc)
# Saving validation accuracies for fold
outfile = open('{:d}fold_val_acc.npy'.format(n_folds), 'w')
np.save(outfile, score_list)
outfile.close()
return nn_list
def test_model(model_checkpoint='model.yaml', weights_checkpoint='NNweights_8.h5', useZCA=False):
model_stream = file(model_checkpoint, 'r')
test_model = model_from_yaml(yaml.safe_load(model_stream))
test_model.load_weights(weights_checkpoint)
# Load and preprocess test set
x_test = load_public_test()
x_test = preprocess_images(x_test)
if useZCA:
ZCAMatrix = np.load('ZCAMatrix.npy')
x_test = np.dot(x_test.reshape(x_test.shape[0],x_test.shape[1]*x_test.shape[2]*x_test.shape[3]),ZCAMatrix)
x_test = x_test.reshape(x_test.shape[0], 1, 32,32)
print "Processed test input with ZCAMatrix"
print "Finished loading test model"
predictions = test_model.predict_classes(x_test)
print predictions+1
save_output_csv("test_predictions.csv", predictions+1)
return
# This function is not done yet
def validate_model(model_checkpoint='model.yaml', weights_checkpoint='NNweights_5.h5', useZCA=True, Folds=10):
model_stream = file(model_checkpoint, 'r')
test_model = model_from_yaml(yaml.safe_load(model_stream))
test_model.load_weights(weights_checkpoint)
# Load and preprocess test set
x_test, y_test, identities = load_data_with_identity(True)
x_test = x_test.reshape(x_test.shape[0], 1, 32, 32)
x_test = preprocess_images(x_test)
lkf = LabelKFold(identities, n_folds=10)
if useZCA:
ZCAMatrix = np.load('ZCAMatrix.npy')
x_test = np.dot(x_test.reshape(x_test.shape[0],x_test.shape[1]*x_test.shape[2]*x_test.shape[3]),ZCAMatrix)
x_test = x_test.reshape(x_test.shape[0], 1, 32,32)
print "Processed test input with ZCAMatrix"
print "Finished loading test model"
predictions = test_model.predict_classes(x_test)
return
if __name__ == '__main__':
# np.set_printoptions(threshold=np.nan)
#print "Using board {:d}".format(csutils.get_board())
#main('CNN')
#test_model(useZCA=True)
NN_bag_predict_unlabeled()
| bsd-3-clause | 886,774,752,257,761,700 | 36.094915 | 121 | 0.629809 | false |
marscher/mdtraj | MDTraj/tests/performance/test_readwrite.py | 1 | 4297 | import os
import tempfile
import numpy as np
from noseperf.testcases import PerformanceTest
#from mdtraj import dcd, binpos, trr, netcdf, hdf5
from mdtraj import (XTCTrajectoryFile, TRRTrajectoryFile, DCDTrajectoryFile,
BINPOSTrajectoryFile, NetCDFTrajectoryFile, HDF5TrajectoryFile)
######################################################
# Base class: handles setting up a temp file and array
######################################################
class WithTemp(PerformanceTest):
n_frames = 10000
n_atoms = 100
def setUp(self):
self.xyz = np.random.randn(self.n_frames, self.n_atoms, 3).astype(np.float32)
self.fn = tempfile.mkstemp()[1]
def tearDown(self):
os.unlink(self.fn)
########################################
# Tests
########################################
class TestXTCWriter(WithTemp):
def test(self):
"Test the write speed of the XTC code (10000 frames, 100 atoms)"
with XTCTrajectoryFile(self.fn, 'w') as f:
f.write(xyz=self.xyz)
class TestXTCRead(WithTemp):
def setUp(self):
super(TestXTCRead, self).setUp()
with XTCTrajectoryFile(self.fn, 'w') as f:
f.write(xyz=self.xyz)
def test(self):
"Test the read speed of the XTC code (10000 frames, 100 atoms)"
with XTCTrajectoryFile(self.fn) as f:
f.read()
class TestDCDWrite(WithTemp):
def test(self):
"Test the write speed of the DCD code (10000 frames, 100 atoms)"
with DCDTrajectoryFile(self.fn, 'w') as f:
f.write(xyz=self.xyz)
class TestDCDRead(WithTemp):
def setUp(self):
super(TestDCDRead, self).setUp()
with DCDTrajectoryFile(self.fn, 'w') as f:
f.write(xyz=self.xyz)
def test(self):
"Test the read speed of the DCD code (10000 frames, 100 atoms)"
with DCDTrajectoryFile(self.fn) as f:
f.read()
class TestBINPOSWrite(WithTemp):
def test(self):
"Test the write speed of the BINPOS code (10000 frames, 100 atoms)"
with BINPOSTrajectoryFile(self.fn, 'w', force_overwrite=True) as f:
f.write(self.xyz)
class TestBINPOSRead(WithTemp):
def setUp(self):
super(TestBINPOSRead, self).setUp()
with BINPOSTrajectoryFile(self.fn, 'w', force_overwrite=True) as f:
f.write(self.xyz)
def test(self):
"Test the read speed of the BINPOS code (10000 frames, 100 atoms)"
with BINPOSTrajectoryFile(self.fn) as f:
xyz = f.read()
class TestTRRWriter(WithTemp):
def test(self):
"Test the write speed of the TRR code (10000 frames, 100 atoms)"
with TRRTrajectoryFile(self.fn, 'w') as f:
f.write(xyz=self.xyz)
class TestTRRRead(WithTemp):
def setUp(self):
super(TestTRRRead, self).setUp()
with TRRTrajectoryFile(self.fn, 'w') as f:
f.write(xyz=self.xyz)
def test(self):
"Test the read speed of the TRR code (10000 frames, 100 atoms)"
with TRRTrajectoryFile(self.fn) as f:
f.read()
class TestNetCDFWrite(WithTemp):
def test(self):
"Test the write speed of the NetCDF code (10000 frames, 100 atoms)"
with NetCDFTrajectoryFile(self.fn, 'w', force_overwrite=True) as f:
f.write(self.xyz)
class TestNetCDFRead(WithTemp):
def setUp(self):
super(TestNetCDFRead, self).setUp()
with NetCDFTrajectoryFile(self.fn, 'w', force_overwrite=True) as f:
f.write(self.xyz)
def test(self):
"Test the read speed of the NetCDF code (10000 frames, 100 atoms)"
with NetCDFTrajectoryFile(self.fn) as f:
f.read()
class TestHDF5Write(WithTemp):
def test(self):
"Test the write speed of the hdf5 code (10000 frames, 100 atoms)"
with HDF5TrajectoryFile(self.fn, 'w', force_overwrite=True) as f:
f.write(self.xyz)
class TestHDF5Read(WithTemp):
def setUp(self):
super(TestHDF5Read, self).setUp()
with HDF5TrajectoryFile(self.fn, 'w', force_overwrite=True) as f:
f.write(self.xyz)
def test(self):
"Test the read speed of the hdf5 code (10000 frames, 100 atoms)"
with HDF5TrajectoryFile(self.fn) as f:
f.read()
| lgpl-2.1 | -3,462,826,742,070,323,000 | 29.048951 | 85 | 0.605306 | false |
Manojkumar91/odoo_inresto | addons/account_payment/account_move_line.py | 4 | 4410 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.exceptions import UserError
from operator import itemgetter
class account_move_line(osv.osv):
_inherit = "account.move.line"
# delegate to parent, used for local fields.function redefinition
def _amount_to_pay(self, cr, uid, ids, field_names, args, context=None):
return {
id: value['amount_residual']
for id, value in self._amount_residual(cr, uid, ids, field_names, args,
context=context).items()
}
def _to_pay_search(self, cr, uid, obj, name, args, context=None):
if not args:
return []
line_obj = self.pool.get('account.move.line')
query = line_obj._query_get(cr, uid, context={})
where = ' and '.join(map(lambda x: '''(SELECT
CASE WHEN l.amount_currency < 0
THEN - l.amount_currency
ELSE l.credit
END - coalesce(sum(pl.amount_currency), 0)
FROM payment_line pl
INNER JOIN payment_order po ON (pl.order_id = po.id)
WHERE move_line_id = l.id
AND po.state != 'cancel'
) %(operator)s %%s ''' % {'operator': x[1]}, args))
sql_args = tuple(map(itemgetter(2), args))
cr.execute(('''SELECT id
FROM account_move_line l
WHERE account_id IN (select id
FROM account_account
WHERE type=%s AND active)
AND reconcile_id IS null
AND credit > 0
AND ''' + where + ' and ' + query), ('payable',)+sql_args )
res = cr.fetchall()
if not res:
return [('id', '=', '0')]
return [('id', 'in', map(lambda x:x[0], res))]
def line2bank(self, cr, uid, ids, payment_type=None, context=None):
"""
Try to return for each Ledger Posting line a corresponding bank
account according to the payment type. This work using one of
the bank of the partner defined on the invoice eventually
associated to the line.
Return the first suitable bank for the corresponding partner.
"""
payment_mode_obj = self.pool.get('payment.mode')
line2bank = {}
if not ids:
return {}
bank_type = payment_mode_obj.suitable_bank_types(cr, uid, payment_type,
context=context)
for line in self.browse(cr, uid, ids, context=context):
line2bank[line.id] = False
if line.invoice and line.invoice.partner_bank_id:
line2bank[line.id] = line.invoice.partner_bank_id.id
elif line.partner_id:
if not line.partner_id.bank_ids:
line2bank[line.id] = False
else:
for bank in line.partner_id.bank_ids:
if bank.state in bank_type:
line2bank[line.id] = bank.id
break
if not line2bank.get(line.id) and line.partner_id.bank_ids:
line2bank[line.id] = line.partner_id.bank_ids[0].id
else:
raise UserError(_('There is no partner defined on the entry line.'))
return line2bank
_columns = {
'amount_to_pay': fields.function(_amount_to_pay,
type='float', string='Amount to pay', fnct_search=_to_pay_search),
}
| agpl-3.0 | -6,033,715,039,825,717,000 | 41.403846 | 84 | 0.562812 | false |
japaric/npm2exheres | npm2exheres/validate.py | 1 | 4145 | from npm2exheres.print import print_warn
import os
import re
def exist_exheres(pn, pv):
exheres_path = '{}/{}-{}.exheres-0'.format(pn, pn, pv)
return os.path.exists(exheres_path)
def filter_versions(versions, verspec):
"""
>>> filter_versions(['1.2.2', '1.2.3', '1.2.4', '1.3'], '~>1.2.3')
['1.2.3', '1.2.4']
"""
(min_version, max_version) = verspec_to_minmax(verspec)
return list(filter(lambda v: lte(v, max_version) and gte(v, min_version),
versions))
def gte(this, that):
"""
>>> gte('1.2.3', None)
True
>>> gte('1.3.1', [1,3,0])
True
>>> gte('1.3', [1,3,0])
True
>>> gte('1.3', (1,3,0))
False
"""
if not that:
return True
this = list(map(int, this.split('.')))
while len(this) < 3:
this.append(0)
if isinstance(that, list):
return this >= that
else:
return this > list(that)
def lte(this, that):
"""
>>> lte('1.2.3', None)
True
>>> lte('1.2.9', [1,3,0])
True
>>> lte('1.3', [1,3,0])
True
>>> lte('1.3', (1,3,0))
False
"""
if not that:
return True
this = list(map(int, this.split('.')))
while len(this) < 3:
this.append(0)
if isinstance(that, list):
return this <= that
else:
return this < list(that)
def valid_licenses():
return os.listdir('/var/db/paludis/repositories/arbor/licences')
def validate_license(license):
"""
>>> validate_license('BSD-3')
True
>>> validate_license('GPL-3')
True
>>> validate_license('MIT')
True
>>> validate_license('MIT/X11')
False
"""
return license in valid_licenses()
def validate_params(pn, pv, params, messages):
if not params['licenses']:
print_warn('{}-{}: missing license'.format(pn, pv), messages)
else:
licenses = params['licenses'].split(' ')
for license in licenses:
if not validate_license(license):
print_warn('{}-{}: unknown license {}'.format(pn, pv, license),
messages)
if not params['summary']:
print_warn('{}-{}: missing summary'.format(pn, pv), messages)
elif len(params['summary']) > 70:
print_warn('{}-{}: summary is too long'.format(pn, pv), messages)
def verspec_to_minmax(verspec):
"""
>>> verspec_to_minmax('~>1.2.3')
([1, 2, 3], (1, 3, 0))
>>> verspec_to_minmax('~>1.2')
([1, 2, 0], (2, 0, 0))
>>> verspec_to_minmax('<1.2.3')
(None, (1, 2, 3))
>>> verspec_to_minmax('~>1.2.3|~>1.3')
([1, 2, 3], (2, 0, 0))
"""
if '|' in verspec:
[a, b] = verspec.split('|')
a = list(map(int, a.lstrip('~>').split('.')))
b = list(map(int, b.lstrip('~>').split('.')))
assert(a[1] + 1 == b[1])
min_v = a
max_v = tuple([b[0] + 1, 0, 0])
return (min_v, max_v)
if '&' in verspec:
[min_v, max_v] = verspec.split('&')
if '~' in min_v:
verspec = min_v
elif '~' in max_v:
verspec = max_v
else:
verspec = ''
if verspec.startswith('~>'):
min_v = list(map(int, verspec.lstrip('~>').split('.')))
max_v = min_v[0:-1]
max_v[-1] += 1
min_open = False
max_open = True
elif verspec.startswith('='):
min_v = verspec
max_v = verspec
elif verspec.startswith('>'):
min_v = verspec
max_v = None
elif verspec.startswith('<'):
min_v = None
max_v = verspec
if min_v:
if isinstance(min_v, str):
min_open = not '=' in min_v
min_v = list(map(int, min_v.lstrip('>=').split('.')))
while len(min_v) < 3:
min_v.append(0)
if min_open:
min_v = tuple(min_v)
if max_v:
if isinstance(max_v, str):
max_open = not '=' in max_v
max_v = list(map(int, max_v.lstrip('<=').split('.')))
while len(max_v) < 3:
max_v.append(0)
if max_open:
max_v = tuple(max_v)
return (min_v, max_v)
| mit | -8,177,797,979,655,093,000 | 22.551136 | 79 | 0.486128 | false |
keedio/hue | desktop/core/ext-py/Mako-0.8.1/mako/ext/turbogears.py | 38 | 2108 | # ext/turbogears.py
# Copyright (C) 2006-2012 the Mako authors and contributors <see AUTHORS file>
#
# This module is part of Mako and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import re, inspect
from mako.lookup import TemplateLookup
from mako.template import Template
class TGPlugin(object):
"""TurboGears compatible Template Plugin."""
def __init__(self, extra_vars_func=None, options=None, extension='mak'):
self.extra_vars_func = extra_vars_func
self.extension = extension
if not options:
options = {}
# Pull the options out and initialize the lookup
lookup_options = {}
for k, v in options.items():
if k.startswith('mako.'):
lookup_options[k[5:]] = v
elif k in ['directories', 'filesystem_checks', 'module_directory']:
lookup_options[k] = v
self.lookup = TemplateLookup(**lookup_options)
self.tmpl_options = {}
# transfer lookup args to template args, based on those available
# in getargspec
for kw in inspect.getargspec(Template.__init__)[0]:
if kw in lookup_options:
self.tmpl_options[kw] = lookup_options[kw]
def load_template(self, templatename, template_string=None):
"""Loads a template from a file or a string"""
if template_string is not None:
return Template(template_string, **self.tmpl_options)
# Translate TG dot notation to normal / template path
if '/' not in templatename:
templatename = '/' + templatename.replace('.', '/') + '.' +\
self.extension
# Lookup template
return self.lookup.get_template(templatename)
def render(self, info, format="html", fragment=False, template=None):
if isinstance(template, str):
template = self.load_template(template)
# Load extra vars func if provided
if self.extra_vars_func:
info.update(self.extra_vars_func())
return template.render(**info)
| apache-2.0 | 1,500,685,392,377,851,000 | 35.982456 | 79 | 0.620493 | false |
whygee/oppia | core/domain/stats_domain_test.py | 15 | 6143 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'Sean Lip'
from core.domain import exp_domain
from core.domain import exp_services
from core.domain import event_services
from core.domain import stats_domain
from core.tests import test_utils
import feconf
class StateRuleAnswerLogUnitTests(test_utils.GenericTestBase):
"""Test the state rule answer log domain object."""
DEFAULT_RULESPEC_STR = exp_domain.DEFAULT_RULESPEC_STR
def test_state_rule_answer_logs(self):
exp = exp_domain.Exploration.create_default_exploration(
'eid', 'title', 'category')
exp_services.save_new_exploration('user_id', exp)
state_name = exp.init_state_name
answer_log = stats_domain.StateRuleAnswerLog.get(
'eid', state_name, self.DEFAULT_RULESPEC_STR)
self.assertEquals(answer_log.answers, {})
self.assertEquals(answer_log.total_answer_count, 0)
self.assertEquals(answer_log.get_top_answers(2), [])
event_services.AnswerSubmissionEventHandler.record(
'eid', 1, state_name, self.DEFAULT_RULESPEC_STR, 'answer1')
answer_log = stats_domain.StateRuleAnswerLog.get(
'eid', state_name, self.DEFAULT_RULESPEC_STR)
self.assertEquals(answer_log.answers, {'answer1': 1})
self.assertEquals(answer_log.total_answer_count, 1)
self.assertEquals(answer_log.get_top_answers(1), [('answer1', 1)])
self.assertEquals(answer_log.get_top_answers(2), [('answer1', 1)])
event_services.AnswerSubmissionEventHandler.record(
'eid', 1, state_name, self.DEFAULT_RULESPEC_STR, 'answer1')
event_services.AnswerSubmissionEventHandler.record(
'eid', 1, state_name, self.DEFAULT_RULESPEC_STR, 'answer2')
answer_log = stats_domain.StateRuleAnswerLog.get(
'eid', state_name, self.DEFAULT_RULESPEC_STR)
self.assertEquals(answer_log.answers, {'answer1': 2, 'answer2': 1})
self.assertEquals(answer_log.total_answer_count, 3)
self.assertEquals(
answer_log.get_top_answers(1), [('answer1', 2)])
self.assertEquals(
answer_log.get_top_answers(2), [('answer1', 2), ('answer2', 1)])
event_services.AnswerSubmissionEventHandler.record(
'eid', 1, state_name, self.DEFAULT_RULESPEC_STR, 'answer2')
event_services.AnswerSubmissionEventHandler.record(
'eid', 1, state_name, self.DEFAULT_RULESPEC_STR, 'answer2')
answer_log = stats_domain.StateRuleAnswerLog.get(
'eid', state_name, self.DEFAULT_RULESPEC_STR)
self.assertEquals(answer_log.answers, {'answer1': 2, 'answer2': 3})
self.assertEquals(answer_log.total_answer_count, 5)
self.assertEquals(
answer_log.get_top_answers(1), [('answer2', 3)])
self.assertEquals(
answer_log.get_top_answers(2), [('answer2', 3), ('answer1', 2)])
def test_recording_answer_for_different_rules(self):
exp = exp_domain.Exploration.create_default_exploration(
'eid', 'title', 'category')
exp_services.save_new_exploration('user_id', exp)
rule = exp_domain.RuleSpec.from_dict({
'rule_type': 'LessThan',
'inputs': {'x': 5}
})
rule_str = rule.stringify_classified_rule()
state_name = exp.init_state_name
event_services.AnswerSubmissionEventHandler.record(
'eid', 1, state_name, self.DEFAULT_RULESPEC_STR, 'answer1')
event_services.AnswerSubmissionEventHandler.record(
'eid', 1, state_name, rule_str, 'answer2')
default_rule_answer_log = stats_domain.StateRuleAnswerLog.get(
'eid', state_name, self.DEFAULT_RULESPEC_STR)
self.assertEquals(default_rule_answer_log.answers, {'answer1': 1})
self.assertEquals(default_rule_answer_log.total_answer_count, 1)
other_rule_answer_log = stats_domain.StateRuleAnswerLog.get(
'eid', state_name, rule_str)
self.assertEquals(other_rule_answer_log.answers, {'answer2': 1})
self.assertEquals(other_rule_answer_log.total_answer_count, 1)
def test_resolving_answers(self):
exp = exp_domain.Exploration.create_default_exploration(
'eid', 'title', 'category')
exp_services.save_new_exploration('user_id', exp)
state_name = exp.init_state_name
answer_log = stats_domain.StateRuleAnswerLog.get(
'eid', state_name, self.DEFAULT_RULESPEC_STR)
self.assertEquals(answer_log.answers, {})
event_services.AnswerSubmissionEventHandler.record(
'eid', 1, state_name, self.DEFAULT_RULESPEC_STR, 'answer1')
event_services.AnswerSubmissionEventHandler.record(
'eid', 1, state_name, self.DEFAULT_RULESPEC_STR, 'answer1')
event_services.AnswerSubmissionEventHandler.record(
'eid', 1, state_name, self.DEFAULT_RULESPEC_STR, 'answer2')
answer_log = stats_domain.StateRuleAnswerLog.get(
'eid', state_name, self.DEFAULT_RULESPEC_STR)
self.assertEquals(answer_log.answers, {'answer1': 2, 'answer2': 1})
self.assertEquals(answer_log.total_answer_count, 3)
event_services.DefaultRuleAnswerResolutionEventHandler.record(
'eid', state_name, ['answer1'])
answer_log = stats_domain.StateRuleAnswerLog.get(
'eid', state_name, self.DEFAULT_RULESPEC_STR)
self.assertEquals(answer_log.answers, {'answer2': 1})
self.assertEquals(answer_log.total_answer_count, 1)
| apache-2.0 | 4,395,155,295,401,251,000 | 42.878571 | 76 | 0.661566 | false |
gbaty/shiboken2 | tests/samplebinding/map_test.py | 6 | 2795 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of the Shiboken Python Bindings Generator project.
#
# Copyright (C) 2013 Digia Plc and/or its subsidiary(-ies).
#
# Contact: PySide team <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public License
# version 2.1 as published by the Free Software Foundation. Please
# review the following information to ensure the GNU Lesser General
# Public License version 2.1 requirements will be met:
# http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
# #
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
'''Test cases for std::map container conversions'''
import unittest
from sample import MapUser
class ExtendedMapUser(MapUser):
def __init__(self):
MapUser.__init__(self)
self.create_map_called = False
def createMap(self):
self.create_map_called = True
return {'two' : (complex(2.2, 2.2), 2),
'three' : (complex(3.3, 3.3), 3),
'five' : (complex(5.5, 5.5), 5),
'seven' : (complex(7.7, 7.7), 7)}
class MapConversionTest(unittest.TestCase):
'''Test case for std::map container conversions'''
def testReimplementedVirtualMethodCall(self):
'''Test if a Python override of a virtual method is correctly called from C++.'''
mu = ExtendedMapUser()
map_ = mu.callCreateMap()
self.assert_(mu.create_map_called)
self.assertEqual(type(map_), dict)
for key, value in map_.items():
self.assertEqual(type(key), str)
self.assertEqual(type(value[0]), complex)
self.assertEqual(type(value[1]), int)
def testConversionInBothDirections(self):
'''Test converting a map from Python to C++ and back again.'''
mu = MapUser()
map_ = {'odds' : [2, 4, 6], 'evens' : [3, 5, 7], 'primes' : [3, 4, 6]}
mu.setMap(map_)
result = mu.getMap()
self.assertEqual(result, map_)
def testConversionMapIntKeyValueTypeValue(self):
'''C++ signature: MapUser::passMapIntValueType(const std::map<int, const ByteArray>&)'''
mu = MapUser()
map_ = {0 : 'string'}
result = mu.passMapIntValueType(map_)
self.assertEqual(map_, result)
if __name__ == '__main__':
unittest.main()
| gpl-2.0 | 2,183,458,481,055,712,000 | 36.266667 | 96 | 0.649374 | false |
taigetco/cassandra_read | pylib/cqlshlib/displaying.py | 62 | 3634 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from collections import defaultdict
RED = '\033[0;1;31m'
GREEN = '\033[0;1;32m'
YELLOW = '\033[0;1;33m'
BLUE = '\033[0;1;34m'
MAGENTA = '\033[0;1;35m'
CYAN = '\033[0;1;36m'
WHITE = '\033[0;1;37m'
DARK_MAGENTA = '\033[0;35m'
ANSI_RESET = '\033[0m'
def colorme(bval, colormap, colorkey):
if colormap is None:
colormap = DEFAULT_VALUE_COLORS
return FormattedValue(bval, colormap[colorkey] + bval + colormap['reset'])
class FormattedValue:
def __init__(self, strval, coloredval=None, displaywidth=None):
self.strval = strval
if coloredval is None:
coloredval = strval
self.coloredval = coloredval
if displaywidth is None:
displaywidth = len(strval)
# displaywidth is useful for display of special unicode characters
# with
self.displaywidth = displaywidth
def __len__(self):
return len(self.strval)
def _pad(self, width, fill=' '):
if width > self.displaywidth:
return fill * (width - self.displaywidth)
else:
return ''
def ljust(self, width, fill=' ', color=False):
"""
Similar to self.strval.ljust(width), but takes expected terminal
display width into account for special characters, and does not
take color escape codes into account.
"""
if color:
return self.color_ljust(width, fill=fill)
return self.strval + self._pad(width, fill)
def rjust(self, width, fill=' ', color=False):
"""
Similar to self.strval.rjust(width), but takes expected terminal
display width into account for special characters, and does not
take color escape codes into account.
"""
if color:
return self.color_rjust(width, fill=fill)
return self._pad(width, fill) + self.strval
def color_rjust(self, width, fill=' '):
"""
Similar to self.rjust(width), but uses this value's colored
representation, and does not take color escape codes into account
in determining width.
"""
return self._pad(width, fill) + self.coloredval
def color_ljust(self, width, fill=' '):
"""
Similar to self.ljust(width), but uses this value's colored
representation, and does not take color escape codes into account
in determining width.
"""
return self.coloredval + self._pad(width, fill)
DEFAULT_VALUE_COLORS = dict(
default=YELLOW,
text=YELLOW,
error=RED,
blob=DARK_MAGENTA,
timestamp=GREEN,
int=GREEN,
float=GREEN,
decimal=GREEN,
inet=GREEN,
boolean=GREEN,
uuid=GREEN,
collection=BLUE,
reset=ANSI_RESET,
)
COLUMN_NAME_COLORS = defaultdict(lambda: MAGENTA,
error=RED,
blob=DARK_MAGENTA,
reset=ANSI_RESET,
)
| apache-2.0 | 4,569,019,769,568,256,000 | 31.446429 | 78 | 0.652174 | false |
makinacorpus/ionyweb | ionyweb/page_app/page_gallery_images/views.py | 2 | 1936 | # -*- coding: utf-8 -*-
from django.conf import settings
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from ionyweb.website.rendering.utils import render_view
from ionyweb.website.rendering.medias import *
from models import Album
RENDER_MEDIAS = (
# App CSS
CSSMedia('page_gallery_images.css'),
# Lightbox CSS
CSSMedia('fancybox/jquery.fancybox.css', prefix_file='js'),
# Lightbox JS
JSMedia('fancybox/jquery.fancybox.js'),
JSMedia('ionyweb.lightbox.js'),
# Actions JS
JSAdminMedia('page_gallery_images_actions.js'),
)
def index_view(request, page_app, album_slug=None):
"""
Display all albums of the app gallery.
"""
context_instance=RequestContext(request)
# Index View -- Display all albums
if not album_slug:
return render_view('page_gallery_images/index.html',
{'object': page_app},
RENDER_MEDIAS,
context_instance=context_instance)
# Album View -- Display all images
else:
album = get_object_or_404(Album, slug=album_slug)
# Displaying infos
infos = {}
if page_app.show_album_title:
infos['title'] = album.title
infos['title_rule'] = page_app.album_title_rule
# --
# Save object title in request
# for plugin 'Breadcrumb'
# --
if settings.BREADCRUMB_PLUGIN in settings.INSTALLED_APPS:
setattr(request, settings.BREADCRUMB_OBJECT_TITLE, album.title)
# --
return render_view('page_gallery_images/album.html',
{'object': page_app,
'infos': infos,
'album': album},
RENDER_MEDIAS,
title=album.title,
context_instance=context_instance)
| bsd-3-clause | -655,532,076,070,607,900 | 32.964912 | 75 | 0.578512 | false |
ashiklom/studyGroup | lessons/RISE/slideUtilities.py | 2 | 2048 | from IPython.display import display_html
from IPython.display import display
import matplotlib.pyplot as plt
from IPython.display import HTML
import binascii
import base64
def hide_code_in_slideshow():
import os
uid = binascii.hexlify(os.urandom(8)).decode('UTF-8')
html = """<div id="%s"></div>
<script type="text/javascript">
$(function(){
var p = $("#%s");
if (p.length==0) return;
while (!p.hasClass("cell")) {
p=p.parent();
if (p.prop("tagName") =="body") return;
}
var cell = p;
cell.find(".input").addClass("hide-in-slideshow")
});
</script>""" % (uid, uid)
display_html(html, raw=True)
return html
##########################
# python notebook does not support matplotlib animations
# so these functions create an .mp4 video and then display
# it using inline HTML
##########################
# Source: http://nbviewer.ipython.org/url/jakevdp.github.io/downloads/notebooks/AnimationEmbedding.ipynb
# updated to python3, now uses package base64 to do the encoding, reading comments from this post:
# http://jakevdp.github.io/blog/2013/05/12/embedding-matplotlib-animations/
from tempfile import NamedTemporaryFile
VIDEO_TAG = """<video controls>
<source src="data:video/x-m4v;base64,{0}" type="video/mp4">
Your browser does not support the video tag.
</video>"""
def anim_to_html(anim):
if not hasattr(anim, '_encoded_video'):
with NamedTemporaryFile(suffix='.mp4') as f:
anim.save(f.name, fps=20, extra_args=['-vcodec', 'libx264'])
video = open(f.name, "rb").read()
anim._encoded_video = base64.b64encode(video).decode('utf-8')
return VIDEO_TAG.format(anim._encoded_video)
def display_animation(anim):
# plt.close(anim._fig)
return HTML(anim_to_html(anim))
def display_saved_anim(fname):
with open(fname,'rb') as f:
video = f.read()
return HTML(VIDEO_TAG.format(base64.b64encode(video).decode('utf-8')))
| apache-2.0 | 327,447,022,830,235,700 | 31.507937 | 104 | 0.626465 | false |
js0701/chromium-crosswalk | tools/telemetry/telemetry/__init__.py | 7 | 1861 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A library for cross-platform browser tests."""
import os
import sys
# Ensure Python >= 2.7.
if sys.version_info < (2, 7):
print >> sys.stderr, 'Need Python 2.7 or greater.'
sys.exit(-1)
from telemetry.internal.util import global_hooks
global_hooks.InstallHooks()
# Add dependencies into our path.
from telemetry.core import util
def _AddDirToPythonPath(*path_parts):
path = os.path.abspath(os.path.join(*path_parts))
if os.path.isdir(path) and path not in sys.path:
# Some call sites that use Telemetry assume that sys.path[0] is the
# directory containing the script, so we add these extra paths to right
# after sys.path[0].
sys.path.insert(1, path)
_AddDirToPythonPath(util.GetTelemetryThirdPartyDir(), 'altgraph')
_AddDirToPythonPath(util.GetTelemetryThirdPartyDir(), 'mock')
_AddDirToPythonPath(util.GetTelemetryThirdPartyDir(), 'modulegraph')
_AddDirToPythonPath(util.GetTelemetryThirdPartyDir(), 'mox3')
_AddDirToPythonPath(util.GetTelemetryThirdPartyDir(), 'pexpect')
_AddDirToPythonPath(util.GetTelemetryThirdPartyDir(), 'png')
_AddDirToPythonPath(util.GetTelemetryThirdPartyDir(), 'pyfakefs')
_AddDirToPythonPath(util.GetTelemetryThirdPartyDir(), 'pyserial')
_AddDirToPythonPath(util.GetTelemetryThirdPartyDir(), 'typ')
_AddDirToPythonPath(util.GetTelemetryThirdPartyDir(), 'webpagereplay')
_AddDirToPythonPath(util.GetTelemetryThirdPartyDir(), 'websocket-client')
_AddDirToPythonPath(os.path.dirname(__file__), os.path.pardir, os.path.pardir,
os.path.pardir, 'build', 'android')
_AddDirToPythonPath(os.path.dirname(__file__), os.path.pardir, os.path.pardir,
os.path.pardir, 'third_party', 'catapult', 'tracing')
| bsd-3-clause | 4,260,652,117,494,071,000 | 40.355556 | 78 | 0.748522 | false |
hajgato/easybuild-easyblocks | easybuild/easyblocks/o/openssl.py | 3 | 2597 | ##
# Copyright 2009-2013 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for OpenSSL, implemented as an easyblock
@author: Kenneth Hoste (Ghent University)
@author: Jens Timmerman (Ghent University)
"""
import os
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.tools.filetools import run_cmd
class EB_OpenSSL(ConfigureMake):
"""Support for building OpenSSL"""
def configure_step(self, cmd_prefix=''):
"""
Configure step
"""
cmd = "%s %s./config --prefix=%s threads shared %s" % (self.cfg['preconfigopts'], cmd_prefix,
self.installdir, self.cfg['configopts'])
(out, _) = run_cmd(cmd, log_all=True, simple=False)
return out
def sanity_check_step(self):
"""Custom sanity check"""
libdir = None
for libdir_cand in ['lib', 'lib64']:
if os.path.exists(os.path.join(self.installdir, libdir_cand)):
libdir = libdir_cand
if libdir is None:
self.log.error("Failed to determine library directory.")
custom_paths = {
'files': [os.path.join(libdir, x) for x in ['engines', 'libcrypto.a', 'libcrypto.so',
'libcrypto.so.1.0.0', 'libssl.a',
'libssl.so', 'libssl.so.1.0.0']] +
['bin/openssl'],
'dirs': [],
}
super(EB_OpenSSL, self).sanity_check_step(custom_paths=custom_paths)
| gpl-2.0 | 5,376,023,745,489,699,000 | 35.577465 | 103 | 0.624952 | false |
theJollySin/mazelib | test/test_generators.py | 1 | 10866 | import numpy as np
import unittest
from mazelib.mazelib import Maze
from mazelib.generate.AldousBroder import AldousBroder
from mazelib.generate.BacktrackingGenerator import BacktrackingGenerator
from mazelib.generate.BinaryTree import BinaryTree
from mazelib.generate.CellularAutomaton import CellularAutomaton
from mazelib.generate.Division import Division
from mazelib.generate.DungeonRooms import DungeonRooms
from mazelib.generate.Ellers import Ellers
from mazelib.generate.GrowingTree import GrowingTree
from mazelib.generate.HuntAndKill import HuntAndKill
from mazelib.generate.Kruskal import Kruskal
from mazelib.generate.Prims import Prims
from mazelib.generate.Sidewinder import Sidewinder
from mazelib.generate.TrivialMaze import TrivialMaze
from mazelib.generate.Wilsons import Wilsons
class GeneratorsTest(unittest.TestCase):
def test_abstract_constructor(self):
""" test the MazeGenAlgo constructor """
# example 1 of maze dimension definitions
m = Maze()
m.generator = Prims(3, 3)
assert m.generator.h == 3
assert m.generator.w == 3
assert m.generator.H == 7
assert m.generator.W == 7
# example 2 of maze dimension definitions
m.generator = Prims(24, 12)
assert m.generator.h == 24
assert m.generator.w == 12
assert m.generator.H == 49
assert m.generator.W == 25
# ensure assertions are failed when invalid maze dimensions are provided
self.assertRaises(AssertionError, Prims, 2, 2)
self.assertRaises(AssertionError, Prims, 0, 2)
self.assertRaises(AssertionError, Prims, -2, 3)
self.assertRaises(AssertionError, Prims, 224, -2)
def test_aldous_broder(self):
""" test the AlgousBroder method generates a reasonably sane maze """
m = Maze()
m.generator = AldousBroder(4, 5)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_backtracking_generator(self):
""" test the Backtracking method generates a reasonably sane maze """
m = Maze()
m.generator = BacktrackingGenerator(4, 5)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_binary_tree(self):
""" test the Binary Tree method generates a reasonably sane maze """
# try without a skew parameter
m = Maze()
m.generator = BinaryTree(4, 5)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
# try with a skew parameter
m = Maze()
m.generator = BinaryTree(4, 5, 'NW')
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_cellular_automaton(self):
""" test the Cellulator Automaton method generates a reasonably sane maze """
m = Maze()
m.generator = CellularAutomaton(4, 5)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
def test_division(self):
""" test the Division method generates a reasonably sane maze """
m = Maze()
m.generator = Division(4, 5)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_dungeon_rooms_grid(self):
""" test Dungeon Rooms maze-creation mazes a reasonably sane maze """
g = np.ones((7, 7), dtype=np.int8)
g[1] = [1, 1, 1, 1, 1, 1, 1]
g[2] = [1, 1, 1, 1, 1, 1, 1]
g[3] = [1, 1, 0, 0, 0, 1, 1]
g[4] = [1, 1, 0, 0, 0, 1, 1]
g[5] = [1, 1, 0, 0, 0, 1, 1]
m = Maze()
m.generator = DungeonRooms(4, 4, grid=g)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
def test_dungeon_reconnect_maze(self):
""" test Dungeon Rooms maze-creation mazes a reasonably sane maze when reconnecting a maze """
g = np.ones((7, 7), dtype=np.int8)
g[1] = [1, 0, 0, 0, 1, 0, 1]
g[2] = [1, 0, 1, 1, 1, 0, 1]
g[3] = [1, 0, 0, 0, 1, 0, 1]
g[4] = [1, 0, 0, 0, 1, 0, 1]
g[5] = [1, 0, 0, 0, 1, 0, 1]
m = Maze()
m.generator = DungeonRooms(4, 4, grid=g)
m.generator.reconnect_maze()
assert boundary_is_solid(m.generator.grid)
assert all_passages_open(m.generator.grid)
def test_dungeon_rooms_random_rooms(self):
""" test Dungeon Rooms maze-creation mazes a reasonably sane maze when generating some random rooms """
m = Maze()
m.generator = DungeonRooms(4, 4, rooms=[[(1, 1), (3, 3)]], hunt_order='random')
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
def test_dungeon_rooms_serpentine_rooms(self):
""" test DungeonRooms mazes are reasonably when generating some random rooms in a serpentine fashion """
m = Maze()
m.generator = DungeonRooms(4, 4, rooms=[[(1, 1), (3, 3)]], hunt_order='serpentine')
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
def test_ellers(self):
""" test the Ellers method generates a reasonably sane maze """
m = Maze()
m.generator = Ellers(4, 5)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_growing_tree(self):
""" test the Growing Tree method generates a reasonably sane maze """
m = Maze()
m.generator = GrowingTree(4, 5)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_hunt_and_kill_random_order(self):
""" test the Hunt and Kill method generates a reasonably sane maze, using the random order pathway """
m = Maze()
m.generator = HuntAndKill(4, 5, 'random')
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_hunt_and_kill_serpentine_order(self):
""" test the Hunt and Kill method generates a reasonably sane maze, using the serpentine pathway """
m = Maze()
m.generator = HuntAndKill(4, 5, 'serpentine')
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_kruskal(self):
""" test the Kruskal method generates a reasonably sane maze """
m = Maze()
m.generator = Kruskal(4, 5)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_prims(self):
""" test the Prims method generates a reasonably sane maze """
m = Maze()
m.generator = Prims(4, 5)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_sidewinder(self):
""" test the Sidewinder method generates a reasonably sane maze """
m = Maze()
m.generator = Sidewinder(4, 5)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_trivial_maze_spiral(self):
""" test that the trivial/spiral maze is reasonably sane """
m = Maze()
m.generator = TrivialMaze(4, 5)
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
assert all_corners_complete(m.grid)
def test_trivial_maze_serpentine(self):
""" test that the trivial/spiral maze is reasonably sane when using the serpentine alternative
run this test enough times to trip the different skewness parameters
"""
for _ in range(10):
m = Maze()
m.generator = TrivialMaze(4, 5, 'serpentine')
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_wilsons_random_order(self):
""" test the Wilson method generates a reasonably sane maze, using the random order pathway """
m = Maze()
m.generator = Wilsons(4, 5, hunt_order='random')
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def test_wilsons_serpentine_order(self):
""" test the Wilson method generates a reasonably sane maze, using the serpentine pathway """
m = Maze()
m.generator = Wilsons(4, 5, hunt_order='serpentine')
m.generate()
assert boundary_is_solid(m.grid)
assert all_passages_open(m.grid)
assert all_corners_complete(m.grid)
def boundary_is_solid(grid):
""" Helper method to test of the maze is sane
Algorithms should generate a maze with a solid boundary of walls.
Args:
grid (np.array): maze array
Returns:
boolean: is the maze boundary solid?
"""
# first row
for c in grid[0]:
if c == 0:
return False
# other rows
for row in grid[1: -1]:
if row[0] == 0 or row[-1] == 0:
return False
# last row
for c in grid[grid.shape[0] - 1]:
if c == 0:
return False
return True
def all_passages_open(grid):
""" Helper method to test of the maze is sane
All of the (odd, odd) grid cells in a maze should be passages.
Args:
grid (np.array): maze array
Returns:
booean: Are all the odd/odd grid cells open?
"""
H, W = grid.shape
for r in range(1, H, 2):
for c in range(1, W, 2):
if grid[r, c] == 1:
return False
return True
def all_corners_complete(grid):
""" Helper method to test of the maze is sane
All of the (even, even) grid cells in a maze should be walls.
Args:
grid (np.array): maze array
Returns:
boolean: Are all of the grid corners solid?
"""
H, W = grid.shape
for r in range(2, H, 2):
for c in range(2, W, 2):
if grid[r, c] == 0:
return False
return True
if __name__ == '__main__':
unittest.main(argv=['first-arg-is-ignored'], exit=False)
| gpl-3.0 | 5,452,656,329,043,997,000 | 31.435821 | 112 | 0.608043 | false |
hrishioa/Aviato | flask/Lib/site-packages/nltk/book.py | 5 | 3354 | # Natural Language Toolkit: Some texts for exploration in chapter 1 of the book
#
# Copyright (C) 2001-2015 NLTK Project
# Author: Steven Bird <[email protected]>
#
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
from __future__ import print_function
from nltk.corpus import (gutenberg, genesis, inaugural,
nps_chat, webtext, treebank, wordnet)
from nltk.text import Text
from nltk.probability import FreqDist
from nltk.util import bigrams
from nltk.misc import babelize_shell
print("*** Introductory Examples for the NLTK Book ***")
print("Loading text1, ..., text9 and sent1, ..., sent9")
print("Type the name of the text or sentence to view it.")
print("Type: 'texts()' or 'sents()' to list the materials.")
text1 = Text(gutenberg.words('melville-moby_dick.txt'))
print("text1:", text1.name)
text2 = Text(gutenberg.words('austen-sense.txt'))
print("text2:", text2.name)
text3 = Text(genesis.words('english-kjv.txt'), name="The Book of Genesis")
print("text3:", text3.name)
text4 = Text(inaugural.words(), name="Inaugural Address Corpus")
print("text4:", text4.name)
text5 = Text(nps_chat.words(), name="Chat Corpus")
print("text5:", text5.name)
text6 = Text(webtext.words('grail.txt'), name="Monty Python and the Holy Grail")
print("text6:", text6.name)
text7 = Text(treebank.words(), name="Wall Street Journal")
print("text7:", text7.name)
text8 = Text(webtext.words('singles.txt'), name="Personals Corpus")
print("text8:", text8.name)
text9 = Text(gutenberg.words('chesterton-thursday.txt'))
print("text9:", text9.name)
def texts():
print("text1:", text1.name)
print("text2:", text2.name)
print("text3:", text3.name)
print("text4:", text4.name)
print("text5:", text5.name)
print("text6:", text6.name)
print("text7:", text7.name)
print("text8:", text8.name)
print("text9:", text9.name)
sent1 = ["Call", "me", "Ishmael", "."]
sent2 = ["The", "family", "of", "Dashwood", "had", "long",
"been", "settled", "in", "Sussex", "."]
sent3 = ["In", "the", "beginning", "God", "created", "the",
"heaven", "and", "the", "earth", "."]
sent4 = ["Fellow", "-", "Citizens", "of", "the", "Senate",
"and", "of", "the", "House", "of", "Representatives", ":"]
sent5 = ["I", "have", "a", "problem", "with", "people",
"PMing", "me", "to", "lol", "JOIN"]
sent6 = ['SCENE', '1', ':', '[', 'wind', ']', '[', 'clop', 'clop',
'clop', ']', 'KING', 'ARTHUR', ':', 'Whoa', 'there', '!']
sent7 = ["Pierre", "Vinken", ",", "61", "years", "old", ",",
"will", "join", "the", "board", "as", "a", "nonexecutive",
"director", "Nov.", "29", "."]
sent8 = ['25', 'SEXY', 'MALE', ',', 'seeks', 'attrac', 'older',
'single', 'lady', ',', 'for', 'discreet', 'encounters', '.']
sent9 = ["THE", "suburb", "of", "Saffron", "Park", "lay", "on", "the",
"sunset", "side", "of", "London", ",", "as", "red", "and",
"ragged", "as", "a", "cloud", "of", "sunset", "."]
def sents():
print("sent1:", " ".join(sent1))
print("sent2:", " ".join(sent2))
print("sent3:", " ".join(sent3))
print("sent4:", " ".join(sent4))
print("sent5:", " ".join(sent5))
print("sent6:", " ".join(sent6))
print("sent7:", " ".join(sent7))
print("sent8:", " ".join(sent8))
print("sent9:", " ".join(sent9))
| gpl-2.0 | -6,703,769,327,110,885,000 | 36.685393 | 80 | 0.587657 | false |
davidwaroquiers/pymatgen | pymatgen/io/abinit/abitimer.py | 1 | 31329 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
This module provides objects for extracting timing data from the ABINIT output files
It also provides tools to analye and to visualize the parallel efficiency.
"""
import collections
import logging
import os
import sys
import numpy as np
from monty.string import is_string, list_strings
from pymatgen.util.num import minloc
from pymatgen.util.plotting import add_fig_kwargs, get_ax_fig_plt
logger = logging.getLogger(__name__)
def alternate(*iterables):
"""
[a[0], b[0], ... , a[1], b[1], ..., a[n], b[n] ...]
>>> alternate([1,4], [2,5], [3,6])
[1, 2, 3, 4, 5, 6]
"""
items = []
for tup in zip(*iterables):
items.extend(tup)
return items
class AbinitTimerParserError(Exception):
"""Errors raised by AbinitTimerParser"""
class AbinitTimerParser(collections.abc.Iterable):
"""
Responsible for parsing a list of output files, extracting the timing results
and analyzing the results.
Assume the Abinit output files have been produced with `timopt -1`.
Example:
parser = AbinitTimerParser()
parser.parse(list_of_files)
To analyze all *.abo files withing top, use:
parser, paths, okfiles = AbinitTimerParser.walk(top=".", ext=".abo")
"""
# The markers enclosing the data.
BEGIN_TAG = "-<BEGIN_TIMER"
END_TAG = "-<END_TIMER>"
Error = AbinitTimerParserError
# DEFAULT_MPI_RANK = "0"
@classmethod
def walk(cls, top=".", ext=".abo"):
"""
Scan directory tree starting from top, look for files with extension `ext` and
parse timing data.
Return: (parser, paths, okfiles)
where `parser` is the new object, `paths` is the list of files found and `okfiles`
is the list of files that have been parsed successfully.
(okfiles == paths) if all files have been parsed.
"""
paths = []
for root, dirs, files in os.walk(top):
for f in files:
if f.endswith(ext):
paths.append(os.path.join(root, f))
parser = cls()
okfiles = parser.parse(paths)
return parser, paths, okfiles
def __init__(self):
"""Initialize object."""
# List of files that have been parsed.
self._filenames = []
# timers[filename][mpi_rank]
# contains the timer extracted from the file filename associated to the MPI rank mpi_rank.
self._timers = collections.OrderedDict()
def __iter__(self):
return self._timers.__iter__()
def __len__(self):
return len(self._timers)
@property
def filenames(self):
"""List of files that have been parsed successfully."""
return self._filenames
def parse(self, filenames):
"""
Read and parse a filename or a list of filenames.
Files that cannot be opened are ignored. A single filename may also be given.
Return: list of successfully read files.
"""
filenames = list_strings(filenames)
read_ok = []
for fname in filenames:
try:
fh = open(fname)
except IOError:
logger.warning("Cannot open file %s" % fname)
continue
try:
self._read(fh, fname)
read_ok.append(fname)
except self.Error as e:
logger.warning("exception while parsing file %s:\n%s" % (fname, str(e)))
continue
finally:
fh.close()
# Add read_ok to the list of files that have been parsed.
self._filenames.extend(read_ok)
return read_ok
def _read(self, fh, fname):
"""Parse the TIMER section"""
if fname in self._timers:
raise self.Error("Cannot overwrite timer associated to: %s " % fname)
def parse_line(line):
"""Parse single line."""
name, vals = line[:25], line[25:].split()
try:
ctime, cfract, wtime, wfract, ncalls, gflops = vals
except ValueError:
# v8.3 Added two columns at the end [Speedup, Efficacity]
ctime, cfract, wtime, wfract, ncalls, gflops, speedup, eff = vals
return AbinitTimerSection(name, ctime, cfract, wtime, wfract, ncalls, gflops)
sections, info, cpu_time, wall_time = None, None, None, None
data = {}
inside, has_timer = 0, False
for line in fh:
# print(line.strip())
if line.startswith(self.BEGIN_TAG):
has_timer = True
sections = []
info = {}
inside = 1
line = line[len(self.BEGIN_TAG) :].strip()[:-1]
info["fname"] = fname
for tok in line.split(","):
key, val = [s.strip() for s in tok.split("=")]
info[key] = val
elif line.startswith(self.END_TAG):
inside = 0
timer = AbinitTimer(sections, info, cpu_time, wall_time)
mpi_rank = info["mpi_rank"]
data[mpi_rank] = timer
elif inside:
inside += 1
line = line[1:].strip()
if inside == 2:
d = dict()
for tok in line.split(","):
key, val = [s.strip() for s in tok.split("=")]
d[key] = float(val)
cpu_time, wall_time = d["cpu_time"], d["wall_time"]
elif inside > 5:
sections.append(parse_line(line))
else:
try:
parse_line(line)
except Exception:
parser_failed = True
if not parser_failed:
raise self.Error("line should be empty: " + str(inside) + line)
if not has_timer:
raise self.Error("%s: No timer section found" % fname)
# Add it to the dict
self._timers[fname] = data
def timers(self, filename=None, mpi_rank="0"):
"""
Return the list of timers associated to the given `filename` and MPI rank mpi_rank.
"""
if filename is not None:
return [self._timers[filename][mpi_rank]]
return [self._timers[filename][mpi_rank] for filename in self._filenames]
def section_names(self, ordkey="wall_time"):
"""
Return the names of sections ordered by ordkey.
For the time being, the values are taken from the first timer.
"""
section_names = []
# FIXME this is not trivial
for idx, timer in enumerate(self.timers()):
if idx == 0:
section_names = [s.name for s in timer.order_sections(ordkey)]
# check = section_names
# else:
# new_set = set( [s.name for s in timer.order_sections(ordkey)])
# section_names.intersection_update(new_set)
# check = check.union(new_set)
# if check != section_names:
# print("sections", section_names)
# print("check",check)
return section_names
def get_sections(self, section_name):
"""
Return the list of sections stored in self.timers() given `section_name`
A fake section is returned if the timer does not have section_name.
"""
sections = []
for timer in self.timers():
for sect in timer.sections:
if sect.name == section_name:
sections.append(sect)
break
else:
sections.append(AbinitTimerSection.fake())
return sections
def pefficiency(self):
"""
Analyze the parallel efficiency.
Return: :class:`ParallelEfficiency` object.
"""
timers = self.timers()
# Number of CPUs employed in each calculation.
ncpus = [timer.ncpus for timer in timers]
# Find the minimum number of cpus used and its index in timers.
min_idx = minloc(ncpus)
min_ncpus = ncpus[min_idx]
# Reference timer
ref_t = timers[min_idx]
# Compute the parallel efficiency (total and section efficiency)
peff = {}
ctime_peff = [(min_ncpus * ref_t.wall_time) / (t.wall_time * ncp) for (t, ncp) in zip(timers, ncpus)]
wtime_peff = [(min_ncpus * ref_t.cpu_time) / (t.cpu_time * ncp) for (t, ncp) in zip(timers, ncpus)]
n = len(timers)
peff["total"] = {}
peff["total"]["cpu_time"] = ctime_peff
peff["total"]["wall_time"] = wtime_peff
peff["total"]["cpu_fract"] = n * [100]
peff["total"]["wall_fract"] = n * [100]
for sect_name in self.section_names():
# print(sect_name)
ref_sect = ref_t.get_section(sect_name)
sects = [t.get_section(sect_name) for t in timers]
try:
ctime_peff = [(min_ncpus * ref_sect.cpu_time) / (s.cpu_time * ncp) for (s, ncp) in zip(sects, ncpus)]
wtime_peff = [(min_ncpus * ref_sect.wall_time) / (s.wall_time * ncp) for (s, ncp) in zip(sects, ncpus)]
except ZeroDivisionError:
ctime_peff = n * [-1]
wtime_peff = n * [-1]
assert sect_name not in peff
peff[sect_name] = {}
peff[sect_name]["cpu_time"] = ctime_peff
peff[sect_name]["wall_time"] = wtime_peff
peff[sect_name]["cpu_fract"] = [s.cpu_fract for s in sects]
peff[sect_name]["wall_fract"] = [s.wall_fract for s in sects]
return ParallelEfficiency(self._filenames, min_idx, peff)
def summarize(self, **kwargs):
"""
Return pandas DataFrame with the most important results stored in the timers.
"""
import pandas as pd
colnames = [
"fname",
"wall_time",
"cpu_time",
"mpi_nprocs",
"omp_nthreads",
"mpi_rank",
]
frame = pd.DataFrame(columns=colnames)
for i, timer in enumerate(self.timers()):
frame = frame.append({k: getattr(timer, k) for k in colnames}, ignore_index=True)
frame["tot_ncpus"] = frame["mpi_nprocs"] * frame["omp_nthreads"]
# Compute parallel efficiency (use the run with min number of cpus to normalize).
i = frame["tot_ncpus"].values.argmin()
ref_wtime = frame.iloc[i]["wall_time"]
ref_ncpus = frame.iloc[i]["tot_ncpus"]
frame["peff"] = (ref_ncpus * ref_wtime) / (frame["wall_time"] * frame["tot_ncpus"])
return frame
@add_fig_kwargs
def plot_efficiency(self, key="wall_time", what="good+bad", nmax=5, ax=None, **kwargs):
"""
Plot the parallel efficiency
Args:
key: Parallel efficiency is computed using the wall_time.
what: Specifies what to plot: `good` for sections with good parallel efficiency.
`bad` for sections with bad efficiency. Options can be concatenated with `+`.
nmax: Maximum number of entries in plot
ax: matplotlib :class:`Axes` or None if a new figure should be created.
================ ====================================================
kwargs Meaning
================ ====================================================
linewidth matplotlib linewidth. Default: 2.0
markersize matplotlib markersize. Default: 10
================ ====================================================
Returns:
`matplotlib` figure
"""
ax, fig, plt = get_ax_fig_plt(ax=ax)
lw = kwargs.pop("linewidth", 2.0)
msize = kwargs.pop("markersize", 10)
what = what.split("+")
timers = self.timers()
peff = self.pefficiency()
n = len(timers)
xx = np.arange(n)
# ax.set_color_cycle(['g', 'b', 'c', 'm', 'y', 'k'])
ax.set_prop_cycle(color=["g", "b", "c", "m", "y", "k"])
lines, legend_entries = [], []
# Plot sections with good efficiency.
if "good" in what:
good = peff.good_sections(key=key, nmax=nmax)
for g in good:
# print(g, peff[g])
yy = peff[g][key]
(line,) = ax.plot(xx, yy, "-->", linewidth=lw, markersize=msize)
lines.append(line)
legend_entries.append(g)
# Plot sections with bad efficiency.
if "bad" in what:
bad = peff.bad_sections(key=key, nmax=nmax)
for b in bad:
# print(b, peff[b])
yy = peff[b][key]
(line,) = ax.plot(xx, yy, "-.<", linewidth=lw, markersize=msize)
lines.append(line)
legend_entries.append(b)
# Add total if not already done
if "total" not in legend_entries:
yy = peff["total"][key]
(total_line,) = ax.plot(xx, yy, "r", linewidth=lw, markersize=msize)
lines.append(total_line)
legend_entries.append("total")
ax.legend(lines, legend_entries, loc="best", shadow=True)
# ax.set_title(title)
ax.set_xlabel("Total_NCPUs")
ax.set_ylabel("Efficiency")
ax.grid(True)
# Set xticks and labels.
labels = ["MPI=%d, OMP=%d" % (t.mpi_nprocs, t.omp_nthreads) for t in timers]
ax.set_xticks(xx)
ax.set_xticklabels(labels, fontdict=None, minor=False, rotation=15)
return fig
@add_fig_kwargs
def plot_pie(self, key="wall_time", minfract=0.05, **kwargs):
"""
Plot pie charts of the different timers.
Args:
key: Keyword used to extract data from timers.
minfract: Don't show sections whose relative weight is less that minfract.
Returns:
`matplotlib` figure
"""
timers = self.timers()
n = len(timers)
# Make square figures and axes
import matplotlib.pyplot as plt
from matplotlib.gridspec import GridSpec
fig = plt.gcf()
gspec = GridSpec(n, 1)
for idx, timer in enumerate(timers):
ax = plt.subplot(gspec[idx, 0])
ax.set_title(str(timer))
timer.pie(ax=ax, key=key, minfract=minfract, show=False)
return fig
@add_fig_kwargs
def plot_stacked_hist(self, key="wall_time", nmax=5, ax=None, **kwargs):
"""
Plot stacked histogram of the different timers.
Args:
key: Keyword used to extract data from the timers. Only the first `nmax`
sections with largest value are show.
mmax: Maximum nuber of sections to show. Other entries are grouped together
in the `others` section.
ax: matplotlib :class:`Axes` or None if a new figure should be created.
Returns:
`matplotlib` figure
"""
ax, fig, plt = get_ax_fig_plt(ax=ax)
mpi_rank = "0"
timers = self.timers(mpi_rank=mpi_rank)
n = len(timers)
names, values = [], []
rest = np.zeros(n)
for idx, sname in enumerate(self.section_names(ordkey=key)):
sections = self.get_sections(sname)
svals = np.asarray([s.__dict__[key] for s in sections])
if idx < nmax:
names.append(sname)
values.append(svals)
else:
rest += svals
names.append("others (nmax=%d)" % nmax)
values.append(rest)
# The dataset is stored in values. Now create the stacked histogram.
ind = np.arange(n) # the locations for the groups
width = 0.35 # the width of the bars
colors = nmax * ["r", "g", "b", "c", "k", "y", "m"]
bars = []
bottom = np.zeros(n)
for idx, vals in enumerate(values):
color = colors[idx]
bar_ = ax.bar(ind, vals, width, color=color, bottom=bottom)
bars.append(bar_)
bottom += vals
ax.set_ylabel(key)
ax.set_title("Stacked histogram with the %d most important sections" % nmax)
ticks = ind + width / 2.0
labels = ["MPI=%d, OMP=%d" % (t.mpi_nprocs, t.omp_nthreads) for t in timers]
ax.set_xticks(ticks)
ax.set_xticklabels(labels, rotation=15)
# Add legend.
ax.legend([bar_[0] for bar_ in bars], names, loc="best")
return fig
def plot_all(self, show=True, **kwargs):
"""
Call all plot methods provided by the parser.
"""
figs = []
app = figs.append
app(self.plot_stacked_hist(show=show))
app(self.plot_efficiency(show=show))
app(self.plot_pie(show=show))
return figs
class ParallelEfficiency(dict):
"""
Store results concerning the parallel efficiency of the job.
"""
def __init__(self, filenames, ref_idx, *args, **kwargs):
"""
Args:
filennames: List of filenames
ref_idx: Index of the Reference time (calculation done with the smallest number of cpus)
"""
self.update(*args, **kwargs)
self.filenames = filenames
self._ref_idx = ref_idx
def _order_by_peff(self, key, criterion, reverse=True):
self.estimator = {
"min": min,
"max": max,
"mean": lambda items: sum(items) / len(items),
}[criterion]
data = []
for (sect_name, peff) in self.items():
# Ignore values where we had a division by zero.
if all([v != -1 for v in peff[key]]):
values = peff[key][:]
# print(sect_name, values)
if len(values) > 1:
ref_value = values.pop(self._ref_idx)
assert ref_value == 1.0
data.append((sect_name, self.estimator(values)))
data.sort(key=lambda t: t[1], reverse=reverse)
return tuple([sect_name for (sect_name, e) in data])
def totable(self, stop=None, reverse=True):
"""
Return table (list of lists) with timing results.
Args:
stop: Include results up to stop. None for all
reverse: Put items with highest wall_time in first positions if True.
"""
osects = self._order_by_peff("wall_time", criterion="mean", reverse=reverse)
if stop is not None:
osects = osects[:stop]
n = len(self.filenames)
table = [["AbinitTimerSection"] + alternate(self.filenames, n * ["%"])]
for sect_name in osects:
peff = self[sect_name]["wall_time"]
fract = self[sect_name]["wall_fract"]
vals = alternate(peff, fract)
table.append([sect_name] + ["%.2f" % val for val in vals])
return table
def good_sections(self, key="wall_time", criterion="mean", nmax=5):
"""
Return first `nmax` sections with best value of key `key` using criterion `criterion`.
"""
good_sections = self._order_by_peff(key, criterion=criterion)
return good_sections[:nmax]
def bad_sections(self, key="wall_time", criterion="mean", nmax=5):
"""
Return first `nmax` sections with worst value of key `key` using criterion `criterion`.
"""
bad_sections = self._order_by_peff(key, criterion=criterion, reverse=False)
return bad_sections[:nmax]
class AbinitTimerSection:
"""Record with the timing results associated to a section of code."""
STR_FIELDS = ["name"]
NUMERIC_FIELDS = [
"wall_time",
"wall_fract",
"cpu_time",
"cpu_fract",
"ncalls",
"gflops",
]
FIELDS = tuple(STR_FIELDS + NUMERIC_FIELDS)
@classmethod
def fake(cls):
"""Return a fake section. Mainly used to fill missing entries if needed."""
return AbinitTimerSection("fake", 0.0, 0.0, 0.0, 0.0, -1, 0.0)
def __init__(self, name, cpu_time, cpu_fract, wall_time, wall_fract, ncalls, gflops):
"""
Args:
name: Name of the sections.
cpu_time: CPU time in seconds.
cpu_fract: Percentage of CPU time.
wall_time: Wall-time in seconds.
wall_fract: Percentage of wall-time.
ncalls: Number of calls
gflops: Gigaflops.
"""
self.name = name.strip()
self.cpu_time = float(cpu_time)
self.cpu_fract = float(cpu_fract)
self.wall_time = float(wall_time)
self.wall_fract = float(wall_fract)
self.ncalls = int(ncalls)
self.gflops = float(gflops)
def to_tuple(self):
"""Convert object to tuple."""
return tuple([self.__dict__[at] for at in AbinitTimerSection.FIELDS])
def to_dict(self):
"""Convert object to dictionary."""
return {at: self.__dict__[at] for at in AbinitTimerSection.FIELDS}
def to_csvline(self, with_header=False):
"""Return a string with data in CSV format. Add header if `with_header`"""
string = ""
if with_header:
string += "# " + " ".join(at for at in AbinitTimerSection.FIELDS) + "\n"
string += ", ".join(str(v) for v in self.to_tuple()) + "\n"
return string
def __str__(self):
"""String representation."""
string = ""
for a in AbinitTimerSection.FIELDS:
string += a + " = " + self.__dict__[a] + ","
return string[:-1]
class AbinitTimer:
"""Container class storing the timing results."""
def __init__(self, sections, info, cpu_time, wall_time):
"""
Args:
sections: List of sections
info: Dictionary with extra info.
cpu_time: Cpu-time in seconds.
wall_time: Wall-time in seconds.
"""
# Store sections and names
self.sections = tuple(sections)
self.section_names = tuple([s.name for s in self.sections])
self.info = info
self.cpu_time = float(cpu_time)
self.wall_time = float(wall_time)
self.mpi_nprocs = int(info["mpi_nprocs"])
self.omp_nthreads = int(info["omp_nthreads"])
self.mpi_rank = info["mpi_rank"].strip()
self.fname = info["fname"].strip()
def __str__(self):
string = "file=%s, wall_time=%.1f, mpi_nprocs=%d, omp_nthreads=%d" % (
self.fname,
self.wall_time,
self.mpi_nprocs,
self.omp_nthreads,
)
# string += ", rank = " + self.mpi_rank
return string
@property
def ncpus(self):
"""Total number of CPUs employed."""
return self.mpi_nprocs * self.omp_nthreads
def get_section(self, section_name):
"""Return section associated to `section_name`."""
try:
idx = self.section_names.index(section_name)
except Exception:
raise
sect = self.sections[idx]
assert sect.name == section_name
return sect
def to_csv(self, fileobj=sys.stdout):
"""Write data on file fileobj using CSV format."""
openclose = is_string(fileobj)
if openclose:
fileobj = open(fileobj, "w")
for idx, section in enumerate(self.sections):
fileobj.write(section.to_csvline(with_header=(idx == 0)))
fileobj.flush()
if openclose:
fileobj.close()
def to_table(self, sort_key="wall_time", stop=None):
"""Return a table (list of lists) with timer data"""
table = [
list(AbinitTimerSection.FIELDS),
]
ord_sections = self.order_sections(sort_key)
if stop is not None:
ord_sections = ord_sections[:stop]
for osect in ord_sections:
row = [str(item) for item in osect.to_tuple()]
table.append(row)
return table
# Maintain old API
totable = to_table
def get_dataframe(self, sort_key="wall_time", **kwargs):
"""
Return a pandas DataFrame with entries sorted according to `sort_key`.
"""
import pandas as pd
frame = pd.DataFrame(columns=AbinitTimerSection.FIELDS)
for osect in self.order_sections(sort_key):
frame = frame.append(osect.to_dict(), ignore_index=True)
# Monkey patch
frame.info = self.info
frame.cpu_time = self.cpu_time
frame.wall_time = self.wall_time
frame.mpi_nprocs = self.mpi_nprocs
frame.omp_nthreads = self.omp_nthreads
frame.mpi_rank = self.mpi_rank
frame.fname = self.fname
return frame
def get_values(self, keys):
"""
Return a list of values associated to a particular list of keys.
"""
if is_string(keys):
return [s.__dict__[keys] for s in self.sections]
values = []
for k in keys:
values.append([s.__dict__[k] for s in self.sections])
return values
def names_and_values(self, key, minval=None, minfract=None, sorted=True):
"""
Select the entries whose value[key] is >= minval or whose fraction[key] is >= minfract
Return the names of the sections and the corresponding values.
"""
values = self.get_values(key)
names = self.get_values("name")
new_names, new_values = [], []
other_val = 0.0
if minval is not None:
assert minfract is None
for n, v in zip(names, values):
if v >= minval:
new_names.append(n)
new_values.append(v)
else:
other_val += v
new_names.append("below minval " + str(minval))
new_values.append(other_val)
elif minfract is not None:
assert minval is None
total = self.sum_sections(key)
for n, v in zip(names, values):
if v / total >= minfract:
new_names.append(n)
new_values.append(v)
else:
other_val += v
new_names.append("below minfract " + str(minfract))
new_values.append(other_val)
else:
# all values
new_names, new_values = names, values
if sorted:
# Sort new_values and rearrange new_names.
nandv = list(zip(new_names, new_values))
nandv.sort(key=lambda t: t[1])
new_names, new_values = [n[0] for n in nandv], [n[1] for n in nandv]
return new_names, new_values
def _reduce_sections(self, keys, operator):
return operator(self.get_values(keys))
def sum_sections(self, keys):
"""Sum value of keys."""
return self._reduce_sections(keys, sum)
def order_sections(self, key, reverse=True):
"""Sort sections according to the value of key."""
return sorted(self.sections, key=lambda s: s.__dict__[key], reverse=reverse)
@add_fig_kwargs
def cpuwall_histogram(self, ax=None, **kwargs):
"""
Plot histogram with cpu- and wall-time on axis `ax`.
Args:
ax: matplotlib :class:`Axes` or None if a new figure should be created.
Returns: `matplotlib` figure
"""
ax, fig, plt = get_ax_fig_plt(ax=ax)
nk = len(self.sections)
ind = np.arange(nk) # the x locations for the groups
width = 0.35 # the width of the bars
cpu_times = self.get_values("cpu_time")
rects1 = plt.bar(ind, cpu_times, width, color="r")
wall_times = self.get_values("wall_time")
rects2 = plt.bar(ind + width, wall_times, width, color="y")
# Add ylable and title
ax.set_ylabel("Time (s)")
# plt.title('CPU-time and Wall-time for the different sections of the code')
ticks = self.get_values("name")
ax.set_xticks(ind + width, ticks)
ax.legend((rects1[0], rects2[0]), ("CPU", "Wall"), loc="best")
return fig
@add_fig_kwargs
def pie(self, key="wall_time", minfract=0.05, ax=None, **kwargs):
"""
Plot pie chart for this timer.
Args:
key: Keyword used to extract data from the timer.
minfract: Don't show sections whose relative weight is less that minfract.
ax: matplotlib :class:`Axes` or None if a new figure should be created.
Returns: `matplotlib` figure
"""
ax, fig, plt = get_ax_fig_plt(ax=ax)
# Set aspect ratio to be equal so that pie is drawn as a circle.
ax.axis("equal")
# Don't show section whose value is less that minfract
labels, vals = self.names_and_values(key, minfract=minfract)
ax.pie(vals, explode=None, labels=labels, autopct="%1.1f%%", shadow=True)
return fig
@add_fig_kwargs
def scatter_hist(self, ax=None, **kwargs):
"""
Scatter plot + histogram.
Args:
ax: matplotlib :class:`Axes` or None if a new figure should be created.
Returns: `matplotlib` figure
"""
from mpl_toolkits.axes_grid1 import make_axes_locatable
ax, fig, plt = get_ax_fig_plt(ax=ax)
x = np.asarray(self.get_values("cpu_time"))
y = np.asarray(self.get_values("wall_time"))
# the scatter plot:
axScatter = plt.subplot(1, 1, 1)
axScatter.scatter(x, y)
axScatter.set_aspect("auto")
# create new axes on the right and on the top of the current axes
# The first argument of the new_vertical(new_horizontal) method is
# the height (width) of the axes to be created in inches.
divider = make_axes_locatable(axScatter)
axHistx = divider.append_axes("top", 1.2, pad=0.1, sharex=axScatter)
axHisty = divider.append_axes("right", 1.2, pad=0.1, sharey=axScatter)
# make some labels invisible
plt.setp(axHistx.get_xticklabels() + axHisty.get_yticklabels(), visible=False)
# now determine nice limits by hand:
binwidth = 0.25
xymax = np.max([np.max(np.fabs(x)), np.max(np.fabs(y))])
lim = (int(xymax / binwidth) + 1) * binwidth
bins = np.arange(-lim, lim + binwidth, binwidth)
axHistx.hist(x, bins=bins)
axHisty.hist(y, bins=bins, orientation="horizontal")
# the xaxis of axHistx and yaxis of axHisty are shared with axScatter,
# thus there is no need to manually adjust the xlim and ylim of these axis.
# axHistx.axis["bottom"].major_ticklabels.set_visible(False)
for tl in axHistx.get_xticklabels():
tl.set_visible(False)
axHistx.set_yticks([0, 50, 100])
# axHisty.axis["left"].major_ticklabels.set_visible(False)
for tl in axHisty.get_yticklabels():
tl.set_visible(False)
axHisty.set_xticks([0, 50, 100])
# plt.draw()
return fig
| mit | 5,615,313,038,195,150,000 | 32.471154 | 119 | 0.546554 | false |
frishberg/django | tests/auth_tests/test_admin_multidb.py | 12 | 1496 | from django.conf.urls import url
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User
from django.db import connections
from django.test import TestCase, mock, override_settings
from django.urls import reverse
class Router(object):
target_db = None
def db_for_read(self, model, **hints):
return self.target_db
db_for_write = db_for_read
site = admin.AdminSite(name='test_adminsite')
site.register(User, admin_class=UserAdmin)
urlpatterns = [
url(r'^admin/', site.urls),
]
@override_settings(ROOT_URLCONF=__name__, DATABASE_ROUTERS=['%s.Router' % __name__])
class MultiDatabaseTests(TestCase):
multi_db = True
@classmethod
def setUpTestData(cls):
cls.superusers = {}
for db in connections:
Router.target_db = db
cls.superusers[db] = User.objects.create_superuser(
username='admin', password='something', email='[email protected]',
)
@mock.patch('django.contrib.auth.admin.transaction')
def test_add_view(self, mock):
for db in connections:
Router.target_db = db
self.client.force_login(self.superusers[db])
self.client.post(reverse('test_adminsite:auth_user_add'), {
'username': 'some_user',
'password1': 'helloworld',
'password2': 'helloworld',
})
mock.atomic.assert_called_with(using=db)
| bsd-3-clause | 2,055,074,307,013,647,600 | 28.92 | 84 | 0.639037 | false |
markovmodel/adaptivemd | adaptivemd/engine/__init__.py | 2 | 1176 | ##############################################################################
# adaptiveMD: A Python Framework to Run Adaptive Molecular Dynamics (MD)
# Simulations on HPC Resources
# Copyright 2017 FU Berlin and the Authors
#
# Authors: Jan-Hendrik Prinz
# Contributors:
#
# `adaptiveMD` is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as
# published by the Free Software Foundation, either version 2.1
# of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with MDTraj. If not, see <http://www.gnu.org/licenses/>.
##############################################################################
from __future__ import absolute_import
from .engine import (Engine, Trajectory, Frame, TrajectoryGenerationTask,
TrajectoryExtensionTask)
| lgpl-2.1 | 8,122,477,913,784,121,000 | 46.04 | 78 | 0.64966 | false |
Revanth47/addons-server | tests/ui/pages/desktop/base.py | 4 | 1697 | from pypom import Page, Region
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
class Base(Page):
_url = '{base_url}/{locale}'
def __init__(self, selenium, base_url, locale='en-US', **kwargs):
super(Base, self).__init__(selenium, base_url, locale=locale, **kwargs)
@property
def header(self):
return self.Header(self)
@property
def logged_in(self):
"""Returns True if a user is logged in"""
return self.is_element_displayed(*self.header._user_locator)
def login(self, email, password):
login_page = self.header.click_login()
login_page.login(email, password)
def logout(self):
self.header.click_logout()
class Header(Region):
_root_locator = (By.CLASS_NAME, 'amo-header')
_login_locator = (By.CSS_SELECTOR, '#aux-nav .account a:nth-child(2)')
_logout_locator = (By.CSS_SELECTOR, '.logout > a')
_user_locator = (By.CSS_SELECTOR, '#aux-nav .account .user')
def click_login(self):
self.find_element(*self._login_locator).click()
from pages.desktop.login import Login
return Login(self.selenium, self.page.base_url)
def click_logout(self):
user = self.find_element(*self._user_locator)
logout = self.find_element(*self._logout_locator)
action = ActionChains(self.selenium)
action.move_to_element(user)
action.move_to_element(logout)
action.click()
action.perform()
self.wait.until(lambda s: self.is_element_displayed(
*self._login_locator))
| bsd-3-clause | 9,104,308,252,170,018,000 | 32.94 | 79 | 0.612257 | false |
Jmita/ptavi-p3 | smallsmilhandler.py | 1 | 2081 | from xml.sax import make_parser
from xml.sax.handler import ContentHandler
class SmallSMILHandler(ContentHandler):
def __init__(self):
self.lista = []
def startElement(self, name, attrs):
if name == "root-layout":
self.root_layout = {}
self.root_layout['width'] = attrs.get('width', "")
self.root_layout['height'] = attrs.get('height', "")
self.root_layout['background_color'] =(attrs.get('background-color', ""))
self.lista.append([name, self.root_layout])
elif name == "region":
self.region = {}
self.region['id'] = attrs.get('id', "")
self.region['top'] = attrs.get('top', "")
self.region['bottom'] = attrs.get('bottom', "")
self.region['left'] = attrs.get('left', "")
self.region['right'] = attrs.get('right', "")
self.lista.append([name, self.region])
elif name == "img":
self.img = {}
self.img['src'] = attrs.get('src', "")
self.img['region'] = attrs.get('region', "")
self.img['begin'] = attrs.get('begin', "")
self.img['dur'] = attrs.get('dur', "")
self.lista.append([name, self.img])
elif name == "audio":
self.audio = {}
self.audio['src'] = attrs.get('src', "")
self.audio['begin'] = attrs.get('begin', "")
self.audio['dur'] = attrs.get('dur', "")
self.lista.append([name, self.audio])
elif name == "textstream":
self.textstream = {}
self.textstream['src'] = attrs.get('src', "")
self.textstream['region'] = attrs.get('region', "")
self.lista.append([name, self.textstream])
def get_tags(self):
return self.lista
if __name__ == "__main__":
"""
Programa principal
"""
parser = make_parser()
SSHandler = SmallSMILHandler()
parser.setContentHandler(SSHandler)
parser.parse(open('karaoke.smil'))
data = SSHandler.get_tags()
print data
| apache-2.0 | 3,023,930,376,117,540,400 | 32.564516 | 85 | 0.519942 | false |
kholia/pyrpm | tests/rpmgraph.py | 1 | 14571 | #!/usr/bin/python
#
# rpmgraph
#
# (c) 2005 Thomas Woerner <[email protected]>
#
# version 2005-06-07-01
import sys, os
PYRPMDIR = ".."
if not PYRPMDIR in sys.path:
sys.path.append(PYRPMDIR)
import pyrpm
import pyrpm.database.memorydb
from pyrpm.logger import log
def usage():
print """Usage: %s [-h] <rpm name/package>...
-h | --help print help
-v | --verbose be verbose, and more, ..
-f use full package names (NEVR)
-o write simple graph output to file
(default: rpmgraph.dot)
-i iterate and write iteration and loop graphs
(iteration_XXX.dot and loop_XXX.dot)
-nC no conflict checks
-nF no file conflict checks
This program prints a tree for package dependencies if '-i' is not given else
it iterates though the normal ordering process and writes the iteration and
loop graphs.
""" % sys.argv[0]
# ----------------------------------------------------------------------------
sout = os.readlink("/proc/self/fd/1")
devpts = 0
if sout[0:9] == "/dev/pts/":
devpts = 1
def progress_write(msg):
if devpts == 1:
sys.stdout.write("\r")
sys.stdout.write(msg)
if devpts == 0:
sys.stdout.write("\n")
sys.stdout.flush()
# ----------------------------------------------------------------------------
rpms = [ ]
verbose = 0
full_names = 0
iteration = 0
output = "rpmgraph.dot"
tags = [ "name", "epoch", "version", "release", "arch",
"providename", "provideflags", "provideversion", "requirename",
"requireflags", "requireversion", "obsoletename", "obsoleteflags",
"obsoleteversion", "conflictname", "conflictflags",
"conflictversion", "filesizes", "filemodes", "filemd5s", "fileflags",
"dirindexes", "basenames", "dirnames" ]
if __name__ == '__main__':
if len(sys.argv) == 1:
usage()
sys.exit(0)
pargs = [ ]
i = 1
while i < len(sys.argv):
if sys.argv[i] == "-h" or sys.argv[i] == "--help":
usage()
sys.exit(0)
elif sys.argv[i][:2] == "-v":
j = 1
while j < len(sys.argv[i]) and sys.argv[i][j] == "v":
verbose += 1
j += 1
elif sys.argv[i][:2] == "-v":
verbose += 1
elif sys.argv[i][:2] == "-f":
full_names = 1
elif sys.argv[i][:2] == "-i":
iteration = 1
elif sys.argv[i] == "-o":
i += 1
output = sys.argv[i]
elif sys.argv[i] == "-nC":
pyrpm.rpmconfig.noconflicts = 1
elif sys.argv[i] == "-nF":
pyrpm.rpmconfig.nofileconflicts = 1
else:
pargs.append(sys.argv[i])
i += 1
pyrpm.rpmconfig.verbose = verbose
if pyrpm.rpmconfig.verbose > 3:
pyrpm.rpmconfig.debug = pyrpm.rpmconfig.verbose - 3
if pyrpm.rpmconfig.verbose > 2:
pyrpm.rpmconfig.warning = pyrpm.rpmconfig.verbose - 2
elif pyrpm.rpmconfig.verbose > 1:
pyrpm.rpmconfig.warning = pyrpm.rpmconfig.verbose - 1
if len(pargs) == 0:
usage()
sys.exit(0)
# -- load packages
i = 1
for f in pargs:
if verbose > 0:
progress_write("Reading %d/%d " % (i, len(pargs)))
r = pyrpm.RpmPackage(pyrpm.rpmconfig, f)
try:
r.read(tags=tags)
except IOError:
print "Loading of %s failed, exiting." % f
sys.exit(-1)
r.close()
rpms.append(r)
i += 1
if verbose > 0 and len(pargs) > 0:
print
del pargs
# -----------------------------------------------------------------------
def printRelations(relations, output):
if output == "-":
fp = sys.stdout
else:
fp = open(output, "w+")
fp.write('digraph rpmgraph {\n')
fp.write('graph [\n');
fp.write(' overlap="false",\n');
fp.write(' nodesep="1.0",\n');
fp.write(' K=2,\n');
fp.write(' splines="true",\n');
fp.write(' mindist=2,\n');
fp.write(' pack="true",\n');
fp.write(' ratio="compress",\n');
fp.write(' size="50,50"\n');
fp.write('];\n')
fp.write('node [\n');
# fp.write(' label="\N",\n');
fp.write(' fontsize=150\n');
fp.write('];\n')
fp.write('edge [\n');
fp.write(' minlen=1,\n');
fp.write(' tailclip=true,\n');
fp.write(' headclip=true\n');
fp.write('];\n')
for pkg in relations:
rel = relations[pkg]
if full_names:
pkg_name = pkg.getNEVRA()
else:
pkg_name = pkg["name"]
fp.write('"%s" [peripheries=%d];\n' % \
(pkg_name, len(rel.pre)+len(rel.post)))
for pkg in relations:
rel = relations[pkg]
if full_names:
pkg_name = pkg.getNEVRA()
else:
pkg_name = pkg["name"]
if len(rel.pre) > 0:
for p in rel.pre:
f = rel.pre[p]
if f == 1:
style='solid'
else:
style='bold'
if full_names:
name = p.getNEVRA()
else:
name = p["name"]
fp.write('"%s" -> "%s" [style="%s", arrowsize=10.0];\n' % \
(pkg_name, name, style))
fp.write('}\n')
if output != "-":
fp.close()
# -----------------------------------------------------------------------
class Node:
def __init__(self, name, index):
self.name = name
self.index = index
self.x = 0
self.y = 0
self.width = 0.7
self.height = 0.5
if len(self.name) * 0.07 + 0.2 > self.width:
self.width = len(self.name) * 0.07 + 0.2
class Loop:
def __init__(self, relations, loop, start_index):
self.relations = relations
self.loop = loop
self.x = 0
self.y = 0
self.nodes = { }
l = len(self.loop)-1
for i in xrange(l):
self.nodes[self.loop[i]] = Node(self.loop[i]["name"],
start_index + i)
if i < 2:
if l == 2:
self.nodes[self.loop[i]].x = 50
else:
self.nodes[self.loop[i]].x = 100
self.nodes[self.loop[i]].y = i * (l - 1) * 100 + 50
else:
self.nodes[self.loop[i]].x = 0
self.nodes[self.loop[i]].y = (l - i) * 100 + 50
self.width = 150
if l > 2:
self.width = 200
self.height = l * 100
def __len__(self):
return len(self.loop)
def __getitem__(self, i):
return self.loop[i]
def __str__(self):
s = ""
for node in self.nodes:
s += '"node%08d" ' % self.nodes[node].index
s += '[label="%s", pos="%d,%d", width="%.2f", height="%.2f"];\n' % \
(self.nodes[node].name,
self.nodes[node].x + self.x, self.nodes[node].y + self.y,
self.nodes[node].width, self.nodes[node].height)
l = len(self.loop)-1
for i in xrange(l):
node = self.loop[i]
next = self.loop[i+1]
style='style="solid"'
if self.relations[node].pre[next] == 2:
style='style="bold"'
x2 = self.nodes[node].x + self.x
y2 = self.nodes[node].y + self.y
x1 = self.nodes[next].x + self.x
y1 = self.nodes[next].y + self.y
if y1 > y2:
y1 -= self.nodes[next].height * 30
y2 += self.nodes[node].height * 30
if y1 < y2:
y1 += self.nodes[next].height * 30
y2 -= self.nodes[node].height * 30
if x1 > x2:
x1 -= self.nodes[next].width * 20
if x1 < x2:
x2 -= self.nodes[node].width * 20
if l == 2: # two nodes
x1 += - 10 + i*20
x2 += - 10 + i*20
pos = 'pos="e,%d,%d %d,%d %d,%d %d,%d %d,%d"' % \
(x1, y1, x2, y2, x2, y2, x2, y2,
(x1 + x2) / 2, (y1 + y2) / 2)
s += '"node%08d" -> "node%08d" [%s, %s];\n' % \
(self.nodes[node].index, self.nodes[next].index,
style, pos)
return s
def arrangeLoops(loop_list, _y_max):
x = 50
y = y_max = 0
line_width = 0
lines = [ ] # unfilled lines
for loop in loop_list:
# first check if it fits in an unfilled line
if len(lines) > 0:
found = 0
for line in lines:
if line[1] + loop.height <= _y_max and \
loop.width <= line[2]:
loop.x = line[0]
loop.y = line[1]
line[1] += loop.height
if y_max < line[1]:
y_max = line[1]
found = 1
break
if found == 1:
continue
if y != 0 and y + loop.height > _y_max:
if y < _y_max:
lines.append([x, y, line_width])
y = 0
x += line_width
line_width = 0
loop.x = x
loop.y = y
if line_width < loop.width:
line_width = loop.width
y += loop.height
if y_max < y:
y_max = y
return (x + line_width, y_max)
def printLoops(relations, loops, output):
if output == "-":
fp = sys.stdout
else:
fp = open(output, "w+")
loop_list = [ ]
nodes_index = 0
for loop in loops:
loop_list.append(Loop(relations, loop, nodes_index))
nodes_index += len(loop)-1
(x_max, y_max) = arrangeLoops(loop_list, 100)
old_y_max = [ ]
# make it about (3*height x 4*width)
while y_max < 1.25 * x_max or y_max > 1.5 * x_max:
# y_max = (x_max + y_max) / 2
y_max = (1.33 * x_max + 0.75 * y_max) / 2
(x_max, y_max) = arrangeLoops(loop_list, y_max)
if y_max in old_y_max:
break
old_y_max.append(y_max)
fp.write('digraph rpmgraph {\n')
fp.write('graph [\n');
fp.write(' overlap="false",\n');
fp.write(' nodesep="1.0",\n');
fp.write(' K=2,\n');
fp.write(' splines="true",\n');
fp.write(' mindist=2,\n');
fp.write(' pack="true",\n');
fp.write(' ratio="compress",\n');
fp.write(' bb="0,0,%d,%d"\n' % (x_max, y_max));
fp.write('];\n')
fp.write('node [\n');
fp.write(' fontsize=10\n');
fp.write('];\n')
fp.write('edge [\n');
fp.write(' minlen=1,\n');
fp.write(' tailclip=true,\n');
fp.write(' headclip=true\n');
fp.write(' arrowsize=1.0\n');
fp.write('];\n')
for loop in loop_list:
fp.write(str(loop))
fp.write('}\n')
if output != "-":
fp.close()
# -----------------------------------------------------------------------
def orderRpms(orderer, relations):
""" Order rpmlist.
Returns ordered list of packages. """
global iteration_count
global loop_count
order = [ ]
last = [ ]
idx = 1
while len(relations) > 0:
printRelations(relations, "iteration_%03d.dot" % iteration_count)
iteration_count += 1
# remove and save all packages without a post relation in reverse
# order
# these packages will be appended later to the list
orderer._separatePostLeafNodes(relations, last)
if len(relations) == 0:
break
next = orderer._getNextLeafNode(relations)
if next != None:
order.append(next)
relations.remove(next)
log.debug2("%d: %s", idx, next.getNEVRA())
idx += 1
else:
loops = orderer.getLoops(relations)
printLoops(relations, loops, "loop_%03d.dot" % loop_count)
loop_count += 1
if orderer.breakupLoops(relations, loops) != 1:
log.errorLn("Unable to breakup loop.")
return None
if pyrpm.rpmconfig.debug > 1:
for r in last:
log.debug2("%d: %s", idx, r.getNEVRA())
idx += 1
return (order + last)
# -----------------------------------------------------------------------
operation = pyrpm.OP_UPDATE
db = pyrpm.database.memorydb.RpmMemoryDB(pyrpm.rpmconfig, None)
db.addPkgs([])
resolver = pyrpm.RpmResolver(pyrpm.rpmconfig, db)
i = 0
l = len(rpms)
while len(rpms) > 0:
if verbose > 0:
progress_write("Appending %d/%d " % (i, l))
r = rpms.pop(0)
# append
resolver.install(r)
i += 1
del rpms
if len(resolver.installs) == 0:
print "Nothing to do."
sys.exit(0)
if resolver.resolve() != 1:
sys.exit(-1)
# -----------------------------------------------------------------------
orderer = pyrpm.RpmOrderer(pyrpm.rpmconfig,
resolver.installs, resolver.updates,
resolver.obsoletes, resolver.erases)
del resolver
relations = orderer.genRelations(orderer.installs, pyrpm.OP_INSTALL)
if relations == None or len(relations) < 1:
sys.exit(-1)
if iteration:
iteration_count = 1
loop_count = 1
orderRpms(orderer, relations)
else:
printRelations(relations, output)
del orderer
sys.exit(0)
| gpl-2.0 | -6,106,591,619,334,053,000 | 31.024176 | 84 | 0.431611 | false |
Hybrid-Cloud/cinder | cinder/api/contrib/snapshot_unmanage.py | 5 | 2737 | # Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import webob
from webob import exc
from cinder.api import extensions
from cinder.api.openstack import wsgi
from cinder import exception
from cinder.i18n import _LI
from cinder import volume
LOG = logging.getLogger(__name__)
authorize = extensions.extension_authorizer('snapshot', 'snapshot_unmanage')
class SnapshotUnmanageController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(SnapshotUnmanageController, self).__init__(*args, **kwargs)
self.volume_api = volume.API()
@wsgi.response(202)
@wsgi.action('os-unmanage')
def unmanage(self, req, id, body):
"""Stop managing a snapshot.
This action is very much like a delete, except that a different
method (unmanage) is called on the Cinder driver. This has the effect
of removing the snapshot from Cinder management without actually
removing the backend storage object associated with it.
There are no required parameters.
A Not Found error is returned if the specified snapshot does not exist.
"""
context = req.environ['cinder.context']
authorize(context)
LOG.info(_LI("Unmanage snapshot with id: %s"), id)
try:
snapshot = self.volume_api.get_snapshot(context, id)
self.volume_api.delete_snapshot(context, snapshot,
unmanage_only=True)
# Not found exception will be handled at the wsgi level
except exception.InvalidSnapshot as ex:
raise exc.HTTPBadRequest(explanation=ex.msg)
return webob.Response(status_int=202)
class Snapshot_unmanage(extensions.ExtensionDescriptor):
"""Enable volume unmanage operation."""
name = "SnapshotUnmanage"
alias = "os-snapshot-unmanage"
updated = "2014-12-31T00:00:00+00:00"
def get_controller_extensions(self):
controller = SnapshotUnmanageController()
extension = extensions.ControllerExtension(self, 'snapshots',
controller)
return [extension]
| apache-2.0 | -3,679,071,870,225,744,400 | 35.986486 | 79 | 0.678115 | false |
francisliyy/caravel-aidp | caravel/migrations/versions/27ae655e4247_make_creator_owners.py | 3 | 1155 | """Make creator owners
Revision ID: 27ae655e4247
Revises: d8bc074f7aad
Create Date: 2016-06-27 08:43:52.592242
"""
# revision identifiers, used by Alembic.
revision = '27ae655e4247'
down_revision = 'd8bc074f7aad'
from alembic import op
from caravel import db
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from flask_appbuilder import Model
from sqlalchemy import (
Column, Integer, ForeignKey, Table)
Base = declarative_base()
class Slice(Base):
"""Declarative class to do query in upgrade"""
__tablename__ = 'slices'
id = Column(Integer, primary_key=True)
class Dashboard(Base):
"""Declarative class to do query in upgrade"""
__tablename__ = 'dashboards'
id = Column(Integer, primary_key=True)
def upgrade():
bind = op.get_bind()
session = db.Session(bind=bind)
objects = session.query(Slice).all()
objects += session.query(Dashboard).all()
for obj in objects:
if obj.created_by and obj.created_by not in obj.owners:
obj.owners.append(obj.created_by)
session.commit()
session.close()
def downgrade():
pass
| apache-2.0 | 6,249,852,287,527,510,000 | 22.1 | 63 | 0.697835 | false |
nham/rust | src/etc/maketest.py | 15 | 2681 | # Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
import subprocess
import os
import sys
# msys1/msys2 automatically converts `/abs/path1:/abs/path2` into
# `c:\real\abs\path1;c:\real\abs\path2` (semicolons) if shell thinks
# the value is list of paths.
# (if there is only one path, it becomes `c:/real/abs/path`.)
# this causes great confusion and error: shell and Makefile doesn't like
# windows paths so it is really error-prone. revert it for peace.
def normalize_path(v):
v = v.replace('\\', '/')
# c:/path -> /c/path
if ':/' in v:
v = '/' + v.replace(':/', '/')
return v
def putenv(name, value):
if os.name == 'nt':
value = normalize_path(value)
os.putenv(name, value)
def convert_path_spec(name, value):
if os.name == 'nt' and name != 'PATH':
value = ":".join(normalize_path(v) for v in value.split(";"))
return value
make = sys.argv[2]
putenv('RUSTC', os.path.abspath(sys.argv[3]))
putenv('TMPDIR', os.path.abspath(sys.argv[4]))
putenv('CC', sys.argv[5])
putenv('RUSTDOC', os.path.abspath(sys.argv[6]))
filt = sys.argv[7]
putenv('LD_LIB_PATH_ENVVAR', sys.argv[8]);
putenv('HOST_RPATH_DIR', os.path.abspath(sys.argv[9]));
putenv('TARGET_RPATH_DIR', os.path.abspath(sys.argv[10]));
putenv('RUST_BUILD_STAGE', sys.argv[11])
if not filt in sys.argv[1]:
sys.exit(0)
print('maketest: ' + os.path.basename(os.path.dirname(sys.argv[1])))
path = sys.argv[1]
if path[-1] == '/':
# msys1 has a bug that `make` fails to include `../tools.mk` (parent dir)
# if `-C path` option is given and `path` is absolute directory with
# trailing slash (`c:/path/to/test/`).
# the easist workaround is to remove the slash (`c:/path/to/test`).
# msys2 seems to fix this problem.
path = path[:-1]
proc = subprocess.Popen([make, '-C', path],
stdout = subprocess.PIPE,
stderr = subprocess.PIPE)
out, err = proc.communicate()
i = proc.wait()
if i != 0:
print '----- ' + sys.argv[1] + """ --------------------
------ stdout ---------------------------------------------
""" + out + """
------ stderr ---------------------------------------------
""" + err + """
------ ---------------------------------------------
"""
sys.exit(i)
| apache-2.0 | 5,905,180,420,240,376,000 | 32.936709 | 77 | 0.596046 | false |
li-ch/pykka | tests/actor_test.py | 1 | 13696 | import threading
import unittest
import uuid
from pykka.actor import ThreadingActor
from pykka.exceptions import ActorDeadError
from pykka.registry import ActorRegistry
class AnActor(object):
def __init__(self, **events):
super(AnActor, self).__init__()
self.on_start_was_called = events['on_start_was_called']
self.on_stop_was_called = events['on_stop_was_called']
self.on_failure_was_called = events['on_failure_was_called']
self.actor_was_registered_before_on_start_was_called = events[
'actor_was_registered_before_on_start_was_called']
self.greetings_was_received = events['greetings_was_received']
def on_start(self):
self.on_start_was_called.set()
if ActorRegistry.get_by_urn(self.actor_urn) is not None:
self.actor_was_registered_before_on_start_was_called.set()
def on_stop(self):
self.on_stop_was_called.set()
def on_failure(self, *args):
self.on_failure_was_called.set()
def on_receive(self, message):
if message.get('command') == 'raise exception':
raise Exception('foo')
elif message.get('command') == 'raise base exception':
raise BaseException()
elif message.get('command') == 'stop twice':
self.stop()
self.stop()
elif message.get('command') == 'message self then stop':
self.actor_ref.tell({'command': 'greetings'})
self.stop()
elif message.get('command') == 'greetings':
self.greetings_was_received.set()
elif message.get('command') == 'callback':
message['callback']()
else:
super(AnActor, self).on_receive(message)
class EarlyStoppingActor(object):
def __init__(self, on_stop_was_called):
super(EarlyStoppingActor, self).__init__()
self.on_stop_was_called = on_stop_was_called
def on_start(self):
self.stop()
def on_stop(self):
self.on_stop_was_called.set()
class EarlyFailingActor(object):
def __init__(self, on_start_was_called):
super(EarlyFailingActor, self).__init__()
self.on_start_was_called = on_start_was_called
def on_start(self):
try:
raise RuntimeError('on_start failure')
finally:
self.on_start_was_called.set()
class LateFailingActor(object):
def __init__(self, on_stop_was_called):
super(LateFailingActor, self).__init__()
self.on_stop_was_called = on_stop_was_called
def on_start(self):
self.stop()
def on_stop(self):
try:
raise RuntimeError('on_stop failure')
finally:
self.on_stop_was_called.set()
class FailingOnFailureActor(object):
def __init__(self, on_failure_was_called):
super(FailingOnFailureActor, self).__init__()
self.on_failure_was_called = on_failure_was_called
def on_receive(self, message):
if message.get('command') == 'raise exception':
raise Exception('on_receive failure')
else:
super(FailingOnFailureActor, self).on_receive(message)
def on_failure(self, *args):
try:
raise RuntimeError('on_failure failure')
finally:
self.on_failure_was_called.set()
class ActorTest(object):
def setUp(self):
self.on_start_was_called = self.event_class()
self.on_stop_was_called = self.event_class()
self.on_failure_was_called = self.event_class()
self.actor_was_registered_before_on_start_was_called = (
self.event_class())
self.greetings_was_received = self.event_class()
self.actor_ref = self.AnActor.start(
on_start_was_called=self.on_start_was_called,
on_stop_was_called=self.on_stop_was_called,
on_failure_was_called=self.on_failure_was_called,
actor_was_registered_before_on_start_was_called=(
self.actor_was_registered_before_on_start_was_called),
greetings_was_received=self.greetings_was_received)
self.actor_proxy = self.actor_ref.proxy()
def tearDown(self):
ActorRegistry.stop_all()
def test_messages_left_in_queue_after_actor_stops_receive_an_error(self):
event = self.event_class()
self.actor_ref.tell({'command': 'callback', 'callback': event.wait})
self.actor_ref.stop(block=False)
response = self.actor_ref.ask({'command': 'irrelevant'}, block=False)
event.set()
self.assertRaises(ActorDeadError, response.get, timeout=0.5)
def test_stop_requests_left_in_queue_after_actor_stops_are_handled(self):
event = self.event_class()
self.actor_ref.tell({'command': 'callback', 'callback': event.wait})
self.actor_ref.stop(block=False)
response = self.actor_ref.ask({'command': 'pykka_stop'}, block=False)
event.set()
response.get(timeout=0.5)
def test_actor_has_an_uuid4_based_urn(self):
self.assertEqual(4, uuid.UUID(self.actor_ref.actor_urn).version)
def test_actor_has_unique_uuid(self):
event = self.event_class()
actors = [
self.AnActor.start(
on_start_was_called=event,
on_stop_was_called=event,
on_failure_was_called=event,
actor_was_registered_before_on_start_was_called=event,
greetings_was_received=event)
for _ in range(3)]
self.assertNotEqual(actors[0].actor_urn, actors[1].actor_urn)
self.assertNotEqual(actors[1].actor_urn, actors[2].actor_urn)
self.assertNotEqual(actors[2].actor_urn, actors[0].actor_urn)
def test_str_on_raw_actor_contains_actor_class_name(self):
event = self.event_class()
unstarted_actor = self.AnActor(
on_start_was_called=event,
on_stop_was_called=event,
on_failure_was_called=event,
actor_was_registered_before_on_start_was_called=event,
greetings_was_received=event)
self.assert_('AnActor' in str(unstarted_actor))
def test_str_on_raw_actor_contains_actor_urn(self):
event = self.event_class()
unstarted_actor = self.AnActor(
on_start_was_called=event,
on_stop_was_called=event,
on_failure_was_called=event,
actor_was_registered_before_on_start_was_called=event,
greetings_was_received=event)
self.assert_(unstarted_actor.actor_urn in str(unstarted_actor))
def test_init_can_be_called_with_arbitrary_arguments(self):
self.SuperInitActor(1, 2, 3, foo='bar')
def test_on_start_is_called_before_first_message_is_processed(self):
self.on_start_was_called.wait(5)
self.assertTrue(self.on_start_was_called.is_set())
def test_on_start_is_called_after_the_actor_is_registered(self):
# NOTE: If the actor is registered after the actor is started, this
# test may still occasionally pass, as it is dependant on the exact
# timing of events. When the actor is first registered and then
# started, this test should always pass.
self.on_start_was_called.wait(5)
self.assertTrue(self.on_start_was_called.is_set())
self.actor_was_registered_before_on_start_was_called.wait(0.1)
self.assertTrue(
self.actor_was_registered_before_on_start_was_called.is_set())
def test_on_start_can_stop_actor_before_receive_loop_is_started(self):
# NOTE: This test will pass even if the actor is allowed to start the
# receive loop, but it will cause the test suite to hang, as the actor
# thread is blocking on receiving messages to the actor inbox forever.
# If one made this test specifically for ThreadingActor, one could add
# an assertFalse(actor_thread.is_alive()), which would cause the test
# to fail properly.
stop_event = self.event_class()
another_actor = self.EarlyStoppingActor.start(stop_event)
stop_event.wait(5)
self.assertTrue(stop_event.is_set())
self.assertFalse(another_actor.is_alive())
def test_on_start_failure_causes_actor_to_stop(self):
# Actor should not be alive if on_start fails.
start_event = self.event_class()
actor_ref = self.EarlyFailingActor.start(start_event)
start_event.wait(5)
actor_ref.actor_stopped.wait(5)
self.assertFalse(actor_ref.is_alive())
def test_on_stop_is_called_when_actor_is_stopped(self):
self.assertFalse(self.on_stop_was_called.is_set())
self.actor_ref.stop()
self.on_stop_was_called.wait(5)
self.assertTrue(self.on_stop_was_called.is_set())
def test_on_stop_failure_causes_actor_to_stop(self):
stop_event = self.event_class()
actor = self.LateFailingActor.start(stop_event)
stop_event.wait(5)
self.assertFalse(actor.is_alive())
def test_on_failure_is_called_when_exception_cannot_be_returned(self):
self.assertFalse(self.on_failure_was_called.is_set())
self.actor_ref.tell({'command': 'raise exception'})
self.on_failure_was_called.wait(5)
self.assertTrue(self.on_failure_was_called.is_set())
self.assertFalse(self.on_stop_was_called.is_set())
def test_on_failure_failure_causes_actor_to_stop(self):
failure_event = self.event_class()
actor = self.FailingOnFailureActor.start(failure_event)
actor.tell({'command': 'raise exception'})
failure_event.wait(5)
self.assertFalse(actor.is_alive())
def test_actor_is_stopped_when_unhandled_exceptions_are_raised(self):
self.assertFalse(self.on_failure_was_called.is_set())
self.actor_ref.tell({'command': 'raise exception'})
self.on_failure_was_called.wait(5)
self.assertTrue(self.on_failure_was_called.is_set())
self.assertEqual(0, len(ActorRegistry.get_all()))
def test_all_actors_are_stopped_on_base_exception(self):
start_event = self.event_class()
stop_event = self.event_class()
fail_event = self.event_class()
registered_event = self.event_class()
greetings_event = self.event_class()
self.AnActor.start(
on_start_was_called=start_event,
on_stop_was_called=stop_event,
on_failure_was_called=fail_event,
actor_was_registered_before_on_start_was_called=registered_event,
greetings_was_received=greetings_event)
self.assertEqual(2, len(ActorRegistry.get_all()))
self.assertFalse(self.on_stop_was_called.is_set())
self.actor_ref.tell({'command': 'raise base exception'})
self.on_stop_was_called.wait(5)
self.assertTrue(self.on_stop_was_called.is_set())
self.assert_(1 >= len(ActorRegistry.get_all()))
stop_event.wait(5)
self.assertTrue(stop_event.is_set())
self.assertEqual(0, len(ActorRegistry.get_all()))
def test_actor_can_call_stop_on_self_multiple_times(self):
self.actor_ref.ask({'command': 'stop twice'})
def test_actor_processes_all_messages_before_stop_on_self_stops_it(self):
self.actor_ref.ask({'command': 'message self then stop'})
self.greetings_was_received.wait(5)
self.assertTrue(self.greetings_was_received.is_set())
self.on_stop_was_called.wait(5)
self.assertEqual(0, len(ActorRegistry.get_all()))
def ConcreteActorTest(actor_class, event_class):
class C(ActorTest, unittest.TestCase):
class AnActor(AnActor, actor_class):
pass
class EarlyStoppingActor(EarlyStoppingActor, actor_class):
pass
class EarlyFailingActor(EarlyFailingActor, actor_class):
pass
class LateFailingActor(LateFailingActor, actor_class):
pass
class FailingOnFailureActor(FailingOnFailureActor, actor_class):
pass
class SuperInitActor(actor_class):
pass
C.__name__ = '%sTest' % actor_class.__name__
C.event_class = event_class
return C
class ThreadingActorTest(ConcreteActorTest(ThreadingActor, threading.Event)):
class DaemonActor(ThreadingActor):
use_daemon_thread = True
def test_actor_thread_is_named_after_pykka_actor_class(self):
alive_threads = threading.enumerate()
alive_thread_names = [t.name for t in alive_threads]
named_correctly = [
name.startswith(AnActor.__name__) for name in alive_thread_names]
self.assert_(any(named_correctly))
def test_actor_thread_is_not_daemonic_by_default(self):
alive_threads = threading.enumerate()
actor_threads = [
t for t in alive_threads if t.name.startswith('AnActor')]
self.assertEqual(1, len(actor_threads))
self.assertFalse(actor_threads[0].daemon)
def test_actor_thread_is_daemonic_if_use_daemon_thread_flag_is_set(self):
actor_ref = self.DaemonActor.start()
alive_threads = threading.enumerate()
actor_threads = [
t for t in alive_threads if t.name.startswith('DaemonActor')]
self.assertEqual(1, len(actor_threads))
self.assertTrue(actor_threads[0].daemon)
actor_ref.stop()
try:
import gevent.event
from pykka.gevent import GeventActor
GeventActorTest = ConcreteActorTest(GeventActor, gevent.event.Event)
except ImportError:
pass
try:
import eventlet # noqa
from pykka.eventlet import EventletActor, EventletEvent
EventletActorTest = ConcreteActorTest(EventletActor, EventletEvent)
except ImportError:
pass
| apache-2.0 | -1,672,452,303,989,471,700 | 37.256983 | 78 | 0.63953 | false |
piyush1911/git-cola | cola/widgets/archive.py | 7 | 8558 | from __future__ import division, absolute_import, unicode_literals
import os
from PyQt4 import QtCore
from PyQt4 import QtGui
from PyQt4.QtCore import Qt
from PyQt4.QtCore import SIGNAL
from cola import cmds
from cola import core
from cola import icons
from cola import qtutils
from cola.git import git
from cola.git import STDOUT
from cola.i18n import N_
from cola.widgets import defs
from cola.compat import ustr
class ExpandableGroupBox(QtGui.QGroupBox):
def __init__(self, parent=None):
QtGui.QGroupBox.__init__(self, parent)
self.setFlat(True)
self.expanded = True
self.click_pos = None
self.arrow_icon_size = defs.small_icon
def set_expanded(self, expanded):
if expanded == self.expanded:
self.emit(SIGNAL('expanded(bool)'), expanded)
return
self.expanded = expanded
for widget in self.findChildren(QtGui.QWidget):
widget.setHidden(not expanded)
self.emit(SIGNAL('expanded(bool)'), expanded)
def mousePressEvent(self, event):
if event.button() == Qt.LeftButton:
option = QtGui.QStyleOptionGroupBox()
self.initStyleOption(option)
icon_size = defs.small_icon
button_area = QtCore.QRect(0, 0, icon_size, icon_size)
offset = icon_size + defs.spacing
adjusted = option.rect.adjusted(0, 0, -offset, 0)
top_left = adjusted.topLeft()
button_area.moveTopLeft(QtCore.QPoint(top_left))
self.click_pos = event.pos()
QtGui.QGroupBox.mousePressEvent(self, event)
def mouseReleaseEvent(self, event):
if (event.button() == Qt.LeftButton and
self.click_pos == event.pos()):
self.set_expanded(not self.expanded)
QtGui.QGroupBox.mouseReleaseEvent(self, event)
def paintEvent(self, event):
painter = QtGui.QStylePainter(self)
option = QtGui.QStyleOptionGroupBox()
self.initStyleOption(option)
painter.save()
painter.translate(self.arrow_icon_size + defs.spacing, 0)
painter.drawText(option.rect, Qt.AlignLeft, self.title())
painter.restore()
style = QtGui.QStyle
point = option.rect.adjusted(0, -4, 0, 0).topLeft()
icon_size = self.arrow_icon_size
option.rect = QtCore.QRect(point.x(), point.y(), icon_size, icon_size)
if self.expanded:
painter.drawPrimitive(style.PE_IndicatorArrowDown, option)
else:
painter.drawPrimitive(style.PE_IndicatorArrowRight, option)
class GitArchiveDialog(QtGui.QDialog):
@staticmethod
def save_hashed_objects(ref, shortref, parent=None):
dlg = GitArchiveDialog(ref, shortref, parent)
if dlg.exec_() != dlg.Accepted:
return None
return dlg
def __init__(self, ref, shortref=None, parent=None):
QtGui.QDialog.__init__(self, parent)
if parent is not None:
self.setWindowModality(Qt.WindowModal)
# input
self.ref = ref
if shortref is None:
shortref = ref
# outputs
self.fmt = None
filename = '%s-%s' % (os.path.basename(core.getcwd()), shortref)
self.prefix = filename + '/'
self.filename = filename
# widgets
self.setWindowTitle(N_('Save Archive'))
self.filetext = QtGui.QLineEdit()
self.filetext.setText(self.filename)
self.browse = qtutils.create_toolbutton(icon=icons.file_zip())
self.format_strings = (
git.archive('--list')[STDOUT].rstrip().splitlines())
self.format_combo = QtGui.QComboBox()
self.format_combo.setEditable(False)
self.format_combo.addItems(self.format_strings)
self.close_button = qtutils.close_button()
self.save_button = qtutils.create_button(text=N_('Save'),
icon=icons.save(),
default=True)
self.prefix_label = QtGui.QLabel()
self.prefix_label.setText(N_('Prefix'))
self.prefix_text = QtGui.QLineEdit()
self.prefix_text.setText(self.prefix)
self.prefix_group = ExpandableGroupBox()
self.prefix_group.setTitle(N_('Advanced'))
# layouts
self.filelayt = qtutils.hbox(defs.no_margin, defs.spacing,
self.browse, self.filetext,
self.format_combo)
self.prefixlayt = qtutils.hbox(defs.margin, defs.spacing,
self.prefix_label, self.prefix_text)
self.prefix_group.setLayout(self.prefixlayt)
self.prefix_group.set_expanded(False)
self.btnlayt = qtutils.hbox(defs.no_margin, defs.spacing,
qtutils.STRETCH, self.close_button,
self.save_button)
self.mainlayt = qtutils.vbox(defs.margin, defs.no_spacing,
self.filelayt, self.prefix_group,
qtutils.STRETCH, self.btnlayt)
self.setLayout(self.mainlayt)
self.resize(defs.scale(520), defs.scale(10))
# initial setup; done before connecting to avoid
# signal/slot side-effects
if 'tar.gz' in self.format_strings:
idx = self.format_strings.index('tar.gz')
elif 'zip' in self.format_strings:
idx = self.format_strings.index('zip')
else:
idx = 0
self.format_combo.setCurrentIndex(idx)
self.update_filetext_for_format(idx)
# connections
self.connect(self.filetext, SIGNAL('textChanged(QString)'),
self.filetext_changed)
self.connect(self.prefix_text, SIGNAL('textChanged(QString)'),
self.prefix_text_changed)
self.connect(self.format_combo, SIGNAL('currentIndexChanged(int)'),
self.update_filetext_for_format)
self.connect(self.prefix_group, SIGNAL('expanded(bool)'),
self.prefix_group_expanded)
self.connect(self, SIGNAL('accepted()'), self.archive_saved)
qtutils.connect_button(self.browse, self.choose_filename)
qtutils.connect_button(self.close_button, self.reject)
qtutils.connect_button(self.save_button, self.save_archive)
def archive_saved(self):
cmds.do(cmds.Archive, self.ref, self.fmt, self.prefix, self.filename)
qtutils.information(N_('File Saved'),
N_('File saved to "%s"') % self.filename)
def save_archive(self):
filename = self.filename
if not filename:
return
if core.exists(filename):
title = N_('Overwrite File?')
msg = N_('The file "%s" exists and will be overwritten.') % filename
info_txt = N_('Overwrite "%s"?') % filename
ok_txt = N_('Overwrite')
if not qtutils.confirm(title, msg, info_txt, ok_txt,
default=False, icon=icons.save()):
return
self.accept()
def choose_filename(self):
filename = qtutils.save_as(self.filename)
if not filename:
return
self.filetext.setText(filename)
self.update_filetext_for_format(self.format_combo.currentIndex())
def filetext_changed(self, qstr):
self.filename = ustr(qstr)
self.save.setEnabled(bool(self.filename))
prefix = self.strip_exts(os.path.basename(self.filename)) + '/'
self.prefix_text.setText(prefix)
def prefix_text_changed(self, qstr):
self.prefix = ustr(qstr)
def strip_exts(self, text):
for format_string in self.format_strings:
ext = '.'+format_string
if text.endswith(ext):
return text[:-len(ext)]
return text
def update_filetext_for_format(self, idx):
self.fmt = self.format_strings[idx]
text = self.strip_exts(ustr(self.filetext.text()))
self.filename = '%s.%s' % (text, self.fmt)
self.filetext.setText(self.filename)
self.filetext.setFocus()
if '/' in text:
start = text.rindex('/') + 1
else:
start = 0
self.filetext.setSelection(start, len(text) - start)
def prefix_group_expanded(self, expanded):
if expanded:
self.prefix_text.setFocus()
else:
self.filetext.setFocus()
| gpl-2.0 | 726,761,229,199,508,500 | 34.957983 | 80 | 0.590091 | false |
elaOnMars/logya | docs/conf.py | 1 | 7939 | # -*- coding: utf-8 -*-
#
# logya documentation build configuration file, created by
# sphinx-quickstart on Sun Sep 8 18:10:11 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'logya'
copyright = u'2015, Ramiro Gómez'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '4.0.0'
# The full version, including alpha/beta/rc tags.
release = '4.0.0dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = '_static/logya.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'logyadoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'logya.tex', u'logya Documentation',
u'Ramiro Gómez', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'logya', u'Logya Documentation',
[u'Ramiro Gómez'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'logya', u'logya Documentation',
u'Ramiro Gómez', 'logya', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| mit | 8,130,983,754,686,606,000 | 30.995968 | 80 | 0.703718 | false |
alexkolar/home-assistant | tests/common.py | 6 | 5290 | """
tests.helper
~~~~~~~~~~~~~
Helper method for writing tests.
"""
import os
from datetime import timedelta
from unittest import mock
from homeassistant import core as ha, loader
import homeassistant.util.location as location_util
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.const import (
STATE_ON, STATE_OFF, DEVICE_DEFAULT_NAME, EVENT_TIME_CHANGED,
EVENT_STATE_CHANGED, EVENT_PLATFORM_DISCOVERED, ATTR_SERVICE,
ATTR_DISCOVERED)
from homeassistant.components import sun, mqtt
def get_test_config_dir():
""" Returns a path to a test config dir. """
return os.path.join(os.path.dirname(__file__), "config")
def get_test_home_assistant(num_threads=None):
""" Returns a Home Assistant object pointing at test config dir. """
if num_threads:
orig_num_threads = ha.MIN_WORKER_THREAD
ha.MIN_WORKER_THREAD = num_threads
hass = ha.HomeAssistant()
if num_threads:
ha.MIN_WORKER_THREAD = orig_num_threads
hass.config.config_dir = get_test_config_dir()
hass.config.latitude = 32.87336
hass.config.longitude = -117.22743
if 'custom_components.test' not in loader.AVAILABLE_COMPONENTS:
loader.prepare(hass)
return hass
def mock_detect_location_info():
""" Mock implementation of util.detect_location_info. """
return location_util.LocationInfo(
ip='1.1.1.1',
country_code='US',
country_name='United States',
region_code='CA',
region_name='California',
city='San Diego',
zip_code='92122',
time_zone='America/Los_Angeles',
latitude='2.0',
longitude='1.0',
use_fahrenheit=True,
)
def mock_service(hass, domain, service):
"""
Sets up a fake service.
Returns a list that logs all calls to fake service.
"""
calls = []
hass.services.register(
domain, service, lambda call: calls.append(call))
return calls
def fire_mqtt_message(hass, topic, payload, qos=0):
hass.bus.fire(mqtt.EVENT_MQTT_MESSAGE_RECEIVED, {
mqtt.ATTR_TOPIC: topic,
mqtt.ATTR_PAYLOAD: payload,
mqtt.ATTR_QOS: qos,
})
def fire_time_changed(hass, time):
hass.bus.fire(EVENT_TIME_CHANGED, {'now': time})
def fire_service_discovered(hass, service, info):
hass.bus.fire(EVENT_PLATFORM_DISCOVERED, {
ATTR_SERVICE: service,
ATTR_DISCOVERED: info
})
def ensure_sun_risen(hass):
""" Trigger sun to rise if below horizon. """
if sun.is_on(hass):
return
fire_time_changed(hass, sun.next_rising_utc(hass) + timedelta(seconds=10))
def ensure_sun_set(hass):
""" Trigger sun to set if above horizon. """
if not sun.is_on(hass):
return
fire_time_changed(hass, sun.next_setting_utc(hass) + timedelta(seconds=10))
def mock_state_change_event(hass, new_state, old_state=None):
event_data = {
'entity_id': new_state.entity_id,
'new_state': new_state,
}
if old_state:
event_data['old_state'] = old_state
hass.bus.fire(EVENT_STATE_CHANGED, event_data)
def mock_http_component(hass):
hass.http = MockHTTP()
hass.config.components.append('http')
def mock_mqtt_component(hass):
with mock.patch('homeassistant.components.mqtt.MQTT'):
mqtt.setup(hass, {
mqtt.DOMAIN: {
mqtt.CONF_BROKER: 'mock-broker',
}
})
hass.config.components.append(mqtt.DOMAIN)
class MockHTTP(object):
""" Mocks the HTTP module. """
def register_path(self, method, url, callback, require_auth=True):
pass
class MockModule(object):
""" Provides a fake module. """
def __init__(self, domain, dependencies=[], setup=None):
self.DOMAIN = domain
self.DEPENDENCIES = dependencies
# Setup a mock setup if none given.
self.setup = lambda hass, config: False if setup is None else setup
class MockToggleDevice(ToggleEntity):
""" Provides a mock toggle device. """
def __init__(self, name, state):
self._name = name or DEVICE_DEFAULT_NAME
self._state = state
self.calls = []
@property
def name(self):
""" Returns the name of the device if any. """
self.calls.append(('name', {}))
return self._name
@property
def state(self):
""" Returns the name of the device if any. """
self.calls.append(('state', {}))
return self._state
@property
def is_on(self):
""" True if device is on. """
self.calls.append(('is_on', {}))
return self._state == STATE_ON
def turn_on(self, **kwargs):
""" Turn the device on. """
self.calls.append(('turn_on', kwargs))
self._state = STATE_ON
def turn_off(self, **kwargs):
""" Turn the device off. """
self.calls.append(('turn_off', kwargs))
self._state = STATE_OFF
def last_call(self, method=None):
if not self.calls:
return None
elif method is None:
return self.calls[-1]
else:
try:
return next(call for call in reversed(self.calls)
if call[0] == method)
except StopIteration:
return None
| mit | 8,768,889,767,843,919,000 | 25.582915 | 79 | 0.612098 | false |
InUrSys/PescArt2.0 | src/srcPlus/SQL_ViewModel.py | 1 | 1264 | '''
Created on 04/09/2017
@author: chernomirdinmacuvele
'''
from PyQt5.Qt import Qt, QSqlRelationalTableModel
from PyQt5 import QtSql
import QT_msg
class viewModel():
def __init__(self, dbcon= None, TblName= None, dictModel=None, filtro=None):
self.TblName = TblName
self.dictModel = dictModel
self.filtro = filtro
self.msg = QT_msg
self.txt_306 = "Erro nº:309 \nPorfavor tente de novo."
self.txt_100 = "Erro nº: 100 \nPorfavor tente de novo."
self.setViewModel()
def setViewModel(self):
self.Model = QSqlRelationalTableModel()
self.Model.setTable(self.TblName)
self.Model.setFilter(self.filtro)
for idx, val in enumerate (self.dictModel['val2Rel']):
if val is not None:
self.Model.setRelation(idx, QtSql.QSqlRelation(self.dictModel["relTblName"][idx], val[0], val[1]))
self.Model.setSort(0, Qt.AscendingOrder)
for idx, val in enumerate (self.dictModel['newNames']):
self.Model.setHeaderData(idx, Qt.Horizontal, val)
bOK = self.Model.select()
if not bOK:
self.msg.error(txt="Erro na configuracao do Modelo", verbTxt=str(self.Model.lastError().text()))
| gpl-3.0 | -1,212,956,792,029,076,500 | 34.083333 | 116 | 0.630745 | false |
bswartz/cinder | cinder/volume/drivers/kaminario/kaminario_common.py | 1 | 21966 | # Copyright (c) 2016 by Kaminario Technologies, Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Volume driver for Kaminario K2 all-flash arrays."""
import re
import six
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import importutils
from oslo_utils import units
from oslo_utils import versionutils
import cinder
from cinder import exception
from cinder.i18n import _, _LE, _LW
from cinder import utils
from cinder.volume.drivers.san import san
from cinder.volume import utils as vol_utils
K2_MIN_VERSION = '2.2.0'
LOG = logging.getLogger(__name__)
kaminario1_opts = [
cfg.StrOpt('kaminario_nodedup_substring',
default='K2-nodedup',
help="If volume-type name contains this substring "
"nodedup volume will be created, otherwise "
"dedup volume wil be created.")]
kaminario2_opts = [
cfg.BoolOpt('auto_calc_max_oversubscription_ratio',
default=False,
help="K2 driver will calculate max_oversubscription_ratio "
"on setting this option as True.")]
CONF = cfg.CONF
CONF.register_opts(kaminario1_opts)
def kaminario_logger(func):
"""Return a function wrapper.
The wrapper adds log for entry and exit to the function.
"""
def func_wrapper(*args, **kwargs):
LOG.debug('Entering %(function)s of %(class)s with arguments: '
' %(args)s, %(kwargs)s',
{'class': args[0].__class__.__name__,
'function': func.__name__,
'args': args[1:],
'kwargs': kwargs})
ret = func(*args, **kwargs)
LOG.debug('Exiting %(function)s of %(class)s '
'having return value: %(ret)s',
{'class': args[0].__class__.__name__,
'function': func.__name__,
'ret': ret})
return ret
return func_wrapper
class KaminarioCinderDriver(cinder.volume.driver.ISCSIDriver):
VENDOR = "Kaminario"
VERSION = "1.0"
stats = {}
def __init__(self, *args, **kwargs):
super(KaminarioCinderDriver, self).__init__(*args, **kwargs)
self.configuration.append_config_values(san.san_opts)
self.configuration.append_config_values(kaminario2_opts)
self._protocol = None
def check_for_setup_error(self):
if self.krest is None:
msg = _("Unable to import 'krest' python module.")
LOG.error(msg)
raise exception.KaminarioCinderDriverException(reason=msg)
else:
conf = self.configuration
self.client = self.krest.EndPoint(conf.san_ip,
conf.san_login,
conf.san_password,
ssl_validate=False)
v_rs = self.client.search("system/state")
if hasattr(v_rs, 'hits') and v_rs.total != 0:
ver = v_rs.hits[0].rest_api_version
ver_exist = versionutils.convert_version_to_int(ver)
ver_min = versionutils.convert_version_to_int(K2_MIN_VERSION)
if ver_exist < ver_min:
msg = _("K2 rest api version should be "
">= %s.") % K2_MIN_VERSION
LOG.error(msg)
raise exception.KaminarioCinderDriverException(reason=msg)
else:
msg = _("K2 rest api version search failed.")
LOG.error(msg)
raise exception.KaminarioCinderDriverException(reason=msg)
@kaminario_logger
def _check_ops(self):
"""Ensure that the options we care about are set."""
required_ops = ['san_ip', 'san_login', 'san_password']
for attr in required_ops:
if not getattr(self.configuration, attr, None):
raise exception.InvalidInput(reason=_('%s is not set.') % attr)
@kaminario_logger
def do_setup(self, context):
super(KaminarioCinderDriver, self).do_setup(context)
self._check_ops()
self.krest = importutils.try_import("krest")
@kaminario_logger
def create_volume(self, volume):
"""Volume creation in K2 needs a volume group.
- create a volume group
- create a volume in the volume group
"""
vg_name = self.get_volume_group_name(volume.id)
vol_name = self.get_volume_name(volume.id)
if CONF.kaminario_nodedup_substring in volume.volume_type.name:
prov_type = False
else:
prov_type = True
try:
LOG.debug("Creating volume group with name: %(name)s, "
"quota: unlimited and dedup_support: %(dedup)s",
{'name': vg_name, 'dedup': prov_type})
vg = self.client.new("volume_groups", name=vg_name, quota=0,
is_dedup=prov_type).save()
LOG.debug("Creating volume with name: %(name)s, size: %(size)s "
"GB, volume_group: %(vg)s",
{'name': vol_name, 'size': volume.size, 'vg': vg_name})
self.client.new("volumes", name=vol_name,
size=volume.size * units.Mi,
volume_group=vg).save()
except Exception as ex:
vg_rs = self.client.search("volume_groups", name=vg_name)
if vg_rs.total != 0:
LOG.debug("Deleting vg: %s for failed volume in K2.", vg_name)
vg_rs.hits[0].delete()
LOG.exception(_LE("Creation of volume %s failed."), vol_name)
raise exception.KaminarioCinderDriverException(
reason=six.text_type(ex.message))
@kaminario_logger
def create_volume_from_snapshot(self, volume, snapshot):
"""Create volume from snapshot.
- search for snapshot and retention_policy
- create a view from snapshot and attach view
- create a volume and attach volume
- copy data from attached view to attached volume
- detach volume and view and finally delete view
"""
snap_name = self.get_snap_name(snapshot.id)
view_name = self.get_view_name(volume.id)
vol_name = self.get_volume_name(volume.id)
cview = src_attach_info = dest_attach_info = None
rpolicy = self.get_policy()
properties = utils.brick_get_connector_properties()
LOG.debug("Searching for snapshot: %s in K2.", snap_name)
snap_rs = self.client.search("snapshots", short_name=snap_name)
if hasattr(snap_rs, 'hits') and snap_rs.total != 0:
snap = snap_rs.hits[0]
LOG.debug("Creating a view: %(view)s from snapshot: %(snap)s",
{'view': view_name, 'snap': snap_name})
try:
cview = self.client.new("snapshots",
short_name=view_name,
source=snap, retention_policy=rpolicy,
is_exposable=True).save()
except Exception as ex:
LOG.exception(_LE("Creating a view: %(view)s from snapshot: "
"%(snap)s failed"), {"view": view_name,
"snap": snap_name})
raise exception.KaminarioCinderDriverException(
reason=six.text_type(ex.message))
else:
msg = _("Snapshot: %s search failed in K2.") % snap_name
LOG.error(msg)
raise exception.KaminarioCinderDriverException(reason=msg)
try:
conn = self.initialize_connection(cview, properties)
src_attach_info = self._connect_device(conn)
self.create_volume(volume)
conn = self.initialize_connection(volume, properties)
dest_attach_info = self._connect_device(conn)
vol_utils.copy_volume(src_attach_info['device']['path'],
dest_attach_info['device']['path'],
snapshot.volume.size * units.Ki,
self.configuration.volume_dd_blocksize,
sparse=True)
self.terminate_connection(volume, properties)
self.terminate_connection(cview, properties)
except Exception as ex:
self.terminate_connection(cview, properties)
self.terminate_connection(volume, properties)
cview.delete()
self.delete_volume(volume)
LOG.exception(_LE("Copy to volume: %(vol)s from view: %(view)s "
"failed"), {"vol": vol_name, "view": view_name})
raise exception.KaminarioCinderDriverException(
reason=six.text_type(ex.message))
@kaminario_logger
def create_cloned_volume(self, volume, src_vref):
"""Create a clone from source volume.
- attach source volume
- create and attach new volume
- copy data from attached source volume to attached new volume
- detach both volumes
"""
clone_name = self.get_volume_name(volume.id)
src_name = self.get_volume_name(src_vref.id)
src_vol = self.client.search("volumes", name=src_name)
src_map = self.client.search("mappings", volume=src_vol)
if src_map.total != 0:
msg = _("K2 driver does not support clone of a attached volume. "
"To get this done, create a snapshot from the attached "
"volume and then create a volume from the snapshot.")
LOG.error(msg)
raise exception.KaminarioCinderDriverException(reason=msg)
try:
properties = utils.brick_get_connector_properties()
conn = self.initialize_connection(src_vref, properties)
src_attach_info = self._connect_device(conn)
self.create_volume(volume)
conn = self.initialize_connection(volume, properties)
dest_attach_info = self._connect_device(conn)
vol_utils.copy_volume(src_attach_info['device']['path'],
dest_attach_info['device']['path'],
src_vref.size * units.Ki,
self.configuration.volume_dd_blocksize,
sparse=True)
self.terminate_connection(volume, properties)
self.terminate_connection(src_vref, properties)
except Exception as ex:
self.terminate_connection(src_vref, properties)
self.terminate_connection(volume, properties)
self.delete_volume(volume)
LOG.exception(_LE("Create a clone: %s failed."), clone_name)
raise exception.KaminarioCinderDriverException(
reason=six.text_type(ex.message))
@kaminario_logger
def delete_volume(self, volume):
"""Volume in K2 exists in a volume group.
- delete the volume
- delete the corresponding volume group
"""
vg_name = self.get_volume_group_name(volume.id)
vol_name = self.get_volume_name(volume.id)
try:
LOG.debug("Searching and deleting volume: %s in K2.", vol_name)
vol_rs = self.client.search("volumes", name=vol_name)
if vol_rs.total != 0:
vol_rs.hits[0].delete()
LOG.debug("Searching and deleting vg: %s in K2.", vg_name)
vg_rs = self.client.search("volume_groups", name=vg_name)
if vg_rs.total != 0:
vg_rs.hits[0].delete()
except Exception as ex:
LOG.exception(_LE("Deletion of volume %s failed."), vol_name)
raise exception.KaminarioCinderDriverException(
reason=six.text_type(ex.message))
@kaminario_logger
def get_volume_stats(self, refresh=False):
if refresh:
self.update_volume_stats()
stats = self.stats
stats['storage_protocol'] = self._protocol
stats['driver_version'] = self.VERSION
stats['vendor_name'] = self.VENDOR
backend_name = self.configuration.safe_get('volume_backend_name')
stats['volume_backend_name'] = (backend_name or
self.__class__.__name__)
return stats
def create_export(self, context, volume, connector):
pass
def ensure_export(self, context, volume):
pass
def remove_export(self, context, volume):
pass
@kaminario_logger
def create_snapshot(self, snapshot):
"""Create a snapshot from a volume_group."""
vg_name = self.get_volume_group_name(snapshot.volume_id)
snap_name = self.get_snap_name(snapshot.id)
rpolicy = self.get_policy()
try:
LOG.debug("Searching volume_group: %s in K2.", vg_name)
vg = self.client.search("volume_groups", name=vg_name).hits[0]
LOG.debug("Creating a snapshot: %(snap)s from vg: %(vg)s",
{'snap': snap_name, 'vg': vg_name})
self.client.new("snapshots", short_name=snap_name,
source=vg, retention_policy=rpolicy).save()
except Exception as ex:
LOG.exception(_LE("Creation of snapshot: %s failed."), snap_name)
raise exception.KaminarioCinderDriverException(
reason=six.text_type(ex.message))
@kaminario_logger
def delete_snapshot(self, snapshot):
"""Delete a snapshot."""
snap_name = self.get_snap_name(snapshot.id)
try:
LOG.debug("Searching and deleting snapshot: %s in K2.", snap_name)
snap_rs = self.client.search("snapshots", short_name=snap_name)
if snap_rs.total != 0:
snap_rs.hits[0].delete()
except Exception as ex:
LOG.exception(_LE("Deletion of snapshot: %s failed."), snap_name)
raise exception.KaminarioCinderDriverException(
reason=six.text_type(ex.message))
@kaminario_logger
def extend_volume(self, volume, new_size):
"""Extend volume."""
vol_name = self.get_volume_name(volume.id)
try:
LOG.debug("Searching volume: %s in K2.", vol_name)
vol = self.client.search("volumes", name=vol_name).hits[0]
vol.size = new_size * units.Mi
LOG.debug("Extending volume: %s in K2.", vol_name)
vol.save()
except Exception as ex:
LOG.exception(_LE("Extending volume: %s failed."), vol_name)
raise exception.KaminarioCinderDriverException(
reason=six.text_type(ex.message))
@kaminario_logger
def update_volume_stats(self):
conf = self.configuration
LOG.debug("Searching system capacity in K2.")
cap = self.client.search("system/capacity").hits[0]
LOG.debug("Searching total volumes in K2 for updating stats.")
total_volumes = self.client.search("volumes").total - 1
provisioned_vol = cap.provisioned_volumes
if (conf.auto_calc_max_oversubscription_ratio and cap.provisioned
and (cap.total - cap.free) != 0):
ratio = provisioned_vol / float(cap.total - cap.free)
else:
ratio = conf.max_over_subscription_ratio
self.stats = {'QoS_support': False,
'free_capacity_gb': cap.free / units.Mi,
'total_capacity_gb': cap.total / units.Mi,
'thin_provisioning_support': True,
'sparse_copy_volume': True,
'total_volumes': total_volumes,
'thick_provisioning_support': False,
'provisioned_capacity_gb': provisioned_vol / units.Mi,
'max_oversubscription_ratio': ratio}
@kaminario_logger
def get_initiator_host_name(self, connector):
"""Return the initiator host name.
Valid characters: 0-9, a-z, A-Z, '-', '_'
All other characters are replaced with '_'.
Total characters in initiator host name: 32
"""
return re.sub('[^0-9a-zA-Z-_]', '_', connector['host'])[:32]
@kaminario_logger
def get_volume_group_name(self, vid):
"""Return the volume group name."""
return "cvg-{0}".format(vid)
@kaminario_logger
def get_volume_name(self, vid):
"""Return the volume name."""
return "cv-{0}".format(vid)
@kaminario_logger
def get_snap_name(self, sid):
"""Return the snapshot name."""
return "cs-{0}".format(sid)
@kaminario_logger
def get_view_name(self, vid):
"""Return the view name."""
return "cview-{0}".format(vid)
@kaminario_logger
def _delete_host_by_name(self, name):
"""Deleting host by name."""
host_rs = self.client.search("hosts", name=name)
if hasattr(host_rs, "hits") and host_rs.total != 0:
host = host_rs.hits[0]
host.delete()
@kaminario_logger
def get_policy(self):
"""Return the retention policy."""
try:
LOG.debug("Searching for retention_policy in K2.")
return self.client.search("retention_policies",
name="Best_Effort_Retention").hits[0]
except Exception as ex:
LOG.exception(_LE("Retention policy search failed in K2."))
raise exception.KaminarioCinderDriverException(
reason=six.text_type(ex.message))
@kaminario_logger
def _get_volume_object(self, volume):
vol_name = self.get_volume_name(volume.id)
LOG.debug("Searching volume : %s in K2.", vol_name)
vol_rs = self.client.search("volumes", name=vol_name)
if not hasattr(vol_rs, 'hits') or vol_rs.total == 0:
msg = _("Unable to find volume: %s from K2.") % vol_name
LOG.error(msg)
raise exception.KaminarioCinderDriverException(reason=msg)
return vol_rs.hits[0]
@kaminario_logger
def _get_lun_number(self, vol, host):
volsnap = None
LOG.debug("Searching volsnaps in K2.")
volsnap_rs = self.client.search("volsnaps", snapshot=vol)
if hasattr(volsnap_rs, 'hits') and volsnap_rs.total != 0:
volsnap = volsnap_rs.hits[0]
LOG.debug("Searching mapping of volsnap in K2.")
map_rs = self.client.search("mappings", volume=volsnap, host=host)
return map_rs.hits[0].lun
def initialize_connection(self, volume, connector):
pass
@kaminario_logger
def terminate_connection(self, volume, connector, **kwargs):
"""Terminate connection of volume from host."""
# Get volume object
if type(volume).__name__ != 'RestObject':
vol_name = self.get_volume_name(volume.id)
LOG.debug("Searching volume: %s in K2.", vol_name)
volume_rs = self.client.search("volumes", name=vol_name)
if hasattr(volume_rs, "hits") and volume_rs.total != 0:
volume = volume_rs.hits[0]
else:
vol_name = volume.name
# Get host object.
host_name = self.get_initiator_host_name(connector)
host_rs = self.client.search("hosts", name=host_name)
if hasattr(host_rs, "hits") and host_rs.total != 0 and volume:
host = host_rs.hits[0]
LOG.debug("Searching and deleting mapping of volume: %(name)s to "
"host: %(host)s", {'host': host_name, 'name': vol_name})
map_rs = self.client.search("mappings", volume=volume, host=host)
if hasattr(map_rs, "hits") and map_rs.total != 0:
map_rs.hits[0].delete()
if self.client.search("mappings", host=host).total == 0:
LOG.debug("Deleting initiator hostname: %s in K2.", host_name)
host.delete()
else:
LOG.warning(_LW("Host: %s not found on K2."), host_name)
def k2_initialize_connection(self, volume, connector):
# Get volume object.
if type(volume).__name__ != 'RestObject':
vol = self._get_volume_object(volume)
else:
vol = volume
# Get host object.
host, host_rs, host_name = self._get_host_object(connector)
try:
# Map volume object to host object.
LOG.debug("Mapping volume: %(vol)s to host: %(host)s",
{'host': host_name, 'vol': vol.name})
mapping = self.client.new("mappings", volume=vol, host=host).save()
except Exception as ex:
if host_rs.total == 0:
self._delete_host_by_name(host_name)
LOG.exception(_LE("Unable to map volume: %(vol)s to host: "
"%(host)s"), {'host': host_name,
'vol': vol.name})
raise exception.KaminarioCinderDriverException(
reason=six.text_type(ex.message))
# Get lun number.
if type(volume).__name__ == 'RestObject':
return self._get_lun_number(vol, host)
else:
return mapping.lun
def _get_host_object(self, connector):
pass
| apache-2.0 | 6,486,585,981,483,560,000 | 41.818713 | 79 | 0.566148 | false |
CVL-GitHub/karaage | karaage/legacy/machines/south_migrations/0017_auto__del_field_account_default_project_tmp__add_field_account_default.py | 3 | 11377 | # -*- coding: utf-8 -*-
from south.db import db
from south.v2 import SchemaMigration
class Migration(SchemaMigration):
depends_on = (
('projects', '0016_rename_project'),
)
def forwards(self, orm):
db.rename_column('account', 'default_project_tmp_id', 'default_project_id')
def backwards(self, orm):
db.rename_column('account', 'default_project_id', 'default_project_tmp_id')
models = {
'institutes.institute': {
'Meta': {'ordering': "['name']", 'object_name': 'Institute', 'db_table': "'institute'"},
'delegates': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'delegate'", 'to': "orm['people.Person']", 'through': "orm['institutes.InstituteDelegate']", 'blank': 'True', 'symmetrical': 'False', 'null': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'saml_entityid': ('django.db.models.fields.CharField', [], {'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'institutes.institutedelegate': {
'Meta': {'object_name': 'InstituteDelegate', 'db_table': "'institutedelegate'"},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['institutes.Institute']"}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Person']"}),
'send_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'machines.account': {
'Meta': {'ordering': "['person']", 'object_name': 'Account', 'db_table': "'account'"},
'date_created': ('django.db.models.fields.DateField', [], {}),
'date_deleted': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'default_project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['projects.Project']", 'null': 'True', 'blank': 'True'}),
'disk_quota': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'login_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'machine_category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['machines.MachineCategory']"}),
'person': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Person']"}),
'shell': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'machines.machine': {
'Meta': {'object_name': 'Machine', 'db_table': "'machine'"},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['machines.MachineCategory']"}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'mem_per_core': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'no_cpus': ('django.db.models.fields.IntegerField', [], {}),
'no_nodes': ('django.db.models.fields.IntegerField', [], {}),
'pbs_server_host': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'scaling_factor': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'machines.machinecategory': {
'Meta': {'object_name': 'MachineCategory', 'db_table': "'machine_category'"},
'datastore': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'people.group': {
'Meta': {'ordering': "['name']", 'object_name': 'Group'},
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'groups'", 'symmetrical': 'False', 'to': "orm['people.Person']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'})
},
'people.person': {
'Meta': {'ordering': "['full_name', 'short_name']", 'object_name': 'Person', 'db_table': "'person'"},
'address': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'user_approver'", 'null': 'True', 'to': "orm['people.Person']"}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'comment': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'country': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'date_approved': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_deleted': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'user_deletor'", 'null': 'True', 'to': "orm['people.Person']"}),
'department': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'db_index': 'True'}),
'expires': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'fax': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '60'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['institutes.Institute']"}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_admin': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_systemuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_usage': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'legacy_ldap_password': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'login_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'mobile': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'position': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'postcode': ('django.db.models.fields.CharField', [], {'max_length': '8', 'null': 'True', 'blank': 'True'}),
'saml_id': ('django.db.models.fields.CharField', [], {'max_length': '200', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'short_name': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '4', 'null': 'True', 'blank': 'True'}),
'supervisor': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'telephone': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '10', 'null': 'True', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
'projects.project': {
'Meta': {'ordering': "['pid']", 'object_name': 'Project', 'db_table': "'project'"},
'additional_req': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'project_approver'", 'null': 'True', 'to': "orm['people.Person']"}),
'date_approved': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'date_deleted': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'project_deletor'", 'null': 'True', 'to': "orm['people.Person']"}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['people.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'institute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['institutes.Institute']"}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_approved': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_usage': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'leaders': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'leaders'", 'symmetrical': 'False', 'to': "orm['people.Person']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'pid': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'start_date': ('django.db.models.fields.DateField', [], {'default': 'datetime.datetime(2013, 9, 17, 0, 0)'})
}
}
complete_apps = ['machines']
| gpl-3.0 | 5,145,831,037,399,073,000 | 83.902985 | 250 | 0.548036 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.