repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
PongPi/isl-odoo | addons/product_margin/wizard/__init__.py | 444 | 1078 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import product_margin
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
EPDCenter/android_kernel_rockchip_ylp | scripts/tracing/draw_functrace.py | 14676 | 3560 | #!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <[email protected]>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
| gpl-2.0 |
LChristakis/chalice-hunter | lib/python3.4/site-packages/pip/basecommand.py | 79 | 9310 | """Base Command class, and related routines"""
from __future__ import absolute_import
import logging
import os
import sys
import traceback
import optparse
import warnings
from pip._vendor.six import StringIO
from pip import cmdoptions
from pip.locations import running_under_virtualenv
from pip.download import PipSession
from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
CommandError, PreviousBuildDirError)
from pip.compat import logging_dictConfig
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.status_codes import (
SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
PREVIOUS_BUILD_DIR_ERROR,
)
from pip.utils import appdirs, get_prog, normalize_path
from pip.utils.deprecation import RemovedInPip8Warning
from pip.utils.filesystem import check_path_owner
from pip.utils.logging import IndentingFormatter
from pip.utils.outdated import pip_version_check
__all__ = ['Command']
logger = logging.getLogger(__name__)
class Command(object):
name = None
usage = None
hidden = False
log_stream = "ext://sys.stdout"
def __init__(self, isolated=False):
parser_kw = {
'usage': self.usage,
'prog': '%s %s' % (get_prog(), self.name),
'formatter': UpdatingDefaultsHelpFormatter(),
'add_help_option': False,
'name': self.name,
'description': self.__doc__,
'isolated': isolated,
}
self.parser = ConfigOptionParser(**parser_kw)
# Commands should add options to this option group
optgroup_name = '%s Options' % self.name.capitalize()
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
# Add the general options
gen_opts = cmdoptions.make_option_group(
cmdoptions.general_group,
self.parser,
)
self.parser.add_option_group(gen_opts)
def _build_session(self, options, retries=None, timeout=None):
session = PipSession(
cache=(
normalize_path(os.path.join(options.cache_dir, "http"))
if options.cache_dir else None
),
retries=retries if retries is not None else options.retries,
insecure_hosts=options.trusted_hosts,
)
# Handle custom ca-bundles from the user
if options.cert:
session.verify = options.cert
# Handle SSL client certificate
if options.client_cert:
session.cert = options.client_cert
# Handle timeouts
if options.timeout or timeout:
session.timeout = (
timeout if timeout is not None else options.timeout
)
# Handle configured proxies
if options.proxy:
session.proxies = {
"http": options.proxy,
"https": options.proxy,
}
# Determine if we can prompt the user for authentication or not
session.auth.prompting = not options.no_input
return session
def parse_args(self, args):
# factored out for testability
return self.parser.parse_args(args)
def main(self, args):
options, args = self.parse_args(args)
if options.quiet:
level = "WARNING"
elif options.verbose:
level = "DEBUG"
else:
level = "INFO"
# Compute the path for our debug log.
debug_log_path = os.path.join(appdirs.user_log_dir("pip"), "debug.log")
# Ensure that the path for our debug log is owned by the current user
# and if it is not, disable the debug log.
write_debug_log = check_path_owner(debug_log_path)
logging_dictConfig({
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"indent": {
"()": IndentingFormatter,
"format": (
"%(message)s"
if not options.log_explicit_levels
else "[%(levelname)s] %(message)s"
),
},
},
"handlers": {
"console": {
"level": level,
"class": "pip.utils.logging.ColorizedStreamHandler",
"stream": self.log_stream,
"formatter": "indent",
},
"debug_log": {
"level": "DEBUG",
"class": "pip.utils.logging.BetterRotatingFileHandler",
"filename": debug_log_path,
"maxBytes": 10 * 1000 * 1000, # 10 MB
"backupCount": 1,
"delay": True,
"formatter": "indent",
},
"user_log": {
"level": "DEBUG",
"class": "pip.utils.logging.BetterRotatingFileHandler",
"filename": options.log or "/dev/null",
"delay": True,
"formatter": "indent",
},
},
"root": {
"level": level,
"handlers": list(filter(None, [
"console",
"debug_log" if write_debug_log else None,
"user_log" if options.log else None,
])),
},
# Disable any logging besides WARNING unless we have DEBUG level
# logging enabled. These use both pip._vendor and the bare names
# for the case where someone unbundles our libraries.
"loggers": dict(
(
name,
{
"level": (
"WARNING"
if level in ["INFO", "ERROR"]
else "DEBUG"
),
},
)
for name in ["pip._vendor", "distlib", "requests", "urllib3"]
),
})
# We add this warning here instead of up above, because the logger
# hasn't been configured until just now.
if not write_debug_log:
logger.warning(
"The directory '%s' or its parent directory is not owned by "
"the current user and the debug log has been disabled. Please "
"check the permissions and owner of that directory. If "
"executing pip with sudo, you may want the -H flag.",
os.path.dirname(debug_log_path),
)
if options.log_explicit_levels:
warnings.warn(
"--log-explicit-levels has been deprecated and will be removed"
" in a future version.",
RemovedInPip8Warning,
)
# TODO: try to get these passing down from the command?
# without resorting to os.environ to hold these.
if options.no_input:
os.environ['PIP_NO_INPUT'] = '1'
if options.exists_action:
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
if options.require_venv:
# If a venv is required check if it can really be found
if not running_under_virtualenv():
logger.critical(
'Could not find an activated virtualenv (required).'
)
sys.exit(VIRTUALENV_NOT_FOUND)
# Check if we're using the latest version of pip available
if (not options.disable_pip_version_check
and not getattr(options, "no_index", False)):
with self._build_session(
options,
retries=0,
timeout=min(5, options.timeout)) as session:
pip_version_check(session)
try:
status = self.run(options, args)
# FIXME: all commands should return an exit status
# and when it is done, isinstance is not needed anymore
if isinstance(status, int):
return status
except PreviousBuildDirError as exc:
logger.critical(str(exc))
logger.debug('Exception information:\n%s', format_exc())
return PREVIOUS_BUILD_DIR_ERROR
except (InstallationError, UninstallationError, BadCommand) as exc:
logger.critical(str(exc))
logger.debug('Exception information:\n%s', format_exc())
return ERROR
except CommandError as exc:
logger.critical('ERROR: %s', exc)
logger.debug('Exception information:\n%s', format_exc())
return ERROR
except KeyboardInterrupt:
logger.critical('Operation cancelled by user')
logger.debug('Exception information:\n%s', format_exc())
return ERROR
except:
logger.critical('Exception:\n%s', format_exc())
return UNKNOWN_ERROR
return SUCCESS
def format_exc(exc_info=None):
if exc_info is None:
exc_info = sys.exc_info()
out = StringIO()
traceback.print_exception(*exc_info, **dict(file=out))
return out.getvalue()
| mit |
jackyyf/ucore_os_lab | related_info/ostep/ostep9-mlfq.py | 54 | 12243 | #! /usr/bin/env python
import sys
from optparse import OptionParser
import random
# finds the highest nonempty queue
# -1 if they are all empty
def FindQueue():
q = hiQueue
while q > 0:
if len(queue[q]) > 0:
return q
q -= 1
if len(queue[0]) > 0:
return 0
return -1
def LowerQueue(currJob, currQueue, issuedIO):
if currQueue > 0:
# in this case, have to change the priority of the job
job[currJob]['currPri'] = currQueue - 1
if issuedIO == False:
queue[currQueue-1].append(currJob)
job[currJob]['ticksLeft'] = quantum[currQueue-1]
else:
if issuedIO == False:
queue[currQueue].append(currJob)
job[currJob]['ticksLeft'] = quantum[currQueue]
def Abort(str):
sys.stderr.write(str + '\n')
exit(1)
#
# PARSE ARGUMENTS
#
parser = OptionParser()
parser.add_option('-s', '--seed', default=0, help='the random seed',
action='store', type='int', dest='seed')
parser.add_option('-n', '--numQueues', help='number of queues in MLFQ (if not using -Q)', default=3,
action='store', type='int', dest='numQueues')
parser.add_option('-q', '--quantum', help='length of time slice (if not using -Q)', default=10,
action='store', type='int', dest='quantum')
parser.add_option('-Q', '--quantumList', help='length of time slice per queue level, specified as x,y,z,... where x is the quantum length for the highest priority queue, y the next highest, and so forth',
default='', action='store', type='string', dest='quantumList')
parser.add_option('-j', '--numJobs', default=3, help='number of jobs in the system',
action='store', type='int', dest='numJobs')
parser.add_option('-m', '--maxlen', default=100, help='max run-time of a job (if randomly generating)',
action='store', type='int', dest='maxlen')
parser.add_option('-M', '--maxio', default=10, help='max I/O frequency of a job (if randomly generating)',
action='store', type='int', dest='maxio')
parser.add_option('-B', '--boost', default=0, help='how often to boost the priority of all jobs back to high priority',
action='store', type='int', dest='boost')
parser.add_option('-i', '--iotime', default=5, help='how long an I/O should last (fixed constant)',
action='store', type='int', dest='ioTime')
parser.add_option('-S', '--stay', default=False, help='reset and stay at same priority level when issuing I/O',
action='store_true', dest='stay')
parser.add_option('-I', '--iobump', default=False, help='if specified, jobs that finished I/O move immediately to front of current queue',
action='store_true', dest='iobump')
parser.add_option('-l', '--jlist', default='', help='a comma-separated list of jobs to run, in the form x1,y1,z1:x2,y2,z2:... where x is start time, y is run time, and z is how often the job issues an I/O request',
action='store', type='string', dest='jlist')
parser.add_option('-c', help='compute answers for me', action='store_true', default=False, dest='solve')
(options, args) = parser.parse_args()
random.seed(options.seed)
# MLFQ: How Many Queues
numQueues = options.numQueues
quantum = {}
if options.quantumList != '':
# instead, extract number of queues and their time slic
quantumLengths = options.quantumList.split(',')
numQueues = len(quantumLengths)
qc = numQueues - 1
for i in range(numQueues):
quantum[qc] = int(quantumLengths[i])
qc -= 1
else:
for i in range(numQueues):
quantum[i] = int(options.quantum)
hiQueue = numQueues - 1
# MLFQ: I/O Model
# the time for each IO: not great to have a single fixed time but...
ioTime = int(options.ioTime)
# This tracks when IOs and other interrupts are complete
ioDone = {}
# This stores all info about the jobs
job = {}
# seed the random generator
random.seed(options.seed)
# jlist 'startTime,runTime,ioFreq:startTime,runTime,ioFreq:...'
jobCnt = 0
if options.jlist != '':
allJobs = options.jlist.split(':')
for j in allJobs:
jobInfo = j.split(',')
if len(jobInfo) != 3:
sys.stderr.write('Badly formatted job string. Should be x1,y1,z1:x2,y2,z2:...\n')
sys.stderr.write('where x is the startTime, y is the runTime, and z is the I/O frequency.\n')
exit(1)
assert(len(jobInfo) == 3)
startTime = int(jobInfo[0])
runTime = int(jobInfo[1])
ioFreq = int(jobInfo[2])
job[jobCnt] = {'currPri':hiQueue, 'ticksLeft':quantum[hiQueue], 'startTime':startTime,
'runTime':runTime, 'timeLeft':runTime, 'ioFreq':ioFreq, 'doingIO':False,
'firstRun':-1}
if startTime not in ioDone:
ioDone[startTime] = []
ioDone[startTime].append((jobCnt, 'JOB BEGINS'))
jobCnt += 1
else:
# do something random
for j in range(options.numJobs):
startTime = 0
runTime = int(random.random() * options.maxlen)
ioFreq = int(random.random() * options.maxio)
job[jobCnt] = {'currPri':hiQueue, 'ticksLeft':quantum[hiQueue], 'startTime':startTime,
'runTime':runTime, 'timeLeft':runTime, 'ioFreq':ioFreq, 'doingIO':False,
'firstRun':-1}
if startTime not in ioDone:
ioDone[startTime] = []
ioDone[startTime].append((jobCnt, 'JOB BEGINS'))
jobCnt += 1
numJobs = len(job)
print 'Here is the list of inputs:'
print 'OPTIONS jobs', numJobs
print 'OPTIONS queues', numQueues
for i in range(len(quantum)-1,-1,-1):
print 'OPTIONS quantum length for queue %2d is %3d' % (i, quantum[i])
print 'OPTIONS boost', options.boost
print 'OPTIONS ioTime', options.ioTime
print 'OPTIONS stayAfterIO', options.stay
print 'OPTIONS iobump', options.iobump
print '\n'
print 'For each job, three defining characteristics are given:'
print ' startTime : at what time does the job enter the system'
print ' runTime : the total CPU time needed by the job to finish'
print ' ioFreq : every ioFreq time units, the job issues an I/O'
print ' (the I/O takes ioTime units to complete)\n'
print 'Job List:'
for i in range(numJobs):
print ' Job %2d: startTime %3d - runTime %3d - ioFreq %3d' % (i, job[i]['startTime'],
job[i]['runTime'], job[i]['ioFreq'])
print ''
if options.solve == False:
print 'Compute the execution trace for the given workloads.'
print 'If you would like, also compute the response and turnaround'
print 'times for each of the jobs.'
print ''
print 'Use the -c flag to get the exact results when you are finished.\n'
exit(0)
# initialize the MLFQ queues
queue = {}
for q in range(numQueues):
queue[q] = []
# TIME IS CENTRAL
currTime = 0
# use these to know when we're finished
totalJobs = len(job)
finishedJobs = 0
print '\nExecution Trace:\n'
while finishedJobs < totalJobs:
# find highest priority job
# run it until either
# (a) the job uses up its time quantum
# (b) the job performs an I/O
# check for priority boost
if options.boost > 0 and currTime != 0:
if currTime % options.boost == 0:
print '[ time %d ] BOOST ( every %d )' % (currTime, options.boost)
# remove all jobs from queues (except high queue)
for q in range(numQueues-1):
for j in queue[q]:
if job[j]['doingIO'] == False:
queue[hiQueue].append(j)
queue[q] = []
# print 'BOOST: QUEUES look like:', queue
# change priority to high priority
# reset number of ticks left for all jobs (XXX just for lower jobs?)
# add to highest run queue (if not doing I/O)
for j in range(numJobs):
# print '-> Boost %d (timeLeft %d)' % (j, job[j]['timeLeft'])
if job[j]['timeLeft'] > 0:
# print '-> FinalBoost %d (timeLeft %d)' % (j, job[j]['timeLeft'])
job[j]['currPri'] = hiQueue
job[j]['ticksLeft'] = quantum[hiQueue]
# print 'BOOST END: QUEUES look like:', queue
# check for any I/Os done
if currTime in ioDone:
for (j, type) in ioDone[currTime]:
q = job[j]['currPri']
job[j]['doingIO'] = False
print '[ time %d ] %s by JOB %d' % (currTime, type, j)
if options.iobump == False:
queue[q].append(j)
else:
queue[q].insert(0, j)
# now find the highest priority job
currQueue = FindQueue()
if currQueue == -1:
print '[ time %d ] IDLE' % (currTime)
currTime += 1
continue
#print 'FOUND QUEUE: %d' % currQueue
#print 'ALL QUEUES:', queue
# there was at least one runnable job, and hence ...
currJob = queue[currQueue][0]
if job[currJob]['currPri'] != currQueue:
Abort('currPri[%d] does not match currQueue[%d]' % (job[currJob]['currPri'], currQueue))
job[currJob]['timeLeft'] -= 1
job[currJob]['ticksLeft'] -= 1
if job[currJob]['firstRun'] == -1:
job[currJob]['firstRun'] = currTime
runTime = job[currJob]['runTime']
ioFreq = job[currJob]['ioFreq']
ticksLeft = job[currJob]['ticksLeft']
timeLeft = job[currJob]['timeLeft']
print '[ time %d ] Run JOB %d at PRIORITY %d [ TICKSLEFT %d RUNTIME %d TIMELEFT %d ]' % (currTime, currJob, currQueue, ticksLeft, runTime, timeLeft)
if timeLeft < 0:
Abort('Error: should never have less than 0 time left to run')
# UPDATE TIME
currTime += 1
# CHECK FOR JOB ENDING
if timeLeft == 0:
print '[ time %d ] FINISHED JOB %d' % (currTime, currJob)
finishedJobs += 1
job[currJob]['endTime'] = currTime
# print 'BEFORE POP', queue
done = queue[currQueue].pop(0)
# print 'AFTER POP', queue
assert(done == currJob)
continue
# CHECK FOR IO
issuedIO = False
if ioFreq > 0 and (((runTime - timeLeft) % ioFreq) == 0):
# time for an IO!
print '[ time %d ] IO_START by JOB %d' % (currTime, currJob)
issuedIO = True
desched = queue[currQueue].pop(0)
assert(desched == currJob)
job[currJob]['doingIO'] = True
# this does the bad rule -- reset your tick counter if you stay at the same level
if options.stay == True:
job[currJob]['ticksLeft'] = quantum[currQueue]
# add to IO Queue: but which queue?
futureTime = currTime + ioTime
if futureTime not in ioDone:
ioDone[futureTime] = []
ioDone[futureTime].append((currJob, 'IO_DONE'))
# print 'NEW IO EVENT at ', futureTime, ' is ', ioDone[futureTime]
# CHECK FOR QUANTUM ENDING AT THIS LEVEL
if ticksLeft == 0:
# print '--> DESCHEDULE %d' % currJob
if issuedIO == False:
# print '--> BUT IO HAS NOT BEEN ISSUED (therefor pop from queue)'
desched = queue[currQueue].pop(0)
assert(desched == currJob)
# move down one queue! (unless lowest queue)
LowerQueue(currJob, currQueue, issuedIO)
# print out statistics
print ''
print 'Final statistics:'
responseSum = 0
turnaroundSum = 0
for i in range(numJobs):
response = job[i]['firstRun'] - job[i]['startTime']
turnaround = job[i]['endTime'] - job[i]['startTime']
print ' Job %2d: startTime %3d - response %3d - turnaround %3d' % (i, job[i]['startTime'],
response, turnaround)
responseSum += response
turnaroundSum += turnaround
print '\n Avg %2d: startTime n/a - response %.2f - turnaround %.2f' % (i,
float(responseSum)/numJobs,
float(turnaroundSum)/numJobs)
print '\n'
| gpl-2.0 |
TrevorLowing/PyGames | pysollib/pysolgtk/tkwidget.py | 2 | 10366 | #!/usr/bin/env python
# -*- mode: python; coding: utf-8; -*-
##---------------------------------------------------------------------------##
##
## Copyright (C) 1998-2003 Markus Franz Xaver Johannes Oberhumer
## Copyright (C) 2003 Mt. Hood Playing Card Co.
## Copyright (C) 2005-2009 Skomoroh
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
##---------------------------------------------------------------------------##
# imports
import os, sys
import gtk
gdk = gtk.gdk
# PySol imports
# Toolkit imports
from tkutil import makeToplevel, setTransient, wm_withdraw
from pysollib.mfxutil import kwdefault, KwStruct, openURL
# ************************************************************************
# *
# ************************************************************************
class _MyDialog(gtk.Dialog):
def __init__(self):
gtk.Dialog.__init__(self)
self.connect("destroy", self.quit)
self.connect("delete_event", self.quit)
def __setattr__(self, name, value):
self.__dict__[name] = value
def quit(self, *args):
self.status = 0
self.hide()
self.destroy()
gtk.main_quit()
class MfxDialog(_MyDialog):
img = {}
button_img = {}
def __init__(self, parent, title='',
timeout=0,
resizable=0,
width=-1, height=-1,
text='', justify='center',
strings=("OK",), default=0,
separator=False,
padx=20, pady=20,
bitmap=None, bitmap_side='left',
bitmap_padx=20, bitmap_pady=20,
image=None, image_side='left',
image_padx=10, image_pady=20,
**kw):
_MyDialog.__init__(self)
self.status = 1
self.button = -1
self.buttons = []
modal=True
if modal:
setTransient(self, parent)
# settings
if width > 0 or height > 0:
self.set_size_request(width, height)
#self.window.resize(width, height)
self.set_title(title)
#
self.connect('key-press-event', self._keyPressEvent)
def createBox(self, widget_class=gtk.HBox):
box = widget_class(spacing=5)
box.set_border_width(5)
self.vbox.pack_start(box)
box.show()
return box, self.action_area
createHBox = createBox
def createVBox(self):
return self.createBox(widget_class=gtk.VBox)
def createTable(self):
# FIXME
return self.createBox(widget_class=gtk.Table)
def createBitmaps(self, box, kw):
if kw['bitmap']:
stock = {"info": gtk.STOCK_DIALOG_INFO,
"error": gtk.STOCK_DIALOG_ERROR,
"warning": gtk.STOCK_DIALOG_WARNING,
"question": gtk.STOCK_DIALOG_QUESTION} [kw['bitmap']]
im = gtk.image_new_from_stock(stock, gtk.ICON_SIZE_DIALOG)
box.pack_start(im)
im.set_property('xpad', kw['bitmap_padx'])
im.set_property('ypad', kw['bitmap_pady'])
im.show()
elif kw['image']:
im = gtk.Image()
im.set_from_pixbuf(kw['image'].pixbuf)
if kw['image_side'] == 'left':
box.pack_start(im)
else:
box.pack_end(im)
im.set_property('xpad', kw['image_padx'])
im.set_property('ypad', kw['image_pady'])
im.show()
def createButtons(self, box, kw):
strings, default = kw['strings'], kw['default']
for i in range(len(strings)):
text = strings[i]
if not text:
continue
if isinstance(text, (list, tuple)):
text, index = text
else: # str
index = i
text = text.replace('&', '_')
b = gtk.Button(text)
b.set_property('can-default', True)
if index == default:
b.grab_focus()
#b.grab_default()
b.set_data("user_data", index)
b.connect("clicked", self.done)
box.pack_start(b)
b.show()
self.buttons.append(b)
def initKw(self, kw):
kwdefault(kw,
timeout=0, resizable=0,
text="", justify="center",
strings=(_("&OK"),),
default=0,
width=0,
padx=20, pady=20,
bitmap=None, bitmap_side="left",
bitmap_padx=10, bitmap_pady=20,
image=None, image_side="left",
image_padx=10, image_pady=20,
)
## # default to separator if more than one button
## sw = len(kw.strings) > 1
## kwdefault(kw.__dict__, separator=sw)
return kw
def done(self, button):
self.status = 0
self.button = button.get_data("user_data")
self.quit()
def _keyPressEvent(self, w, e):
if gdk.keyval_name(e.keyval) == 'Escape':
self.quit()
class MfxMessageDialog(MfxDialog):
def __init__(self, parent, title, **kw):
##print 'MfxMessageDialog', kw
kw = self.initKw(kw)
MfxDialog.__init__(self, parent, title, **kw)
top_box, bottom_box = self.createBox()
self.createBitmaps(top_box, kw)
label = gtk.Label(kw['text'])
label.set_justify(gtk.JUSTIFY_CENTER)
label.set_property('xpad', kw['padx'])
label.set_property('ypad', kw['pady'])
top_box.pack_start(label)
self.createButtons(bottom_box, kw)
label.show()
self.set_position(gtk.WIN_POS_CENTER_ON_PARENT)
##self.set_position(gtk.WIN_POS_CENTER)
self.show_all()
gtk.main()
def initKw(self, kw):
#if kw.has_key('bitmap'):
# kwdefault(kw, width=250, height=150)
return MfxDialog.initKw(self, kw)
# ************************************************************************
# *
# ************************************************************************
class PysolAboutDialog(MfxDialog):
def __init__(self, app, parent, title, **kw):
self._url = kw['url']
kw = self.initKw(kw)
MfxDialog.__init__(self, parent, title, **kw)
top_box, bottom_box = self.createBox()
self.createBitmaps(top_box, kw)
box = gtk.VBox()
box.set_property('border-width', 20)
top_box.pack_start(box)
label = gtk.Label(kw['text'])
label.set_justify(gtk.JUSTIFY_CENTER)
box.pack_start(label)
url_label = gtk.Label()
url_label.set_justify(gtk.JUSTIFY_CENTER)
url_label.set_markup(
'<span foreground="blue" underline="single">%s</span>' % kw['url'])
event_box = gtk.EventBox()
box.pack_start(event_box)
event_box.connect('button-press-event', self._urlClicked)
event_box.add(url_label)
self.createButtons(bottom_box, kw)
self.set_position(gtk.WIN_POS_CENTER_ON_PARENT)
##self.set_position(gtk.WIN_POS_CENTER)
self.show_all()
event_box.window.set_cursor(gdk.Cursor(gdk.HAND2))
gtk.main()
def initKw(self, kw):
#if kw.has_key('bitmap'):
# kwdefault(kw, width=250, height=150)
return MfxDialog.initKw(self, kw)
def _urlClicked(self, *args):
openURL(self._url)
# ************************************************************************
# *
# ************************************************************************
class MfxExceptionDialog(MfxDialog):
def __init__(self, parent, ex, title="Error", **kw):
kw = KwStruct(kw, bitmap="error")
text = str(kw.get("text", ""))
if text and text[-1] != "\n":
text = text + "\n"
text = text + "\n"
if isinstance(ex, EnvironmentError) and ex.filename is not None:
t = '[Errno %s] %s:\n%s' % (ex.errno, ex.strerror, repr(ex.filename))
else:
t = str(ex)
kw.text = text + t
MfxDialog.__init__(self, parent, title, **kw.__dict__)
# ************************************************************************
# *
# ************************************************************************
class MfxSimpleEntry(_MyDialog):
def __init__(self, parent, title, label, value, resizable=0, **kw):
_MyDialog.__init__(self)
self.button = 0
self.status = 1
self.value = value
self.init(parent, label, True)
self.entry.set_text(str(value))
self.set_title(title)
self.show()
gtk.main()
def init(self, parent, message="", modal=True):
if modal:
setTransient(self, parent)
box = gtk.VBox(spacing=10)
box.set_border_width(10)
self.vbox.pack_start(box)
box.show()
if message:
label = gtk.Label(message)
box.pack_start(label)
label.show()
self.entry = gtk.Entry()
box.pack_start(self.entry)
self.entry.show()
self.entry.grab_focus()
button = gtk.Button("OK")
button.connect("clicked", self.done)
button.set_flags(gtk.CAN_DEFAULT)
self.action_area.pack_start(button)
button.show()
button.grab_default()
button = gtk.Button("Cancel")
button.connect("clicked", self.quit)
button.set_flags(gtk.CAN_DEFAULT)
self.action_area.pack_start(button)
button.show()
def done(self, button):
self.status = 0
self.value = self.entry.get_text()
self.quit()
class SelectDialogTreeData:
pass
| gpl-2.0 |
duramato/SickRage | autoProcessTV/lib/requests/packages/chardet/langbulgarianmodel.py | 2965 | 12784 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Character Mapping Table:
# this table is modified base on win1251BulgarianCharToOrderMap, so
# only number <64 is sure valid
Latin5_BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80
210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90
81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0
62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0
)
win1251BulgarianCharToOrderMap = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40
110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50
253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60
116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70
206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80
221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90
88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0
73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0
31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0
39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0
1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0
7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 96.9392%
# first 1024 sequences:3.0618%
# rest sequences: 0.2992%
# negative sequences: 0.0020%
BulgarianLangModel = (
0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2,
3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1,
0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0,
0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0,
0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0,
0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0,
0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,
3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3,
2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1,
3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1,
3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2,
1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0,
3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1,
1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0,
2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2,
2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0,
3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2,
1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0,
2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2,
2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2,
1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0,
2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2,
2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0,
2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2,
1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0,
2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2,
1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0,
3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2,
1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0,
3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1,
1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0,
2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1,
1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0,
2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2,
1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,
2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1,
1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1,
2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2,
1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,
2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2,
1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1,
0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2,
1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1,
1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,
1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1,
0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1,
0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,
2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0,
1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1,
0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,
0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1,
1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,
1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,
)
Latin5BulgarianModel = {
'charToOrderMap': Latin5_BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "ISO-8859-5"
}
Win1251BulgarianModel = {
'charToOrderMap': win1251BulgarianCharToOrderMap,
'precedenceMatrix': BulgarianLangModel,
'mTypicalPositiveRatio': 0.969392,
'keepEnglishLetter': False,
'charsetName': "windows-1251"
}
# flake8: noqa
| gpl-3.0 |
geophysics/mtpy | mtpy/utils/convert_coordinates_in_edis.py | 1 | 2530 | #!/usr/bin/env python
"""
mtpy/mtpy/uofa/convert_coordinates_in_edis.py
This is a convenience script for converting coordinates in EDI files.
Files are parsed and if a 'lat' or 'lon' is detected, the argument on
the other side of an '=' is converted into decimal degrees. The rest of the file
remains unchanged.
argument:
- directory containing edi files
optional:
- output directory [default: 'decimal_degrees']
"""
import sys
import os
import os.path as op
import fnmatch
import re
import mtpy.utils.format as MTft
def main():
if len(sys.argv) < 2:
sys.exit('\nNeed at least 1 arguments:\n '
'\n <path to EDI files> \n '
'[optional: <output path>]\n')
edidir = sys.argv[1]
if not op.isdir(edidir):
print 'Given directory does not exist {0}'.format(edidir)
sys.exit()
edilist =[]
try:
edilist = fnmatch.filter(os.listdir(edidir),'*.[Ee][Dd][Ii]')
if len(edilist) == 0:
raise
edilist = [op.abspath(op.join(edidir,i)) for i in edilist]
except:
print 'Given directory does not contain edi files: {0}'.format(edidir)
outputdir = op.join(edidir,'decimal_degrees')
if not op.isdir(outputdir):
os.makedirs(outputdir)
if len(sys.argv) > 2:
outputdir = sys.argv[2]
try:
if not op.isdir(outputdir):
os.makedirs(outputdir)
except:
print 'could not generate output directory - using default'
outputdir = op.join(edidir,'decimal_degrees')
if not op.isdir(outputdir):
os.makedirs(outputdir)
path = convert_edis(edilist,outputdir)
return path
def convert_edis(edilist, output_path):
for edi in edilist:
infile = edi
outfile_raw = os.path.split(edi)[1]
outfile = op.join(output_path, outfile_raw)
outstring =''
with open(infile,'r') as F:
edilines = F.readlines()
for line in edilines:
if not ('lat' in line.lower() or 'lon' in line.lower()):
outstring += line
continue
linelist = line.strip().split('=')
coord = linelist[1]
dec_coord = str(MTft.assert_decimal_coordinates(coord))
outstring += '\t{0}={1}\t\n'.format(linelist[0],dec_coord)
with open(outfile,'w') as Fout:
Fout.write(outstring.expandtabs(4))
if __name__=='__main__':
main()
| gpl-3.0 |
Jgarcia-IAS/SAT | openerp/addons/base_setup/base_setup.py | 382 | 5430 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import simplejson
import cgi
from openerp import tools
from openerp.osv import fields, osv
from openerp.tools.translate import _
from lxml import etree
# Specify Your Terminology will move to 'partner' module
class specify_partner_terminology(osv.osv_memory):
_name = 'base.setup.terminology'
_inherit = 'res.config'
_columns = {
'partner': fields.selection([
('Customer','Customer'),
('Client','Client'),
('Member','Member'),
('Patient','Patient'),
('Partner','Partner'),
('Donor','Donor'),
('Guest','Guest'),
('Tenant','Tenant')
], 'How do you call a Customer', required=True ),
}
_defaults={
'partner' :'Customer',
}
def make_translations(self, cr, uid, ids, name, type, src, value, res_id=0, context=None):
trans_obj = self.pool.get('ir.translation')
user_obj = self.pool.get('res.users')
context_lang = user_obj.browse(cr, uid, uid, context=context).lang
existing_trans_ids = trans_obj.search(cr, uid, [('name','=',name), ('lang','=',context_lang), ('type','=',type), ('src','=',src), ('res_id','=',res_id)])
if existing_trans_ids:
trans_obj.write(cr, uid, existing_trans_ids, {'value': value}, context=context)
else:
create_id = trans_obj.create(cr, uid, {'name': name,'lang': context_lang, 'type': type, 'src': src, 'value': value , 'res_id': res_id}, context=context)
return {}
def execute(self, cr, uid, ids, context=None):
def _case_insensitive_replace(ref_string, src, value):
import re
pattern = re.compile(src, re.IGNORECASE)
return pattern.sub(_(value), _(ref_string))
trans_obj = self.pool.get('ir.translation')
fields_obj = self.pool.get('ir.model.fields')
menu_obj = self.pool.get('ir.ui.menu')
act_window_obj = self.pool.get('ir.actions.act_window')
for o in self.browse(cr, uid, ids, context=context):
#translate label of field
field_ids = fields_obj.search(cr, uid, [('field_description','ilike','Customer')])
for f_id in fields_obj.browse(cr ,uid, field_ids, context=context):
field_ref = f_id.model_id.model + ',' + f_id.name
self.make_translations(cr, uid, ids, field_ref, 'field', f_id.field_description, _case_insensitive_replace(f_id.field_description,'Customer',o.partner), context=context)
#translate help tooltip of field
for obj in self.pool.models.values():
for field_name, field_rec in obj._columns.items():
if field_rec.help.lower().count('customer'):
field_ref = obj._name + ',' + field_name
self.make_translations(cr, uid, ids, field_ref, 'help', field_rec.help, _case_insensitive_replace(field_rec.help,'Customer',o.partner), context=context)
#translate menuitems
menu_ids = menu_obj.search(cr,uid, [('name','ilike','Customer')])
for m_id in menu_obj.browse(cr, uid, menu_ids, context=context):
menu_name = m_id.name
menu_ref = 'ir.ui.menu' + ',' + 'name'
self.make_translations(cr, uid, ids, menu_ref, 'model', menu_name, _case_insensitive_replace(menu_name,'Customer',o.partner), res_id=m_id.id, context=context)
#translate act window name
act_window_ids = act_window_obj.search(cr, uid, [('name','ilike','Customer')])
for act_id in act_window_obj.browse(cr ,uid, act_window_ids, context=context):
act_ref = 'ir.actions.act_window' + ',' + 'name'
self.make_translations(cr, uid, ids, act_ref, 'model', act_id.name, _case_insensitive_replace(act_id.name,'Customer',o.partner), res_id=act_id.id, context=context)
#translate act window tooltips
act_window_ids = act_window_obj.search(cr, uid, [('help','ilike','Customer')])
for act_id in act_window_obj.browse(cr ,uid, act_window_ids, context=context):
act_ref = 'ir.actions.act_window' + ',' + 'help'
self.make_translations(cr, uid, ids, act_ref, 'model', act_id.help, _case_insensitive_replace(act_id.help,'Customer',o.partner), res_id=act_id.id, context=context)
return {}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
beckynaylor/PiSwarmSim | PiSwarmSimulator/arena.py | 1 | 5901 | # Pi-Swarm Simulator is a simple graphical simulation environment for the Pi-Swarm robots
# Copyright (C) 2014 Becky Naylor, Jon Timmis, University of York
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#All arena element classes are in this file
#import external libraries
import os, random, sys, math, itertools, operator, datetime, re, cProfile
from framework import *
#import simulator classes
from robot import *
from proxSensor import *
#Room perimeter polygon, currently there should be just one of these
class Room():
def __init__(self, world, xsize, ysize):
self.xsize = xsize
self.ysize = ysize
#TODO: make centre relative to the screen, not hardcoded
#Centre the room in the screen
self.centrePoint = (0,ysize/2)
self.walls = world.CreateBody(position=self.centrePoint, userData=self)
#List of corner positions to create edges
self.corners = [ (-xsize/2,-ysize/2),
(-xsize/2,ysize/2),
(xsize/2,ysize/2),
(xsize/2,-ysize/2),
(-xsize/2,-ysize/2) ]
#Make vertices
self.walls.CreateEdgeChain(self.corners)
#Arena obstacles, provide the world to add them to. Can also provide a list of protected areas (of type b2body)
class Obstacle:
def __init__(self, world, obstacleid, room, protectedAreaList=0):
self.obstacleid = obstacleid
self.shape = ""
#Pick random size
obs_size = random.uniform(0.5,1.5)
#Dice roll to decide object shape
diceroll = random.randint(0,2)
roomx = room.xsize
roomy = room.ysize
#square
if diceroll == 0:
self.shape = "square"
obs_y_size = obs_size
obstacle=b2PolygonShape(box=(obs_size, obs_size))
self.size = obs_size
#rectangle
elif diceroll == 1:
self.shape = "rectangle"
#generate y side too for rectangle
obs_y_size = random.uniform(0.5,1.5)
obstacle=b2PolygonShape(box=(obs_size, obs_y_size))
self.size = (obs_size, obs_y_size)
#circle
elif diceroll == 2:
self.shape = "circle"
obs_size = obs_size*2
obs_y_size = obs_size
obstacle=b2CircleShape(radius=(obs_size))
self.size = obs_size
positionAccepted = False
while positionAccepted == False:
#Pick random co-ordinates
(xpos, ypos) = (random.uniform(-(float(roomx)/2)+obs_size,(float(roomx)/2)-obs_size), random.uniform(0+obs_y_size,roomy-obs_y_size))
self.fixtures = b2FixtureDef(shape=obstacle, density=1, friction=0.3)
self.body = world.CreateStaticBody(position=(xpos,ypos), fixtures=self.fixtures, userData=self)
#Check there are no protected areas e.g. powersockets at this point
if protectedAreaList != 0:
positionAccepted = True
for protArea in protectedAreaList:
overlapping = b2TestOverlap(self.fixtures.shape, 0, protArea.fixtures.shape, 0, self.body.transform, protArea.body.transform);
#If the shape overlaps a protected area then we need to generate new coordinates
if overlapping == True:
positionAccepted = False
#Destroy old shape before creating a new one
if positionAccepted == False:
world.DestroyBody(self.body)
#Floor area where the robots recharge, specified size (x,y) and position (x,y)
class PowerStrip:
def __init__(self, world, powerid, room, position="none", size="none"):
roomx = room.xsize
roomy = room.ysize
if size == "none":
size = (1.5,1.5)
if position == "none":
#position = ((roomx/2)-size[0],roomy-size[1])
position = (-(roomx/2)+size[0],roomy-size[1])
#with size (3,2) and room (40,40) xpos = -17 and ypos = 2 is bottom left
#with size (3,2) and room (40,40) xpos = -17 and ypos = 38 is top left
#with size (3,2) and room (40,40) xpos = 17 and ypos = 38 is top right
self.powerid = powerid
self.size = size
powerstrip=b2PolygonShape(box=self.size)
self.fixtures = b2FixtureDef(shape=powerstrip, density=0, friction=0, isSensor=True, userData=self)
self.body = world.CreateStaticBody(position=position, fixtures=self.fixtures)
#Floor tile of specified size (x,y) and position (x,y)
class FloorTile:
def __init__(self, world, position, size):
self.contacted = False
#pygame seems to double the expected size, so (4.0,4.0) has 4.0 above centre point and 4.0 below - so halve it.
size = (size[0]/2,size[1]/2)
floortile=b2PolygonShape(box=size)
self.fixtures = b2FixtureDef(shape=floortile, density=0, friction=0, isSensor=True, userData=self)
self.body = world.CreateStaticBody(position=position, fixtures=self.fixtures)
| gpl-3.0 |
hickerson/bbn | fable/fable_sources/libtbx/command_line/run_tests_parallel.py | 1 | 3187 | from __future__ import division
import libtbx.test_utils.parallel
from libtbx.utils import Sorry, Usage
import libtbx.phil
import random
import os
import sys
master_phil = libtbx.phil.parse("""
directory = None
.type = path
.multiple = True
module = None
.type = str
.multiple = True
nproc = 1
.type= int
shuffle = False
.type = bool
quiet = False
.type = bool
run_in_tmp_dir = False
.type = bool
output_junit_xml = False
.type = bool
.help = "Create junit-style xml output"
"Requires junit_xml module:"
" https://pypi.python.org/pypi/junit-xml"
""")
def run (args) :
if (len(args) == 0) :
raise Usage("""libtbx.run_tests_parallel [module=NAME] [directory=path]""")
user_phil = []
for arg in args :
if os.path.isdir(arg) :
user_phil.append(libtbx.phil.parse("directory=%s" % arg))
else :
try :
arg_phil = libtbx.phil.parse(arg)
except RuntimeError :
raise Sorry("Unrecognized argument '%s'" % arg)
else :
user_phil.append(arg_phil)
params = master_phil.fetch(sources=user_phil).extract()
if params.run_in_tmp_dir:
import tempfile
run_dir = tempfile.mkdtemp(suffix='', prefix='cctbxtst')
print 'Running tests in %s' % run_dir
os.chdir(run_dir)
else:
cwd = os.getcwd()
cwd_files = os.listdir(cwd)
if (len(cwd_files) > 0) :
raise Sorry("Please run this program in an empty directory.")
if params.output_junit_xml:
try:
import junit_xml
except ImportError, e:
raise Sorry(
"Cannot import junit_xml. Try running with output_junit_xml=False")
if (len(params.directory) == 0) and (len(params.module) == 0) :
raise Sorry("Please specify modules and/or directories to test.")
all_tests = []
for dir_name in params.directory :
if os.path.split(dir_name)[-1].find("cctbx_project")>-1:
print 'DANGER '*10
print 'Using the directory option in cctbx_project can be very time consuming'
print 'DANGER '*10
dir_tests = libtbx.test_utils.parallel.find_tests(dir_name)
all_tests.extend(libtbx.test_utils.parallel.make_commands(dir_tests))
for module_name in params.module :
module_tests = libtbx.test_utils.parallel.get_module_tests(module_name)
all_tests.extend(module_tests)
if (len(all_tests) == 0) :
raise Sorry("No test scripts found in %s." % params.directory)
if (params.shuffle) :
random.shuffle(all_tests)
if (not params.quiet) :
print "Running the following %d tests on %d processors:" % (len(all_tests),
params.nproc)
for test in all_tests :
print " " + test
log = open("zlog", "wb")
libtbx.test_utils.parallel.run_command_list(
cmd_list=all_tests,
nprocs=params.nproc,
log=log,
quiet=params.quiet,
output_junit_xml=params.output_junit_xml)
log.close()
print """
============================================================================
Reminder: Please do not forget: libtbx.find_clutter
See also: cctbx_project/libtbx/development/dev_guidelines.txt
============================================================================
"""
if (__name__ == "__main__") :
run(sys.argv[1:])
| mit |
Petraea/jsonbot | jsb/utils/pdod.py | 1 | 1417 | # gozerbot/pdod.py
#
#
""" pickled dicts of dicts """
## jsb imports
from jsb.utils.lazydict import LazyDict
from jsb.lib.persist import Persist
## Pdod class
class Pdod(Persist):
""" pickled dicts of dicts """
def __getitem__(self, name):
""" return item with name """
if self.data.has_key(name): return self.data[name]
def __delitem__(self, name):
""" delete name item """
if self.data.has_key(name): return self.data.__delitem__(name)
def __setitem__(self, name, item):
""" set name item """
self.data[name] = item
def __contains__(self, name):
return self.data.__contains__(name)
def setdefault(self, name, default):
""" set default of name """
return self.data.setdefault(name, default)
def has_key(self, name):
""" has name key """
return self.data.has_key(name)
def has_key2(self, name1, najsb):
""" has [name1][najsb] key """
if self.data.has_key(name1): return self.data[name1].has_key(najsb)
def get(self, name1, najsb):
""" get data[name1][najsb] """
try:
result = self.data[name1][najsb]
return result
except KeyError: pass
def set(self, name1, najsb, item):
""" set name, najsb item """
if not self.data.has_key(name1): self.data[name1] = {}
self.data[name1][najsb] = item
| mit |
Nikoli/youtube-dl | youtube_dl/extractor/ultimedia.py | 2 | 3512 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
qualities,
unified_strdate,
clean_html,
)
class UltimediaIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?ultimedia\.com/default/index/video[^/]+/id/(?P<id>[\d+a-z]+)'
_TESTS = [{
# news
'url': 'https://www.ultimedia.com/default/index/videogeneric/id/s8uk0r',
'md5': '276a0e49de58c7e85d32b057837952a2',
'info_dict': {
'id': 's8uk0r',
'ext': 'mp4',
'title': 'Loi sur la fin de vie: le texte prévoit un renforcement des directives anticipées',
'description': 'md5:3e5c8fd65791487333dda5db8aed32af',
'thumbnail': 're:^https?://.*\.jpg',
'upload_date': '20150317',
},
}, {
# music
'url': 'https://www.ultimedia.com/default/index/videomusic/id/xvpfp8',
'md5': '2ea3513813cf230605c7e2ffe7eca61c',
'info_dict': {
'id': 'xvpfp8',
'ext': 'mp4',
'title': "Two - C'est la vie (Clip)",
'description': 'Two',
'thumbnail': 're:^https?://.*\.jpg',
'upload_date': '20150224',
},
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
deliver_url = self._search_regex(
r'<iframe[^>]+src="(https?://(?:www\.)?ultimedia\.com/deliver/[^"]+)"',
webpage, 'deliver URL')
deliver_page = self._download_webpage(
deliver_url, video_id, 'Downloading iframe page')
if '>This video is currently not available' in deliver_page:
raise ExtractorError(
'Video %s is currently not available' % video_id, expected=True)
player = self._parse_json(
self._search_regex(
r"jwplayer\('player(?:_temp)?'\)\.setup\(({.+?})\)\.on", deliver_page, 'player'),
video_id)
quality = qualities(['flash', 'html5'])
formats = []
for mode in player['modes']:
video_url = mode.get('config', {}).get('file')
if not video_url:
continue
if re.match(r'https?://www\.youtube\.com/.+?', video_url):
return self.url_result(video_url, 'Youtube')
formats.append({
'url': video_url,
'format_id': mode.get('type'),
'quality': quality(mode.get('type')),
})
self._sort_formats(formats)
thumbnail = player.get('image')
title = clean_html((
self._html_search_regex(
r'(?s)<div\s+id="catArticle">.+?</div>(.+?)</h1>',
webpage, 'title', default=None) or
self._search_regex(
r"var\s+nameVideo\s*=\s*'([^']+)'",
deliver_page, 'title')))
description = clean_html(self._html_search_regex(
r'(?s)<span>Description</span>(.+?)</p>', webpage,
'description', fatal=False))
upload_date = unified_strdate(self._search_regex(
r'Ajouté le\s*<span>([^<]+)', webpage,
'upload date', fatal=False))
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'upload_date': upload_date,
'formats': formats,
}
| unlicense |
jor-/scipy | scipy/optimize/tests/test_linprog.py | 2 | 69667 | """
Unit test for Linear Programming
"""
from __future__ import division, print_function, absolute_import
import sys
import numpy as np
from numpy.testing import (assert_, assert_allclose, assert_equal,
assert_array_less)
from pytest import raises as assert_raises
from scipy.optimize import linprog, OptimizeWarning
from scipy._lib._numpy_compat import _assert_warns, suppress_warnings
from scipy.sparse.linalg import MatrixRankWarning
from scipy.linalg import LinAlgWarning
import pytest
has_umfpack = True
try:
from scikits.umfpack import UmfpackWarning
except ImportError:
has_umfpack = False
has_cholmod = True
try:
import sksparse
except ImportError:
has_cholmod = False
def _assert_iteration_limit_reached(res, maxiter):
assert_(not res.success, "Incorrectly reported success")
assert_(res.success < maxiter, "Incorrectly reported number of iterations")
assert_equal(res.status, 1, "Failed to report iteration limit reached")
def _assert_infeasible(res):
# res: linprog result object
assert_(not res.success, "incorrectly reported success")
assert_equal(res.status, 2, "failed to report infeasible status")
def _assert_unbounded(res):
# res: linprog result object
assert_(not res.success, "incorrectly reported success")
assert_equal(res.status, 3, "failed to report unbounded status")
def _assert_unable_to_find_basic_feasible_sol(res):
# res: linprog result object
# The status may be either 2 or 4 depending on why the feasible solution
# could not be found. If the undelying problem is expected to not have a
# feasible solution _assert_infeasible should be used.
assert_(not res.success, "incorrectly reported success")
assert_(res.status in (2, 4), "failed to report optimization failure")
def _assert_success(res, desired_fun=None, desired_x=None,
rtol=1e-8, atol=1e-8):
# res: linprog result object
# desired_fun: desired objective function value or None
# desired_x: desired solution or None
if not res.success:
msg = "linprog status {0}, message: {1}".format(res.status,
res.message)
raise AssertionError(msg)
assert_equal(res.status, 0)
if desired_fun is not None:
assert_allclose(res.fun, desired_fun,
err_msg="converged to an unexpected objective value",
rtol=rtol, atol=atol)
if desired_x is not None:
assert_allclose(res.x, desired_x,
err_msg="converged to an unexpected solution",
rtol=rtol, atol=atol)
def magic_square(n):
"""
Generates a linear program for which integer solutions represent an
n x n magic square; binary decision variables represent the presence
(or absence) of an integer 1 to n^2 in each position of the square.
"""
np.random.seed(0)
M = n * (n**2 + 1) / 2
numbers = np.arange(n**4) // n**2 + 1
numbers = numbers.reshape(n**2, n, n)
zeros = np.zeros((n**2, n, n))
A_list = []
b_list = []
# Rule 1: use every number exactly once
for i in range(n**2):
A_row = zeros.copy()
A_row[i, :, :] = 1
A_list.append(A_row.flatten())
b_list.append(1)
# Rule 2: Only one number per square
for i in range(n):
for j in range(n):
A_row = zeros.copy()
A_row[:, i, j] = 1
A_list.append(A_row.flatten())
b_list.append(1)
# Rule 3: sum of rows is M
for i in range(n):
A_row = zeros.copy()
A_row[:, i, :] = numbers[:, i, :]
A_list.append(A_row.flatten())
b_list.append(M)
# Rule 4: sum of columns is M
for i in range(n):
A_row = zeros.copy()
A_row[:, :, i] = numbers[:, :, i]
A_list.append(A_row.flatten())
b_list.append(M)
# Rule 5: sum of diagonals is M
A_row = zeros.copy()
A_row[:, range(n), range(n)] = numbers[:, range(n), range(n)]
A_list.append(A_row.flatten())
b_list.append(M)
A_row = zeros.copy()
A_row[:, range(n), range(-1, -n - 1, -1)] = \
numbers[:, range(n), range(-1, -n - 1, -1)]
A_list.append(A_row.flatten())
b_list.append(M)
A = np.array(np.vstack(A_list), dtype=float)
b = np.array(b_list, dtype=float)
c = np.random.rand(A.shape[1])
return A, b, c, numbers
def lpgen_2d(m, n):
""" -> A b c LP test: m*n vars, m+n constraints
row sums == n/m, col sums == 1
https://gist.github.com/denis-bz/8647461
"""
np.random.seed(0)
c = - np.random.exponential(size=(m, n))
Arow = np.zeros((m, m * n))
brow = np.zeros(m)
for j in range(m):
j1 = j + 1
Arow[j, j * n:j1 * n] = 1
brow[j] = n / m
Acol = np.zeros((n, m * n))
bcol = np.zeros(n)
for j in range(n):
j1 = j + 1
Acol[j, j::n] = 1
bcol[j] = 1
A = np.vstack((Arow, Acol))
b = np.hstack((brow, bcol))
return A, b, c.ravel()
def nontrivial_problem():
c = [-1, 8, 4, -6]
A_ub = [[-7, -7, 6, 9],
[1, -1, -3, 0],
[10, -10, -7, 7],
[6, -1, 3, 4]]
b_ub = [-3, 6, -6, 6]
A_eq = [[-10, 1, 1, -8]]
b_eq = [-4]
x_star = [101 / 1391, 1462 / 1391, 0, 752 / 1391]
f_star = 7083 / 1391
return c, A_ub, b_ub, A_eq, b_eq, x_star, f_star
def generic_callback_test(self):
# Check that callback is as advertised
last_cb = {}
def cb(res):
message = res.pop('message')
complete = res.pop('complete')
assert_(res.pop('phase') in (1, 2))
assert_(res.pop('status') in range(4))
assert_(isinstance(res.pop('nit'), int))
assert_(isinstance(complete, bool))
assert_(isinstance(message, str))
last_cb['x'] = res['x']
last_cb['fun'] = res['fun']
last_cb['slack'] = res['slack']
last_cb['con'] = res['con']
c = np.array([-3, -2])
A_ub = [[2, 1], [1, 1], [1, 0]]
b_ub = [10, 8, 4]
res = linprog(c, A_ub=A_ub, b_ub=b_ub, callback=cb, method=self.method)
_assert_success(res, desired_fun=-18.0, desired_x=[2, 6])
assert_allclose(last_cb['fun'], res['fun'])
assert_allclose(last_cb['x'], res['x'])
assert_allclose(last_cb['con'], res['con'])
assert_allclose(last_cb['slack'], res['slack'])
def test_unknown_solver():
c = np.array([-3, -2])
A_ub = [[2, 1], [1, 1], [1, 0]]
b_ub = [10, 8, 4]
assert_raises(ValueError, linprog,
c, A_ub=A_ub, b_ub=b_ub, method='ekki-ekki-ekki')
A_ub = None
b_ub = None
A_eq = None
b_eq = None
bounds = None
################
# Common Tests #
################
class LinprogCommonTests(object):
"""
Base class for `linprog` tests. Generally, each test will be performed
once for every derived class of LinprogCommonTests, each of which will
typically change self.options and/or self.method. Effectively, these tests
are run for many combination of method (simplex, revised simplex, and
interior point) and options (such as pivoting rule or sparse treatment).
"""
##################
# Targeted Tests #
##################
def test_callback(self):
generic_callback_test(self)
def test_disp(self):
# test that display option does not break anything.
A, b, c = lpgen_2d(20, 20)
res = linprog(c, A_ub=A, b_ub=b, method=self.method,
options={"disp": True})
_assert_success(res, desired_fun=-64.049494229)
def test_docstring_example(self):
# Example from linprog docstring.
c = [-1, 4]
A = [[-3, 1], [1, 2]]
b = [6, 4]
x0_bounds = (None, None)
x1_bounds = (-3, None)
res = linprog(c, A_ub=A, b_ub=b, bounds=(x0_bounds, x1_bounds),
options=self.options, method=self.method)
_assert_success(res, desired_fun=-22)
def test_type_error(self):
# (presumably) checks that linprog recognizes type errors
# This is tested more carefully in test__linprog_clean_inputs.py
c = [1]
A_eq = [[1]]
b_eq = "hello"
assert_raises(TypeError, linprog,
c, A_eq=A_eq, b_eq=b_eq,
method=self.method, options=self.options)
def test_aliasing_b_ub(self):
# (presumably) checks that linprog does not modify b_ub
# This is tested more carefully in test__linprog_clean_inputs.py
c = np.array([1.0])
A_ub = np.array([[1.0]])
b_ub_orig = np.array([3.0])
b_ub = b_ub_orig.copy()
bounds = (-4.0, np.inf)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-4, desired_x=[-4])
assert_allclose(b_ub_orig, b_ub)
def test_aliasing_b_eq(self):
# (presumably) checks that linprog does not modify b_eq
# This is tested more carefully in test__linprog_clean_inputs.py
c = np.array([1.0])
A_eq = np.array([[1.0]])
b_eq_orig = np.array([3.0])
b_eq = b_eq_orig.copy()
bounds = (-4.0, np.inf)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=3, desired_x=[3])
assert_allclose(b_eq_orig, b_eq)
def test_non_ndarray_args(self):
# (presumably) checks that linprog accepts list in place of arrays
# This is tested more carefully in test__linprog_clean_inputs.py
c = [1.0]
A_ub = [[1.0]]
b_ub = [3.0]
A_eq = [[1.0]]
b_eq = [2.0]
bounds = (-1.0, 10.0)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=2, desired_x=[2])
def test_unknown_options(self):
c = np.array([-3, -2])
A_ub = [[2, 1], [1, 1], [1, 0]]
b_ub = [10, 8, 4]
def f(c, A_ub=None, b_ub=None, A_eq=None,
b_eq=None, bounds=None, options={}):
linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=options)
o = {key: self.options[key] for key in self.options}
o['spam'] = 42
_assert_warns(OptimizeWarning, f,
c, A_ub=A_ub, b_ub=b_ub, options=o)
def test_invalid_inputs(self):
def f(c, A_ub=None, b_ub=None, A_eq=None, b_eq=None, bounds=None):
linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
for bad_bound in [[(5, 0), (1, 2), (3, 4)],
[(1, 2), (3, 4)],
[(1, 2), (3, 4), (3, 4, 5)],
[(1, 2), (np.inf, np.inf), (3, 4)],
[(1, 2), (-np.inf, -np.inf), (3, 4)],
]:
assert_raises(ValueError, f, [1, 2, 3], bounds=bad_bound)
assert_raises(ValueError, f, [1, 2], A_ub=[[1, 2]], b_ub=[1, 2])
assert_raises(ValueError, f, [1, 2], A_ub=[[1]], b_ub=[1])
assert_raises(ValueError, f, [1, 2], A_eq=[[1, 2]], b_eq=[1, 2])
assert_raises(ValueError, f, [1, 2], A_eq=[[1]], b_eq=[1])
assert_raises(ValueError, f, [1, 2], A_eq=[1], b_eq=1)
# this last check doesn't make sense for sparse presolve
if ("_sparse_presolve" in self.options and
self.options["_sparse_presolve"]):
return
# there aren't 3D sparse matrices
assert_raises(ValueError, f, [1, 2], A_ub=np.zeros((1, 1, 3)), b_eq=1)
def test_empty_constraint_1(self):
c = [-1, -2]
res = linprog(c, method=self.method, options=self.options)
_assert_unbounded(res)
def test_empty_constraint_2(self):
c = [-1, 1, -1, 1]
bounds = [(0, np.inf), (-np.inf, 0), (-1, 1), (-1, 1)]
res = linprog(c, bounds=bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
# Unboundedness detected in presolve requires no iterations
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_empty_constraint_3(self):
c = [1, -1, 1, -1]
bounds = [(0, np.inf), (-np.inf, 0), (-1, 1), (-1, 1)]
res = linprog(c, bounds=bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[0, 0, -1, 1], desired_fun=-2)
def test_inequality_constraints(self):
# Minimize linear function subject to linear inequality constraints.
# http://www.dam.brown.edu/people/huiwang/classes/am121/Archive/simplex_121_c.pdf
c = np.array([3, 2]) * -1 # maximize
A_ub = [[2, 1],
[1, 1],
[1, 0]]
b_ub = [10, 8, 4]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-18, desired_x=[2, 6])
def test_inequality_constraints2(self):
# Minimize linear function subject to linear inequality constraints.
# http://www.statslab.cam.ac.uk/~ff271/teaching/opt/notes/notes8.pdf
# (dead link)
c = [6, 3]
A_ub = [[0, 3],
[-1, -1],
[-2, 1]]
b_ub = [2, -1, -1]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=5, desired_x=[2 / 3, 1 / 3])
def test_bounds_simple(self):
c = [1, 2]
bounds = (1, 2)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[1, 1])
bounds = [(1, 2), (1, 2)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[1, 1])
def test_bounded_below_only_1(self):
c = np.array([1.0])
A_eq = np.array([[1.0]])
b_eq = np.array([3.0])
bounds = (1.0, None)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=3, desired_x=[3])
def test_bounded_below_only_2(self):
c = np.ones(3)
A_eq = np.eye(3)
b_eq = np.array([1, 2, 3])
bounds = (0.5, np.inf)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=b_eq, desired_fun=np.sum(b_eq))
def test_bounded_above_only_1(self):
c = np.array([1.0])
A_eq = np.array([[1.0]])
b_eq = np.array([3.0])
bounds = (None, 10.0)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=3, desired_x=[3])
def test_bounded_above_only_2(self):
c = np.ones(3)
A_eq = np.eye(3)
b_eq = np.array([1, 2, 3])
bounds = (-np.inf, 4)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=b_eq, desired_fun=np.sum(b_eq))
def test_bounds_infinity(self):
c = np.ones(3)
A_eq = np.eye(3)
b_eq = np.array([1, 2, 3])
bounds = (-np.inf, np.inf)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=b_eq, desired_fun=np.sum(b_eq))
def test_bounds_mixed(self):
# Problem has one unbounded variable and
# another with a negative lower bound.
c = np.array([-1, 4]) * -1 # maximize
A_ub = np.array([[-3, 1],
[1, 2]], dtype=np.float64)
b_ub = [6, 4]
x0_bounds = (-np.inf, np.inf)
x1_bounds = (-3, np.inf)
bounds = (x0_bounds, x1_bounds)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-80 / 7, desired_x=[-8 / 7, 18 / 7])
def test_bounds_equal_but_infeasible(self):
c = [-4, 1]
A_ub = [[7, -2], [0, 1], [2, -2]]
b_ub = [14, 0, 3]
bounds = [(2, 2), (0, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
def test_bounds_equal_but_infeasible2(self):
c = [-4, 1]
A_eq = [[7, -2], [0, 1], [2, -2]]
b_eq = [14, 0, 3]
bounds = [(2, 2), (0, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
def test_bounds_equal_no_presolve(self):
# There was a bug when a lower and upper bound were equal but
# presolve was not on to eliminate the variable. The bound
# was being converted to an equality constraint, but the bound
# was not eliminated, leading to issues in postprocessing.
c = [1, 2]
A_ub = [[1, 2], [1.1, 2.2]]
b_ub = [4, 8]
bounds = [(1, 2), (2, 2)]
o = {key: self.options[key] for key in self.options}
o["presolve"] = False
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
_assert_infeasible(res)
def test_zero_column_1(self):
m, n = 3, 4
np.random.seed(0)
c = np.random.rand(n)
c[1] = 1
A_eq = np.random.rand(m, n)
A_eq[:, 1] = 0
b_eq = np.random.rand(m)
A_ub = [[1, 0, 1, 1]]
b_ub = 3
bounds = [(-10, 10), (-10, 10), (-10, None), (None, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-9.7087836730413404)
def test_zero_column_2(self):
np.random.seed(0)
m, n = 2, 4
c = np.random.rand(n)
c[1] = -1
A_eq = np.random.rand(m, n)
A_eq[:, 1] = 0
b_eq = np.random.rand(m)
A_ub = np.random.rand(m, n)
A_ub[:, 1] = 0
b_ub = np.random.rand(m)
bounds = (None, None)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
# Unboundedness detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_zero_row_1(self):
c = [1, 2, 3]
A_eq = [[0, 0, 0], [1, 1, 1], [0, 0, 0]]
b_eq = [0, 3, 0]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=3)
def test_zero_row_2(self):
A_ub = [[0, 0, 0], [1, 1, 1], [0, 0, 0]]
b_ub = [0, 3, 0]
c = [1, 2, 3]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=0)
def test_zero_row_3(self):
m, n = 2, 4
c = np.random.rand(n)
A_eq = np.random.rand(m, n)
A_eq[0, :] = 0
b_eq = np.random.rand(m)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
# Infeasibility detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_zero_row_4(self):
m, n = 2, 4
c = np.random.rand(n)
A_ub = np.random.rand(m, n)
A_ub[0, :] = 0
b_ub = -np.random.rand(m)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
# Infeasibility detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_singleton_row_eq_1(self):
c = [1, 1, 1, 2]
A_eq = [[1, 0, 0, 0], [0, 2, 0, 0], [1, 0, 0, 0], [1, 1, 1, 1]]
b_eq = [1, 2, 2, 4]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
# Infeasibility detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_singleton_row_eq_2(self):
c = [1, 1, 1, 2]
A_eq = [[1, 0, 0, 0], [0, 2, 0, 0], [1, 0, 0, 0], [1, 1, 1, 1]]
b_eq = [1, 2, 1, 4]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=4)
def test_singleton_row_ub_1(self):
c = [1, 1, 1, 2]
A_ub = [[1, 0, 0, 0], [0, 2, 0, 0], [-1, 0, 0, 0], [1, 1, 1, 1]]
b_ub = [1, 2, -2, 4]
bounds = [(None, None), (0, None), (0, None), (0, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
# Infeasibility detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_singleton_row_ub_2(self):
c = [1, 1, 1, 2]
A_ub = [[1, 0, 0, 0], [0, 2, 0, 0], [-1, 0, 0, 0], [1, 1, 1, 1]]
b_ub = [1, 2, -0.5, 4]
bounds = [(None, None), (0, None), (0, None), (0, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=0.5)
def test_infeasible(self):
# Test linprog response to an infeasible problem
c = [-1, -1]
A_ub = [[1, 0],
[0, 1],
[-1, -1]]
b_ub = [2, 2, -5]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
def test_infeasible_inequality_bounds(self):
c = [1]
A_ub = [[2]]
b_ub = 4
bounds = (5, 6)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
# Infeasibility detected in presolve
if self.options.get('presolve', True):
assert_equal(res.nit, 0)
def test_unbounded(self):
# Test linprog response to an unbounded problem
c = np.array([1, 1]) * -1 # maximize
A_ub = [[-1, 1],
[-1, -1]]
b_ub = [-1, -2]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
def test_unbounded_below_no_presolve_corrected(self):
c = [1]
bounds = [(None, 1)]
o = {key: self.options[key] for key in self.options}
o["presolve"] = False
res = linprog(c=c, bounds=bounds,
method=self.method,
options=o)
if self.method == "revised simplex":
# Revised simplex has a special pathway for no constraints.
assert_equal(res.status, 5)
else:
_assert_unbounded(res)
def test_unbounded_no_nontrivial_constraints_1(self):
"""
Test whether presolve pathway for detecting unboundedness after
constraint elimination is working.
"""
c = np.array([0, 0, 0, 1, -1, -1])
A_ub = np.array([[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, -1]])
b_ub = np.array([2, -2, 0])
bounds = [(None, None), (None, None), (None, None),
(-1, 1), (-1, 1), (0, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
assert_equal(res.x[-1], np.inf)
assert_equal(res.message[:36], "The problem is (trivially) unbounded")
def test_unbounded_no_nontrivial_constraints_2(self):
"""
Test whether presolve pathway for detecting unboundedness after
constraint elimination is working.
"""
c = np.array([0, 0, 0, 1, -1, 1])
A_ub = np.array([[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1]])
b_ub = np.array([2, -2, 0])
bounds = [(None, None), (None, None), (None, None),
(-1, 1), (-1, 1), (None, 0)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
assert_equal(res.x[-1], -np.inf)
assert_equal(res.message[:36], "The problem is (trivially) unbounded")
def test_cyclic_recovery(self):
# Test linprogs recovery from cycling using the Klee-Minty problem
# Klee-Minty https://www.math.ubc.ca/~israel/m340/kleemin3.pdf
c = np.array([100, 10, 1]) * -1 # maximize
A_ub = [[1, 0, 0],
[20, 1, 0],
[200, 20, 1]]
b_ub = [1, 100, 10000]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[0, 0, 10000], atol=5e-6, rtol=1e-7)
def test_cyclic_bland(self):
# Test the effect of Bland's rule on a cycling problem
c = np.array([-10, 57, 9, 24.])
A_ub = np.array([[0.5, -5.5, -2.5, 9],
[0.5, -1.5, -0.5, 1],
[1, 0, 0, 0]])
b_ub = [0, 0, 1]
# copy the existing options dictionary but change maxiter
maxiter = 100
o = {key: val for key, val in self.options.items()}
o['maxiter'] = maxiter
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
if self.method == 'simplex' and not self.options.get('bland'):
# simplex cycles without Bland's rule
_assert_iteration_limit_reached(res, o['maxiter'])
else:
# other methods, including simplex with Bland's rule, succeed
_assert_success(res, desired_x=[1, 0, 1, 0])
# note that revised simplex skips this test because it may or may not
# cycle depending on the initial basis
def test_remove_redundancy_infeasibility(self):
# mostly a test of redundancy removal, which is carefully tested in
# test__remove_redundancy.py
m, n = 10, 10
c = np.random.rand(n)
A_eq = np.random.rand(m, n)
b_eq = np.random.rand(m)
A_eq[-1, :] = 2 * A_eq[-2, :]
b_eq[-1] *= -1
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
#################
# General Tests #
#################
def test_nontrivial_problem(self):
# Problem involves all constraint types,
# negative resource limits, and rounding issues.
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
def test_lpgen_problem(self):
# Test linprog with a rather large problem (400 variables,
# 40 constraints) generated by https://gist.github.com/denis-bz/8647461
A_ub, b_ub, c = lpgen_2d(20, 20)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "Solving system with option 'sym_pos'")
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-64.049494229)
def test_network_flow(self):
# A network flow problem with supply and demand at nodes
# and with costs along directed edges.
# https://www.princeton.edu/~rvdb/542/lectures/lec10.pdf
c = [2, 4, 9, 11, 4, 3, 8, 7, 0, 15, 16, 18]
n, p = -1, 1
A_eq = [
[n, n, p, 0, p, 0, 0, 0, 0, p, 0, 0],
[p, 0, 0, p, 0, p, 0, 0, 0, 0, 0, 0],
[0, 0, n, n, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, p, p, 0, 0, p, 0],
[0, 0, 0, 0, n, n, n, 0, p, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, n, n, 0, 0, p],
[0, 0, 0, 0, 0, 0, 0, 0, 0, n, n, n]]
b_eq = [0, 19, -16, 33, 0, 0, -36]
with suppress_warnings() as sup:
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=755, atol=1e-6, rtol=1e-7)
def test_network_flow_limited_capacity(self):
# A network flow problem with supply and demand at nodes
# and with costs and capacities along directed edges.
# http://blog.sommer-forst.de/2013/04/10/
c = [2, 2, 1, 3, 1]
bounds = [
[0, 4],
[0, 2],
[0, 2],
[0, 3],
[0, 5]]
n, p = -1, 1
A_eq = [
[n, n, 0, 0, 0],
[p, 0, n, n, 0],
[0, p, p, 0, n],
[0, 0, 0, p, p]]
b_eq = [-4, 0, 0, 4]
with suppress_warnings() as sup:
# this is an UmfpackWarning but I had trouble importing it
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(RuntimeWarning, "scipy.linalg.solve\nIll...")
sup.filter(OptimizeWarning, "A_eq does not appear...")
sup.filter(OptimizeWarning, "Solving system with option...")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=14)
def test_simplex_algorithm_wikipedia_example(self):
# https://en.wikipedia.org/wiki/Simplex_algorithm#Example
c = [-2, -3, -4]
A_ub = [
[3, 2, 1],
[2, 5, 3]]
b_ub = [10, 15]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-20)
def test_enzo_example(self):
# https://github.com/scipy/scipy/issues/1779 lp2.py
#
# Translated from Octave code at:
# http://www.ecs.shimane-u.ac.jp/~kyoshida/lpeng.htm
# and placed under MIT licence by Enzo Michelangeli
# with permission explicitly granted by the original author,
# Prof. Kazunobu Yoshida
c = [4, 8, 3, 0, 0, 0]
A_eq = [
[2, 5, 3, -1, 0, 0],
[3, 2.5, 8, 0, -1, 0],
[8, 10, 4, 0, 0, -1]]
b_eq = [185, 155, 600]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=317.5,
desired_x=[66.25, 0, 17.5, 0, 183.75, 0],
atol=6e-6, rtol=1e-7)
def test_enzo_example_b(self):
# rescued from https://github.com/scipy/scipy/pull/218
c = [2.8, 6.3, 10.8, -2.8, -6.3, -10.8]
A_eq = [[-1, -1, -1, 0, 0, 0],
[0, 0, 0, 1, 1, 1],
[1, 0, 0, 1, 0, 0],
[0, 1, 0, 0, 1, 0],
[0, 0, 1, 0, 0, 1]]
b_eq = [-0.5, 0.4, 0.3, 0.3, 0.3]
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-1.77,
desired_x=[0.3, 0.2, 0.0, 0.0, 0.1, 0.3])
def test_enzo_example_c_with_degeneracy(self):
# rescued from https://github.com/scipy/scipy/pull/218
m = 20
c = -np.ones(m)
tmp = 2 * np.pi * np.arange(1, m + 1) / (m + 1)
A_eq = np.vstack((np.cos(tmp) - 1, np.sin(tmp)))
b_eq = [0, 0]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=0, desired_x=np.zeros(m))
def test_enzo_example_c_with_unboundedness(self):
# rescued from https://github.com/scipy/scipy/pull/218
m = 50
c = -np.ones(m)
tmp = 2 * np.pi * np.arange(m) / (m + 1)
A_eq = np.vstack((np.cos(tmp) - 1, np.sin(tmp)))
b_eq = [0, 0]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_unbounded(res)
def test_enzo_example_c_with_infeasibility(self):
# rescued from https://github.com/scipy/scipy/pull/218
m = 50
c = -np.ones(m)
tmp = 2 * np.pi * np.arange(m) / (m + 1)
A_eq = np.vstack((np.cos(tmp) - 1, np.sin(tmp)))
b_eq = [1, 1]
o = {key: self.options[key] for key in self.options}
o["presolve"] = False
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
_assert_infeasible(res)
def test_basic_artificial_vars(self):
# Problem is chosen to test two phase simplex methods when at the end
# of phase 1 some artificial variables remain in the basis.
# Also, for `method='simplex'`, the row in the tableau corresponding
# with the artificial variables is not all zero.
c = np.array([-0.1, -0.07, 0.004, 0.004, 0.004, 0.004])
A_ub = np.array([[1.0, 0, 0, 0, 0, 0], [-1.0, 0, 0, 0, 0, 0],
[0, -1.0, 0, 0, 0, 0], [0, 1.0, 0, 0, 0, 0],
[1.0, 1.0, 0, 0, 0, 0]])
b_ub = np.array([3.0, 3.0, 3.0, 3.0, 20.0])
A_eq = np.array([[1.0, 0, -1, 1, -1, 1], [0, -1.0, -1, 1, -1, 1]])
b_eq = np.array([0, 0])
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=0, desired_x=np.zeros_like(c),
atol=2e-6)
#################
# Bug Fix Tests #
#################
def test_bug_5400(self):
# https://github.com/scipy/scipy/issues/5400
bounds = [
(0, None),
(0, 100), (0, 100), (0, 100), (0, 100), (0, 100), (0, 100),
(0, 900), (0, 900), (0, 900), (0, 900), (0, 900), (0, 900),
(0, None), (0, None), (0, None), (0, None), (0, None), (0, None)]
f = 1 / 9
g = -1e4
h = -3.1
A_ub = np.array([
[1, -2.99, 0, 0, -3, 0, 0, 0, -1, -1, 0, -1, -1, 1, 1, 0, 0, 0, 0],
[1, 0, -2.9, h, 0, -3, 0, -1, 0, 0, -1, 0, -1, 0, 0, 1, 1, 0, 0],
[1, 0, 0, h, 0, 0, -3, -1, -1, 0, -1, -1, 0, 0, 0, 0, 0, 1, 1],
[0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, -1],
[0, 1.99, -1, -1, 0, 0, 0, -1, f, f, 0, 0, 0, g, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 2, -1, -1, 0, 0, 0, -1, f, f, 0, g, 0, 0, 0, 0],
[0, -1, 1.9, 2.1, 0, 0, 0, f, -1, -1, 0, 0, 0, 0, 0, g, 0, 0, 0],
[0, 0, 0, 0, -1, 2, -1, 0, 0, 0, f, -1, f, 0, 0, 0, g, 0, 0],
[0, -1, -1, 2.1, 0, 0, 0, f, f, -1, 0, 0, 0, 0, 0, 0, 0, g, 0],
[0, 0, 0, 0, -1, -1, 2, 0, 0, 0, f, f, -1, 0, 0, 0, 0, 0, g]])
b_ub = np.array([
0.0, 0, 0, 100, 100, 100, 100, 100, 100, 900, 900, 900, 900, 900,
900, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
c = np.array([-1.0, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 0, 0, 0, 0, 0, 0])
with suppress_warnings() as sup:
sup.filter(OptimizeWarning,
"Solving system with option 'sym_pos'")
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=-106.63507541835018)
def test_bug_6139(self):
# linprog(method='simplex') fails to find a basic feasible solution
# if phase 1 pseudo-objective function is outside the provided tol.
# https://github.com/scipy/scipy/issues/6139
# Note: This is not strictly a bug as the default tolerance determines
# if a result is "close enough" to zero and should not be expected
# to work for all cases.
c = np.array([1, 1, 1])
A_eq = np.array([[1., 0., 0.], [-1000., 0., - 1000.]])
b_eq = np.array([5.00000000e+00, -1.00000000e+04])
A_ub = -np.array([[0., 1000000., 1010000.]])
b_ub = -np.array([10000000.])
bounds = (None, None)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=14.95,
desired_x=np.array([5, 4.95, 5]))
def test_bug_6690(self):
# linprog simplex used to violate bound constraint despite reporting
# success.
# https://github.com/scipy/scipy/issues/6690
A_eq = np.array([[0, 0, 0, 0.93, 0, 0.65, 0, 0, 0.83, 0]])
b_eq = np.array([0.9626])
A_ub = np.array([
[0, 0, 0, 1.18, 0, 0, 0, -0.2, 0, -0.22],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0.43, 0, 0, 0, 0, 0, 0],
[0, -1.22, -0.25, 0, 0, 0, -2.06, 0, 0, 1.37],
[0, 0, 0, 0, 0, 0, 0, -0.25, 0, 0]
])
b_ub = np.array([0.615, 0, 0.172, -0.869, -0.022])
bounds = np.array([
[-0.84, -0.97, 0.34, 0.4, -0.33, -0.74, 0.47, 0.09, -1.45, -0.73],
[0.37, 0.02, 2.86, 0.86, 1.18, 0.5, 1.76, 0.17, 0.32, -0.15]
]).T
c = np.array([
-1.64, 0.7, 1.8, -1.06, -1.16, 0.26, 2.13, 1.53, 0.66, 0.28
])
with suppress_warnings() as sup:
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(OptimizeWarning,
"Solving system with option 'cholesky'")
sup.filter(OptimizeWarning, "Solving system with option 'sym_pos'")
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
desired_fun = -1.19099999999
desired_x = np.array([0.3700, -0.9700, 0.3400, 0.4000, 1.1800,
0.5000, 0.4700, 0.0900, 0.3200, -0.7300])
_assert_success(res, desired_fun=desired_fun, desired_x=desired_x)
# Add small tol value to ensure arrays are less than or equal.
atol = 1e-6
assert_array_less(bounds[:, 0] - atol, res.x)
assert_array_less(res.x, bounds[:, 1] + atol)
def test_bug_7044(self):
# linprog simplex failed to "identify correct constraints" (?)
# leading to a non-optimal solution if A is rank-deficient.
# https://github.com/scipy/scipy/issues/7044
A_eq, b_eq, c, N = magic_square(3)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
desired_fun = 1.730550597
_assert_success(res, desired_fun=desired_fun)
assert_allclose(A_eq.dot(res.x), b_eq)
assert_array_less(np.zeros(res.x.size) - 1e-5, res.x)
def test_bug_7237(self):
# https://github.com/scipy/scipy/issues/7237
# linprog simplex "explodes" when the pivot value is very
# close to zero.
c = np.array([-1, 0, 0, 0, 0, 0, 0, 0, 0])
A_ub = np.array([
[1., -724., 911., -551., -555., -896., 478., -80., -293.],
[1., 566., 42., 937., 233., 883., 392., -909., 57.],
[1., -208., -894., 539., 321., 532., -924., 942., 55.],
[1., 857., -859., 83., 462., -265., -971., 826., 482.],
[1., 314., -424., 245., -424., 194., -443., -104., -429.],
[1., 540., 679., 361., 149., -827., 876., 633., 302.],
[0., -1., -0., -0., -0., -0., -0., -0., -0.],
[0., -0., -1., -0., -0., -0., -0., -0., -0.],
[0., -0., -0., -1., -0., -0., -0., -0., -0.],
[0., -0., -0., -0., -1., -0., -0., -0., -0.],
[0., -0., -0., -0., -0., -1., -0., -0., -0.],
[0., -0., -0., -0., -0., -0., -1., -0., -0.],
[0., -0., -0., -0., -0., -0., -0., -1., -0.],
[0., -0., -0., -0., -0., -0., -0., -0., -1.],
[0., 1., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 1., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 0., 0., 0., 0.],
[0., 0., 0., 0., 0., 1., 0., 0., 0.],
[0., 0., 0., 0., 0., 0., 1., 0., 0.],
[0., 0., 0., 0., 0., 0., 0., 1., 0.],
[0., 0., 0., 0., 0., 0., 0., 0., 1.]
])
b_ub = np.array([
0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.,
0., 0., 0., 1., 1., 1., 1., 1., 1., 1., 1.])
A_eq = np.array([[0., 1., 1., 1., 1., 1., 1., 1., 1.]])
b_eq = np.array([[1.]])
bounds = [(None, None)] * 9
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=108.568535, atol=1e-6)
def test_bug_8174(self):
# https://github.com/scipy/scipy/issues/8174
# The simplex method sometimes "explodes" if the pivot value is very
# close to zero.
A_ub = np.array([
[22714, 1008, 13380, -2713.5, -1116],
[-4986, -1092, -31220, 17386.5, 684],
[-4986, 0, 0, -2713.5, 0],
[22714, 0, 0, 17386.5, 0]])
b_ub = np.zeros(A_ub.shape[0])
c = -np.ones(A_ub.shape[1])
bounds = [(0, 1)] * A_ub.shape[1]
with suppress_warnings() as sup:
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
if self.options.get('tol', 1e-9) < 1e-10 and self.method == 'simplex':
_assert_unable_to_find_basic_feasible_sol(res)
else:
_assert_success(res, desired_fun=-2.0080717488789235, atol=1e-6)
def test_bug_8174_2(self):
# Test supplementary example from issue 8174.
# https://github.com/scipy/scipy/issues/8174
# https://stackoverflow.com/questions/47717012/linprog-in-scipy-optimize-checking-solution
c = np.array([1, 0, 0, 0, 0, 0, 0])
A_ub = -np.identity(7)
b_ub = np.array([[-2], [-2], [-2], [-2], [-2], [-2], [-2]])
A_eq = np.array([
[1, 1, 1, 1, 1, 1, 0],
[0.3, 1.3, 0.9, 0, 0, 0, -1],
[0.3, 0, 0, 0, 0, 0, -2/3],
[0, 0.65, 0, 0, 0, 0, -1/15],
[0, 0, 0.3, 0, 0, 0, -1/15]
])
b_eq = np.array([[100], [0], [0], [0], [0]])
with suppress_warnings() as sup:
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(OptimizeWarning, "A_eq does not appear...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_fun=43.3333333331385)
def test_bug_8561(self):
# Test that pivot row is chosen correctly when using Bland's rule
# This was originally written for the simplex method with
# Bland's rule only, but it doesn't hurt to test all methods/options
# https://github.com/scipy/scipy/issues/8561
c = np.array([7, 0, -4, 1.5, 1.5])
A_ub = np.array([
[4, 5.5, 1.5, 1.0, -3.5],
[1, -2.5, -2, 2.5, 0.5],
[3, -0.5, 4, -12.5, -7],
[-1, 4.5, 2, -3.5, -2],
[5.5, 2, -4.5, -1, 9.5]])
b_ub = np.array([0, 0, 0, 0, 1])
res = linprog(c, A_ub=A_ub, b_ub=b_ub, options=self.options,
method=self.method)
_assert_success(res, desired_x=[0, 0, 19, 16/3, 29/3])
def test_bug_8662(self):
# linprog simplex used to report incorrect optimal results
# https://github.com/scipy/scipy/issues/8662
c = [-10, 10, 6, 3]
A_ub = [[8, -8, -4, 6],
[-8, 8, 4, -6],
[-4, 4, 8, -4],
[3, -3, -3, -10]]
b_ub = [9, -9, -9, -4]
bounds = [(0, None), (0, None), (0, None), (0, None)]
desired_fun = 36.0000000000
with suppress_warnings() as sup:
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res1 = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
# Set boundary condition as a constraint
A_ub.append([0, 0, -1, 0])
b_ub.append(0)
bounds[2] = (None, None)
with suppress_warnings() as sup:
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res2 = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
rtol = 1e-5
_assert_success(res1, desired_fun=desired_fun, rtol=rtol)
_assert_success(res2, desired_fun=desired_fun, rtol=rtol)
def test_bug_8663(self):
# exposed a bug in presolve
# https://github.com/scipy/scipy/issues/8663
c = [1, 5]
A_eq = [[0, -7]]
b_eq = [-6]
bounds = [(0, None), (None, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[0, 6./7], desired_fun=5*6./7)
def test_bug_8664(self):
# interior-point has trouble with this when presolve is off
# tested for interior-point with presolve off in TestLinprogIPSpecific
# https://github.com/scipy/scipy/issues/8664
c = [4]
A_ub = [[2], [5]]
b_ub = [4, 4]
A_eq = [[0], [-8], [9]]
b_eq = [3, 2, 10]
with suppress_warnings() as sup:
sup.filter(RuntimeWarning)
sup.filter(OptimizeWarning, "Solving system with option...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_infeasible(res)
def test_bug_8973(self):
"""
Test whether bug described at:
https://github.com/scipy/scipy/issues/8973
was fixed.
"""
c = np.array([0, 0, 0, 1, -1])
A_ub = np.array([[1, 0, 0, 0, 0], [0, 1, 0, 0, 0]])
b_ub = np.array([2, -2])
bounds = [(None, None), (None, None), (None, None), (-1, 1), (-1, 1)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[2, -2, 0, -1, 1], desired_fun=-2)
def test_bug_8973_2(self):
"""
Additional test for:
https://github.com/scipy/scipy/issues/8973
suggested in
https://github.com/scipy/scipy/pull/8985
review by @antonior92
"""
c = np.zeros(1)
A_ub = np.array([[1]])
b_ub = np.array([-2])
bounds = (None, None)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[-2], desired_fun=0)
def test_bug_10124(self):
"""
Test for linprog docstring problem
'disp'=True caused revised simplex failure
"""
c = np.zeros(1)
A_ub = np.array([[1]])
b_ub = np.array([-2])
bounds = (None, None)
c = [-1, 4]
A_ub = [[-3, 1], [1, 2]]
b_ub = [6, 4]
bounds = [(None, None), (-3, None)]
o = {"disp": True}
o.update(self.options)
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
_assert_success(res, desired_x=[10, -3], desired_fun=-22)
def test_bug_10349(self):
"""
Test for redundancy removal tolerance issue
https://github.com/scipy/scipy/issues/10349
"""
A_eq = np.array([[1, 1, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0],
[0, 0, 0, 0, 1, 1],
[1, 0, 1, 0, 0, 0],
[0, 0, 0, 1, 1, 0],
[0, 1, 0, 0, 0, 1]])
b_eq = np.array([221, 210, 10, 141, 198, 102])
c = np.concatenate((0, 1, np.zeros(4)), axis=None)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options)
_assert_success(res, desired_x=[129, 92, 12, 198, 0, 10], desired_fun=92)
def test_bug_10466(self):
"""
Test that autoscale fixes poorly-scaled problem
"""
c = [-8., -0., -8., -0., -8., -0., -0., -0., -0., -0., -0., -0., -0.]
A_eq = [[1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0., 0., 0.],
[0., 0., 0., 0., 1., 1., 0., 0., 0., 0., 0., 0., 0.],
[1., 0., 1., 0., 1., 0., -1., 0., 0., 0., 0., 0., 0.],
[1., 0., 1., 0., 1., 0., 0., 1., 0., 0., 0., 0., 0.],
[1., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0., 0.],
[1., 0., 0., 0., 0., 0., 0., 0., 0., 1., 0., 0., 0.],
[1., 0., 1., 0., 1., 0., 0., 0., 0., 0., 1., 0., 0.],
[0., 0., 1., 0., 1., 0., 0., 0., 0., 0., 0., 1., 0.],
[0., 0., 1., 0., 1., 0., 0., 0., 0., 0., 0., 0., 1.]]
b_eq = [3.14572800e+08, 4.19430400e+08, 5.24288000e+08,
1.00663296e+09, 1.07374182e+09, 1.07374182e+09,
1.07374182e+09, 1.07374182e+09, 1.07374182e+09,
1.07374182e+09]
o = {"autoscale": True}
o.update(self.options)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "Solving system with option...")
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(RuntimeWarning, "scipy.linalg.solve\nIll...")
sup.filter(RuntimeWarning, "divide by zero encountered...")
sup.filter(RuntimeWarning, "overflow encountered...")
sup.filter(RuntimeWarning, "invalid value encountered...")
sup.filter(LinAlgWarning, "Ill-conditioned matrix...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
assert_allclose(res.fun, -8589934560)
#########################
# Method-specific Tests #
#########################
class LinprogSimplexTests(LinprogCommonTests):
method = "simplex"
class LinprogIPTests(LinprogCommonTests):
method = "interior-point"
class LinprogRSTests(LinprogCommonTests):
method = "revised simplex"
# Revised simplex does not reliably solve these problems.
# Failure is intermittent due to the random choice of elements to complete
# the basis after phase 1 terminates. In any case, linprog exists
# gracefully, reporting numerical difficulties. I do not think this should
# prevent revised simplex from being merged, as it solves the problems
# most of the time and solves a broader range of problems than the existing
# simplex implementation.
# I believe that the root cause is the same for all three and that this
# same issue prevents revised simplex from solving many other problems
# reliably. Somehow the pivoting rule allows the algorithm to pivot into
# a singular basis. I haven't been able to find a reference that
# acknowledges this possibility, suggesting that there is a bug. On the
# other hand, the pivoting rule is quite simple, and I can't find a
# mistake, which suggests that this is a possibility with the pivoting
# rule. Hopefully a better pivoting rule will fix the issue.
def test_bug_5400(self):
pytest.skip("Intermittent failure acceptable.")
def test_bug_8662(self):
pytest.skip("Intermittent failure acceptable.")
def test_network_flow(self):
pytest.skip("Intermittent failure acceptable.")
################################
# Simplex Option-Specific Tests#
################################
class TestLinprogSimplexDefault(LinprogSimplexTests):
def setup_method(self):
self.options = {}
def test_bug_5400(self):
with pytest.raises(ValueError):
super(TestLinprogSimplexDefault, self).test_bug_5400()
def test_bug_7237_low_tol(self):
# Fails if the tolerance is too strict. Here we test that
# even if the solutuion is wrong, the appropriate error is raised.
self.options.update({'tol': 1e-12})
with pytest.raises(ValueError):
super(TestLinprogSimplexDefault, self).test_bug_7237()
def test_bug_8174_low_tol(self):
# Fails if the tolerance is too strict. Here we test that
# even if the solutuion is wrong, the appropriate warning is issued.
self.options.update({'tol': 1e-12})
with pytest.warns(OptimizeWarning):
super(TestLinprogSimplexDefault, self).test_bug_8174()
class TestLinprogSimplexBland(LinprogSimplexTests):
def setup_method(self):
self.options = {'bland': True}
def test_bug_5400(self):
with pytest.raises(ValueError):
super(TestLinprogSimplexBland, self).test_bug_5400()
def test_bug_8174_low_tol(self):
# Fails if the tolerance is too strict. Here we test that
# even if the solutuion is wrong, the appropriate error is raised.
self.options.update({'tol': 1e-12})
with pytest.raises(AssertionError):
with pytest.warns(OptimizeWarning):
super(TestLinprogSimplexBland, self).test_bug_8174()
class TestLinprogSimplexNoPresolve(LinprogSimplexTests):
def setup_method(self):
self.options = {'presolve': False}
is_32_bit = np.intp(0).itemsize < 8
is_linux = sys.platform.startswith('linux')
@pytest.mark.xfail(
condition=is_32_bit and is_linux,
reason='Fails with warning on 32-bit linux')
def test_bug_5400(self):
super(TestLinprogSimplexNoPresolve, self).test_bug_5400()
def test_bug_6139_low_tol(self):
# Linprog(method='simplex') fails to find a basic feasible solution
# if phase 1 pseudo-objective function is outside the provided tol.
# https://github.com/scipy/scipy/issues/6139
# Without ``presolve`` eliminating such rows the result is incorrect.
self.options.update({'tol': 1e-12})
with pytest.raises(ValueError):
return super(TestLinprogSimplexNoPresolve, self).test_bug_6139()
def test_bug_7237_low_tol(self):
# Fails if the tolerance is too strict. Here we test that
# even if the solutuion is wrong, the appropriate error is raised.
self.options.update({'tol': 1e-12})
with pytest.raises(ValueError):
super(TestLinprogSimplexNoPresolve, self).test_bug_7237()
def test_bug_8174_low_tol(self):
# Fails if the tolerance is too strict. Here we test that
# even if the solutuion is wrong, the appropriate warning is issued.
self.options.update({'tol': 1e-12})
with pytest.warns(OptimizeWarning):
super(TestLinprogSimplexNoPresolve, self).test_bug_8174()
def test_unbounded_no_nontrivial_constraints_1(self):
pytest.skip("Tests behavior specific to presolve")
def test_unbounded_no_nontrivial_constraints_2(self):
pytest.skip("Tests behavior specific to presolve")
#######################################
# Interior-Point Option-Specific Tests#
#######################################
class TestLinprogIPDense(LinprogIPTests):
options = {"sparse": False}
if has_cholmod:
class TestLinprogIPSparseCholmod(LinprogIPTests):
options = {"sparse": True, "cholesky": True}
if has_umfpack:
class TestLinprogIPSparseUmfpack(LinprogIPTests):
options = {"sparse": True, "cholesky": False}
def test_bug_10466(self):
pytest.skip("Autoscale doesn't fix everything, and that's OK.")
class TestLinprogIPSparse(LinprogIPTests):
options = {"sparse": True, "cholesky": False, "sym_pos": False}
@pytest.mark.xfail(reason='Fails with ATLAS, see gh-7877')
def test_bug_6690(self):
# Test defined in base class, but can't mark as xfail there
super(TestLinprogIPSparse, self).test_bug_6690()
def test_magic_square_sparse_no_presolve(self):
# test linprog with a problem with a rank-deficient A_eq matrix
A_eq, b_eq, c, N = magic_square(3)
bounds = (0, 1)
with suppress_warnings() as sup:
if has_umfpack:
sup.filter(UmfpackWarning)
sup.filter(MatrixRankWarning, "Matrix is exactly singular")
sup.filter(OptimizeWarning, "Solving system with option...")
o = {key: self.options[key] for key in self.options}
o["presolve"] = False
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
_assert_success(res, desired_fun=1.730550597)
def test_sparse_solve_options(self):
# checking that problem is solved with all column permutation options
A_eq, b_eq, c, N = magic_square(3)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
sup.filter(OptimizeWarning, "Invalid permc_spec option")
o = {key: self.options[key] for key in self.options}
permc_specs = ('NATURAL', 'MMD_ATA', 'MMD_AT_PLUS_A',
'COLAMD', 'ekki-ekki-ekki')
# 'ekki-ekki-ekki' raises warning about invalid permc_spec option
# and uses default
for permc_spec in permc_specs:
o["permc_spec"] = permc_spec
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=o)
_assert_success(res, desired_fun=1.730550597)
class TestLinprogIPSparsePresolve(LinprogIPTests):
options = {"sparse": True, "_sparse_presolve": True}
def test_enzo_example_c_with_infeasibility(self):
pytest.skip('_sparse_presolve=True incompatible with presolve=False')
@pytest.mark.xfail(reason='Fails with ATLAS, see gh-7877')
def test_bug_6690(self):
# Test defined in base class, but can't mark as xfail there
super(TestLinprogIPSparsePresolve, self).test_bug_6690()
class TestLinprogIPSpecific(object):
method = "interior-point"
# the following tests don't need to be performed separately for
# sparse presolve, sparse after presolve, and dense
def test_solver_select(self):
# check that default solver is selected as expected
if has_cholmod:
options = {'sparse': True, 'cholesky': True}
elif has_umfpack:
options = {'sparse': True, 'cholesky': False}
else:
options = {'sparse': True, 'cholesky': False, 'sym_pos': False}
A, b, c = lpgen_2d(20, 20)
res1 = linprog(c, A_ub=A, b_ub=b, method=self.method, options=options)
res2 = linprog(c, A_ub=A, b_ub=b, method=self.method) # default solver
assert_allclose(res1.fun, res2.fun,
err_msg="linprog default solver unexpected result",
rtol=1e-15, atol=1e-15)
def test_unbounded_below_no_presolve_original(self):
# formerly caused segfault in TravisCI w/ "cholesky":True
c = [-1]
bounds = [(None, 1)]
res = linprog(c=c, bounds=bounds,
method=self.method,
options={"presolve": False, "cholesky": True})
_assert_success(res, desired_fun=-1)
def test_cholesky(self):
# use cholesky factorization and triangular solves
A, b, c = lpgen_2d(20, 20)
res = linprog(c, A_ub=A, b_ub=b, method=self.method,
options={"cholesky": True}) # only for dense
_assert_success(res, desired_fun=-64.049494229)
def test_alternate_initial_point(self):
# use "improved" initial point
A, b, c = lpgen_2d(20, 20)
with suppress_warnings() as sup:
sup.filter(RuntimeWarning, "scipy.linalg.solve\nIll...")
sup.filter(OptimizeWarning, "Solving system with option...")
sup.filter(LinAlgWarning, "Ill-conditioned matrix...")
res = linprog(c, A_ub=A, b_ub=b, method=self.method,
options={"ip": True, "disp": True})
# ip code is independent of sparse/dense
_assert_success(res, desired_fun=-64.049494229)
def test_maxiter(self):
# test iteration limit
A, b, c = lpgen_2d(20, 20)
maxiter = np.random.randint(6) + 1 # problem takes 7 iterations
res = linprog(c, A_ub=A, b_ub=b, method=self.method,
options={"maxiter": maxiter})
# maxiter is independent of sparse/dense
_assert_iteration_limit_reached(res, maxiter)
assert_equal(res.nit, maxiter)
def test_bug_8664(self):
# interior-point has trouble with this when presolve is off
c = [4]
A_ub = [[2], [5]]
b_ub = [4, 4]
A_eq = [[0], [-8], [9]]
b_eq = [3, 2, 10]
with suppress_warnings() as sup:
sup.filter(RuntimeWarning)
sup.filter(OptimizeWarning, "Solving system with option...")
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options={"presolve": False})
assert_(not res.success, "Incorrectly reported success")
########################################
# Revised Simplex Option-Specific Tests#
########################################
class TestLinprogRSCommon(LinprogRSTests):
options = {}
def test_cyclic_bland(self):
pytest.skip("Intermittent failure acceptable.")
def test_nontrivial_problem_with_guess(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=x_star)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
assert_equal(res.nit, 0)
def test_nontrivial_problem_with_unbounded_variables(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
bounds = [(None, None), (None, None), (0, None), (None, None)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=x_star)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
assert_equal(res.nit, 0)
def test_nontrivial_problem_with_bounded_variables(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
bounds = [(None, 1), (1, None), (0, None), (.4, .6)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=x_star)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
assert_equal(res.nit, 0)
def test_nontrivial_problem_with_negative_unbounded_variable(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
b_eq = [4]
x_star = np.array([-219/385, 582/385, 0, 4/10])
f_star = 3951/385
bounds = [(None, None), (1, None), (0, None), (.4, .6)]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=x_star)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
assert_equal(res.nit, 0)
def test_nontrivial_problem_with_bad_guess(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
bad_guess = [1, 2, 3, .5]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=bad_guess)
assert_equal(res.status, 6)
def test_redundant_constraints_with_guess(self):
A, b, c, N = magic_square(3)
p = np.random.rand(*c.shape)
with suppress_warnings() as sup:
sup.filter(OptimizeWarning, "A_eq does not appear...")
sup.filter(RuntimeWarning, "invalid value encountered")
sup.filter(LinAlgWarning)
res = linprog(c, A_eq=A, b_eq=b, method=self.method)
res2 = linprog(c, A_eq=A, b_eq=b, method=self.method, x0=res.x)
res3 = linprog(c + p, A_eq=A, b_eq=b, method=self.method, x0=res.x)
_assert_success(res2, desired_fun=1.730550597)
assert_equal(res2.nit, 0)
_assert_success(res3)
assert_(res3.nit < res.nit) # hot start reduces iterations
class TestLinprogRSBland(LinprogRSTests):
options = {"pivot": "bland"}
###########################
# Autoscale-Specific Tests#
###########################
class AutoscaleTests(object):
options = {"autoscale": True}
test_bug_6139 = LinprogCommonTests.test_bug_6139
test_bug_6690 = LinprogCommonTests.test_bug_6690
test_bug_7237 = LinprogCommonTests.test_bug_7237
class TestAutoscaleIP(AutoscaleTests):
method = "interior-point"
def test_bug_6139(self):
self.options['tol'] = 1e-10
return AutoscaleTests.test_bug_6139(self)
class TestAutoscaleSimplex(AutoscaleTests):
method = "simplex"
class TestAutoscaleRS(AutoscaleTests):
method = "revised simplex"
def test_nontrivial_problem_with_guess(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=x_star)
_assert_success(res, desired_fun=f_star, desired_x=x_star)
assert_equal(res.nit, 0)
def test_nontrivial_problem_with_bad_guess(self):
c, A_ub, b_ub, A_eq, b_eq, x_star, f_star = nontrivial_problem()
bad_guess = [1, 2, 3, .5]
res = linprog(c, A_ub, b_ub, A_eq, b_eq, bounds,
method=self.method, options=self.options, x0=bad_guess)
assert_equal(res.status, 6)
| bsd-3-clause |
Syrcon/servo | tests/wpt/web-platform-tests/tools/pytest/_pytest/skipping.py | 168 | 12742 | """ support for skip/xfail functions and markers. """
import os
import sys
import traceback
import py
import pytest
from _pytest.mark import MarkInfo, MarkDecorator
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption('--runxfail',
action="store_true", dest="runxfail", default=False,
help="run tests even if they are marked xfail")
parser.addini("xfail_strict", "default for the strict parameter of xfail "
"markers when not given explicitly (default: "
"False)",
default=False,
type="bool")
def pytest_configure(config):
if config.option.runxfail:
old = pytest.xfail
config._cleanup.append(lambda: setattr(pytest, "xfail", old))
def nop(*args, **kwargs):
pass
nop.Exception = XFailed
setattr(pytest, "xfail", nop)
config.addinivalue_line("markers",
"skipif(condition): skip the given test function if eval(condition) "
"results in a True value. Evaluation happens within the "
"module global context. Example: skipif('sys.platform == \"win32\"') "
"skips the test if we are on the win32 platform. see "
"http://pytest.org/latest/skipping.html"
)
config.addinivalue_line("markers",
"xfail(condition, reason=None, run=True, raises=None): mark the the test function "
"as an expected failure if eval(condition) has a True value. "
"Optionally specify a reason for better reporting and run=False if "
"you don't even want to execute the test function. If only specific "
"exception(s) are expected, you can list them in raises, and if the test fails "
"in other ways, it will be reported as a true failure. "
"See http://pytest.org/latest/skipping.html"
)
def pytest_namespace():
return dict(xfail=xfail)
class XFailed(pytest.fail.Exception):
""" raised from an explicit call to pytest.xfail() """
def xfail(reason=""):
""" xfail an executing test or setup functions with the given reason."""
__tracebackhide__ = True
raise XFailed(reason)
xfail.Exception = XFailed
class MarkEvaluator:
def __init__(self, item, name):
self.item = item
self.name = name
@property
def holder(self):
return self.item.keywords.get(self.name)
def __bool__(self):
return bool(self.holder)
__nonzero__ = __bool__
def wasvalid(self):
return not hasattr(self, 'exc')
def invalidraise(self, exc):
raises = self.get('raises')
if not raises:
return
return not isinstance(exc, raises)
def istrue(self):
try:
return self._istrue()
except Exception:
self.exc = sys.exc_info()
if isinstance(self.exc[1], SyntaxError):
msg = [" " * (self.exc[1].offset + 4) + "^",]
msg.append("SyntaxError: invalid syntax")
else:
msg = traceback.format_exception_only(*self.exc[:2])
pytest.fail("Error evaluating %r expression\n"
" %s\n"
"%s"
%(self.name, self.expr, "\n".join(msg)),
pytrace=False)
def _getglobals(self):
d = {'os': os, 'sys': sys, 'config': self.item.config}
func = self.item.obj
try:
d.update(func.__globals__)
except AttributeError:
d.update(func.func_globals)
return d
def _istrue(self):
if hasattr(self, 'result'):
return self.result
if self.holder:
d = self._getglobals()
if self.holder.args:
self.result = False
# "holder" might be a MarkInfo or a MarkDecorator; only
# MarkInfo keeps track of all parameters it received in an
# _arglist attribute
if hasattr(self.holder, '_arglist'):
arglist = self.holder._arglist
else:
arglist = [(self.holder.args, self.holder.kwargs)]
for args, kwargs in arglist:
for expr in args:
self.expr = expr
if isinstance(expr, py.builtin._basestring):
result = cached_eval(self.item.config, expr, d)
else:
if "reason" not in kwargs:
# XXX better be checked at collection time
msg = "you need to specify reason=STRING " \
"when using booleans as conditions."
pytest.fail(msg)
result = bool(expr)
if result:
self.result = True
self.reason = kwargs.get('reason', None)
self.expr = expr
return self.result
else:
self.result = True
return getattr(self, 'result', False)
def get(self, attr, default=None):
return self.holder.kwargs.get(attr, default)
def getexplanation(self):
expl = getattr(self, 'reason', None) or self.get('reason', None)
if not expl:
if not hasattr(self, 'expr'):
return ""
else:
return "condition: " + str(self.expr)
return expl
@pytest.hookimpl(tryfirst=True)
def pytest_runtest_setup(item):
# Check if skip or skipif are specified as pytest marks
skipif_info = item.keywords.get('skipif')
if isinstance(skipif_info, (MarkInfo, MarkDecorator)):
eval_skipif = MarkEvaluator(item, 'skipif')
if eval_skipif.istrue():
item._evalskip = eval_skipif
pytest.skip(eval_skipif.getexplanation())
skip_info = item.keywords.get('skip')
if isinstance(skip_info, (MarkInfo, MarkDecorator)):
item._evalskip = True
if 'reason' in skip_info.kwargs:
pytest.skip(skip_info.kwargs['reason'])
elif skip_info.args:
pytest.skip(skip_info.args[0])
else:
pytest.skip("unconditional skip")
item._evalxfail = MarkEvaluator(item, 'xfail')
check_xfail_no_run(item)
@pytest.mark.hookwrapper
def pytest_pyfunc_call(pyfuncitem):
check_xfail_no_run(pyfuncitem)
outcome = yield
passed = outcome.excinfo is None
if passed:
check_strict_xfail(pyfuncitem)
def check_xfail_no_run(item):
"""check xfail(run=False)"""
if not item.config.option.runxfail:
evalxfail = item._evalxfail
if evalxfail.istrue():
if not evalxfail.get('run', True):
pytest.xfail("[NOTRUN] " + evalxfail.getexplanation())
def check_strict_xfail(pyfuncitem):
"""check xfail(strict=True) for the given PASSING test"""
evalxfail = pyfuncitem._evalxfail
if evalxfail.istrue():
strict_default = pyfuncitem.config.getini('xfail_strict')
is_strict_xfail = evalxfail.get('strict', strict_default)
if is_strict_xfail:
del pyfuncitem._evalxfail
explanation = evalxfail.getexplanation()
pytest.fail('[XPASS(strict)] ' + explanation, pytrace=False)
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
outcome = yield
rep = outcome.get_result()
evalxfail = getattr(item, '_evalxfail', None)
evalskip = getattr(item, '_evalskip', None)
# unitttest special case, see setting of _unexpectedsuccess
if hasattr(item, '_unexpectedsuccess') and rep.when == "call":
# we need to translate into how pytest encodes xpass
rep.wasxfail = "reason: " + repr(item._unexpectedsuccess)
rep.outcome = "failed"
elif item.config.option.runxfail:
pass # don't interefere
elif call.excinfo and call.excinfo.errisinstance(pytest.xfail.Exception):
rep.wasxfail = "reason: " + call.excinfo.value.msg
rep.outcome = "skipped"
elif evalxfail and not rep.skipped and evalxfail.wasvalid() and \
evalxfail.istrue():
if call.excinfo:
if evalxfail.invalidraise(call.excinfo.value):
rep.outcome = "failed"
else:
rep.outcome = "skipped"
rep.wasxfail = evalxfail.getexplanation()
elif call.when == "call":
rep.outcome = "failed" # xpass outcome
rep.wasxfail = evalxfail.getexplanation()
elif evalskip is not None and rep.skipped and type(rep.longrepr) is tuple:
# skipped by mark.skipif; change the location of the failure
# to point to the item definition, otherwise it will display
# the location of where the skip exception was raised within pytest
filename, line, reason = rep.longrepr
filename, line = item.location[:2]
rep.longrepr = filename, line, reason
# called by terminalreporter progress reporting
def pytest_report_teststatus(report):
if hasattr(report, "wasxfail"):
if report.skipped:
return "xfailed", "x", "xfail"
elif report.failed:
return "xpassed", "X", ("XPASS", {'yellow': True})
# called by the terminalreporter instance/plugin
def pytest_terminal_summary(terminalreporter):
tr = terminalreporter
if not tr.reportchars:
#for name in "xfailed skipped failed xpassed":
# if not tr.stats.get(name, 0):
# tr.write_line("HINT: use '-r' option to see extra "
# "summary info about tests")
# break
return
lines = []
for char in tr.reportchars:
if char == "x":
show_xfailed(terminalreporter, lines)
elif char == "X":
show_xpassed(terminalreporter, lines)
elif char in "fF":
show_simple(terminalreporter, lines, 'failed', "FAIL %s")
elif char in "sS":
show_skipped(terminalreporter, lines)
elif char == "E":
show_simple(terminalreporter, lines, 'error', "ERROR %s")
elif char == 'p':
show_simple(terminalreporter, lines, 'passed', "PASSED %s")
if lines:
tr._tw.sep("=", "short test summary info")
for line in lines:
tr._tw.line(line)
def show_simple(terminalreporter, lines, stat, format):
failed = terminalreporter.stats.get(stat)
if failed:
for rep in failed:
pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
lines.append(format %(pos,))
def show_xfailed(terminalreporter, lines):
xfailed = terminalreporter.stats.get("xfailed")
if xfailed:
for rep in xfailed:
pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
reason = rep.wasxfail
lines.append("XFAIL %s" % (pos,))
if reason:
lines.append(" " + str(reason))
def show_xpassed(terminalreporter, lines):
xpassed = terminalreporter.stats.get("xpassed")
if xpassed:
for rep in xpassed:
pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
reason = rep.wasxfail
lines.append("XPASS %s %s" %(pos, reason))
def cached_eval(config, expr, d):
if not hasattr(config, '_evalcache'):
config._evalcache = {}
try:
return config._evalcache[expr]
except KeyError:
import _pytest._code
exprcode = _pytest._code.compile(expr, mode="eval")
config._evalcache[expr] = x = eval(exprcode, d)
return x
def folded_skips(skipped):
d = {}
for event in skipped:
key = event.longrepr
assert len(key) == 3, (event, key)
d.setdefault(key, []).append(event)
l = []
for key, events in d.items():
l.append((len(events),) + key)
return l
def show_skipped(terminalreporter, lines):
tr = terminalreporter
skipped = tr.stats.get('skipped', [])
if skipped:
#if not tr.hasopt('skipped'):
# tr.write_line(
# "%d skipped tests, specify -rs for more info" %
# len(skipped))
# return
fskips = folded_skips(skipped)
if fskips:
#tr.write_sep("_", "skipped test summary")
for num, fspath, lineno, reason in fskips:
if reason.startswith("Skipped: "):
reason = reason[9:]
lines.append("SKIP [%d] %s:%d: %s" %
(num, fspath, lineno, reason))
| mpl-2.0 |
mmnelemane/nova | nova/api/openstack/compute/legacy_v2/contrib/floating_ip_pools.py | 79 | 2131 | # Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import extensions
from nova import network
authorize = extensions.extension_authorizer('compute', 'floating_ip_pools')
def _translate_floating_ip_view(pool_name):
return {
'name': pool_name,
}
def _translate_floating_ip_pools_view(pools):
return {
'floating_ip_pools': [_translate_floating_ip_view(pool_name)
for pool_name in pools]
}
class FloatingIPPoolsController(object):
"""The Floating IP Pool API controller for the OpenStack API."""
def __init__(self):
self.network_api = network.API()
super(FloatingIPPoolsController, self).__init__()
def index(self, req):
"""Return a list of pools."""
context = req.environ['nova.context']
authorize(context)
pools = self.network_api.get_floating_ip_pools(context)
return _translate_floating_ip_pools_view(pools)
class Floating_ip_pools(extensions.ExtensionDescriptor):
"""Floating IPs support."""
name = "FloatingIpPools"
alias = "os-floating-ip-pools"
namespace = ("http://docs.openstack.org/compute/ext/"
"floating_ip_pools/api/v1.1")
updated = "2012-01-04T00:00:00Z"
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-floating-ip-pools',
FloatingIPPoolsController(),
member_actions={})
resources.append(res)
return resources
| apache-2.0 |
EduardoMolina/SU2 | SU2_PY/package_tests.py | 1 | 9332 | #!/usr/bin/env python
## \file package_tests.py
# \brief _____________.
# \author T. Lukaczyk
# \version 7.0.3 "Blackbird"
#
# SU2 Project Website: https://su2code.github.io
#
# The SU2 Project is maintained by the SU2 Foundation
# (http://su2foundation.org)
#
# Copyright 2012-2020, SU2 Contributors (cf. AUTHORS.md)
#
# SU2 is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# SU2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with SU2. If not, see <http://www.gnu.org/licenses/>.
# make print(*args) function available in PY2.6+, does'nt work on PY < 2.6
from __future__ import print_function
import os, sys, copy
sys.path.append(os.environ['SU2_RUN'])
import SU2
from collections import OrderedDict
# todo:
# verify command line interface
# commenting
# verify optimization, gradients, flow solutions
# verbosity
# plotting
# verbose redirection
# pyopt optimizers
# needed config options
# OPT_CONSTRAINT
# OPT_OBJECTIVE
# CONSOLE
# OUTPUT_WEIGHT
# FINDIFF_STEP
# DOT_FINDIFF_STEP
# GRADIENT_METHOD= FINITE_DIFFERENCING, CONTINUOUS_ADJOINT, DISCRETE_ADJOINT
# ADAPTATION= DIRECT, ADJOINT
def main():
#io0() # working
#io1()
#level0() # working
#level1() # working
#level2() # working
#level3() # working
#level4() # working
#level5() # working
mesh0()
print('DONE!')
def io0():
folder='test_io0'; pull='config_NACA0012.cfg'; link='mesh_NACA0012.su2'
with SU2.io.redirect_folder(folder,pull,link):
config_name = 'config_NACA0012.cfg'
config = SU2.io.Config(filename=config_name)
print(config)
config.ADAPT_CYCLES
config['ADAPT_CYCLES']
config.dump('out.cfg')
konfig = copy.deepcopy(config)
konfig['TASKS'] = ['TEST']
konfig['NUMBER_PART'] = 0
config_diff = config.diff(konfig)
print(config_diff)
wait = 0
def io1():
option = SU2.io.config.MathProblem()
option = 'DIRECT'
wait = 0
def level0():
folder='test_level0'; pull='config_NACA0012.cfg'; link='mesh_NACA0012.su2'
with SU2.io.redirect_folder(folder,pull,link):
# Setup
config_name = 'config_NACA0012.cfg'
config = SU2.io.Config(config_name)
config.EXT_ITER = 9
config.NUMBER_PART = 2
SU2.run.CFD(config)
def level1():
folder='test_level1'; pull='config_NACA0012.cfg'; link='mesh_NACA0012.su2'
with SU2.io.redirect_folder(folder,pull,link):
# Setup
config_name = 'config_NACA0012.cfg'
config = SU2.io.Config(config_name)
config['NUMBER_PART'] = 2
config['EXT_ITER'] = 9
state = SU2.io.State()
# Deformation
dv_new = [0.002]*38
info = SU2.run.deform(config,dv_new)
state.update(info)
# Direct Solution
info = SU2.run.direct(config)
state.update(info)
SU2.io.restart2solution(config,state)
# Adjoint Solution
info = SU2.run.adjoint(config)
state.update(info)
SU2.io.restart2solution(config,state)
# Gradient Projection
info = SU2.run.projection(config)
state.update(info)
print(state)
SU2.io.save_data('state.pkl',state)
data = SU2.io.load_data('state.pkl')
SU2.io.save_data('config.pkl',config)
data = SU2.io.load_data('config.pkl')
wait = 0
def level2():
folder='test_level2'; pull='config_NACA0012.cfg'; link='mesh_NACA0012.su2'
with SU2.io.redirect_folder(folder,pull,link):
# Setup
config_name = 'config_NACA0012.cfg'
config = SU2.io.Config(config_name)
config['NUMBER_PART'] = 2
config['EXT_ITER'] = 9
dv_new = [0.0]*38
#dv_new[10] = 0.05
config.unpack_dvs(dv_new)
state = SU2.io.State()
#with SU2.io.redirect.folder(folder='JOB_001',link='mesh_NACA0012.su2'):
# grad = SU2.eval.grad( 'DRAG', 'FINDIFF', config, state )
with SU2.io.redirect_folder(folder='JOB_001',link='mesh_NACA0012.su2'):
func = SU2.eval.func( 'LIFT', config, state )
grads = SU2.eval.grad( 'LIFT', 'CONTINUOUS_ADJOINT', config, state )
with SU2.io.redirect_folder(folder='JOB_001',link='mesh_NACA0012.su2'):
func = SU2.eval.func( 'DRAG', config, state ) # will not run direct
grads = SU2.eval.grad( 'LIFT', 'CONTINUOUS_ADJOINT', config, state ) # will not run adjoint
grads = SU2.eval.grad( 'DRAG', 'CONTINUOUS_ADJOINT', config, state ) # will run adjoint
wait = 0
def level3():
folder='test_level3'; pull='config_NACA0012.cfg'; link='mesh_NACA0012.su2'
with SU2.io.redirect_folder(folder,pull,link):
# Setup
config_name = 'config_NACA0012.cfg'
config = SU2.io.Config(config_name)
config['NUMBER_PART'] = 2
config['EXT_ITER'] = 9
# initialize design state
state = SU2.io.State()
state.find_files(config)
# start design
design = SU2.eval.Design(config,state)
# run design with dv change
dv_new = [0.0]*38
vals = design.obj_f(dv_new)
vals = design.obj_df(dv_new)
vals = design.con_ceq(dv_new)
vals = design.con_dceq(dv_new)
vals = design.con_cieq(dv_new)
vals = design.con_dcieq(dv_new)
vals = design.func('LIFT')
vals = design.grad('LIFT','CONTINUOUS_ADJOINT')
SU2.io.save_data('design.pkl',design)
data = SU2.io.load_data('design.pkl')
wait = 0
def level4():
folder='test_level4'; pull='config_NACA0012.cfg'; link='mesh_NACA0012.su2'
with SU2.io.redirect_folder(folder,pull,link):
# Setup
config_name = 'config_NACA0012.cfg'
config = SU2.io.Config(config_name)
config['NUMBER_PART'] = 2
config['EXT_ITER'] = 9
config.CONSOLE = 'QUIET'
# initialize design state
state = SU2.io.State()
state.find_files(config)
# initialize project
project = SU2.opt.Project(config,state)
# run project with dv changes
dv_new = [0.0]*38
vals = project.obj_f(dv_new)
vals = project.obj_df(dv_new)
dv_new = [-0.005]*38
vals = project.obj_f(dv_new)
dv_new = [0.0]*38
dv_new[9] = -0.02
vals = project.obj_f(dv_new)
dv_new = [0.005]*38
vals = project.obj_f(dv_new) # will not rerun solutions
SU2.io.save_data('project.pkl',project)
data = SU2.io.load_data('project.pkl')
data = project.data
wait = 0
print("Done!")
def level5():
folder='test_level5'; pull='config_NACA0012.cfg'; link='mesh_NACA0012.su2'
with SU2.io.redirect_folder(folder,pull,link):
# Setup
config_name = 'config_NACA0012.cfg'
config = SU2.io.Config(config_name)
config['NUMBER_PART'] = 2
config['EXT_ITER'] = 9
config['CONSOLE'] = 'CONCISE'
# set optimization problem
obj = {}
obj['DRAG'] = {'SCALE':1.e-2}
cons = {}
cons['EQUALITY'] = {}
cons['INEQUALITY'] = {}
cons['INEQUALITY']['LIFT'] = {'SIGN':'>','VALUE':0.328188,'SCALE':1e-1}
cons['INEQUALITY']['MOMENT_Z'] = {'SIGN':'>','VALUE':0.034068,'SCALE':1e-2}
def_dv = config.DEFINITION_DV
n_dv = sum(def_dv['KIND'])
def_dv['SCALE'] = [1.e0]*n_dv
config.OPT_OBJECTIVE = obj
config.OPT_CONSTRAINT = cons
# initialize design state
state = SU2.io.State()
state.find_files(config)
# initialize project
project = SU2.opt.Project(config,state)
# optimization setup
x0 = [0.0]*n_dv
xb = [] #[[-0.02,0.02]]*n_dv
its = 20
# optimize
SU2.opt.SLSQP(project,x0,xb,its)
wait = 0
def mesh0():
folder='mesh_level0'; pull='config_NACA0012.cfg'; link='mesh_NACA0012.su2'
with SU2.io.redirect_folder(folder,pull,link):
# Setup
config_name = 'config_NACA0012.cfg'
config = SU2.io.Config(config_name)
config.EXT_ITER = 9
config.NUMBER_PART = 2
SU2.run.CFD(config)
SU2.io.restart2solution(config)
SU2.run.MSH(config)
if __name__ == '__main__':
main()
| lgpl-2.1 |
SCSSG/Odoo-SCS | addons/edi/models/edi.py | 277 | 31944 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011-2014 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
import hashlib
import simplejson as json
import logging
import re
import time
import urllib2
import openerp
import openerp.release as release
from openerp.osv import osv, fields
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval as eval
_logger = logging.getLogger(__name__)
EXTERNAL_ID_PATTERN = re.compile(r'^([^.:]+)(?::([^.]+))?\.(\S+)$')
EDI_VIEW_WEB_URL = '%s/edi/view?db=%s&token=%s'
EDI_PROTOCOL_VERSION = 1 # arbitrary ever-increasing version number
EDI_GENERATOR = 'Odoo' + release.major_version
EDI_GENERATOR_VERSION = release.version_info
def split_external_id(ext_id):
match = EXTERNAL_ID_PATTERN.match(ext_id)
assert match, \
_("'%s' is an invalid external ID") % (ext_id)
return {'module': match.group(1),
'db_uuid': match.group(2),
'id': match.group(3),
'full': match.group(0)}
def safe_unique_id(database_id, model, record_id):
"""Generate a unique string to represent a (database_uuid,model,record_id) pair
without being too long, and with a very low probability of collisions.
"""
msg = "%s-%s-%s-%s" % (time.time(), database_id, model, record_id)
digest = hashlib.sha1(msg).digest()
# fold the sha1 20 bytes digest to 9 bytes
digest = ''.join(chr(ord(x) ^ ord(y)) for (x,y) in zip(digest[:9], digest[9:-2]))
# b64-encode the 9-bytes folded digest to a reasonable 12 chars ASCII ID
digest = base64.urlsafe_b64encode(digest)
return '%s-%s' % (model.replace('.','_'), digest)
def last_update_for(record):
"""Returns the last update timestamp for the given record,
if available, otherwise False
"""
if record._log_access:
record_log = record.get_metadata()[0]
return record_log.get('write_date') or record_log.get('create_date') or False
return False
class edi(osv.AbstractModel):
_name = 'edi.edi'
_description = 'EDI Subsystem'
def new_edi_token(self, cr, uid, record):
"""Return a new, random unique token to identify this model record,
and to be used as token when exporting it as an EDI document.
:param browse_record record: model record for which a token is needed
"""
db_uuid = self.pool.get('ir.config_parameter').get_param(cr, uid, 'database.uuid')
edi_token = hashlib.sha256('%s-%s-%s-%s' % (time.time(), db_uuid, record._name, record.id)).hexdigest()
return edi_token
def serialize(self, edi_documents):
"""Serialize the given EDI document structures (Python dicts holding EDI data),
using JSON serialization.
:param [dict] edi_documents: list of EDI document structures to serialize
:return: UTF-8 encoded string containing the serialized document
"""
serialized_list = json.dumps(edi_documents)
return serialized_list
def generate_edi(self, cr, uid, records, context=None):
"""Generates a final EDI document containing the EDI serialization
of the given records, which should all be instances of a Model
that has the :meth:`~.edi` mixin. The document is not saved in the
database.
:param list(browse_record) records: records to export as EDI
:return: UTF-8 encoded string containing the serialized records
"""
edi_list = []
for record in records:
record_model = record._model
edi_list += record_model.edi_export(cr, uid, [record], context=context)
return self.serialize(edi_list)
def load_edi(self, cr, uid, edi_documents, context=None):
"""Import the given EDI document structures into the system, using
:meth:`~.import_edi`.
:param edi_documents: list of Python dicts containing the deserialized
version of EDI documents
:return: list of (model, id, action) tuple containing the model and database ID
of all records that were imported in the system, plus a suggested
action definition dict for displaying each document.
"""
ir_module = self.pool.get('ir.module.module')
res = []
for edi_document in edi_documents:
module = edi_document.get('__import_module') or edi_document.get('__module')
assert module, 'a `__module` or `__import_module` attribute is required in each EDI document.'
if module != 'base' and not ir_module.search(cr, uid, [('name','=',module),('state','=','installed')]):
raise osv.except_osv(_('Missing Application.'),
_("The document you are trying to import requires the Odoo `%s` application. "
"You can install it by connecting as the administrator and opening the configuration assistant.")%(module,))
model = edi_document.get('__import_model') or edi_document.get('__model')
assert model, 'a `__model` or `__import_model` attribute is required in each EDI document.'
assert model in self.pool, 'model `%s` cannot be found, despite module `%s` being available - '\
'this EDI document seems invalid or unsupported.' % (model,module)
model_obj = self.pool[model]
record_id = model_obj.edi_import(cr, uid, edi_document, context=context)
record_action = model_obj._edi_record_display_action(cr, uid, record_id, context=context)
res.append((model, record_id, record_action))
return res
def deserialize(self, edi_documents_string):
"""Return deserialized version of the given EDI Document string.
:param str|unicode edi_documents_string: UTF-8 string (or unicode) containing
JSON-serialized EDI document(s)
:return: Python object representing the EDI document(s) (usually a list of dicts)
"""
return json.loads(edi_documents_string)
def import_edi(self, cr, uid, edi_document=None, edi_url=None, context=None):
"""Import a JSON serialized EDI Document string into the system, first retrieving it
from the given ``edi_url`` if provided.
:param str|unicode edi: UTF-8 string or unicode containing JSON-serialized
EDI Document to import. Must not be provided if
``edi_url`` is given.
:param str|unicode edi_url: URL where the EDI document (same format as ``edi``)
may be retrieved, without authentication.
"""
if edi_url:
assert not edi_document, 'edi must not be provided if edi_url is given.'
edi_document = urllib2.urlopen(edi_url).read()
assert edi_document, 'EDI Document is empty!'
edi_documents = self.deserialize(edi_document)
return self.load_edi(cr, uid, edi_documents, context=context)
class EDIMixin(object):
"""Mixin class for Model objects that want be exposed as EDI documents.
Classes that inherit from this mixin class should override the
``edi_import()`` and ``edi_export()`` methods to implement their
specific behavior, based on the primitives provided by this mixin."""
def _edi_requires_attributes(self, attributes, edi):
model_name = edi.get('__imported_model') or edi.get('__model') or self._name
for attribute in attributes:
assert edi.get(attribute),\
'Attribute `%s` is required in %s EDI documents.' % (attribute, model_name)
# private method, not RPC-exposed as it creates ir.model.data entries as
# SUPERUSER based on its parameters
def _edi_external_id(self, cr, uid, record, existing_id=None, existing_module=None,
context=None):
"""Generate/Retrieve unique external ID for ``record``.
Each EDI record and each relationship attribute in it is identified by a
unique external ID, which includes the database's UUID, as a way to
refer to any record within any Odoo instance, without conflict.
For Odoo records that have an existing "External ID" (i.e. an entry in
ir.model.data), the EDI unique identifier for this record will be made of
"%s:%s:%s" % (module, database UUID, ir.model.data ID). The database's
UUID MUST NOT contain a colon characters (this is guaranteed by the
UUID algorithm).
For records that have no existing ir.model.data entry, a new one will be
created during the EDI export. It is recommended that the generated external ID
contains a readable reference to the record model, plus a unique value that
hides the database ID. If ``existing_id`` is provided (because it came from
an import), it will be used instead of generating a new one.
If ``existing_module`` is provided (because it came from
an import), it will be used instead of using local values.
:param browse_record record: any browse_record needing an EDI external ID
:param string existing_id: optional existing external ID value, usually coming
from a just-imported EDI record, to be used instead
of generating a new one
:param string existing_module: optional existing module name, usually in the
format ``module:db_uuid`` and coming from a
just-imported EDI record, to be used instead
of local values
:return: the full unique External ID to use for record
"""
ir_model_data = self.pool.get('ir.model.data')
db_uuid = self.pool.get('ir.config_parameter').get_param(cr, uid, 'database.uuid')
ext_id = record.get_external_id()[record.id]
if not ext_id:
ext_id = existing_id or safe_unique_id(db_uuid, record._name, record.id)
# ID is unique cross-db thanks to db_uuid (already included in existing_module)
module = existing_module or "%s:%s" % (record._original_module, db_uuid)
_logger.debug("%s: Generating new external ID `%s.%s` for %r.", self._name,
module, ext_id, record)
ir_model_data.create(cr, openerp.SUPERUSER_ID,
{'name': ext_id,
'model': record._name,
'module': module,
'res_id': record.id})
else:
module, ext_id = ext_id.split('.')
if not ':' in module:
# this record was not previously EDI-imported
if not module == record._original_module:
# this could happen for data records defined in a module that depends
# on the module that owns the model, e.g. purchase defines
# product.pricelist records.
_logger.debug('Mismatching module: expected %s, got %s, for %s.',
module, record._original_module, record)
# ID is unique cross-db thanks to db_uuid
module = "%s:%s" % (module, db_uuid)
return '%s.%s' % (module, ext_id)
def _edi_record_display_action(self, cr, uid, id, context=None):
"""Returns an appropriate action definition dict for displaying
the record with ID ``rec_id``.
:param int id: database ID of record to display
:return: action definition dict
"""
return {'type': 'ir.actions.act_window',
'view_mode': 'form,tree',
'view_type': 'form',
'res_model': self._name,
'res_id': id}
def edi_metadata(self, cr, uid, records, context=None):
"""Return a list containing the boilerplate EDI structures for
exporting ``records`` as EDI, including
the metadata fields
The metadata fields always include::
{
'__model': 'some.model', # record model
'__module': 'module', # require module
'__id': 'module:db-uuid:model.id', # unique global external ID for the record
'__last_update': '2011-01-01 10:00:00', # last update date in UTC!
'__version': 1, # EDI spec version
'__generator' : 'Odoo', # EDI generator
'__generator_version' : [6,1,0], # server version, to check compatibility.
'__attachments_':
}
:param list(browse_record) records: records to export
:return: list of dicts containing boilerplate EDI metadata for each record,
at the corresponding index from ``records``.
"""
ir_attachment = self.pool.get('ir.attachment')
results = []
for record in records:
ext_id = self._edi_external_id(cr, uid, record, context=context)
edi_dict = {
'__id': ext_id,
'__last_update': last_update_for(record),
'__model' : record._name,
'__module' : record._original_module,
'__version': EDI_PROTOCOL_VERSION,
'__generator': EDI_GENERATOR,
'__generator_version': EDI_GENERATOR_VERSION,
}
attachment_ids = ir_attachment.search(cr, uid, [('res_model','=', record._name), ('res_id', '=', record.id)])
if attachment_ids:
attachments = []
for attachment in ir_attachment.browse(cr, uid, attachment_ids, context=context):
attachments.append({
'name' : attachment.name,
'content': attachment.datas, # already base64 encoded!
'file_name': attachment.datas_fname,
})
edi_dict.update(__attachments=attachments)
results.append(edi_dict)
return results
def edi_m2o(self, cr, uid, record, context=None):
"""Return a m2o EDI representation for the given record.
The EDI format for a many2one is::
['unique_external_id', 'Document Name']
"""
edi_ext_id = self._edi_external_id(cr, uid, record, context=context)
relation_model = record._model
name = relation_model.name_get(cr, uid, [record.id], context=context)
name = name and name[0][1] or False
return [edi_ext_id, name]
def edi_o2m(self, cr, uid, records, edi_struct=None, context=None):
"""Return a list representing a O2M EDI relationship containing
all the given records, according to the given ``edi_struct``.
This is basically the same as exporting all the record using
:meth:`~.edi_export` with the given ``edi_struct``, and wrapping
the results in a list.
Example::
[ # O2M fields would be a list of dicts, with their
{ '__id': 'module:db-uuid.id', # own __id.
'__last_update': 'iso date', # update date
'name': 'some name',
#...
},
# ...
],
"""
result = []
for record in records:
result += record._model.edi_export(cr, uid, [record], edi_struct=edi_struct, context=context)
return result
def edi_m2m(self, cr, uid, records, context=None):
"""Return a list representing a M2M EDI relationship directed towards
all the given records.
This is basically the same as exporting all the record using
:meth:`~.edi_m2o` and wrapping the results in a list.
Example::
# M2M fields are exported as a list of pairs, like a list of M2O values
[
['module:db-uuid.id1', 'Task 01: bla bla'],
['module:db-uuid.id2', 'Task 02: bla bla']
]
"""
return [self.edi_m2o(cr, uid, r, context=context) for r in records]
def edi_export(self, cr, uid, records, edi_struct=None, context=None):
"""Returns a list of dicts representing EDI documents containing the
records, and matching the given ``edi_struct``, if provided.
:param edi_struct: if provided, edi_struct should be a dictionary
with a skeleton of the fields to export.
Basic fields can have any key as value, but o2m
values should have a sample skeleton dict as value,
to act like a recursive export.
For example, for a res.partner record::
edi_struct: {
'name': True,
'company_id': True,
'address': {
'name': True,
'street': True,
}
}
Any field not specified in the edi_struct will not
be included in the exported data. Fields with no
value (False) will be omitted in the EDI struct.
If edi_struct is omitted, no fields will be exported
"""
if edi_struct is None:
edi_struct = {}
fields_to_export = edi_struct.keys()
results = []
for record in records:
edi_dict = self.edi_metadata(cr, uid, [record], context=context)[0]
for field_name in fields_to_export:
field = self._fields[field_name]
value = getattr(record, field_name)
if not value and value not in ('', 0):
continue
elif field.type == 'many2one':
value = self.edi_m2o(cr, uid, value, context=context)
elif field.type == 'many2many':
value = self.edi_m2m(cr, uid, value, context=context)
elif field.type == 'one2many':
value = self.edi_o2m(cr, uid, value, edi_struct=edi_struct.get(field_name, {}), context=context)
edi_dict[field_name] = value
results.append(edi_dict)
return results
def _edi_get_object_by_name(self, cr, uid, name, model_name, context=None):
model = self.pool[model_name]
search_results = model.name_search(cr, uid, name, operator='=', context=context)
if len(search_results) == 1:
return model.browse(cr, uid, search_results[0][0], context=context)
return False
def _edi_generate_report_attachment(self, cr, uid, record, context=None):
"""Utility method to generate the first PDF-type report declared for the
current model with ``usage`` attribute set to ``default``.
This must be called explicitly by models that need it, usually
at the beginning of ``edi_export``, before the call to ``super()``."""
ir_actions_report = self.pool.get('ir.actions.report.xml')
matching_reports = ir_actions_report.search(cr, uid, [('model','=',self._name),
('report_type','=','pdf'),
('usage','=','default')])
if matching_reports:
report = ir_actions_report.browse(cr, uid, matching_reports[0])
result, format = openerp.report.render_report(cr, uid, [record.id], report.report_name, {'model': self._name}, context=context)
eval_context = {'time': time, 'object': record}
if not report.attachment or not eval(report.attachment, eval_context):
# no auto-saving of report as attachment, need to do it manually
result = base64.b64encode(result)
file_name = record.name_get()[0][1]
file_name = re.sub(r'[^a-zA-Z0-9_-]', '_', file_name)
file_name += ".pdf"
self.pool.get('ir.attachment').create(cr, uid,
{
'name': file_name,
'datas': result,
'datas_fname': file_name,
'res_model': self._name,
'res_id': record.id,
'type': 'binary'
},
context=context)
def _edi_import_attachments(self, cr, uid, record_id, edi, context=None):
ir_attachment = self.pool.get('ir.attachment')
for attachment in edi.get('__attachments', []):
# check attachment data is non-empty and valid
file_data = None
try:
file_data = base64.b64decode(attachment.get('content'))
except TypeError:
pass
assert file_data, 'Incorrect/Missing attachment file content.'
assert attachment.get('name'), 'Incorrect/Missing attachment name.'
assert attachment.get('file_name'), 'Incorrect/Missing attachment file name.'
assert attachment.get('file_name'), 'Incorrect/Missing attachment file name.'
ir_attachment.create(cr, uid, {'name': attachment['name'],
'datas_fname': attachment['file_name'],
'res_model': self._name,
'res_id': record_id,
# should be pure 7bit ASCII
'datas': str(attachment['content']),
}, context=context)
def _edi_get_object_by_external_id(self, cr, uid, external_id, model, context=None):
"""Returns browse_record representing object identified by the model and external_id,
or None if no record was found with this external id.
:param external_id: fully qualified external id, in the EDI form
``module:db_uuid:identifier``.
:param model: model name the record belongs to.
"""
ir_model_data = self.pool.get('ir.model.data')
# external_id is expected to have the form: ``module:db_uuid:model.random_name``
ext_id_members = split_external_id(external_id)
db_uuid = self.pool.get('ir.config_parameter').get_param(cr, uid, 'database.uuid')
module = ext_id_members['module']
ext_id = ext_id_members['id']
modules = []
ext_db_uuid = ext_id_members['db_uuid']
if ext_db_uuid:
modules.append('%s:%s' % (module, ext_id_members['db_uuid']))
if ext_db_uuid is None or ext_db_uuid == db_uuid:
# local records may also be registered without the db_uuid
modules.append(module)
data_ids = ir_model_data.search(cr, uid, [('model','=',model),
('name','=',ext_id),
('module','in',modules)])
if data_ids:
model = self.pool[model]
data = ir_model_data.browse(cr, uid, data_ids[0], context=context)
if model.exists(cr, uid, [data.res_id]):
return model.browse(cr, uid, data.res_id, context=context)
# stale external-id, cleanup to allow re-import, as the corresponding record is gone
ir_model_data.unlink(cr, 1, [data_ids[0]])
def edi_import_relation(self, cr, uid, model, value, external_id, context=None):
"""Imports a M2O/M2M relation EDI specification ``[external_id,value]`` for the
given model, returning the corresponding database ID:
* First, checks if the ``external_id`` is already known, in which case the corresponding
database ID is directly returned, without doing anything else;
* If the ``external_id`` is unknown, attempts to locate an existing record
with the same ``value`` via name_search(). If found, the given external_id will
be assigned to this local record (in addition to any existing one)
* If previous steps gave no result, create a new record with the given
value in the target model, assign it the given external_id, and return
the new database ID
:param str value: display name of the record to import
:param str external_id: fully-qualified external ID of the record
:return: database id of newly-imported or pre-existing record
"""
_logger.debug("%s: Importing EDI relationship [%r,%r]", model, external_id, value)
target = self._edi_get_object_by_external_id(cr, uid, external_id, model, context=context)
need_new_ext_id = False
if not target:
_logger.debug("%s: Importing EDI relationship [%r,%r] - ID not found, trying name_get.",
self._name, external_id, value)
target = self._edi_get_object_by_name(cr, uid, value, model, context=context)
need_new_ext_id = True
if not target:
_logger.debug("%s: Importing EDI relationship [%r,%r] - name not found, creating it.",
self._name, external_id, value)
# also need_new_ext_id here, but already been set above
model = self.pool[model]
res_id, _ = model.name_create(cr, uid, value, context=context)
target = model.browse(cr, uid, res_id, context=context)
else:
_logger.debug("%s: Importing EDI relationship [%r,%r] - record already exists with ID %s, using it",
self._name, external_id, value, target.id)
if need_new_ext_id:
ext_id_members = split_external_id(external_id)
# module name is never used bare when creating ir.model.data entries, in order
# to avoid being taken as part of the module's data, and cleanup up at next update
module = "%s:%s" % (ext_id_members['module'], ext_id_members['db_uuid'])
# create a new ir.model.data entry for this value
self._edi_external_id(cr, uid, target, existing_id=ext_id_members['id'], existing_module=module, context=context)
return target.id
def edi_import(self, cr, uid, edi, context=None):
"""Imports a dict representing an EDI document into the system.
:param dict edi: EDI document to import
:return: the database ID of the imported record
"""
assert self._name == edi.get('__import_model') or \
('__import_model' not in edi and self._name == edi.get('__model')), \
"EDI Document Model and current model do not match: '%s' (EDI) vs '%s' (current)." % \
(edi.get('__model'), self._name)
# First check the record is now already known in the database, in which case it is ignored
ext_id_members = split_external_id(edi['__id'])
existing = self._edi_get_object_by_external_id(cr, uid, ext_id_members['full'], self._name, context=context)
if existing:
_logger.info("'%s' EDI Document with ID '%s' is already known, skipping import!", self._name, ext_id_members['full'])
return existing.id
record_values = {}
o2m_todo = {} # o2m values are processed after their parent already exists
for field_name, field_value in edi.iteritems():
# skip metadata and empty fields
if field_name.startswith('__') or field_value is None or field_value is False:
continue
field = self._fields.get(field_name)
if not field:
_logger.warning('Ignoring unknown field `%s` when importing `%s` EDI document.', field_name, self._name)
continue
# skip function/related fields
if not field.store:
_logger.warning("Unexpected function field value is found in '%s' EDI document: '%s'." % (self._name, field_name))
continue
relation_model = field.comodel_name
if field.type == 'many2one':
record_values[field_name] = self.edi_import_relation(cr, uid, relation_model,
field_value[1], field_value[0],
context=context)
elif field.type == 'many2many':
record_values[field_name] = [self.edi_import_relation(cr, uid, relation_model, m2m_value[1],
m2m_value[0], context=context)
for m2m_value in field_value]
elif field.type == 'one2many':
# must wait until parent report is imported, as the parent relationship
# is often required in o2m child records
o2m_todo[field_name] = field_value
else:
record_values[field_name] = field_value
module_ref = "%s:%s" % (ext_id_members['module'], ext_id_members['db_uuid'])
record_id = self.pool.get('ir.model.data')._update(cr, uid, self._name, module_ref, record_values,
xml_id=ext_id_members['id'], context=context)
record_display, = self.name_get(cr, uid, [record_id], context=context)
# process o2m values, connecting them to their parent on-the-fly
for o2m_field, o2m_value in o2m_todo.iteritems():
field = self._fields[o2m_field]
dest_model = self.pool[field.comodel_name]
dest_field = field.inverse_name
for o2m_line in o2m_value:
# link to parent record: expects an (ext_id, name) pair
o2m_line[dest_field] = (ext_id_members['full'], record_display[1])
dest_model.edi_import(cr, uid, o2m_line, context=context)
# process the attachments, if any
self._edi_import_attachments(cr, uid, record_id, edi, context=context)
return record_id
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
RossBrunton/django | tests/check_framework/test_model_field_deprecation.py | 322 | 2584 | from django.core import checks
from django.db import models
from django.test import SimpleTestCase
from .tests import IsolateModelsMixin
class TestDeprecatedField(IsolateModelsMixin, SimpleTestCase):
def test_default_details(self):
class MyField(models.Field):
system_check_deprecated_details = {}
class Model(models.Model):
name = MyField()
model = Model()
self.assertEqual(model.check(), [
checks.Warning(
msg='MyField has been deprecated.',
hint=None,
obj=Model._meta.get_field('name'),
id='fields.WXXX',
)
])
def test_user_specified_details(self):
class MyField(models.Field):
system_check_deprecated_details = {
'msg': 'This field is deprecated and will be removed soon.',
'hint': 'Use something else.',
'id': 'fields.W999',
}
class Model(models.Model):
name = MyField()
model = Model()
self.assertEqual(model.check(), [
checks.Warning(
msg='This field is deprecated and will be removed soon.',
hint='Use something else.',
obj=Model._meta.get_field('name'),
id='fields.W999',
)
])
class TestRemovedField(IsolateModelsMixin, SimpleTestCase):
def test_default_details(self):
class MyField(models.Field):
system_check_removed_details = {}
class Model(models.Model):
name = MyField()
model = Model()
self.assertEqual(model.check(), [
checks.Error(
msg='MyField has been removed except for support in historical migrations.',
hint=None,
obj=Model._meta.get_field('name'),
id='fields.EXXX',
)
])
def test_user_specified_details(self):
class MyField(models.Field):
system_check_removed_details = {
'msg': 'Support for this field is gone.',
'hint': 'Use something else.',
'id': 'fields.E999',
}
class Model(models.Model):
name = MyField()
model = Model()
self.assertEqual(model.check(), [
checks.Error(
msg='Support for this field is gone.',
hint='Use something else.',
obj=Model._meta.get_field('name'),
id='fields.E999',
)
])
| bsd-3-clause |
elemson/codeacademyfinalproj | cc_markov.py | 4 | 2430 | import re
import random
from collections import defaultdict, deque
"""
Codecademy Pro Final Project supplementary code
Markov Chain generator
This is a text generator that uses Markov Chains to generate text
using a uniform distribution.
num_key_words is the number of words that compose a key (suggested: 2 or 3)
"""
class MarkovChain:
def __init__(self, num_key_words=2):
self.num_key_words = num_key_words
self.lookup_dict = defaultdict(list)
self._punctuation_regex = re.compile('[,.!;\?\:\-\[\]\n]+')
self._seeded = False
self.__seed_me()
def __seed_me(self, rand_seed=None):
if self._seeded is not True:
try:
if rand_seed is not None:
random.seed(rand_seed)
else:
random.seed()
self._seeded = True
except NotImplementedError:
self._seeded = False
"""
" Build Markov Chain from data source.
" Use add_file() or add_string() to add the appropriate format source
"""
def add_file(self, file_path):
content = ''
with open(file_path, 'r') as fh:
self.__add_source_data(fh.read())
def add_string(self, str):
self.__add_source_data(str)
def __add_source_data(self, str):
clean_str = self._punctuation_regex.sub(' ', str).lower()
tuples = self.__generate_tuple_keys(clean_str.split())
for t in tuples:
self.lookup_dict[t[0]].append(t[1])
def __generate_tuple_keys(self, data):
if len(data) < self.num_key_words:
return
for i in xrange(len(data) - self.num_key_words):
yield [ tuple(data[i:i+self.num_key_words]), data[i+self.num_key_words] ]
"""
" Generates text based on the data the Markov Chain contains
" max_length is the maximum number of words to generate
"""
def generate_text(self, max_length=20):
context = deque()
output = []
if len(self.lookup_dict) > 0:
self.__seed_me(rand_seed=len(self.lookup_dict))
idx = random.randint(0, len(self.lookup_dict)-1)
chain_head = list(self.lookup_dict.keys()[idx])
context.extend(chain_head)
while len(output) < (max_length - self.num_key_words):
next_choices = self.lookup_dict[tuple(context)]
if len(next_choices) > 0:
next_word = random.choice(next_choices)
context.append(next_word)
output.append(context.popleft())
else:
break
output.extend(list(context))
return output
| gpl-2.0 |
Anaethelion/Geotrek | geotrek/core/models.py | 1 | 20663 | # -*- coding: utf-8 -*-
import logging
import functools
from django.contrib.gis.db import models
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.contrib.gis.geos import fromstr, LineString
from mapentity.models import MapEntityMixin
from geotrek.authent.models import StructureRelated
from geotrek.common.mixins import (TimeStampedModelMixin, NoDeleteMixin,
AddPropertyMixin)
from geotrek.common.utils import classproperty
from geotrek.common.utils.postgresql import debug_pg_notices
from geotrek.altimetry.models import AltimetryMixin
from .helpers import PathHelper, TopologyHelper
from django.db import connections, DEFAULT_DB_ALIAS
logger = logging.getLogger(__name__)
class PathManager(models.GeoManager):
# Use this manager when walking through FK/M2M relationships
use_for_related_fields = True
def get_queryset(self):
"""Hide all ``Path`` records that are not marked as visible.
"""
return super(PathManager, self).get_queryset().filter(visible=True)
# GeoDjango note:
# Django automatically creates indexes on geometry fields but it uses a
# syntax which is not compatible with PostGIS 2.0. That's why index creation
# is explicitly disbaled here (see manual index creation in custom SQL files).
class Path(AddPropertyMixin, MapEntityMixin, AltimetryMixin,
TimeStampedModelMixin, StructureRelated):
geom = models.LineStringField(srid=settings.SRID, spatial_index=False)
geom_cadastre = models.LineStringField(null=True, srid=settings.SRID, spatial_index=False,
editable=False)
valid = models.BooleanField(db_column='valide', default=True, verbose_name=_(u"Validity"),
help_text=_(u"Approved by manager"))
visible = models.BooleanField(db_column='visible', default=True, verbose_name=_(u"Visible"),
help_text=_(u"Shown in lists and maps"))
name = models.CharField(null=True, blank=True, max_length=20, db_column='nom', verbose_name=_(u"Name"),
help_text=_(u"Official name"))
comments = models.TextField(null=True, blank=True, db_column='remarques', verbose_name=_(u"Comments"),
help_text=_(u"Remarks"))
departure = models.CharField(null=True, blank=True, default="", max_length=250, db_column='depart', verbose_name=_(u"Departure"),
help_text=_(u"Departure place"))
arrival = models.CharField(null=True, blank=True, default="", max_length=250, db_column='arrivee', verbose_name=_(u"Arrival"),
help_text=_(u"Arrival place"))
comfort = models.ForeignKey('Comfort',
null=True, blank=True, related_name='paths',
verbose_name=_("Comfort"), db_column='confort')
source = models.ForeignKey('PathSource',
null=True, blank=True, related_name='paths',
verbose_name=_("Source"), db_column='source')
stake = models.ForeignKey('Stake',
null=True, blank=True, related_name='paths',
verbose_name=_("Maintenance stake"), db_column='enjeu')
usages = models.ManyToManyField('Usage',
blank=True, null=True, related_name="paths",
verbose_name=_(u"Usages"), db_table="l_r_troncon_usage")
networks = models.ManyToManyField('Network',
blank=True, null=True, related_name="paths",
verbose_name=_(u"Networks"), db_table="l_r_troncon_reseau")
eid = models.CharField(verbose_name=_(u"External id"), max_length=128, blank=True, db_column='id_externe')
objects = PathManager()
is_reversed = False
@property
def length_2d(self):
if self.geom:
return round(self.geom.length, 1)
else:
return None
@classproperty
def length_2d_verbose_name(cls):
return _(u"2D Length")
@property
def length_2d_display(self):
return self.length_2d
def __unicode__(self):
return self.name or _('path %d') % self.pk
class Meta:
db_table = 'l_t_troncon'
verbose_name = _(u"Path")
verbose_name_plural = _(u"Paths")
@classmethod
def closest(cls, point):
"""
Returns the closest path of the point.
Will fail if no path in database.
"""
# TODO: move to custom manager
if point.srid != settings.SRID:
point = point.transform(settings.SRID, clone=True)
return cls.objects.all().distance(point).order_by('distance')[0]
def is_overlap(self):
return not PathHelper.disjoint(self.geom, self.pk)
def reverse(self):
"""
Reverse the geometry.
We keep track of this, since we will have to work on topologies at save()
"""
reversed_coord = self.geom.coords[-1::-1]
self.geom = LineString(reversed_coord)
self.is_reversed = True
return self
def interpolate(self, point):
"""
Returns position ([0.0-1.0]) and offset (distance) of the point
along this path.
"""
return PathHelper.interpolate(self, point)
def snap(self, point):
"""
Returns the point snapped (i.e closest) to the path line geometry.
"""
return PathHelper.snap(self, point)
def reload(self, fromdb=None):
# Update object's computed values (reload from database)
if self.pk and self.visible:
fromdb = self.__class__.objects.get(pk=self.pk)
self.geom = fromdb.geom
AltimetryMixin.reload(self, fromdb)
TimeStampedModelMixin.reload(self, fromdb)
return self
@debug_pg_notices
def save(self, *args, **kwargs):
# If the path was reversed, we have to invert related topologies
if self.is_reversed:
for aggr in self.aggregations.all():
aggr.start_position = 1 - aggr.start_position
aggr.end_position = 1 - aggr.end_position
aggr.save()
self._is_reversed = False
super(Path, self).save(*args, **kwargs)
self.reload()
@property
def name_display(self):
return u'<a data-pk="%s" href="%s" title="%s" >%s</a>' % (self.pk,
self.get_detail_url(),
self,
self)
@property
def name_csv_display(self):
return unicode(self)
@classproperty
def trails_verbose_name(cls):
return _("Trails")
@property
def trails_display(self):
trails = getattr(self, '_trails', self.trails)
if trails:
return ", ".join([t.name_display for t in trails])
return _("None")
@property
def trails_csv_display(self):
trails = getattr(self, '_trails', self.trails)
if trails:
return ", ".join([unicode(t) for t in trails])
return _("None")
@classmethod
def get_create_label(cls):
return _(u"Add a new path")
@property
def checkbox(self):
return u'<input type="checkbox" name="{}[]" value="{}" />'.format('path',
self.pk)
@classproperty
def checkbox_verbose_name(cls):
return _("Action")
@property
def checkbox_display(self):
return self.checkbox
def merge_path(self, path_to_merge):
"""
Path unification
:param path_to path_to_merge: Path instance to merge
:return: Boolean
"""
if (self.pk and path_to_merge) and (self.pk != path_to_merge.pk):
conn = connections[DEFAULT_DB_ALIAS]
cursor = conn.cursor()
sql = "SELECT ft_merge_path({}, {});".format(self.pk, path_to_merge.pk)
cursor.execute(sql)
result = cursor.fetchall()[0][0]
if result:
# reload object after unification
self.reload()
return result
class Topology(AddPropertyMixin, AltimetryMixin, TimeStampedModelMixin, NoDeleteMixin):
paths = models.ManyToManyField(Path, db_column='troncons', through='PathAggregation', verbose_name=_(u"Path"))
offset = models.FloatField(default=0.0, db_column='decallage', verbose_name=_(u"Offset")) # in SRID units
kind = models.CharField(editable=False, verbose_name=_(u"Kind"), max_length=32)
# Override default manager
objects = NoDeleteMixin.get_manager_cls(models.GeoManager)()
geom = models.GeometryField(editable=(not settings.TREKKING_TOPOLOGY_ENABLED),
srid=settings.SRID, null=True,
default=None, spatial_index=False)
""" Fake srid attribute, that prevents transform() calls when using Django map widgets. """
srid = settings.API_SRID
class Meta:
db_table = 'e_t_evenement'
verbose_name = _(u"Topology")
verbose_name_plural = _(u"Topologies")
def __init__(self, *args, **kwargs):
super(Topology, self).__init__(*args, **kwargs)
if not self.pk:
self.kind = self.__class__.KIND
@property
def length_2d(self):
if self.geom and not self.ispoint():
return round(self.geom.length, 1)
else:
return None
@classproperty
def length_2d_verbose_name(cls):
return _(u"2D Length")
@property
def length_2d_display(self):
return self.length_2d
@classproperty
def KIND(cls):
return cls._meta.object_name.upper()
def __unicode__(self):
return u"%s (%s)" % (_(u"Topology"), self.pk)
def ispoint(self):
if not settings.TREKKING_TOPOLOGY_ENABLED or not self.pk:
return self.geom and self.geom.geom_type == 'Point'
return all([a.start_position == a.end_position for a in self.aggregations.all()])
def add_path(self, path, start=0.0, end=1.0, order=0, reload=True):
"""
Shortcut function to add paths into this topology.
"""
from .factories import PathAggregationFactory
aggr = PathAggregationFactory.create(topo_object=self,
path=path,
start_position=start,
end_position=end,
order=order)
if self.deleted:
self.deleted = False
self.save(update_fields=['deleted'])
# Since a trigger modifies geom, we reload the object
if reload:
self.reload()
return aggr
@classmethod
def overlapping(cls, topologies):
""" Return a Topology queryset overlapping specified topologies.
"""
return TopologyHelper.overlapping(cls, topologies)
def mutate(self, other, delete=True):
"""
Take alls attributes of the other topology specified and
save them into this one. Optionnally deletes the other.
"""
self.offset = other.offset
self.save(update_fields=['offset'])
PathAggregation.objects.filter(topo_object=self).delete()
# The previous operation has put deleted = True (in triggers)
# and NULL in geom (see update_geometry_of_evenement:: IF t_count = 0)
self.deleted = False
self.geom = other.geom
self.save(update_fields=['deleted', 'geom'])
# Now copy all agregations from other to self
aggrs = other.aggregations.all()
# A point has only one aggregation, except if it is on an intersection.
# In this case, the trigger will create them, so ignore them here.
if other.ispoint():
aggrs = aggrs[:1]
for aggr in aggrs:
self.add_path(aggr.path, aggr.start_position, aggr.end_position, aggr.order, reload=False)
self.reload()
if delete:
other.delete(force=True) # Really delete it from database
return self
def reload(self, fromdb=None):
"""
Reload into instance all computed attributes in triggers.
"""
if self.pk:
# Update computed values
fromdb = self.__class__.objects.get(pk=self.pk)
self.geom = fromdb.geom
# /!\ offset may be set by a trigger OR in
# the django code, reload() will override
# any unsaved value
self.offset = fromdb.offset
AltimetryMixin.reload(self, fromdb)
TimeStampedModelMixin.reload(self, fromdb)
NoDeleteMixin.reload(self, fromdb)
return self
@debug_pg_notices
def save(self, *args, **kwargs):
# HACK: these fields are readonly from the Django point of view
# but they can be changed at DB level. Since Django write all fields
# to DB anyway, it is important to update it before writting
if self.pk and settings.TREKKING_TOPOLOGY_ENABLED:
existing = self.__class__.objects.get(pk=self.pk)
self.length = existing.length
# In the case of points, the geom can be set by Django. Don't override.
point_geom_not_set = self.ispoint() and self.geom is None
geom_already_in_db = not self.ispoint() and existing.geom is not None
if (point_geom_not_set or geom_already_in_db):
self.geom = existing.geom
else:
if not self.deleted and self.geom is None:
# We cannot have NULL geometry. So we use an empty one,
# it will be computed or overwritten by triggers.
self.geom = fromstr('POINT (0 0)')
if not self.kind:
if self.KIND == "TOPOLOGYMIXIN":
raise Exception("Cannot save abstract topologies")
self.kind = self.__class__.KIND
# Static value for Topology offset, if any
shortmodelname = self._meta.object_name.lower().replace('edge', '')
self.offset = settings.TOPOLOGY_STATIC_OFFSETS.get(shortmodelname, self.offset)
# Save into db
super(Topology, self).save(*args, **kwargs)
self.reload()
def serialize(self, **kwargs):
return TopologyHelper.serialize(self, **kwargs)
@classmethod
def deserialize(cls, serialized):
return TopologyHelper.deserialize(serialized)
def distance(self, to_cls):
"""Distance to associate this topology to another topology class"""
return None
class PathAggregationManager(models.GeoManager):
def get_queryset(self):
return super(PathAggregationManager, self).get_queryset().order_by('order')
class PathAggregation(models.Model):
path = models.ForeignKey(Path, null=False, db_column='troncon',
verbose_name=_(u"Path"),
related_name="aggregations",
on_delete=models.DO_NOTHING) # The CASCADE behavior is enforced at DB-level (see file ../sql/20_evenements_troncons.sql)
topo_object = models.ForeignKey(Topology, null=False, related_name="aggregations",
db_column='evenement', verbose_name=_(u"Topology"))
start_position = models.FloatField(db_column='pk_debut', verbose_name=_(u"Start position"), db_index=True)
end_position = models.FloatField(db_column='pk_fin', verbose_name=_(u"End position"), db_index=True)
order = models.IntegerField(db_column='ordre', default=0, blank=True, null=True, verbose_name=_(u"Order"))
# Override default manager
objects = PathAggregationManager()
def __unicode__(self):
return u"%s (%s-%s: %s - %s)" % (_("Path aggregation"), self.path.pk, self.path.name, self.start_position, self.end_position)
@property
def start_meter(self):
try:
return 0 if self.start_position == 0.0 else int(self.start_position * self.path.length)
except ValueError:
return -1
@property
def end_meter(self):
try:
return 0 if self.end_position == 0.0 else int(self.end_position * self.path.length)
except ValueError:
return -1
@property
def is_full(self):
return (self.start_position == 0.0 and self.end_position == 1.0 or
self.start_position == 1.0 and self.end_position == 0.0)
@debug_pg_notices
def save(self, *args, **kwargs):
return super(PathAggregation, self).save(*args, **kwargs)
class Meta:
db_table = 'e_r_evenement_troncon'
verbose_name = _(u"Path aggregation")
verbose_name_plural = _(u"Path aggregations")
# Important - represent the order of the path in the Topology path list
ordering = ['order', ]
class PathSource(StructureRelated):
source = models.CharField(verbose_name=_(u"Source"), max_length=50)
class Meta:
db_table = 'l_b_source_troncon'
verbose_name = _(u"Path source")
verbose_name_plural = _(u"Path sources")
ordering = ['source']
def __unicode__(self):
return self.source
@functools.total_ordering
class Stake(StructureRelated):
stake = models.CharField(verbose_name=_(u"Stake"), max_length=50, db_column='enjeu')
class Meta:
db_table = 'l_b_enjeu'
verbose_name = _(u"Maintenance stake")
verbose_name_plural = _(u"Maintenance stakes")
ordering = ['id']
def __lt__(self, other):
if other is None:
return False
return self.pk < other.pk
def __eq__(self, other):
return isinstance(other, Stake) \
and self.pk == other.pk
def __unicode__(self):
return self.stake
class Comfort(StructureRelated):
comfort = models.CharField(verbose_name=_(u"Comfort"), max_length=50, db_column='confort')
class Meta:
db_table = 'l_b_confort'
verbose_name = _(u"Comfort")
verbose_name_plural = _(u"Comforts")
ordering = ['comfort']
def __unicode__(self):
return self.comfort
class Usage(StructureRelated):
usage = models.CharField(verbose_name=_(u"Usage"), max_length=50, db_column='usage')
class Meta:
db_table = 'l_b_usage'
verbose_name = _(u"Usage")
verbose_name_plural = _(u"Usages")
ordering = ['usage']
def __unicode__(self):
return self.usage
class Network(StructureRelated):
network = models.CharField(verbose_name=_(u"Network"), max_length=50, db_column='reseau')
class Meta:
db_table = 'l_b_reseau'
verbose_name = _(u"Network")
verbose_name_plural = _(u"Networks")
ordering = ['network']
def __unicode__(self):
return self.network
class Trail(MapEntityMixin, Topology, StructureRelated):
topo_object = models.OneToOneField(Topology, parent_link=True,
db_column='evenement')
name = models.CharField(verbose_name=_(u"Name"), max_length=64, db_column='nom')
departure = models.CharField(verbose_name=_(u"Departure"), max_length=64, db_column='depart')
arrival = models.CharField(verbose_name=_(u"Arrival"), max_length=64, db_column='arrivee')
comments = models.TextField(default="", blank=True, verbose_name=_(u"Comments"), db_column='commentaire')
class Meta:
db_table = 'l_t_sentier'
verbose_name = _(u"Trail")
verbose_name_plural = _(u"Trails")
ordering = ['name']
objects = Topology.get_manager_cls(models.GeoManager)()
def __unicode__(self):
return self.name
@property
def name_display(self):
return u'<a data-pk="%s" href="%s" title="%s" >%s</a>' % (self.pk,
self.get_detail_url(),
self,
self)
@classmethod
def path_trails(cls, path):
return cls.objects.existing().filter(aggregations__path=path)
Path.add_property('trails', lambda self: Trail.path_trails(self), _(u"Trails"))
Topology.add_property('trails', lambda self: Trail.overlapping(self), _(u"Trails"))
| bsd-2-clause |
dispansible/dispansible | ansible/library/disp_homebrew_tap.py | 1 | 5742 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Daniel Jaouen <[email protected]>
# Based on homebrew (Andrew Dunham <[email protected]>)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import re
DOCUMENTATION = '''
---
module: homebrew_tap
author: Daniel Jaouen
short_description: Tap a Homebrew repository.
description:
- Tap external Homebrew repositories.
version_added: "1.6"
options:
tap:
description:
- The repository to tap.
required: true
state:
description:
- state of the repository.
choices: [ 'present', 'absent' ]
required: false
default: 'present'
requirements: [ homebrew ]
'''
EXAMPLES = '''
homebrew_tap: tap=homebrew/dupes state=present
homebrew_tap: tap=homebrew/dupes state=absent
homebrew_tap: tap=homebrew/dupes,homebrew/science state=present
'''
def a_valid_tap(tap):
'''Returns True if the tap is valid.'''
regex = re.compile(r'^([\w-]+)/(homebrew-)?([\w-]+)$')
return regex.match(tap)
def already_tapped(module, brew_path, tap):
'''Returns True if already tapped.'''
rc, out, err = module.run_command([
brew_path,
'tap',
])
taps = [tap_.strip().lower() for tap_ in out.split('\n') if tap_]
return tap.lower() in taps
def add_tap(module, brew_path, tap):
'''Adds a single tap.'''
failed, changed, msg = False, False, ''
if not a_valid_tap(tap):
failed = True
msg = 'not a valid tap: %s' % tap
elif not already_tapped(module, brew_path, tap):
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([
brew_path,
'tap',
tap,
])
if already_tapped(module, brew_path, tap):
changed = True
msg = 'successfully tapped: %s' % tap
else:
failed = True
msg = 'failed to tap: %s' % tap
else:
msg = 'already tapped: %s' % tap
return (failed, changed, msg)
def add_taps(module, brew_path, taps):
'''Adds one or more taps.'''
failed, unchanged, added, msg = False, 0, 0, ''
for tap in taps:
(failed, changed, msg) = add_tap(module, brew_path, tap)
if failed:
break
if changed:
added += 1
else:
unchanged += 1
if failed:
msg = 'added: %d, unchanged: %d, error: ' + msg
msg = msg % (added, unchanged)
elif added:
changed = True
msg = 'added: %d, unchanged: %d' % (added, unchanged)
else:
msg = 'added: %d, unchanged: %d' % (added, unchanged)
return (failed, changed, msg)
def remove_tap(module, brew_path, tap):
'''Removes a single tap.'''
failed, changed, msg = False, False, ''
if not a_valid_tap(tap):
failed = True
msg = 'not a valid tap: %s' % tap
elif already_tapped(module, brew_path, tap):
if module.check_mode:
module.exit_json(changed=True)
rc, out, err = module.run_command([
brew_path,
'untap',
tap,
])
if not already_tapped(module, brew_path, tap):
changed = True
msg = 'successfully untapped: %s' % tap
else:
failed = True
msg = 'failed to untap: %s' % tap
else:
msg = 'already untapped: %s' % tap
return (failed, changed, msg)
def remove_taps(module, brew_path, taps):
'''Removes one or more taps.'''
failed, unchanged, removed, msg = False, 0, 0, ''
for tap in taps:
(failed, changed, msg) = remove_tap(module, brew_path, tap)
if failed:
break
if changed:
removed += 1
else:
unchanged += 1
if failed:
msg = 'removed: %d, unchanged: %d, error: ' + msg
msg = msg % (removed, unchanged)
elif removed:
changed = True
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
else:
msg = 'removed: %d, unchanged: %d' % (removed, unchanged)
return (failed, changed, msg)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(aliases=['tap'], required=True),
state=dict(default='present', choices=['present', 'absent']),
),
supports_check_mode=True,
)
brew_path = module.get_bin_path(
'brew',
required=True,
opt_dirs=['/usr/local/bin', '~/.linuxbrew/bin']
)
taps = module.params['name'].split(',')
if module.params['state'] == 'present':
failed, changed, msg = add_taps(module, brew_path, taps)
if failed:
module.fail_json(msg=msg)
else:
module.exit_json(changed=changed, msg=msg)
elif module.params['state'] == 'absent':
failed, changed, msg = remove_taps(module, brew_path, taps)
if failed:
module.fail_json(msg=msg)
else:
module.exit_json(changed=changed, msg=msg)
# this is magic, see lib/ansible/module_common.py
#<<INCLUDE_ANSIBLE_MODULE_COMMON>>
main()
| mit |
tuxfux-hlp-notes/python-batches | archieves/batch-62/files/myenv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/connectionpool.py | 359 | 33591 | from __future__ import absolute_import
import errno
import logging
import sys
import warnings
from socket import error as SocketError, timeout as SocketTimeout
import socket
try: # Python 3
from queue import LifoQueue, Empty, Full
except ImportError:
from Queue import LifoQueue, Empty, Full
# Queue is imported for side effects on MS Windows
import Queue as _unused_module_Queue # noqa: unused
from .exceptions import (
ClosedPoolError,
ProtocolError,
EmptyPoolError,
HeaderParsingError,
HostChangedError,
LocationValueError,
MaxRetryError,
ProxyError,
ReadTimeoutError,
SSLError,
TimeoutError,
InsecureRequestWarning,
NewConnectionError,
)
from .packages.ssl_match_hostname import CertificateError
from .packages import six
from .connection import (
port_by_scheme,
DummyConnection,
HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
HTTPException, BaseSSLError,
)
from .request import RequestMethods
from .response import HTTPResponse
from .util.connection import is_connection_dropped
from .util.response import assert_header_parsing
from .util.retry import Retry
from .util.timeout import Timeout
from .util.url import get_host, Url
xrange = six.moves.xrange
log = logging.getLogger(__name__)
_Default = object()
# Pool objects
class ConnectionPool(object):
"""
Base class for all connection pools, such as
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
"""
scheme = None
QueueCls = LifoQueue
def __init__(self, host, port=None):
if not host:
raise LocationValueError("No host specified.")
# httplib doesn't like it when we include brackets in ipv6 addresses
# Specifically, if we include brackets but also pass the port then
# httplib crazily doubles up the square brackets on the Host header.
# Instead, we need to make sure we never pass ``None`` as the port.
# However, for backward compatibility reasons we can't actually
# *assert* that.
self.host = host.strip('[]')
self.port = port
def __str__(self):
return '%s(host=%r, port=%r)' % (type(self).__name__,
self.host, self.port)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
# Return False to re-raise any potential exceptions
return False
def close(self):
"""
Close all pooled connections and disable the pool.
"""
pass
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
Thread-safe connection pool for one host.
:param host:
Host used for this HTTP Connection (e.g. "localhost"), passed into
:class:`httplib.HTTPConnection`.
:param port:
Port used for this HTTP Connection (None is equivalent to 80), passed
into :class:`httplib.HTTPConnection`.
:param strict:
Causes BadStatusLine to be raised if the status line can't be parsed
as a valid HTTP/1.0 or 1.1 status line, passed into
:class:`httplib.HTTPConnection`.
.. note::
Only works in Python 2. This parameter is ignored in Python 3.
:param timeout:
Socket timeout in seconds for each individual connection. This can
be a float or integer, which sets the timeout for the HTTP request,
or an instance of :class:`urllib3.util.Timeout` which gives you more
fine-grained control over request timeouts. After the constructor has
been parsed, this is always a `urllib3.util.Timeout` object.
:param maxsize:
Number of connections to save that can be reused. More than 1 is useful
in multithreaded situations. If ``block`` is set to False, more
connections will be created but they will not be saved once they've
been used.
:param block:
If set to True, no more than ``maxsize`` connections will be used at
a time. When no free connections are available, the call will block
until a connection has been released. This is a useful side effect for
particular multithreaded situations where one does not want to use more
than maxsize connections per host to prevent flooding.
:param headers:
Headers to include with all requests, unless other headers are given
explicitly.
:param retries:
Retry configuration to use by default with requests in this pool.
:param _proxy:
Parsed proxy URL, should not be used directly, instead, see
:class:`urllib3.connectionpool.ProxyManager`"
:param _proxy_headers:
A dictionary with proxy headers, should not be used directly,
instead, see :class:`urllib3.connectionpool.ProxyManager`"
:param \**conn_kw:
Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
:class:`urllib3.connection.HTTPSConnection` instances.
"""
scheme = 'http'
ConnectionCls = HTTPConnection
ResponseCls = HTTPResponse
def __init__(self, host, port=None, strict=False,
timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1, block=False,
headers=None, retries=None,
_proxy=None, _proxy_headers=None,
**conn_kw):
ConnectionPool.__init__(self, host, port)
RequestMethods.__init__(self, headers)
self.strict = strict
if not isinstance(timeout, Timeout):
timeout = Timeout.from_float(timeout)
if retries is None:
retries = Retry.DEFAULT
self.timeout = timeout
self.retries = retries
self.pool = self.QueueCls(maxsize)
self.block = block
self.proxy = _proxy
self.proxy_headers = _proxy_headers or {}
# Fill the queue up so that doing get() on it will block properly
for _ in xrange(maxsize):
self.pool.put(None)
# These are mostly for testing and debugging purposes.
self.num_connections = 0
self.num_requests = 0
self.conn_kw = conn_kw
if self.proxy:
# Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
# We cannot know if the user has added default socket options, so we cannot replace the
# list.
self.conn_kw.setdefault('socket_options', [])
def _new_conn(self):
"""
Return a fresh :class:`HTTPConnection`.
"""
self.num_connections += 1
log.info("Starting new HTTP connection (%d): %s",
self.num_connections, self.host)
conn = self.ConnectionCls(host=self.host, port=self.port,
timeout=self.timeout.connect_timeout,
strict=self.strict, **self.conn_kw)
return conn
def _get_conn(self, timeout=None):
"""
Get a connection. Will return a pooled connection if one is available.
If no connections are available and :prop:`.block` is ``False``, then a
fresh connection is returned.
:param timeout:
Seconds to wait before giving up and raising
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
:prop:`.block` is ``True``.
"""
conn = None
try:
conn = self.pool.get(block=self.block, timeout=timeout)
except AttributeError: # self.pool is None
raise ClosedPoolError(self, "Pool is closed.")
except Empty:
if self.block:
raise EmptyPoolError(self,
"Pool reached maximum size and no more "
"connections are allowed.")
pass # Oh well, we'll create a new connection then
# If this is a persistent connection, check if it got disconnected
if conn and is_connection_dropped(conn):
log.info("Resetting dropped connection: %s", self.host)
conn.close()
if getattr(conn, 'auto_open', 1) == 0:
# This is a proxied connection that has been mutated by
# httplib._tunnel() and cannot be reused (since it would
# attempt to bypass the proxy)
conn = None
return conn or self._new_conn()
def _put_conn(self, conn):
"""
Put a connection back into the pool.
:param conn:
Connection object for the current host and port as returned by
:meth:`._new_conn` or :meth:`._get_conn`.
If the pool is already full, the connection is closed and discarded
because we exceeded maxsize. If connections are discarded frequently,
then maxsize should be increased.
If the pool is closed, then the connection will be closed and discarded.
"""
try:
self.pool.put(conn, block=False)
return # Everything is dandy, done.
except AttributeError:
# self.pool is None.
pass
except Full:
# This should never happen if self.block == True
log.warning(
"Connection pool is full, discarding connection: %s",
self.host)
# Connection never got put back into the pool, close it.
if conn:
conn.close()
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
pass
def _prepare_proxy(self, conn):
# Nothing to do for HTTP connections.
pass
def _get_timeout(self, timeout):
""" Helper that always returns a :class:`urllib3.util.Timeout` """
if timeout is _Default:
return self.timeout.clone()
if isinstance(timeout, Timeout):
return timeout.clone()
else:
# User passed us an int/float. This is for backwards compatibility,
# can be removed later
return Timeout.from_float(timeout)
def _raise_timeout(self, err, url, timeout_value):
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
if isinstance(err, SocketTimeout):
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
# See the above comment about EAGAIN in Python 3. In Python 2 we have
# to specifically catch it and throw the timeout error
if hasattr(err, 'errno') and err.errno in _blocking_errnos:
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
# Catch possible read timeouts thrown as SSL errors. If not the
# case, rethrow the original. We need to do this because of:
# http://bugs.python.org/issue10272
if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
def _make_request(self, conn, method, url, timeout=_Default, chunked=False,
**httplib_request_kw):
"""
Perform a request on a given urllib connection object taken from our
pool.
:param conn:
a connection from one of our connection pools
:param timeout:
Socket timeout in seconds for the request. This can be a
float or integer, which will set the same timeout value for
the socket connect and the socket read, or an instance of
:class:`urllib3.util.Timeout`, which gives you more fine-grained
control over your timeouts.
"""
self.num_requests += 1
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
conn.timeout = timeout_obj.connect_timeout
# Trigger any extra validation we need to do.
try:
self._validate_conn(conn)
except (SocketTimeout, BaseSSLError) as e:
# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
# conn.request() calls httplib.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
if chunked:
conn.request_chunked(method, url, **httplib_request_kw)
else:
conn.request(method, url, **httplib_request_kw)
# Reset the timeout for the recv() on the socket
read_timeout = timeout_obj.read_timeout
# App Engine doesn't have a sock attr
if getattr(conn, 'sock', None):
# In Python 3 socket.py will catch EAGAIN and return None when you
# try and read into the file pointer created by http.client, which
# instead raises a BadStatusLine exception. Instead of catching
# the exception and assuming all BadStatusLine exceptions are read
# timeouts, check for a zero timeout before making the request.
if read_timeout == 0:
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % read_timeout)
if read_timeout is Timeout.DEFAULT_TIMEOUT:
conn.sock.settimeout(socket.getdefaulttimeout())
else: # None or a value
conn.sock.settimeout(read_timeout)
# Receive the response from the server
try:
try: # Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
except TypeError: # Python 2.6 and older, Python 3
try:
httplib_response = conn.getresponse()
except Exception as e:
# Remove the TypeError from the exception chain in Python 3;
# otherwise it looks like a programming error was the cause.
six.raise_from(e, None)
except (SocketTimeout, BaseSSLError, SocketError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
# AppEngine doesn't have a version attr.
http_version = getattr(conn, '_http_vsn_str', 'HTTP/?')
log.debug("\"%s %s %s\" %s %s", method, url, http_version,
httplib_response.status, httplib_response.length)
try:
assert_header_parsing(httplib_response.msg)
except HeaderParsingError as hpe: # Platform-specific: Python 3
log.warning(
'Failed to parse headers (url=%s): %s',
self._absolute_url(url), hpe, exc_info=True)
return httplib_response
def _absolute_url(self, path):
return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
def close(self):
"""
Close all pooled connections and disable the pool.
"""
# Disable access to the pool
old_pool, self.pool = self.pool, None
try:
while True:
conn = old_pool.get(block=False)
if conn:
conn.close()
except Empty:
pass # Done.
def is_same_host(self, url):
"""
Check if the given ``url`` is a member of the same host as this
connection pool.
"""
if url.startswith('/'):
return True
# TODO: Add optional support for socket.gethostbyname checking.
scheme, host, port = get_host(url)
# Use explicit default port for comparison when none is given
if self.port and not port:
port = port_by_scheme.get(scheme)
elif not self.port and port == port_by_scheme.get(scheme):
port = None
return (scheme, host, port) == (self.scheme, self.host, self.port)
def urlopen(self, method, url, body=None, headers=None, retries=None,
redirect=True, assert_same_host=True, timeout=_Default,
pool_timeout=None, release_conn=None, chunked=False,
**response_kw):
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param body:
Data to send in the request body (useful for creating
POST requests, see HTTPConnectionPool.post_url for
more convenience).
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
Pass ``None`` to retry until you receive a response. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When False, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param \**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = response_kw.get('preload_content', True)
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/shazow/urllib3/issues/651>
release_this_conn = release_conn
# Merge the proxy headers. Only do this in HTTP. We have to copy the
# headers dict so we can safely change it without those changes being
# reflected in anyone else's copy.
if self.scheme == 'http':
headers = headers.copy()
headers.update(self.proxy_headers)
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout
is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
if is_new_proxy_conn:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
httplib_response = self._make_request(conn, method, url,
timeout=timeout_obj,
body=body, headers=headers,
chunked=chunked)
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Import httplib's response into our own wrapper object
response = self.ResponseCls.from_httplib(httplib_response,
pool=self,
connection=response_conn,
**response_kw)
# Everything went great!
clean_exit = True
except Empty:
# Timed out by queue.
raise EmptyPoolError(self, "No pool connections are available.")
except (BaseSSLError, CertificateError) as e:
# Close the connection. If a connection is reused on which there
# was a Certificate error, the next request will certainly raise
# another Certificate error.
clean_exit = False
raise SSLError(e)
except SSLError:
# Treat SSLError separately from BaseSSLError to preserve
# traceback.
clean_exit = False
raise
except (TimeoutError, HTTPException, SocketError, ProtocolError) as e:
# Discard the connection for these exceptions. It will be
# be replaced during the next _get_conn() call.
clean_exit = False
if isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
e = ProxyError('Cannot connect to proxy.', e)
elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError('Connection aborted.', e)
retries = retries.increment(method, url, error=e, _pool=self,
_stacktrace=sys.exc_info()[2])
retries.sleep()
# Keep track of the error for the retry warning.
err = e
finally:
if not clean_exit:
# We hit some kind of exception, handled or otherwise. We need
# to throw the connection away unless explicitly told not to.
# Close the connection, set the variable to None, and make sure
# we put the None back in the pool to avoid leaking it.
conn = conn and conn.close()
release_this_conn = True
if release_this_conn:
# Put the connection back to be reused. If the connection is
# expired then it will be None, which will get replaced with a
# fresh connection during _get_conn.
self._put_conn(conn)
if not conn:
# Try again
log.warning("Retrying (%r) after connection "
"broken by '%r': %s", retries, err, url)
return self.urlopen(method, url, body, headers, retries,
redirect, assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
# Handle redirect?
redirect_location = redirect and response.get_redirect_location()
if redirect_location:
if response.status == 303:
method = 'GET'
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
# Release the connection for this response, since we're not
# returning it to be released manually.
response.release_conn()
raise
return response
log.info("Redirecting %s -> %s", url, redirect_location)
return self.urlopen(
method, redirect_location, body, headers,
retries=retries, redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
# Check if we should retry the HTTP response.
if retries.is_forced_retry(method, status_code=response.status):
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_status:
# Release the connection for this response, since we're not
# returning it to be released manually.
response.release_conn()
raise
return response
retries.sleep()
log.info("Forced retry: %s", url)
return self.urlopen(
method, url, body, headers,
retries=retries, redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout, pool_timeout=pool_timeout,
release_conn=release_conn, **response_kw)
return response
class HTTPSConnectionPool(HTTPConnectionPool):
"""
Same as :class:`.HTTPConnectionPool`, but HTTPS.
When Python is compiled with the :mod:`ssl` module, then
:class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
instead of :class:`.HTTPSConnection`.
:class:`.VerifiedHTTPSConnection` uses one of ``assert_fingerprint``,
``assert_hostname`` and ``host`` in this order to verify connections.
If ``assert_hostname`` is False, no verification is done.
The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
the connection socket into an SSL socket.
"""
scheme = 'https'
ConnectionCls = HTTPSConnection
def __init__(self, host, port=None,
strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
block=False, headers=None, retries=None,
_proxy=None, _proxy_headers=None,
key_file=None, cert_file=None, cert_reqs=None,
ca_certs=None, ssl_version=None,
assert_hostname=None, assert_fingerprint=None,
ca_cert_dir=None, **conn_kw):
HTTPConnectionPool.__init__(self, host, port, strict, timeout, maxsize,
block, headers, retries, _proxy, _proxy_headers,
**conn_kw)
if ca_certs and cert_reqs is None:
cert_reqs = 'CERT_REQUIRED'
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
self.ca_certs = ca_certs
self.ca_cert_dir = ca_cert_dir
self.ssl_version = ssl_version
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
def _prepare_conn(self, conn):
"""
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
and establish the tunnel if proxy is used.
"""
if isinstance(conn, VerifiedHTTPSConnection):
conn.set_cert(key_file=self.key_file,
cert_file=self.cert_file,
cert_reqs=self.cert_reqs,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
assert_hostname=self.assert_hostname,
assert_fingerprint=self.assert_fingerprint)
conn.ssl_version = self.ssl_version
return conn
def _prepare_proxy(self, conn):
"""
Establish tunnel connection early, because otherwise httplib
would improperly set Host: header to proxy's IP:port.
"""
# Python 2.7+
try:
set_tunnel = conn.set_tunnel
except AttributeError: # Platform-specific: Python 2.6
set_tunnel = conn._set_tunnel
if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
set_tunnel(self.host, self.port)
else:
set_tunnel(self.host, self.port, self.proxy_headers)
conn.connect()
def _new_conn(self):
"""
Return a fresh :class:`httplib.HTTPSConnection`.
"""
self.num_connections += 1
log.info("Starting new HTTPS connection (%d): %s",
self.num_connections, self.host)
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
raise SSLError("Can't connect to HTTPS URL because the SSL "
"module is not available.")
actual_host = self.host
actual_port = self.port
if self.proxy is not None:
actual_host = self.proxy.host
actual_port = self.proxy.port
conn = self.ConnectionCls(host=actual_host, port=actual_port,
timeout=self.timeout.connect_timeout,
strict=self.strict, **self.conn_kw)
return self._prepare_conn(conn)
def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
super(HTTPSConnectionPool, self)._validate_conn(conn)
# Force connect early to allow us to validate the connection.
if not getattr(conn, 'sock', None): # AppEngine might not have `.sock`
conn.connect()
if not conn.is_verified:
warnings.warn((
'Unverified HTTPS request is being made. '
'Adding certificate verification is strongly advised. See: '
'https://urllib3.readthedocs.io/en/latest/security.html'),
InsecureRequestWarning)
def connection_from_url(url, **kw):
"""
Given a url, return an :class:`.ConnectionPool` instance of its host.
This is a shortcut for not having to parse out the scheme, host, and port
of the url before creating an :class:`.ConnectionPool` instance.
:param url:
Absolute URL string that must include the scheme. Port is optional.
:param \**kw:
Passes additional parameters to the constructor of the appropriate
:class:`.ConnectionPool`. Useful for specifying things like
timeout, maxsize, headers, etc.
Example::
>>> conn = connection_from_url('http://google.com/')
>>> r = conn.request('GET', '/')
"""
scheme, host, port = get_host(url)
port = port or port_by_scheme.get(scheme, 80)
if scheme == 'https':
return HTTPSConnectionPool(host, port=port, **kw)
else:
return HTTPConnectionPool(host, port=port, **kw)
| gpl-3.0 |
minorua/QGIS | tests/src/python/test_qgsdatetimeedit.py | 12 | 1643 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsDateTimeEdit
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Denis Rouzaud'
__date__ = '2018-01-04'
__copyright__ = 'Copyright 2017, The QGIS Project'
import qgis # NOQA
from qgis.gui import QgsDateTimeEdit
from qgis.PyQt.QtCore import Qt, QDateTime
from qgis.testing import start_app, unittest
start_app()
DATE = QDateTime.fromString('2018-01-01 01:02:03', Qt.ISODate)
class TestQgsDateTimeEdit(unittest.TestCase):
def testSettersGetters(self):
""" test widget handling of null values """
w = qgis.gui.QgsDateTimeEdit()
w.setAllowNull(False)
w.setDateTime(DATE)
self.assertEqual(DATE, w.dateTime())
# date should remain when setting an invalid date
w.setDateTime(QDateTime())
self.assertEqual(DATE, w.dateTime())
def testNullValueHandling(self):
""" test widget handling of null values """
w = qgis.gui.QgsDateTimeEdit()
w.setAllowNull(True)
# date should be valid again when not allowing NULL values
w.setDateTime(QDateTime())
w.setAllowNull(False)
self.assertTrue(w.dateTime().isValid())
w.setAllowNull(True)
w.setDateTime(QDateTime())
self.assertFalse(w.dateTime().isValid())
w.setAllowNull(False)
self.assertTrue(w.dateTime().isValid())
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
WolfgangAxel/ConGames | Python-Jeopardy/Editor.py | 1 | 18974 | #!/usr/bin/python
# -*- coding: iso-8859-1 -*-
import Tkinter,tkFont
import math
import glob
import os
class simpleapp_tk(Tkinter.Tk):
def __init__(self,parent):
Tkinter.Tk.__init__(self,parent)
self.parent = parent
self.initialize()
def initialize(self):
self.grid()
self.HOME = os.getcwd()
if not os.path.exists(self.HOME+'/.PythonJeopardy'):
os.mkdir(self.HOME+'/.PythonJeopardy')
if not os.path.exists(self.HOME+'/.PythonJeopardy/Default'):
f=open(self.HOME+'/.PythonJeopardy/Default','w')
ABP=[]
for i,cat in enumerate(["Category 1","Category 2","Category 3","Category 4","Category 5"]):
for ques in range(1,6):
exec('q="A question"')
exec('a="An Answer"')
exec('i=""')
exec('B'+str(ques)+'=[q,a,i]')
exec('C'+str(i)+'=[B1,B2,B3,B4,B5]')
exec('ABP.append(["'+cat+'",C'+str(i)+'])')
P=[100,200,300,400,500]
board = [ABP,P]
f.write(str(board)+'\n')
for i,item in enumerate(P):
P[i]=item*2
board = [ABP,P]
f.write(str(board)+'\n')
f.write("['A Category', 'A Question', 'An Answer', '']")
f.close()
self.ent=Tkinter.StringVar()
self.Font = tkFont.Font(family="system",size=12)
##############
self.menya = Tkinter.Menu(self)
self.menya.add_command(label="New", command=self.StartAnew)
self.menya.OpenMenu = Tkinter.Menu(self.menya,tearoff=0)
self.menya.savemen = Tkinter.Menu(self.menya,tearoff=0)
self.menya.savemen.board=Tkinter.Menu(self.menya,tearoff=0)
self.menya.savemen.board.add_command(label="Save to new file",command=self.RawSave)
self.menya.savemen.forread=Tkinter.Menu(self.menya,tearoff=0)
self.menya.savemen.forread.add_command(label="Save to new file",command=self.ReadSave)
for i,name in enumerate(glob.glob(self.HOME+'/.PythonJeopardy/*.board')):
if name:
exec('def Open'+str(i)+'():'+
'\n app.OpenFile ="'+name+'"'+
'\n app.Round=1'+
'\n app.fileName = app.OpenFile.replace(app.HOME+"/.PythonJeopardy/","")'+
'\n app.fileName = app.fileName.replace(".board","")'+
'\n app.arrayload()')
exec('self.menya.OpenMenu.add_command(label="'+name+'", command=Open'+str(i)+')')
exec('def Save'+str(i)+'():'+
'\n app.SaveFile ="'+name+'"'+
'\n app.RawSave()')
exec('self.menya.savemen.board.add_command(label="'+name+'", command=Save'+str(i)+')')
else:
self.OpenMenu.add_command(label="{None Found}")
for i,name in enumerate(glob.glob(self.HOME+'/.PythonJeopardy/*.txt')):
if name:
exec('def SaveR'+str(i)+'():'+
'\n app.SaveFile ="'+name+'"'+
'\n app.ReadSave()')
exec('self.menya.savemen.forread.add_command(label="'+name+'", command=SaveR'+str(i)+')')
self.menya.add_cascade(label="Open",menu=self.menya.OpenMenu)
self.menya.savemen.add_cascade(label="Save .board file", menu=self.menya.savemen.board)
self.menya.savemen.add_cascade(label="Export Readable .txt File", menu=self.menya.savemen.forread)
self.menya.Round = Tkinter.Menu(self.menya,tearoff=0)
for Round in [1,2,3]:
exec('self.menya.Round.add_command(label="Round '+str(Round)+'",command=self.loadround'+str(Round)+')')
self.menya.add_cascade(label="Save",menu=self.menya.savemen)
self.menya.add_cascade(label="Round",menu=self.menya.Round)
self.menya.add_command(label="Change Point Values", command=self.pointass)
self.menya.add_command(label="Auto Font Size",command=self.fontadj)
self.config(menu=self.menya)
##############
for RND in ["R1","R2"]:
exec('self.'+RND+'=Tkinter.Frame(self)')
for cat in range(0,5):
exec('self.'+RND+'.cat'+str(cat)+'=Tkinter.StringVar()')
exec('self.'+RND+'.catscroll'+str(cat)+'=Tkinter.Scrollbar(self.'+RND+')')
exec('self.'+RND+'.catlab'+str(cat)+' = Tkinter.Entry(self.'+RND+',textvariable=self.'+RND+'.cat'+str(cat)+',font=self.Font,width='+str(self.winfo_width()/5)+',xscrollcommand=self.'+RND+'.catscroll'+str(cat)+'.set)')
exec('self.'+RND+'.catscroll'+str(cat)+'.config(command=self.'+RND+'.catlab'+str(cat)+'.xview)')
exec('self.'+RND+'.catscroll'+str(cat)+'.grid()')
exec('self.'+RND+'.catlab'+str(cat)+'.grid(column='+str(cat)+',row=0,sticky="NSEW")')
for ques in range(1,6):
exec('self.'+RND+'.box'+str(cat)+'x'+str(ques)+' = Tkinter.Button(self.'+RND+',command=self.reveal'+str(cat)+'x'+str(ques)+',font=self.Font,width='+str(self.winfo_width()/5)+')')
exec('self.'+RND+'.box'+str(cat)+'x'+str(ques)+'.grid(column='+str(cat)+',row='+str(ques)+',sticky="NSEW")')
for i in range(0,6):
if i<5:
exec('self.'+RND+'.grid_columnconfigure('+str(i)+',weight=1)')
exec('self.'+RND+'.grid_rowconfigure('+str(i)+',weight=1)')
self.R3 = Tkinter.Frame(self)
##############
self.StartAnew()
self.grid_columnconfigure(0,weight=1)
self.grid_rowconfigure(0,weight=1)
self.resizable(True,True)
self.update()
self.geometry(self.geometry())
####################################################################
def savegame(self):
self.savename = Tkinter.Toplevel(self)
self.savename.wm_title("Enter a name to save the file as")
self.savename.entbox=Tkinter.Entry(self.savename,textvariable=self.ent)
self.savename.entbox.grid(column=0,row=0,sticky="NSEW")
self.savename.proceed=Tkinter.Button(self.savename,text="Save",command=self.arraysave)
self.savename.proceed.grid(column=1,row=0,sticky="NSEW")
def arrayload(self):
self.ent.set(self.fileName)
f=open(self.OpenFile,'r')
self.sepf = f.readlines()
for Round in [1,2]:
self.clusterfuck = {}
self.clusterfuck = eval(str(self.sepf[Round-1]))
self.P=eval(str(self.clusterfuck[1]))
for cat in range(0,5):
exec('self.R'+str(Round)+'.cat'+str(cat)+'.set("'+str(eval(str(eval(str(self.clusterfuck[0]))[cat]))[0])+'")')
for ques in range(1,6):
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+' = Tkinter.Frame(self)')
exec('self.R'+str(Round)+'.box'+str(cat)+'x'+str(ques)+'.config(text=eval(str(self.P['+str(ques-1)+'])))')
##################################
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.imagefile = Tkinter.StringVar()')
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.imagecheck=Tkinter.Button(self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+',text="Add image",command=self.changerelief,font=self.Font)')
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.imagecheck.grid(row=1,column=2,sticky="NSEW")')
for i,name in enumerate(["question","answer","image"]):
if name == "image":
if eval('str(eval(str(eval(str(eval(str(eval(str(self.clusterfuck[0]))['+str(cat)+']))[1]))['+str(ques-1)+']))['+str(i)+'])'):
self.CAT,self.QUES,self.Round=cat,ques,Round
self.changerelief()
else:
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.'+name+' = Tkinter.StringVar()')
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.'+name+'.set(str(eval(str(eval(str(eval(str(eval(str(self.clusterfuck[0]))['+str(cat)+']))[1]))['+str(ques-1)+']))['+str(i)+']))')
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.'+name+'entry = Tkinter.Entry(self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+',textvariable=self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.'+name+',font=self.Font)')
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.'+name+'entry.grid(column='+str(i)+',row=0,sticky="NSEW")')
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.RtB=Tkinter.Button(self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+',text="Return to board",command=self.returntoboard,font=self.Font)')
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.RtB.grid(row=2,column=0,columnspan=3,sticky="NSEW")')
for x in range(0,2):
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.columnconfigure('+str(x)+',weight=1)')
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.rowconfigure('+str(x)+',weight=1)')
exec('self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.rowconfigure(2,weight=1)')
##################################
self.clusterfuck = {}
self.clusterfuck = eval(str(self.sepf[2]))
self.R3.category = Tkinter.StringVar()
self.R3.category.set(self.clusterfuck[0])
self.R3.categoryentry = Tkinter.Entry(self.R3,textvariable=self.R3.category,font=self.Font)
self.R3.categoryentry.grid(column=0,columnspan=3,row=0,sticky="NSEW")
for i,name in enumerate(["question","answer"]):
exec('self.R3.'+name+' = Tkinter.StringVar()')
exec('self.R3.'+name+'.set(self.clusterfuck['+str(i+1)+'])')
exec('self.R3.'+name+'entry = Tkinter.Entry(self.R3,textvariable=self.R3.'+name+',font=self.Font)')
exec('self.R3.'+name+'entry.grid(column='+str(i)+',row=1,sticky="NSEW")')
self.R3.pointsentry = Tkinter.Label(self.R3,text="Final Jeopardy",font=self.Font)
self.R3.pointsentry.grid(column=2,row=1,sticky="NSEW")
self.R3.imagefile = Tkinter.StringVar()
self.R3.imagecheck=Tkinter.Button(self.R3,text="Add image",command=self.changerelief,font=self.Font)
if self.clusterfuck[3]:
self.R3.imagecheck.config(relief="sunken")
self.R3.imagefile.set(str(self.clusterfuck[3]))
self.R3.imagecheck.grid(row=2,column=2,sticky="NSEW")
for x in range(0,2):
exec('self.R3.columnconfigure('+str(x)+',weight=1)')
exec('self.R3.rowconfigure('+str(x)+',weight=1)')
self.R3.rowconfigure(2,weight=1)
self.Round = 1
self.clusterfuck = eval(str(self.sepf[0]))
self.P=eval(str(self.clusterfuck[1]))
self.roundload()
f.close()
def roundload(self):
exec('self.R'+str(self.Round)+'.grid(column=0,row=0,sticky="NSEW")')
def RawSave(self):
if self.SaveFile:
self.fileName = self.SaveFile.replace(self.HOME+"/.PythonJeopardy/","")
self.fileName = self.fileName.replace(".board","")
self.ent.set(self.fileName)
self.extension = ".board"
self.newSaveName()
def ReadSave(self):
if self.SaveFile:
self.fileName = self.SaveFile.replace(self.HOME+"/.PythonJeopardy/","")
self.fileName = self.fileName.replace(".txt","")
self.ent.set(self.fileName)
self.extension = ".txt"
self.newSaveName()
def newSaveName(self):
self.typebox = Tkinter.Toplevel(self)
self.typebox.label = Tkinter.Label(self.typebox,text="Save file at: %s/.PythonJeopardy/" % self.HOME,font=self.Font)
self.typebox.label.grid(row=0,column=0,sticky="NSEW")
self.typebox.entry = Tkinter.Entry(self.typebox,textvariable=self.ent)
self.typebox.entry.grid(row=0,column=1,sticky="NSEW")
self.typebox.labelEx = Tkinter.Label(self.typebox,text="%s" % self.extension)
self.typebox.labelEx.grid(row=0,column=2,sticky="NSEW")
self.typebox.button = Tkinter.Button(self.typebox,text="Save",command=self.preSave)
self.typebox.button.grid(row=0,column=3,sticky="NSEW")
self.typebox.update()
self.typebox.geometry(self.typebox.geometry())
def preSave(self):
self.fileName = self.ent.get() + self.extension
self.startSave()
def startSave(self):
self.SaveFile = self.HOME + "/.PythonJeopardy/" + self.fileName
self.arraysave()
try:
self.typebox.destroy()
except:
annoying = True
def arraysave(self):
f=open(self.SaveFile,'w')
if self.fileName == self.fileName.replace(".txt",""):
for Round in [1,2]:
ABP=[]
for cat in range(0,5):
for ques in range(1,6):
exec('q=self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.question.get()')
q=q.replace('"','\"')
q=q.replace("'","\'")
exec('a=self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.answer.get()')
a=a.replace('"','\"')
a=a.replace("'","\'")
exec('if self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.imagefile.get():'+
'\n if self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.imagefile.get() != "Type full path to image here":'
'\n i=self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.imagefile.get()'
'\n else:'+
'\n i=""'+
'\nelse:'+
'\n i=""')
i=i.replace('"','\"')
i=i.replace("'","\'")
exec('B'+str(ques)+'=[q,a,i]')
exec('C'+str(cat)+'=[B1,B2,B3,B4,B5]')
exec('ABP.append([self.R'+str(Round)+'.cat'+str(cat)+'.get(),C'+str(cat)+'])')
Pn=[]
for i,item in enumerate(self.P):
Pn.append(item*Round)
board = [ABP,Pn]
f.write(str(board)+'\n')
c=self.R3.category.get()
c=c.replace('"','\"')
c=c.replace("'","\'")
q=self.R3.question.get()
q=q.replace('"','\"')
q=q.replace("'","\'")
a=self.R3.answer.get()
a=a.replace('"','\"')
a=a.replace("'","\'")
i=self.R3.imagefile.get()
i=i.replace('"','\"')
i=i.replace("'","\'")
ABP=[c,q,a,i]
f.write(str(ABP))
else:
################### I spent entirely too much time making this.
f.writelines( " ____ __ __ ______ __ __ ____ __ __"+"\n"+
" / O |\ \/ //_ __// /_/ // __ | / \/ /"+"\n"+
" / ___/ _\ / / / / __ // /_/ // /\ /"+"\n"+
" /_/ /___/ /_/ /_/ /_/ |____//_/ /_/"+"\n"+
" ________ ______ ____ _____ ___ _____ _____ __ ___"+"\n"+
" /__ __// ___// _ \ / __ \ / | / __ \ / __ \ \ \ / /"+"\n"+
" / / / /__ / / | | / /_/ |/ o | / /_/ | / / | | \ \/ /"+"\n"+
" __ / / / ___// / / / / ____// _ | / _ / / / / /_ \ /"+"\n"+
" / /_/ / / /__ | |_/ / / / / / | | / / | | / /__/ /| |_/ /"+"\n"+
" \_____/ /_____/ \____/ /__/ /__/ |_|/__/ |_|/_______/ \____/"+"\n\n\n")
for Round in [1,2,3]:
if Round <3:
f.writelines(" X><><><><><><><><X\n X Round #%s X\n X><><><><><><><><X\n\n" % Round)
for cat in range(0,5):
exec('f.writelines("Category: "+self.R'+str(Round)+'.cat'+str(cat)+'.get()+"\\n")')
for ques in range(1,6):
exec('f.writelines(str(self.P['+str(ques-1)+']*'+str(Round)+')+": "+self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.question.get()+"\\n")')
exec('f.writelines(" Answer: "+self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.answer.get()+"\\n")')
exec('if self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.imagefile.get():'+
'\n if self.ed'+str(cat)+'x'+str(ques)+'R'+str(self.Round)+'.imagefile.get() != "Type full path to image here":'
'\n f.writelines(" Image: "+self.ed'+str(cat)+'x'+str(ques)+'R'+str(Round)+'.imagefile.get()+"\\n")')
f.writelines("\n")
else:
f.writelines(" X><><><><><><><><X\n X FINAL JEOPARDY X\n X><><><><><><><><X\n\nCategory: %s\n Question: %s\n Answer: %s" % (self.R3.category.get(),self.R3.question.get(),self.R3.answer.get()))
if self.R3.imagefile.get():
if self.R3.imagefile.get() != "Type full path to image here":
f.writelines("\n Image: %s" % self.R3.imagefile.get())
f.close()
def pointass(self):
self.pointbox = Tkinter.Toplevel(self)
self.pointbox.wm_title("Enter point values")
self.pointbox.menya = Tkinter.Menu(self.pointbox)
self.pointbox.menya.add_command(label="Update Points", command=self.pointsave)
self.pointbox.menya.add_command(label="Close", command=self.annoying)
self.pointbox.config(menu=self.pointbox.menya)
for i in range(0,5):
exec('self.pointbox.points'+str(i)+'=Tkinter.IntVar()')
exec('self.pointbox.points'+str(i)+'entry=Tkinter.Entry(self.pointbox,textvariable=self.pointbox.points'+str(i)+')')
exec('self.pointbox.points'+str(i)+'entry.grid(column=0,row='+str(i)+',sticky="NSEW")')
def annoying(self):
self.pointbox.destroy()
def pointsave(self):
for i in range(1,6):
for j in range(0,5):
exec('self.P['+str(j)+']=int(self.pointbox.points'+str(j)+'entry.get())')
exec('self.R'+str(self.Round)+'.box'+str(j)+'x'+str(i)+'.config(text=eval(str(self.P['+str(i-1)+']*'+str(self.Round)+')))')
for Round in [1,2]:
exec('def loadround'+str(Round)+'(self):'+
'\n exec("self.R"+str(self.Round)+".grid_remove()")'+
'\n self.clusterfuck = eval(str(self.sepf['+str(Round-1)+']))'+
'\n self.P=eval(str(self.clusterfuck[1]))'+
'\n self.Round = '+str(Round)+
'\n self.roundload()')
def loadround3(self):
exec("self.R"+str(self.Round)+".grid_remove()")
self.clusterfuck = eval(str(self.sepf[2]))
self.Round = 3
self.roundload()
def fontadj(self):
ws=self.winfo_width()
self.Font.config(size=int(math.ceil(ws/60)))
if self.Round <3:
for cat in range(0,5):
for ques in range(1,6):
exec('self.R'+str(self.Round)+'.box'+str(cat)+'x'+str(ques)+'.config(wraplength='+str(int(math.ceil(ws/5)))+')')
exec('self.R'+str(self.Round)+'.catlab'+str(cat)+'.config(width='+str(int(math.ceil(ws/5)))+')')
for cat in range(0,5):
for ques in range(1,6):
exec('def reveal'+str(cat)+'x'+str(ques)+'(self):'+
'\n self.CAT='+str(cat)+
'\n self.QUES='+str(ques)+
'\n self.PTS=self.P['+str(ques-1)+']'+
'\n self.reveal()')
def reveal(self):
exec('self.R'+str(self.Round)+'.grid_remove()')
exec('self.ed'+str(self.CAT)+'x'+str(self.QUES)+'R'+str(self.Round)+'.pointsentry = Tkinter.Label(self.ed'+str(self.CAT)+'x'+str(self.QUES)+'R'+str(self.Round)+',text=eval(str(self.P['+str(self.QUES-1)+'])),font=self.Font)')
exec('self.ed'+str(self.CAT)+'x'+str(self.QUES)+'R'+str(self.Round)+'.pointsentry.grid(column=2,row=0,sticky="NSEW")')
exec('self.ed'+str(self.CAT)+'x'+str(self.QUES)+'R'+str(self.Round)+'.grid(row=0,column=0,sticky="NSEW")')
def changerelief(self):
if self.Round<3:
exec('FRAME = "self.ed'+str(self.CAT)+'x'+str(self.QUES)+'R'+str(self.Round)+'"')
else:
FRAME = "self.R3"
if eval(FRAME+'.imagecheck.config("relief")[-1]') == "raised":
exec(FRAME+'.imagecheck.config(relief="sunken",text="Remove image")')
exec('if not str(eval(str(eval(str(eval(str(eval(str(self.clusterfuck[0]))['+str(self.CAT)+']))[1]))['+str(self.QUES-1)+']))[2]):'+
'\n '+FRAME+'.imagefile.set("Type full path to image here")'+
'\nelse:'+
'\n '+FRAME+'.imagefile.set(str(eval(str(eval(str(eval(str(eval(str(self.clusterfuck[0]))['+str(self.CAT)+']))[1]))['+str(self.QUES-1)+']))[2]))')
exec(FRAME+'.imageentry = Tkinter.Entry('+FRAME+',textvariable='+FRAME+'.imagefile,font=self.Font)')
if self.Round<3:
exec(FRAME+'.imageentry.grid(row=1,column=0,columnspan=2,sticky="NSEW")')
else:
exec(FRAME+'.imageentry.grid(row=2,column=0,columnspan=2,sticky="NSEW")')
else:
exec(FRAME+'.imagecheck.config(relief="raised",text="Add image")')
exec(FRAME+'.imageentry.grid_remove()')
exec(FRAME+'.imagefile.set("")')
def returntoboard(self):
exec('self.ed'+str(self.CAT)+'x'+str(self.QUES)+'R'+str(self.Round)+'.grid_remove()')
exec('self.R'+str(self.Round)+'.grid(column=0,row=0,sticky="NSEW")')
def StartAnew(self):
self.OpenFile = self.HOME+"/.PythonJeopardy/Default"
self.fileName=""
self.SaveFile = ""
self.arrayload()
if __name__ == "__main__":
app = simpleapp_tk(None)
app.title('Jeopardy Editor')
app.mainloop()
| mit |
ltilve/ChromiumGStreamerBackend | third_party/mojo/src/mojo/public/tools/bindings/mojom_bindings_generator.py | 3 | 8427 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""The frontend for the Mojo bindings system."""
import argparse
import imp
import os
import pprint
import sys
# Disable lint check for finding modules:
# pylint: disable=F0401
def _GetDirAbove(dirname):
"""Returns the directory "above" this file containing |dirname| (which must
also be "above" this file)."""
path = os.path.abspath(__file__)
while True:
path, tail = os.path.split(path)
assert tail
if tail == dirname:
return path
# Manually check for the command-line flag. (This isn't quite right, since it
# ignores, e.g., "--", but it's close enough.)
if "--use_bundled_pylibs" in sys.argv[1:]:
sys.path.insert(0, os.path.join(_GetDirAbove("public"), "public/third_party"))
sys.path.insert(0, os.path.join(os.path.dirname(os.path.abspath(__file__)),
"pylib"))
from mojom.error import Error
import mojom.fileutil as fileutil
from mojom.generate.data import OrderedModuleFromData
from mojom.parse.parser import Parse
from mojom.parse.translate import Translate
def LoadGenerators(generators_string):
if not generators_string:
return [] # No generators.
script_dir = os.path.dirname(os.path.abspath(__file__))
generators = []
for generator_name in [s.strip() for s in generators_string.split(",")]:
# "Built-in" generators:
if generator_name.lower() == "c++":
generator_name = os.path.join(script_dir, "generators",
"mojom_cpp_generator.py")
elif generator_name.lower() == "dart":
generator_name = os.path.join(script_dir, "generators",
"mojom_dart_generator.py")
elif generator_name.lower() == "go":
generator_name = os.path.join(script_dir, "generators",
"mojom_go_generator.py")
elif generator_name.lower() == "javascript":
generator_name = os.path.join(script_dir, "generators",
"mojom_js_generator.py")
elif generator_name.lower() == "java":
generator_name = os.path.join(script_dir, "generators",
"mojom_java_generator.py")
elif generator_name.lower() == "python":
generator_name = os.path.join(script_dir, "generators",
"mojom_python_generator.py")
# Specified generator python module:
elif generator_name.endswith(".py"):
pass
else:
print "Unknown generator name %s" % generator_name
sys.exit(1)
generator_module = imp.load_source(os.path.basename(generator_name)[:-3],
generator_name)
generators.append(generator_module)
return generators
def MakeImportStackMessage(imported_filename_stack):
"""Make a (human-readable) message listing a chain of imports. (Returned
string begins with a newline (if nonempty) and does not end with one.)"""
return ''.join(
reversed(["\n %s was imported by %s" % (a, b) for (a, b) in \
zip(imported_filename_stack[1:], imported_filename_stack)]))
def FindImportFile(dir_name, file_name, search_dirs):
for search_dir in [dir_name] + search_dirs:
path = os.path.join(search_dir, file_name)
if os.path.isfile(path):
return path
return os.path.join(dir_name, file_name)
class MojomProcessor(object):
def __init__(self, should_generate):
self._should_generate = should_generate
self._processed_files = {}
self._parsed_files = {}
def ProcessFile(self, args, remaining_args, generator_modules, filename):
self._ParseFileAndImports(filename, args.import_directories, [])
return self._GenerateModule(args, remaining_args, generator_modules,
filename)
def _GenerateModule(self, args, remaining_args, generator_modules, filename):
# Return the already-generated module.
if filename in self._processed_files:
return self._processed_files[filename]
tree = self._parsed_files[filename]
dirname, name = os.path.split(filename)
mojom = Translate(tree, name)
if args.debug_print_intermediate:
pprint.PrettyPrinter().pprint(mojom)
# Process all our imports first and collect the module object for each.
# We use these to generate proper type info.
for import_data in mojom['imports']:
import_filename = FindImportFile(dirname,
import_data['filename'],
args.import_directories)
import_data['module'] = self._GenerateModule(
args, remaining_args, generator_modules, import_filename)
module = OrderedModuleFromData(mojom)
# Set the path as relative to the source root.
module.path = os.path.relpath(os.path.abspath(filename),
os.path.abspath(args.depth))
# Normalize to unix-style path here to keep the generators simpler.
module.path = module.path.replace('\\', '/')
if self._should_generate(filename):
for generator_module in generator_modules:
generator = generator_module.Generator(module, args.output_dir)
filtered_args = []
if hasattr(generator_module, 'GENERATOR_PREFIX'):
prefix = '--' + generator_module.GENERATOR_PREFIX + '_'
filtered_args = [arg for arg in remaining_args
if arg.startswith(prefix)]
generator.GenerateFiles(filtered_args)
# Save result.
self._processed_files[filename] = module
return module
def _ParseFileAndImports(self, filename, import_directories,
imported_filename_stack):
# Ignore already-parsed files.
if filename in self._parsed_files:
return
if filename in imported_filename_stack:
print "%s: Error: Circular dependency" % filename + \
MakeImportStackMessage(imported_filename_stack + [filename])
sys.exit(1)
try:
with open(filename) as f:
source = f.read()
except IOError as e:
print "%s: Error: %s" % (e.filename, e.strerror) + \
MakeImportStackMessage(imported_filename_stack + [filename])
sys.exit(1)
try:
tree = Parse(source, filename)
except Error as e:
full_stack = imported_filename_stack + [filename]
print str(e) + MakeImportStackMessage(full_stack)
sys.exit(1)
dirname = os.path.split(filename)[0]
for imp_entry in tree.import_list:
import_filename = FindImportFile(dirname,
imp_entry.import_filename, import_directories)
self._ParseFileAndImports(import_filename, import_directories,
imported_filename_stack + [filename])
self._parsed_files[filename] = tree
def main():
parser = argparse.ArgumentParser(
description="Generate bindings from mojom files.")
parser.add_argument("filename", nargs="+",
help="mojom input file")
parser.add_argument("-d", "--depth", dest="depth", default=".",
help="depth from source root")
parser.add_argument("-o", "--output_dir", dest="output_dir", default=".",
help="output directory for generated files")
parser.add_argument("-g", "--generators", dest="generators_string",
metavar="GENERATORS",
default="c++,go,javascript,java,python",
help="comma-separated list of generators")
parser.add_argument("--debug_print_intermediate", action="store_true",
help="print the intermediate representation")
parser.add_argument("-I", dest="import_directories", action="append",
metavar="directory", default=[],
help="add a directory to be searched for import files")
parser.add_argument("--use_bundled_pylibs", action="store_true",
help="use Python modules bundled in the SDK")
(args, remaining_args) = parser.parse_known_args()
generator_modules = LoadGenerators(args.generators_string)
fileutil.EnsureDirectoryExists(args.output_dir)
processor = MojomProcessor(lambda filename: filename in args.filename)
for filename in args.filename:
processor.ProcessFile(args, remaining_args, generator_modules, filename)
return 0
if __name__ == "__main__":
sys.exit(main())
| bsd-3-clause |
sam-m888/gramps | gramps/gen/filters/rules/family/_regexpfathername.py | 5 | 1856 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2002-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# Standard Python modules
#
#-------------------------------------------------------------------------
from ....const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
#-------------------------------------------------------------------------
#
# Gramps modules
#
#-------------------------------------------------------------------------
from ..person import RegExpName
from ._memberbase import father_base
#-------------------------------------------------------------------------
#
# RegExpFatherName
#
#-------------------------------------------------------------------------
class RegExpFatherName(RegExpName):
"""Rule that checks for full or partial name matches"""
name = _('Families with father matching the <regex_name>')
description = _("Matches families whose father has a name "
"matching a specified regular expression")
category = _('Father filters')
base_class = RegExpName
apply = father_base
| gpl-2.0 |
Shivam-Miglani/AndroidViewClient | examples/monkeyrunner-issue-36544-workaround.py | 9 | 2846 | #! /usr/bin/env python
'''
Copyright (C) 2012 Diego Torres Milano
Created on Sep 8, 2012
@author: diego
@see: http://code.google.com/p/android/issues/detail?id=36544
'''
import re
import sys
import os
# This must be imported before MonkeyRunner and MonkeyDevice,
# otherwise the import fails.
# PyDev sets PYTHONPATH, use it
try:
for p in os.environ['PYTHONPATH'].split(':'):
if not p in sys.path:
sys.path.append(p)
except:
pass
try:
sys.path.append(os.path.join(os.environ['ANDROID_VIEW_CLIENT_HOME'], 'src'))
except:
pass
from com.dtmilano.android.viewclient import ViewClient, View
device, serialno = ViewClient.connectToDeviceOrExit()
FLAG_ACTIVITY_NEW_TASK = 0x10000000
# We are not using Settings as the bug describes because there's no WiFi dialog in emulator
#componentName = 'com.android.settings/.Settings'
componentName = 'com.dtmilano.android.sampleui/.MainActivity'
device.startActivity(component=componentName, flags=FLAG_ACTIVITY_NEW_TASK)
ViewClient.sleep(3)
# Set it to True or False to decide if AndroidViewClient or plain monkeyrunner is used
USE_AVC = True
if USE_AVC:
# AndroidViewClient
vc = ViewClient(device=device, serialno=serialno)
showDialogButton = vc.findViewById('id/show_dialog_button')
if showDialogButton:
showDialogButton.touch()
vc.dump()
vc.findViewById('id/0x123456').type('Donald')
ok = vc.findViewWithText('OK')
if ok:
# 09-08 20:17:47.860: D/MonkeyStub(2033): translateCommand: tap 265 518
ok.touch()
vc.dump()
hello = vc.findViewById('id/hello')
if hello:
if hello.getText() == "Hello Donald":
print "OK"
else:
print "FAIL"
else:
print >> sys.stderr, "'hello' not found"
else:
print >> sys.stderr, "'Show Dialog' button not found"
else:
# MonkeyRunner
from com.android.monkeyrunner.easy import EasyMonkeyDevice
from com.android.monkeyrunner.easy import By
easyDevice = EasyMonkeyDevice(device)
showDialogButton = By.id('id/show_dialog_button')
if showDialogButton:
easyDevice.touch(showDialogButton, MonkeyDevice.DOWN_AND_UP)
ViewClient.sleep(3)
editText = By.id('id/0x123456')
print editText
easyDevice.type(editText, 'Donald')
ViewClient.sleep(3)
ok = By.id('id/button1')
if ok:
# 09-08 20:16:41.119: D/MonkeyStub(1992): translateCommand: tap 348 268
easyDevice.touch(ok, MonkeyDevice.DOWN_AND_UP)
hello = By.id('id/hello')
if hello:
if easyDevice.getText(hello) == "Hello Donald":
print "OK"
else:
print "FAIL"
else:
print >> sys.stderr, "'hello' not found"
| apache-2.0 |
atramos/facebook-photo-sync | facebook/requests/packages/urllib3/util/request.py | 1008 | 2089 | from base64 import b64encode
from ..packages.six import b
ACCEPT_ENCODING = 'gzip,deflate'
def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
basic_auth=None, proxy_basic_auth=None, disable_cache=None):
"""
Shortcuts for generating request headers.
:param keep_alive:
If ``True``, adds 'connection: keep-alive' header.
:param accept_encoding:
Can be a boolean, list, or string.
``True`` translates to 'gzip,deflate'.
List will get joined by comma.
String will be used as provided.
:param user_agent:
String representing the user-agent you want, such as
"python-urllib3/0.6"
:param basic_auth:
Colon-separated username:password string for 'authorization: basic ...'
auth header.
:param proxy_basic_auth:
Colon-separated username:password string for 'proxy-authorization: basic ...'
auth header.
:param disable_cache:
If ``True``, adds 'cache-control: no-cache' header.
Example::
>>> make_headers(keep_alive=True, user_agent="Batman/1.0")
{'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
>>> make_headers(accept_encoding=True)
{'accept-encoding': 'gzip,deflate'}
"""
headers = {}
if accept_encoding:
if isinstance(accept_encoding, str):
pass
elif isinstance(accept_encoding, list):
accept_encoding = ','.join(accept_encoding)
else:
accept_encoding = ACCEPT_ENCODING
headers['accept-encoding'] = accept_encoding
if user_agent:
headers['user-agent'] = user_agent
if keep_alive:
headers['connection'] = 'keep-alive'
if basic_auth:
headers['authorization'] = 'Basic ' + \
b64encode(b(basic_auth)).decode('utf-8')
if proxy_basic_auth:
headers['proxy-authorization'] = 'Basic ' + \
b64encode(b(proxy_basic_auth)).decode('utf-8')
if disable_cache:
headers['cache-control'] = 'no-cache'
return headers
| gpl-3.0 |
sankhesh/VTK | Examples/Infovis/Python/tables1.py | 13 | 1485 | #!/usr/bin/env python
"""
This file contains Python code illustrating the creation and manipulation of
vtkTable objects.
"""
from __future__ import print_function
from vtk import *
#------------------------------------------------------------------------------
# Script Entry Point (i.e., main() )
#------------------------------------------------------------------------------
if __name__ == "__main__":
""" Main entry point of this python script """
print("vtkTable Example 1: Building a vtkTable from scratch.")
#----------------------------------------------------------
# Create an empty table
T = vtkTable()
#----------------------------------------------------------
# Create Column 1 (IDs)
col1 = vtkIntArray()
col1.SetName("ID")
for i in range(1, 8):
col1.InsertNextValue(i)
T.AddColumn(col1)
#----------------------------------------------------------
# Create Column 2 (Names)
namesList = ['Bob', 'Ann', 'Sue', 'Bill', 'Joe', 'Jill', 'Rick']
col2 = vtkStringArray()
col2.SetName("Name")
for val in namesList:
col2.InsertNextValue(val)
T.AddColumn(col2)
#----------------------------------------------------------
# Create Column 3 (Ages)
agesList = [12, 25, 72, 11, 31, 36, 32]
col3 = vtkIntArray()
col3.SetName("Age")
for val in agesList:
col3.InsertNextValue(val)
T.AddColumn(col3)
T.Dump(6)
print("vtkTable Example 1: Finished.")
| bsd-3-clause |
rjschof/gem5 | src/arch/micro_asm.py | 86 | 14724 | # Copyright (c) 2003-2005 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
import os
import sys
import re
import string
import traceback
# get type names
from types import *
from ply import lex
from ply import yacc
##########################################################################
#
# Base classes for use outside of the assembler
#
##########################################################################
class Micro_Container(object):
def __init__(self, name):
self.microops = []
self.name = name
self.directives = {}
self.micro_classes = {}
self.labels = {}
def add_microop(self, mnemonic, microop):
self.microops.append(microop)
def __str__(self):
string = "%s:\n" % self.name
for microop in self.microops:
string += " %s\n" % microop
return string
class Combinational_Macroop(Micro_Container):
pass
class Rom_Macroop(object):
def __init__(self, name, target):
self.name = name
self.target = target
def __str__(self):
return "%s: %s\n" % (self.name, self.target)
class Rom(Micro_Container):
def __init__(self, name):
super(Rom, self).__init__(name)
self.externs = {}
##########################################################################
#
# Support classes
#
##########################################################################
class Label(object):
def __init__(self):
self.extern = False
self.name = ""
class Block(object):
def __init__(self):
self.statements = []
class Statement(object):
def __init__(self):
self.is_microop = False
self.is_directive = False
self.params = ""
class Microop(Statement):
def __init__(self):
super(Microop, self).__init__()
self.mnemonic = ""
self.labels = []
self.is_microop = True
class Directive(Statement):
def __init__(self):
super(Directive, self).__init__()
self.name = ""
self.is_directive = True
##########################################################################
#
# Functions that handle common tasks
#
##########################################################################
def print_error(message):
print
print "*** %s" % message
print
def handle_statement(parser, container, statement):
if statement.is_microop:
if statement.mnemonic not in parser.microops.keys():
raise Exception, "Unrecognized mnemonic: %s" % statement.mnemonic
parser.symbols["__microopClassFromInsideTheAssembler"] = \
parser.microops[statement.mnemonic]
try:
microop = eval('__microopClassFromInsideTheAssembler(%s)' %
statement.params, {}, parser.symbols)
except:
print_error("Error creating microop object with mnemonic %s." % \
statement.mnemonic)
raise
try:
for label in statement.labels:
container.labels[label.text] = microop
if label.is_extern:
container.externs[label.text] = microop
container.add_microop(statement.mnemonic, microop)
except:
print_error("Error adding microop.")
raise
elif statement.is_directive:
if statement.name not in container.directives.keys():
raise Exception, "Unrecognized directive: %s" % statement.name
parser.symbols["__directiveFunctionFromInsideTheAssembler"] = \
container.directives[statement.name]
try:
eval('__directiveFunctionFromInsideTheAssembler(%s)' %
statement.params, {}, parser.symbols)
except:
print_error("Error executing directive.")
print container.directives
raise
else:
raise Exception, "Didn't recognize the type of statement", statement
##########################################################################
#
# Lexer specification
#
##########################################################################
# Error handler. Just call exit. Output formatted to work under
# Emacs compile-mode. Optional 'print_traceback' arg, if set to True,
# prints a Python stack backtrace too (can be handy when trying to
# debug the parser itself).
def error(lineno, string, print_traceback = False):
# Print a Python stack backtrace if requested.
if (print_traceback):
traceback.print_exc()
if lineno != 0:
line_str = "%d:" % lineno
else:
line_str = ""
sys.exit("%s %s" % (line_str, string))
reserved = ('DEF', 'MACROOP', 'ROM', 'EXTERN')
tokens = reserved + (
# identifier
'ID',
# arguments for microops and directives
'PARAMS',
'LPAREN', 'RPAREN',
'LBRACE', 'RBRACE',
'COLON', 'SEMI', 'DOT',
'NEWLINE'
)
# New lines are ignored at the top level, but they end statements in the
# assembler
states = (
('asm', 'exclusive'),
('params', 'exclusive'),
)
reserved_map = { }
for r in reserved:
reserved_map[r.lower()] = r
# Ignore comments
def t_ANY_COMMENT(t):
r'\#[^\n]*(?=\n)'
def t_ANY_MULTILINECOMMENT(t):
r'/\*([^/]|((?<!\*)/))*\*/'
# A colon marks the end of a label. It should follow an ID which will
# put the lexer in the "params" state. Seeing the colon will put it back
# in the "asm" state since it knows it saw a label and not a mnemonic.
def t_params_COLON(t):
r':'
t.lexer.begin('asm')
return t
# Parameters are a string of text which don't contain an unescaped statement
# statement terminator, ie a newline or semi colon.
def t_params_PARAMS(t):
r'([^\n;\\]|(\\[\n;\\]))+'
t.lineno += t.value.count('\n')
unescapeParamsRE = re.compile(r'(\\[\n;\\])')
def unescapeParams(mo):
val = mo.group(0)
return val[1]
t.value = unescapeParamsRE.sub(unescapeParams, t.value)
t.lexer.begin('asm')
return t
# An "ID" in the micro assembler is either a label, directive, or mnemonic
# If it's either a directive or a mnemonic, it will be optionally followed by
# parameters. If it's a label, the following colon will make the lexer stop
# looking for parameters.
def t_asm_ID(t):
r'[A-Za-z_]\w*'
t.type = reserved_map.get(t.value, 'ID')
# If the ID is really "extern", we shouldn't start looking for parameters
# yet. The real ID, the label itself, is coming up.
if t.type != 'EXTERN':
t.lexer.begin('params')
return t
# If there is a label and you're -not- in the assembler (which would be caught
# above), don't start looking for parameters.
def t_ANY_ID(t):
r'[A-Za-z_]\w*'
t.type = reserved_map.get(t.value, 'ID')
return t
# Braces enter and exit micro assembly
def t_INITIAL_LBRACE(t):
r'\{'
t.lexer.begin('asm')
return t
def t_asm_RBRACE(t):
r'\}'
t.lexer.begin('INITIAL')
return t
# At the top level, keep track of newlines only for line counting.
def t_INITIAL_NEWLINE(t):
r'\n+'
t.lineno += t.value.count('\n')
# In the micro assembler, do line counting but also return a token. The
# token is needed by the parser to detect the end of a statement.
def t_asm_NEWLINE(t):
r'\n+'
t.lineno += t.value.count('\n')
return t
# A newline or semi colon when looking for params signals that the statement
# is over and the lexer should go back to looking for regular assembly.
def t_params_NEWLINE(t):
r'\n+'
t.lineno += t.value.count('\n')
t.lexer.begin('asm')
return t
def t_params_SEMI(t):
r';'
t.lexer.begin('asm')
return t
# Basic regular expressions to pick out simple tokens
t_ANY_LPAREN = r'\('
t_ANY_RPAREN = r'\)'
t_ANY_SEMI = r';'
t_ANY_DOT = r'\.'
t_ANY_ignore = ' \t\x0c'
def t_ANY_error(t):
error(t.lineno, "illegal character '%s'" % t.value[0])
t.skip(1)
##########################################################################
#
# Parser specification
#
##########################################################################
# Start symbol for a file which may have more than one macroop or rom
# specification.
def p_file(t):
'file : opt_rom_or_macros'
def p_opt_rom_or_macros_0(t):
'opt_rom_or_macros : '
def p_opt_rom_or_macros_1(t):
'opt_rom_or_macros : rom_or_macros'
def p_rom_or_macros_0(t):
'rom_or_macros : rom_or_macro'
def p_rom_or_macros_1(t):
'rom_or_macros : rom_or_macros rom_or_macro'
def p_rom_or_macro_0(t):
'''rom_or_macro : rom_block
| macroop_def'''
# Defines a section of microcode that should go in the current ROM
def p_rom_block(t):
'rom_block : DEF ROM block SEMI'
if not t.parser.rom:
print_error("Rom block found, but no Rom object specified.")
raise TypeError, "Rom block found, but no Rom object was specified."
for statement in t[3].statements:
handle_statement(t.parser, t.parser.rom, statement)
t[0] = t.parser.rom
# Defines a macroop that jumps to an external label in the ROM
def p_macroop_def_0(t):
'macroop_def : DEF MACROOP ID LPAREN ID RPAREN SEMI'
if not t.parser.rom_macroop_type:
print_error("ROM based macroop found, but no ROM macroop class was specified.")
raise TypeError, "ROM based macroop found, but no ROM macroop class was specified."
macroop = t.parser.rom_macroop_type(t[3], t[5])
t.parser.macroops[t[3]] = macroop
# Defines a macroop that is combinationally generated
def p_macroop_def_1(t):
'macroop_def : DEF MACROOP ID block SEMI'
try:
curop = t.parser.macro_type(t[3])
except TypeError:
print_error("Error creating macroop object.")
raise
for statement in t[4].statements:
handle_statement(t.parser, curop, statement)
t.parser.macroops[t[3]] = curop
# A block of statements
def p_block(t):
'block : LBRACE statements RBRACE'
block = Block()
block.statements = t[2]
t[0] = block
def p_statements_0(t):
'statements : statement'
if t[1]:
t[0] = [t[1]]
else:
t[0] = []
def p_statements_1(t):
'statements : statements statement'
if t[2]:
t[1].append(t[2])
t[0] = t[1]
def p_statement(t):
'statement : content_of_statement end_of_statement'
t[0] = t[1]
# A statement can be a microop or an assembler directive
def p_content_of_statement_0(t):
'''content_of_statement : microop
| directive'''
t[0] = t[1]
# Ignore empty statements
def p_content_of_statement_1(t):
'content_of_statement : '
pass
# Statements are ended by newlines or a semi colon
def p_end_of_statement(t):
'''end_of_statement : NEWLINE
| SEMI'''
pass
# Different flavors of microop to avoid shift/reduce errors
def p_microop_0(t):
'microop : labels ID'
microop = Microop()
microop.labels = t[1]
microop.mnemonic = t[2]
t[0] = microop
def p_microop_1(t):
'microop : ID'
microop = Microop()
microop.mnemonic = t[1]
t[0] = microop
def p_microop_2(t):
'microop : labels ID PARAMS'
microop = Microop()
microop.labels = t[1]
microop.mnemonic = t[2]
microop.params = t[3]
t[0] = microop
def p_microop_3(t):
'microop : ID PARAMS'
microop = Microop()
microop.mnemonic = t[1]
microop.params = t[2]
t[0] = microop
# Labels in the microcode
def p_labels_0(t):
'labels : label'
t[0] = [t[1]]
def p_labels_1(t):
'labels : labels label'
t[1].append(t[2])
t[0] = t[1]
# labels on lines by themselves are attached to the following instruction.
def p_labels_2(t):
'labels : labels NEWLINE'
t[0] = t[1]
def p_label_0(t):
'label : ID COLON'
label = Label()
label.is_extern = False
label.text = t[1]
t[0] = label
def p_label_1(t):
'label : EXTERN ID COLON'
label = Label()
label.is_extern = True
label.text = t[2]
t[0] = label
# Directives for the macroop
def p_directive_0(t):
'directive : DOT ID'
directive = Directive()
directive.name = t[2]
t[0] = directive
def p_directive_1(t):
'directive : DOT ID PARAMS'
directive = Directive()
directive.name = t[2]
directive.params = t[3]
t[0] = directive
# Parse error handler. Note that the argument here is the offending
# *token*, not a grammar symbol (hence the need to use t.value)
def p_error(t):
if t:
error(t.lineno, "syntax error at '%s'" % t.value)
else:
error(0, "unknown syntax error", True)
class MicroAssembler(object):
def __init__(self, macro_type, microops,
rom = None, rom_macroop_type = None):
self.lexer = lex.lex()
self.parser = yacc.yacc()
self.parser.macro_type = macro_type
self.parser.macroops = {}
self.parser.microops = microops
self.parser.rom = rom
self.parser.rom_macroop_type = rom_macroop_type
self.parser.symbols = {}
self.symbols = self.parser.symbols
def assemble(self, asm):
self.parser.parse(asm, lexer=self.lexer)
macroops = self.parser.macroops
self.parser.macroops = {}
return macroops
| bsd-3-clause |
UweFleis3/Uwe | py/openage/convert/hardcoded/langcodes.py | 46 | 8618 | # language codes, as used in PE file ressources
# this file is used by pefile.py
langcodes = {
1: 'ar',
2: 'bg',
3: 'ca',
4: 'zh_Hans',
5: 'cs',
6: 'da',
7: 'de',
8: 'el',
9: 'en',
10: 'es',
11: 'fi',
12: 'fr',
13: 'he',
14: 'hu',
15: 'is',
16: 'it',
17: 'ja',
18: 'ko',
19: 'nl',
20: 'no',
21: 'pl',
22: 'pt',
23: 'rm',
24: 'ro',
25: 'ru',
26: 'bs',
27: 'sk',
28: 'sq',
29: 'sv',
30: 'th',
31: 'tr',
32: 'ur',
33: 'id',
34: 'uk',
35: 'be',
36: 'sl',
37: 'et',
38: 'lv',
39: 'lt',
40: 'tg',
41: 'fa',
42: 'vi',
43: 'hy',
44: 'az',
45: 'eu',
46: 'dsb',
47: 'mk',
48: 'st',
49: 'ts',
50: 'tn',
51: 've',
52: 'xh',
53: 'zu',
54: 'af',
55: 'ka',
56: 'fo',
57: 'hi',
58: 'mt',
59: 'se',
60: 'ga',
61: 'yi',
62: 'ms',
63: 'kk',
64: 'ky',
65: 'sw',
66: 'tk',
67: 'uz',
68: 'tt',
69: 'bn',
70: 'pa',
71: 'gu',
72: 'or',
73: 'ta',
74: 'te',
75: 'kn',
76: 'ml',
77: 'as',
78: 'mr',
79: 'sa',
80: 'mn',
81: 'bo',
82: 'cy',
83: 'km',
84: 'lo',
85: 'my',
86: 'gl',
87: 'kok',
88: 'mni',
89: 'sd',
90: 'syr',
91: 'si',
92: 'chr',
93: 'iu',
94: 'am',
95: 'tzm',
96: 'ks',
97: 'ne',
98: 'fy',
99: 'ps',
100: 'fil',
101: 'dv',
102: 'bin',
103: 'ff',
104: 'ha',
105: 'ibb',
106: 'yo',
107: 'quz',
108: 'nso',
109: 'ba',
110: 'lb',
111: 'kl',
112: 'ig',
113: 'kr',
114: 'om',
115: 'ti',
116: 'gn',
117: 'haw',
118: 'la',
119: 'so',
120: 'ii',
121: 'pap',
122: 'arn',
124: 'moh',
126: 'br',
128: 'ug',
129: 'mi',
130: 'oc',
131: 'co',
132: 'gsw',
133: 'sah',
134: 'qut',
135: 'rw',
136: 'wo',
140: 'prs',
145: 'gd',
146: 'ku',
1025: 'ar_SA',
1026: 'bg_BG',
1027: 'ca_ES',
1028: 'zh_TW',
1029: 'cs_CZ',
1030: 'da_DK',
1031: 'de_DE',
1032: 'el_GR',
1033: 'en_US',
1034: 'es_ES_tradnl',
1035: 'fi_FI',
1036: 'fr_FR',
1037: 'he_IL',
1038: 'hu_HU',
1039: 'is_IS',
1040: 'it_IT',
1041: 'ja_JP',
1042: 'ko_KR',
1043: 'nl_NL',
1044: 'nb_NO',
1045: 'pl_PL',
1046: 'pt_BR',
1047: 'rm_CH',
1048: 'ro_RO',
1049: 'ru_RU',
1050: 'hr_HR',
1051: 'sk_SK',
1052: 'sq_AL',
1053: 'sv_SE',
1054: 'th_TH',
1055: 'tr_TR',
1056: 'ur_PK',
1057: 'id_ID',
1058: 'uk_UA',
1059: 'be_BY',
1060: 'sl_SI',
1061: 'et_EE',
1062: 'lv_LV',
1063: 'lt_LT',
1064: 'tg_Cyrl_TJ',
1065: 'fa_IR',
1066: 'vi_VN',
1067: 'hy_AM',
1068: 'az_Latn_AZ',
1069: 'eu_ES',
1070: 'hsb_DE',
1071: 'mk_MK',
1072: 'st_ZA',
1073: 'ts_ZA',
1074: 'tn_ZA',
1075: 've_ZA',
1076: 'xh_ZA',
1077: 'zu_ZA',
1078: 'af_ZA',
1079: 'ka_GE',
1080: 'fo_FO',
1081: 'hi_IN',
1082: 'mt_MT',
1083: 'se_NO',
1085: 'yi_Hebr',
1086: 'ms_MY',
1087: 'kk_KZ',
1088: 'ky_KG',
1089: 'sw_KE',
1090: 'tk_TM',
1091: 'uz_Latn_UZ',
1092: 'tt_RU',
1093: 'bn_IN',
1094: 'pa_IN',
1095: 'gu_IN',
1096: 'or_IN',
1097: 'ta_IN',
1098: 'te_IN',
1099: 'kn_IN',
1100: 'ml_IN',
1101: 'as_IN',
1102: 'mr_IN',
1103: 'sa_IN',
1104: 'mn_MN',
1105: 'bo_CN',
1106: 'cy_GB',
1107: 'km_KH',
1108: 'lo_LA',
1109: 'my_MM',
1110: 'gl_ES',
1111: 'kok_IN',
1112: 'mni_IN',
1113: 'sd_Deva_IN',
1114: 'syr_SY',
1115: 'si_LK',
1116: 'chr_Cher_US',
1117: 'iu_Cans_CA',
1118: 'am_ET',
1119: 'tzm_Arab_MA',
1120: 'ks_Arab',
1121: 'ne_NP',
1122: 'fy_NL',
1123: 'ps_AF',
1124: 'fil_PH',
1125: 'dv_MV',
1126: 'bin_NG',
1127: 'fuv_NG',
1128: 'ha_Latn_NG',
1129: 'ibb_NG',
1130: 'yo_NG',
1131: 'quz_BO',
1132: 'nso_ZA',
1133: 'ba_RU',
1134: 'lb_LU',
1135: 'kl_GL',
1136: 'ig_NG',
1137: 'kr_NG',
1138: 'om_ET',
1139: 'ti_ET',
1140: 'gn_PY',
1141: 'haw_US',
1142: 'la_Latn',
1143: 'so_SO',
1144: 'ii_CN',
1145: 'pap_029',
1146: 'arn_CL',
1148: 'moh_CA',
1150: 'br_FR',
1152: 'ug_CN',
1153: 'mi_NZ',
1154: 'oc_FR',
1155: 'co_FR',
1156: 'gsw_FR',
1157: 'sah_RU',
1158: 'qut_GT',
1159: 'rw_RW',
1160: 'wo_SN',
1164: 'prs_AF',
1165: 'plt_MG',
1166: 'zh_yue_HK',
1167: 'tdd_Tale_CN',
1168: 'khb_Talu_CN',
1169: 'gd_GB',
1170: 'ku_Arab_IQ',
1171: 'quc_CO',
1281: 'qps_ploc',
1534: 'qps_ploca',
2049: 'ar_IQ',
2051: 'ca_ES_valencia',
2052: 'zh_CN',
2055: 'de_CH',
2057: 'en_GB',
2058: 'es_MX',
2060: 'fr_BE',
2064: 'it_CH',
2065: 'ja_Ploc_JP',
2067: 'nl_BE',
2068: 'nn_NO',
2070: 'pt_PT',
2072: 'ro_MD',
2073: 'ru_MD',
2074: 'sr_Latn_CS',
2077: 'sv_FI',
2080: 'ur_IN',
2092: 'az_Cyrl_AZ',
2094: 'dsb_DE',
2098: 'tn_BW',
2107: 'se_SE',
2108: 'ga_IE',
2110: 'ms_BN',
2115: 'uz_Cyrl_UZ',
2117: 'bn_BD',
2118: 'pa_Arab_PK',
2121: 'ta_LK',
2128: 'mn_Mong_CN',
2129: 'bo_BT',
2137: 'sd_Arab_PK',
2141: 'iu_Latn_CA',
2143: 'tzm_Latn_DZ',
2144: 'ks_Deva',
2145: 'ne_IN',
2151: 'ff_Latn_SN',
2155: 'quz_EC',
2163: 'ti_ER',
2559: 'qps_plocm',
3073: 'ar_EG',
3076: 'zh_HK',
3079: 'de_AT',
3081: 'en_AU',
3082: 'es_ES',
3084: 'fr_CA',
3098: 'sr_Cyrl_CS',
3131: 'se_FI',
3152: 'mn_Mong_MN',
3167: 'tmz_MA',
3179: 'quz_PE',
4097: 'ar_LY',
4100: 'zh_SG',
4103: 'de_LU',
4105: 'en_CA',
4106: 'es_GT',
4108: 'fr_CH',
4122: 'hr_BA',
4155: 'smj_NO',
4191: 'tzm_Tfng_MA',
5121: 'ar_DZ',
5124: 'zh_MO',
5127: 'de_LI',
5129: 'en_NZ',
5130: 'es_CR',
5132: 'fr_LU',
5146: 'bs_Latn_BA',
5179: 'smj_SE',
6145: 'ar_MA',
6153: 'en_IE',
6154: 'es_PA',
6156: 'fr_MC',
6170: 'sr_Latn_BA',
6203: 'sma_NO',
7169: 'ar_TN',
7177: 'en_ZA',
7178: 'es_DO',
7194: 'sr_Cyrl_BA',
7227: 'sma_SE',
8193: 'ar_OM',
8201: 'en_JM',
8202: 'es_VE',
8204: 'fr_RE',
8218: 'bs_Cyrl_BA',
8251: 'sms_FI',
9217: 'ar_YE',
9225: 'en_029',
9226: 'es_CO',
9228: 'fr_CD',
9242: 'sr_Latn_RS',
9275: 'smn_FI',
10241: 'ar_SY',
10249: 'en_BZ',
10250: 'es_PE',
10252: 'fr_SN',
10266: 'sr_Cyrl_RS',
11265: 'ar_JO',
11273: 'en_TT',
11274: 'es_AR',
11276: 'fr_CM',
11290: 'sr_Latn_ME',
12289: 'ar_LB',
12297: 'en_ZW',
12298: 'es_EC',
12300: 'fr_CI',
12314: 'sr_Cyrl_ME',
13313: 'ar_KW',
13321: 'en_PH',
13322: 'es_CL',
13324: 'fr_ML',
14337: 'ar_AE',
14345: 'en_ID',
14346: 'es_UY',
14348: 'fr_MA',
15361: 'ar_BH',
15369: 'en_HK',
15370: 'es_PY',
15372: 'fr_HT',
16385: 'ar_QA',
16393: 'en_IN',
16394: 'es_BO',
17409: 'ar_Ploc_SA',
17417: 'en_MY',
17418: 'es_SV',
18433: 'ar_145',
18441: 'en_SG',
18442: 'es_HN',
19465: 'en_AE',
19466: 'es_NI',
20489: 'en_BH',
20490: 'es_PR',
21513: 'en_EG',
21514: 'es_US',
22537: 'en_JO',
22538: 'es_419',
23561: 'en_KW',
24585: 'en_TR',
25609: 'en_YE',
25626: 'bs_Cyrl',
26650: 'bs_Latn',
27674: 'sr_Cyrl',
28698: 'sr_Latn',
28731: 'smn',
29740: 'az_Cyrl',
29755: 'sms',
30724: 'zh',
30740: 'nn',
30746: 'bs',
30764: 'az_Latn',
30779: 'sma',
30787: 'uz_Cyrl',
30800: 'mn_Cyrl',
30813: 'iu_Cans',
30815: 'tzm_Tfng',
31748: 'zh_Hant',
31764: 'nb',
31770: 'sr',
31784: 'tg_Cyrl',
31790: 'dsb',
31803: 'smj',
31811: 'uz_Latn',
31814: 'pa_Arab',
31824: 'mn_Mong',
31833: 'sd_Arab',
31836: 'chr_Cher',
31837: 'iu_Latn',
31839: 'tzm_Latn',
31847: 'ff_Latn',
31848: 'ha_Latn',
31890: 'ku_Arab',
65663: 'x_IV_mathan',
66567: 'de_DE_phoneb',
66574: 'hu_HU_tchncl',
66615: 'ka_GE_modern',
133124: 'zh_CN_stroke',
135172: 'zh_SG_stroke',
136196: 'zh_MO_stroke',
197636: 'zh_TW_pronun',
263172: 'zh_TW_radstr',
263185: 'ja_JP_radstr',
265220: 'zh_HK_radstr',
267268: 'zh_MO_radstr'}
| gpl-3.0 |
rue89-tech/edx-analytics-dashboard | analytics_dashboard/settings/logger.py | 4 | 3377 | """Logging configuration"""
import os
import platform
import sys
from logging.handlers import SysLogHandler
def get_logger_config(log_dir='/var/tmp',
logging_env="no_env",
edx_filename="edx.log",
dev_env=False,
debug=False,
local_loglevel='INFO',
service_variant='insights'):
"""
Return the appropriate logging config dictionary. You should assign the
result of this to the LOGGING var in your settings.
If dev_env is set to true logging will not be done via local rsyslogd,
instead, application logs will be dropped in log_dir.
"edx_filename" is ignored unless dev_env is set to true since otherwise logging is handled by rsyslogd.
"""
# Revert to INFO if an invalid string is passed in
if local_loglevel not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
local_loglevel = 'INFO'
hostname = platform.node().split(".")[0]
syslog_format = ("[service_variant={service_variant}]"
"[%(name)s][env:{logging_env}] %(levelname)s "
"[{hostname} %(process)d] [%(filename)s:%(lineno)d] "
"- %(message)s").format(
service_variant=service_variant,
logging_env=logging_env, hostname=hostname)
if debug:
handlers = ['console']
else:
handlers = ['local']
logger_config = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'standard': {
'format': '%(asctime)s %(levelname)s %(process)d '
'[%(name)s] %(filename)s:%(lineno)d - %(message)s',
},
'syslog_format': {'format': syslog_format},
'raw': {'format': '%(message)s'},
},
'handlers': {
'console': {
'level': 'DEBUG' if debug else 'INFO',
'class': 'logging.StreamHandler',
'formatter': 'standard',
'stream': sys.stdout,
},
},
'loggers': {
'django': {
'handlers': handlers,
'propagate': True,
'level': 'INFO'
},
'': {
'handlers': handlers,
'level': 'DEBUG',
'propagate': False
},
}
}
if dev_env:
edx_file_loc = os.path.join(log_dir, edx_filename)
logger_config['handlers'].update({
'local': {
'class': 'logging.handlers.RotatingFileHandler',
'level': local_loglevel,
'formatter': 'standard',
'filename': edx_file_loc,
'maxBytes': 1024 * 1024 * 2,
'backupCount': 5,
},
})
else:
logger_config['handlers'].update({
'local': {
'level': local_loglevel,
'class': 'logging.handlers.SysLogHandler',
# Use a different address for Mac OS X
'address': '/var/run/syslog' if sys.platform == "darwin" else '/dev/log',
'formatter': 'syslog_format',
'facility': SysLogHandler.LOG_LOCAL0,
},
})
return logger_config
| agpl-3.0 |
rickerc/neutron_audit | neutron/db/routedserviceinsertion_db.py | 17 | 4553 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 VMware, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Kaiwei Fan, VMware, Inc
import sqlalchemy as sa
from sqlalchemy import event
from neutron.common import exceptions as qexception
from neutron.db import model_base
from neutron.extensions import routedserviceinsertion as rsi
class ServiceRouterBinding(model_base.BASEV2):
resource_id = sa.Column(sa.String(36),
primary_key=True)
resource_type = sa.Column(sa.String(36),
primary_key=True)
router_id = sa.Column(sa.String(36),
sa.ForeignKey('routers.id'),
nullable=False)
class AttributeException(qexception.NeutronException):
message = _("Resource type '%(resource_type)s' is longer "
"than %(maxlen)d characters")
@event.listens_for(ServiceRouterBinding.resource_type, 'set', retval=True)
def validate_resource_type(target, value, oldvalue, initiator):
"""Make sure the resource type fit the resource_type column."""
maxlen = ServiceRouterBinding.resource_type.property.columns[0].type.length
if len(value) > maxlen:
raise AttributeException(resource_type=value, maxlen=maxlen)
return value
class RoutedServiceInsertionDbMixin(object):
"""Mixin class to add router service insertion."""
def _process_create_resource_router_id(self, context, resource, model):
with context.session.begin(subtransactions=True):
db = ServiceRouterBinding(
resource_id=resource['id'],
resource_type=model.__tablename__,
router_id=resource[rsi.ROUTER_ID])
context.session.add(db)
return self._make_resource_router_id_dict(db, model)
def _extend_resource_router_id_dict(self, context, resource, model):
binding = self._get_resource_router_id_binding(
context, resource['resource_id'], model)
resource[rsi.ROUTER_ID] = binding['router_id']
def _get_resource_router_id_binding(self, context, model,
resource_id=None,
router_id=None):
query = self._model_query(context, ServiceRouterBinding)
query = query.filter(
ServiceRouterBinding.resource_type == model.__tablename__)
if resource_id:
query = query.filter(
ServiceRouterBinding.resource_id == resource_id)
if router_id:
query = query.filter(
ServiceRouterBinding.router_id == router_id)
return query.first()
def _get_resource_router_id_bindings(self, context, model,
resource_ids=None,
router_ids=None):
query = self._model_query(context, ServiceRouterBinding)
query = query.filter(
ServiceRouterBinding.resource_type == model.__tablename__)
if resource_ids:
query = query.filter(
ServiceRouterBinding.resource_id.in_(resource_ids))
if router_ids:
query = query.filter(
ServiceRouterBinding.router_id.in_(router_ids))
return query.all()
def _make_resource_router_id_dict(self, resource_router_binding, model,
fields=None):
resource = {'resource_id': resource_router_binding['resource_id'],
'resource_type': model.__tablename__,
rsi.ROUTER_ID: resource_router_binding[rsi.ROUTER_ID]}
return self._fields(resource, fields)
def _delete_resource_router_id_binding(self, context, resource_id, model):
with context.session.begin(subtransactions=True):
binding = self._get_resource_router_id_binding(
context, model, resource_id=resource_id)
if binding:
context.session.delete(binding)
| apache-2.0 |
iheitlager/django-rest-framework | rest_framework/parsers.py | 78 | 7968 | """
Parsers are used to parse the content of incoming HTTP requests.
They give us a generic way of being able to handle various media types
on the request, such as form content or json encoded data.
"""
from __future__ import unicode_literals
import json
from django.conf import settings
from django.core.files.uploadhandler import StopFutureHandlers
from django.http import QueryDict
from django.http.multipartparser import \
MultiPartParser as DjangoMultiPartParser
from django.http.multipartparser import (
ChunkIter, MultiPartParserError, parse_header
)
from django.utils import six
from django.utils.encoding import force_text
from django.utils.six.moves.urllib import parse as urlparse
from rest_framework import renderers
from rest_framework.exceptions import ParseError
class DataAndFiles(object):
def __init__(self, data, files):
self.data = data
self.files = files
class BaseParser(object):
"""
All parsers should extend `BaseParser`, specifying a `media_type`
attribute, and overriding the `.parse()` method.
"""
media_type = None
def parse(self, stream, media_type=None, parser_context=None):
"""
Given a stream to read from, return the parsed representation.
Should return parsed data, or a `DataAndFiles` object consisting of the
parsed data and files.
"""
raise NotImplementedError(".parse() must be overridden.")
class JSONParser(BaseParser):
"""
Parses JSON-serialized data.
"""
media_type = 'application/json'
renderer_class = renderers.JSONRenderer
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as JSON and returns the resulting data.
"""
parser_context = parser_context or {}
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
try:
data = stream.read().decode(encoding)
return json.loads(data)
except ValueError as exc:
raise ParseError('JSON parse error - %s' % six.text_type(exc))
class FormParser(BaseParser):
"""
Parser for form data.
"""
media_type = 'application/x-www-form-urlencoded'
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as a URL encoded form,
and returns the resulting QueryDict.
"""
parser_context = parser_context or {}
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
data = QueryDict(stream.read(), encoding=encoding)
return data
class MultiPartParser(BaseParser):
"""
Parser for multipart form data, which may include file data.
"""
media_type = 'multipart/form-data'
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as a multipart encoded form,
and returns a DataAndFiles object.
`.data` will be a `QueryDict` containing all the form parameters.
`.files` will be a `QueryDict` containing all the form files.
"""
parser_context = parser_context or {}
request = parser_context['request']
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
meta = request.META.copy()
meta['CONTENT_TYPE'] = media_type
upload_handlers = request.upload_handlers
try:
parser = DjangoMultiPartParser(meta, stream, upload_handlers, encoding)
data, files = parser.parse()
return DataAndFiles(data, files)
except MultiPartParserError as exc:
raise ParseError('Multipart form parse error - %s' % six.text_type(exc))
class FileUploadParser(BaseParser):
"""
Parser for file upload data.
"""
media_type = '*/*'
def parse(self, stream, media_type=None, parser_context=None):
"""
Treats the incoming bytestream as a raw file upload and returns
a `DateAndFiles` object.
`.data` will be None (we expect request body to be a file content).
`.files` will be a `QueryDict` containing one 'file' element.
"""
parser_context = parser_context or {}
request = parser_context['request']
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
meta = request.META
upload_handlers = request.upload_handlers
filename = self.get_filename(stream, media_type, parser_context)
# Note that this code is extracted from Django's handling of
# file uploads in MultiPartParser.
content_type = meta.get('HTTP_CONTENT_TYPE',
meta.get('CONTENT_TYPE', ''))
try:
content_length = int(meta.get('HTTP_CONTENT_LENGTH',
meta.get('CONTENT_LENGTH', 0)))
except (ValueError, TypeError):
content_length = None
# See if the handler will want to take care of the parsing.
for handler in upload_handlers:
result = handler.handle_raw_input(None,
meta,
content_length,
None,
encoding)
if result is not None:
return DataAndFiles({}, {'file': result[1]})
# This is the standard case.
possible_sizes = [x.chunk_size for x in upload_handlers if x.chunk_size]
chunk_size = min([2 ** 31 - 4] + possible_sizes)
chunks = ChunkIter(stream, chunk_size)
counters = [0] * len(upload_handlers)
for index, handler in enumerate(upload_handlers):
try:
handler.new_file(None, filename, content_type,
content_length, encoding)
except StopFutureHandlers:
upload_handlers = upload_handlers[:index + 1]
break
for chunk in chunks:
for index, handler in enumerate(upload_handlers):
chunk_length = len(chunk)
chunk = handler.receive_data_chunk(chunk, counters[index])
counters[index] += chunk_length
if chunk is None:
break
for index, handler in enumerate(upload_handlers):
file_obj = handler.file_complete(counters[index])
if file_obj:
return DataAndFiles({}, {'file': file_obj})
raise ParseError("FileUpload parse error - "
"none of upload handlers can handle the stream")
def get_filename(self, stream, media_type, parser_context):
"""
Detects the uploaded file name. First searches a 'filename' url kwarg.
Then tries to parse Content-Disposition header.
"""
try:
return parser_context['kwargs']['filename']
except KeyError:
pass
try:
meta = parser_context['request'].META
disposition = parse_header(meta['HTTP_CONTENT_DISPOSITION'].encode('utf-8'))
filename_parm = disposition[1]
if 'filename*' in filename_parm:
return self.get_encoded_filename(filename_parm)
return force_text(filename_parm['filename'])
except (AttributeError, KeyError, ValueError):
pass
def get_encoded_filename(self, filename_parm):
"""
Handle encoded filenames per RFC6266. See also:
http://tools.ietf.org/html/rfc2231#section-4
"""
encoded_filename = force_text(filename_parm['filename*'])
try:
charset, lang, filename = encoded_filename.split('\'', 2)
filename = urlparse.unquote(filename)
except (ValueError, LookupError):
filename = force_text(filename_parm['filename'])
return filename
| bsd-2-clause |
cntnboys/410Lab6 | build/django/build/lib.linux-x86_64-2.7/django/db/backends/postgresql_psycopg2/base.py | 21 | 8837 | """
PostgreSQL database backend for Django.
Requires psycopg 2: http://initd.org/projects/psycopg2
"""
from django.conf import settings
from django.db.backends import (BaseDatabaseFeatures, BaseDatabaseWrapper,
BaseDatabaseValidation)
from django.db.backends.postgresql_psycopg2.operations import DatabaseOperations
from django.db.backends.postgresql_psycopg2.client import DatabaseClient
from django.db.backends.postgresql_psycopg2.creation import DatabaseCreation
from django.db.backends.postgresql_psycopg2.version import get_version
from django.db.backends.postgresql_psycopg2.introspection import DatabaseIntrospection
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.utils import InterfaceError
from django.utils.encoding import force_str
from django.utils.functional import cached_property
from django.utils.safestring import SafeText, SafeBytes
from django.utils.timezone import utc
try:
import psycopg2 as Database
import psycopg2.extensions
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading psycopg2 module: %s" % e)
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
psycopg2.extensions.register_type(psycopg2.extensions.UNICODE)
psycopg2.extensions.register_type(psycopg2.extensions.UNICODEARRAY)
psycopg2.extensions.register_adapter(SafeBytes, psycopg2.extensions.QuotedString)
psycopg2.extensions.register_adapter(SafeText, psycopg2.extensions.QuotedString)
def utc_tzinfo_factory(offset):
if offset != 0:
raise AssertionError("database connection isn't set to UTC")
return utc
class DatabaseFeatures(BaseDatabaseFeatures):
needs_datetime_string_cast = False
can_return_id_from_insert = True
requires_rollback_on_dirty_transaction = True
has_real_datatype = True
can_defer_constraint_checks = True
has_select_for_update = True
has_select_for_update_nowait = True
has_bulk_insert = True
uses_savepoints = True
supports_tablespaces = True
supports_transactions = True
can_introspect_ip_address_field = True
can_introspect_small_integer_field = True
can_distinct_on_fields = True
can_rollback_ddl = True
supports_combined_alters = True
nulls_order_largest = True
closed_cursor_error_class = InterfaceError
has_case_insensitive_like = False
requires_sqlparse_for_splitting = False
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'postgresql'
operators = {
'exact': '= %s',
'iexact': '= UPPER(%s)',
'contains': 'LIKE %s',
'icontains': 'LIKE UPPER(%s)',
'regex': '~ %s',
'iregex': '~* %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE %s',
'endswith': 'LIKE %s',
'istartswith': 'LIKE UPPER(%s)',
'iendswith': 'LIKE UPPER(%s)',
}
pattern_ops = {
'startswith': "LIKE %s || '%%%%'",
'istartswith': "LIKE UPPER(%s) || '%%%%'",
}
Database = Database
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = BaseDatabaseValidation(self)
def get_connection_params(self):
settings_dict = self.settings_dict
# None may be used to connect to the default 'postgres' db
if settings_dict['NAME'] == '':
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured(
"settings.DATABASES is improperly configured. "
"Please supply the NAME value.")
conn_params = {
'database': settings_dict['NAME'] or 'postgres',
}
conn_params.update(settings_dict['OPTIONS'])
if 'autocommit' in conn_params:
del conn_params['autocommit']
if 'isolation_level' in conn_params:
del conn_params['isolation_level']
if settings_dict['USER']:
conn_params['user'] = settings_dict['USER']
if settings_dict['PASSWORD']:
conn_params['password'] = force_str(settings_dict['PASSWORD'])
if settings_dict['HOST']:
conn_params['host'] = settings_dict['HOST']
if settings_dict['PORT']:
conn_params['port'] = settings_dict['PORT']
return conn_params
def get_new_connection(self, conn_params):
connection = Database.connect(**conn_params)
# self.isolation_level must be set:
# - after connecting to the database in order to obtain the database's
# default when no value is explicitly specified in options.
# - before calling _set_autocommit() because if autocommit is on, that
# will set connection.isolation_level to ISOLATION_LEVEL_AUTOCOMMIT;
# and if autocommit is off, on psycopg2 < 2.4.2, _set_autocommit()
# needs self.isolation_level.
options = self.settings_dict['OPTIONS']
try:
self.isolation_level = options['isolation_level']
except KeyError:
self.isolation_level = connection.isolation_level
else:
# Set the isolation level to the value from OPTIONS. This isn't
# needed on psycopg2 < 2.4.2 because it happens as a side-effect
# of _set_autocommit(False).
if (self.isolation_level != connection.isolation_level and
self.psycopg2_version >= (2, 4, 2)):
connection.set_session(isolation_level=self.isolation_level)
return connection
def init_connection_state(self):
settings_dict = self.settings_dict
self.connection.set_client_encoding('UTF8')
tz = 'UTC' if settings.USE_TZ else settings_dict.get('TIME_ZONE')
if tz:
try:
get_parameter_status = self.connection.get_parameter_status
except AttributeError:
# psycopg2 < 2.0.12 doesn't have get_parameter_status
conn_tz = None
else:
conn_tz = get_parameter_status('TimeZone')
if conn_tz != tz:
cursor = self.connection.cursor()
try:
cursor.execute(self.ops.set_time_zone_sql(), [tz])
finally:
cursor.close()
# Commit after setting the time zone (see #17062)
if not self.get_autocommit():
self.connection.commit()
def create_cursor(self):
cursor = self.connection.cursor()
cursor.tzinfo_factory = utc_tzinfo_factory if settings.USE_TZ else None
return cursor
def _set_isolation_level(self, isolation_level):
assert isolation_level in range(1, 5) # Use set_autocommit for level = 0
if self.psycopg2_version >= (2, 4, 2):
self.connection.set_session(isolation_level=isolation_level)
else:
self.connection.set_isolation_level(isolation_level)
def _set_autocommit(self, autocommit):
with self.wrap_database_errors:
if self.psycopg2_version >= (2, 4, 2):
self.connection.autocommit = autocommit
else:
if autocommit:
level = psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT
else:
level = self.isolation_level
self.connection.set_isolation_level(level)
def check_constraints(self, table_names=None):
"""
To check constraints, we set constraints to immediate. Then, when, we're done we must ensure they
are returned to deferred.
"""
self.cursor().execute('SET CONSTRAINTS ALL IMMEDIATE')
self.cursor().execute('SET CONSTRAINTS ALL DEFERRED')
def is_usable(self):
try:
# Use a psycopg cursor directly, bypassing Django's utilities.
self.connection.cursor().execute("SELECT 1")
except Database.Error:
return False
else:
return True
def schema_editor(self, *args, **kwargs):
"Returns a new instance of this backend's SchemaEditor"
return DatabaseSchemaEditor(self, *args, **kwargs)
@cached_property
def psycopg2_version(self):
version = psycopg2.__version__.split(' ', 1)[0]
return tuple(int(v) for v in version.split('.'))
@cached_property
def pg_version(self):
with self.temporary_connection():
return get_version(self.connection)
| apache-2.0 |
pypingou/pagure | dev-data.py | 1 | 21989 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Populate the pagure db with some dev data. """
from __future__ import print_function, unicode_literals, absolute_import
import argparse
import os
import tempfile
import pygit2
import shutil
import six
from sqlalchemy import create_engine, MetaData
import pagure
import tests
import pagure.lib.model
import pagure.lib.query
from pagure.lib.login import generate_hashed_value
from pagure.lib.model import create_default_status
from pagure.lib.repo import PagureRepo
'''
Usage:
python dev-data.py --init
python dev-data.py --clean
python dev-data.py --populate
python dev-data.py --all
'''
_config = pagure.config.reload_config()
def empty_dev_db(session):
print('')
print('WARNING: Deleting all data from', _config['DB_URL'])
response = os.environ.get("FORCE_DELETE")
if not response:
response = six.moves.input('Do you want to continue? (yes/no) ')
if response.lower().startswith('y'):
tables = reversed(pagure.lib.model_base.BASE.metadata.sorted_tables)
for tbl in tables:
session.execute(tbl.delete())
else:
exit("Aborting.")
def insert_data(session, username, user_email):
_config['EMAIL_SEND'] = False
_config['TESTING'] = True
######################################
# tags
item = pagure.lib.model.Tag(
tag='tag1',
)
session.add(item)
session.commit()
######################################
# Users
# Create a couple of users
pingou = item = pagure.lib.model.User(
user='pingou',
fullname='PY C',
password=generate_hashed_value(u'testing123'),
token=None,
default_email='[email protected]',
)
session.add(item)
session.commit()
print("User created: {} <{}>, {}".format(item.user, item.default_email, 'testing123'))
foo = item = pagure.lib.model.User(
user='foo',
fullname='foo bar',
password=generate_hashed_value(u'testing123'),
token=None,
default_email='[email protected]',
)
session.add(item)
session.commit()
print("User created: {} <{}>, {}".format(item.user, item.default_email, 'testing123'))
you = item = pagure.lib.model.User(
user=username,
fullname=username,
password=generate_hashed_value(u'testing123'),
token=None,
default_email=user_email,
)
session.add(item)
session.commit()
print("User created: {} <{}>, {}".format(item.user, item.default_email, 'testing123'))
######################################
# pagure_group
item = pagure.lib.model.PagureGroup(
group_name='admin',
group_type='admin',
user_id=pingou.id,
display_name='admin',
description='Admin Group',
)
session.add(item)
session.commit()
print('Created "admin" group. Pingou is a member.')
# Add a couple of groups so that we can list them
item = pagure.lib.model.PagureGroup(
group_name='group',
group_type='user',
user_id=pingou.id,
display_name='group group',
description='this is a group group',
)
session.add(item)
session.commit()
print('Created "group" group. Pingou is a member.')
item = pagure.lib.model.PagureGroup(
group_name='rel-eng',
group_type='user',
user_id=pingou.id,
display_name='Release Engineering',
description='The group of release engineers',
)
session.add(item)
session.commit()
print('Created "rel-eng" group. Pingou is a member.')
######################################
# projects
import shutil
# delete folder from local instance to start from a clean slate
if os.path.exists(_config['GIT_FOLDER']):
shutil.rmtree(_config['GIT_FOLDER'])
# Create projects
item = project1 = pagure.lib.model.Project(
user_id=pingou.id,
name='test',
is_fork=False,
parent_id=None,
description='test project #1',
hook_token='aaabbbccc',
)
item.close_status = ['Invalid', 'Insufficient data', 'Fixed', 'Duplicate']
session.add(item)
session.flush()
tests.create_locks(session, item)
item = project2 = pagure.lib.model.Project(
user_id=pingou.id,
name='test2',
is_fork=False,
parent_id=None,
description='test project #2',
hook_token='aaabbbddd',
)
item.close_status = ['Invalid', 'Insufficient data', 'Fixed', 'Duplicate']
session.add(item)
item = project3 = pagure.lib.model.Project(
user_id=pingou.id,
name='test3',
is_fork=False,
parent_id=None,
description='namespaced test project',
hook_token='aaabbbeee',
namespace='somenamespace',
)
item.close_status = ['Invalid', 'Insufficient data', 'Fixed', 'Duplicate']
session.add(item)
session.commit()
tests.create_projects_git(_config['GIT_FOLDER'], bare=True)
add_content_git_repo(
os.path.join(_config['GIT_FOLDER'], 'test.git'))
tests.add_readme_git_repo(
os.path.join(_config['GIT_FOLDER'], 'test.git'))
# Add some content to the git repo
add_content_git_repo(
os.path.join(_config['GIT_FOLDER'], 'forks', 'pingou',
'test.git'))
tests.add_readme_git_repo(
os.path.join(_config['GIT_FOLDER'], 'forks', 'pingou',
'test.git'))
tests.add_commit_git_repo(
os.path.join(_config['GIT_FOLDER'], 'forks', 'pingou',
'test.git'), ncommits=10)
######################################
# user_emails
item = pagure.lib.model.UserEmail(
user_id=pingou.id,
email='[email protected]')
session.add(item)
item = pagure.lib.model.UserEmail(
user_id=pingou.id,
email='[email protected]')
session.add(item)
item = pagure.lib.model.UserEmail(
user_id=foo.id,
email='[email protected]')
session.add(item)
item = pagure.lib.model.UserEmail(
user_id=you.id,
email=user_email)
session.add(item)
session.commit()
######################################
# user_emails_pending
email_pend = pagure.lib.model.UserEmailPending(
user_id=pingou.id,
email='[email protected]',
token='abcdef',
)
session.add(email_pend)
session.commit()
######################################
# issues
# Add an issue and tag it so that we can list them
item = pagure.lib.model.Issue(
id=1001,
uid='foobar',
project_id=project1.id,
title='Problem with jenkins build',
content='For some reason the tests fail at line:24',
user_id=pingou.id,
)
session.add(item)
session.commit()
item = pagure.lib.model.Issue(
id=1002,
uid='foobar2',
project_id=project1.id,
title='Unit tests failing',
content='Need to fix code for the unit tests to '
'pass so jenkins build can complete.',
user_id=pingou.id,
)
session.add(item)
session.commit()
item = pagure.lib.model.Issue(
id=1003,
uid='foobar3',
project_id=project1.id,
title='Segfault during execution',
content='Index out of bounds for variable i?',
user_id=you.id,
)
session.add(item)
session.commit()
######################################
# pagure_user_group
group = pagure.lib.query.search_groups(session, pattern=None,
group_name="rel-eng", group_type=None)
item = pagure.lib.model.PagureUserGroup(
user_id=pingou.id,
group_id=group.id
)
session.add(item)
session.commit()
group = pagure.lib.query.search_groups(session, pattern=None,
group_name="admin", group_type=None)
item = pagure.lib.model.PagureUserGroup(
user_id=you.id,
group_id=group.id
)
session.add(item)
session.commit()
group = pagure.lib.query.search_groups(session, pattern=None,
group_name="group", group_type=None)
item = pagure.lib.model.PagureUserGroup(
user_id=foo.id,
group_id=group.id
)
session.add(item)
session.commit()
######################################
# projects_groups
group = pagure.lib.query.search_groups(session, pattern=None,
group_name="rel-eng", group_type=None)
repo = pagure.lib.query.get_authorized_project(session, 'test')
item = pagure.lib.model.ProjectGroup(
project_id=repo.id,
group_id=group.id,
access="commit"
)
session.add(item)
session.commit()
group = pagure.lib.query.search_groups(session, pattern=None,
group_name="admin", group_type=None)
repo = pagure.lib.query.get_authorized_project(session, 'test2')
item = pagure.lib.model.ProjectGroup(
project_id=repo.id,
group_id=group.id,
access="admin"
)
session.add(item)
session.commit()
######################################
# pull_requests
repo = pagure.lib.query.get_authorized_project(session, 'test')
forked_repo = pagure.lib.query.get_authorized_project(session, 'test')
req = pagure.lib.query.new_pull_request(
session=session,
repo_from=forked_repo,
branch_from='master',
repo_to=repo,
branch_to='master',
title='Fixing code for unittest',
user=username,
status="Open"
)
session.commit()
repo = pagure.lib.query.get_authorized_project(session, 'test')
forked_repo = pagure.lib.query.get_authorized_project(session, 'test')
req = pagure.lib.query.new_pull_request(
session=session,
repo_from=forked_repo,
branch_from='master',
repo_to=repo,
branch_to='master',
title='add very nice README',
user=username,
status="Open"
)
session.commit()
repo = pagure.lib.query.get_authorized_project(session, 'test')
forked_repo = pagure.lib.query.get_authorized_project(session, 'test')
req = pagure.lib.query.new_pull_request(
session=session,
repo_from=forked_repo,
branch_from='master',
repo_to=repo,
branch_to='master',
title='Add README',
user=username,
status="Closed"
)
session.commit()
repo = pagure.lib.query.get_authorized_project(session, 'test')
forked_repo = pagure.lib.query.get_authorized_project(session, 'test')
req = pagure.lib.query.new_pull_request(
session=session,
repo_from=forked_repo,
branch_from='master',
repo_to=repo,
branch_to='master',
title='Fix some containers',
user=username,
status="Merged"
)
session.commit()
repo = pagure.lib.query.get_authorized_project(session, 'test')
forked_repo = pagure.lib.query.get_authorized_project(session, 'test')
req = pagure.lib.query.new_pull_request(
session=session,
repo_from=forked_repo,
branch_from='master',
repo_to=repo,
branch_to='master',
title='Fix pull request statuses',
user=username,
status="Closed"
)
session.commit()
repo = pagure.lib.query.get_authorized_project(session, 'test')
forked_repo = pagure.lib.query.get_authorized_project(session, 'test')
req = pagure.lib.query.new_pull_request(
session=session,
repo_from=forked_repo,
branch_from='master',
repo_to=repo,
branch_to='master',
title='Fixing UI of issue',
user=username,
status="Merged"
)
session.commit()
#####################################
# tokens
tests.create_tokens(session, user_id=pingou.id, project_id=project1.id)
######################################
# user_projects
repo = pagure.lib.query.get_authorized_project(session, 'test')
item = pagure.lib.model.ProjectUser(
project_id=repo.id,
user_id=foo.id,
access="commit"
)
session.add(item)
session.commit()
repo = pagure.lib.query.get_authorized_project(session, 'test2')
item = pagure.lib.model.ProjectUser(
project_id=repo.id,
user_id=you.id,
access="commit"
)
session.add(item)
session.commit()
######################################
# issue_comments
item = pagure.lib.model.IssueComment(
user_id=pingou.id,
issue_uid='foobar',
comment='We may need to adjust the unittests instead of the code.',
)
session.add(item)
session.commit()
######################################
# issue_to_issue
repo = pagure.lib.query.get_authorized_project(session, 'test')
all_issues = pagure.lib.query.search_issues(session, repo)
pagure.lib.query.add_issue_dependency(session, all_issues[0],
all_issues[1], 'pingou')
######################################
# pull_request_comments
user = pagure.lib.query.search_user(session, username='pingou')
# only 1 pull request available atm
pr = pagure.lib.query.get_pull_request_of_user(session, "pingou")[0]
item = pagure.lib.model.PullRequestComment(
pull_request_uid=pr.uid,
user_id=user.id,
comment="+1 for me. Btw, could you rebase before you merge?",
notification=0
)
session.add(item)
session.commit()
######################################
# pull_request_flags
# only 1 pull request available atm
pr = pagure.lib.query.get_pull_request_of_user(session, "pingou")[0]
item = pagure.lib.model.PullRequestFlag(
uid="random_pr_flag_uid",
pull_request_uid=pr.uid,
user_id=pingou.id,
username=pingou.user,
percent=80,
comment="Jenkins build passes",
url=str(pr.id),
status="success"
)
session.add(item)
session.commit()
pr = pagure.lib.query.get_pull_request_of_user(session, "foo")[1]
item = pagure.lib.model.PullRequestFlag(
uid="oink oink uid",
pull_request_uid=pr.uid,
user_id=pingou.id,
username=pingou.user,
percent=80,
comment="Jenkins does not pass",
url=str(pr.id),
status="failure"
)
session.add(item)
session.commit()
######################################
# pull_request_assignee
pr = pagure.lib.query.search_pull_requests(session, requestid='1006')
pr.assignee_id = pingou.id
session.commit()
pr = pagure.lib.query.search_pull_requests(session, requestid='1007')
pr.assignee_id = you.id
session.commit()
pr = pagure.lib.query.search_pull_requests(session, requestid='1004')
pr.assignee_id = foo.id
session.commit()
######################################
# tags_issues
repo = pagure.lib.query.get_authorized_project(session, 'test')
issues = pagure.lib.query.search_issues(session, repo)
item = pagure.lib.model.TagIssue(
issue_uid=issues[0].uid,
tag='tag1',
)
session.add(item)
session.commit()
######################################
# tokens_acls
tests.create_tokens_acl(session)
######################################
# Fork a project
# delete fork data
fork_proj_location = "forks/foo/test.git"
try:
shutil.rmtree(os.path.join(_config['GIT_FOLDER'],
fork_proj_location))
except:
print('git folder already deleted')
try:
shutil.rmtree(os.path.join(_config['DOCS_FOLDER'],
fork_proj_location))
except:
print('docs folder already deleted')
try:
shutil.rmtree(os.path.join(_config['TICKETS_FOLDER'],
fork_proj_location))
except:
print('tickets folder already deleted')
try:
shutil.rmtree(os.path.join(_config['REQUESTS_FOLDER'],
fork_proj_location))
except:
print('requests folder already deleted')
repo = pagure.lib.query.get_authorized_project(session, 'test')
result = pagure.lib.query.fork_project(session, 'foo', repo)
if result == 'Repo "test" cloned to "foo/test"':
session.commit()
def add_content_git_repo(folder, branch='master'):
""" Create some content for the specified git repo. """
if not os.path.exists(folder):
os.makedirs(folder)
brepo = pygit2.init_repository(folder, bare=True)
newfolder = tempfile.mkdtemp(prefix='pagure-tests')
repo = pygit2.clone_repository(folder, newfolder)
# Create a file in that git repo
with open(os.path.join(newfolder, 'sources'), 'w') as stream:
stream.write('foo\n bar')
repo.index.add('sources')
repo.index.write()
parents = []
commit = None
try:
commit = repo.revparse_single(
'HEAD' if branch == 'master' else branch)
except KeyError:
pass
if commit:
parents = [commit.oid.hex]
# Commits the files added
tree = repo.index.write_tree()
author = pygit2.Signature(
'Alice Author', '[email protected]')
committer = pygit2.Signature(
'Cecil Committer', '[email protected]')
repo.create_commit(
'refs/heads/%s' % branch, # the name of the reference to update
author,
committer,
'Add sources file for testing',
# binary string representing the tree object ID
tree,
# list of binary strings representing parents of the new commit
parents,
)
parents = []
commit = None
try:
commit = repo.revparse_single(
'HEAD' if branch == 'master' else branch)
except KeyError:
pass
if commit:
parents = [commit.oid.hex]
subfolder = os.path.join('folder1', 'folder2')
if not os.path.exists(os.path.join(newfolder, subfolder)):
os.makedirs(os.path.join(newfolder, subfolder))
# Create a file in that git repo
with open(os.path.join(newfolder, subfolder, 'file'), 'w') as stream:
stream.write('foo\n bar\nbaz')
repo.index.add(os.path.join(subfolder, 'file'))
repo.index.write()
# Commits the files added
tree = repo.index.write_tree()
author = pygit2.Signature(
'Alice Author', '[email protected]')
committer = pygit2.Signature(
'Cecil Committer', '[email protected]')
repo.create_commit(
'refs/heads/%s' % branch, # the name of the reference to update
author,
committer,
'Add some directory and a file for more testing',
# binary string representing the tree object ID
tree,
# list of binary strings representing parents of the new commit
parents
)
# Push to origin
ori_remote = repo.remotes[0]
master_ref = repo.lookup_reference(
'HEAD' if branch == 'master' else 'refs/heads/%s' % branch).resolve()
refname = '%s:%s' % (master_ref.name, master_ref.name)
PagureRepo.push(ori_remote, refname)
shutil.rmtree(newfolder)
def _get_username():
invalid_option = ['pingou', 'foo']
user_name = os.environ.get("USER_NAME")
if not user_name:
print("")
user_name = six.moves.input(
"Enter your username so we can add you into the test data: ")
cnt = 0
while not user_name.strip() or user_name in invalid_option:
print("Reserved names: " + str(invalid_option))
user_name = six.moves.input(
"Enter your username so we can add you into the "
"test data: ")
cnt += 1
if cnt == 4:
print("We asked too many times, bailing")
sys.exit(1)
return user_name
def _get_user_email():
invalid_option = ['[email protected]', '[email protected]']
user_email = os.environ.get("USER_EMAIL")
if not user_email:
print("")
user_email = six.moves.input("Enter your user email: ")
cnt = 0
while not user_email.strip() or user_email in invalid_option:
print("Reserved names: " + str(invalid_option))
user_email = six.moves.input("Enter your user email: ")
cnt += 1
if cnt == 4:
print("We asked too many times, bailing")
sys.exit(1)
return user_email
if __name__ == "__main__":
desc = "Run the dev database initialization/insertion/deletion " \
"script for db located " + str(_config['DB_URL'])
parser = argparse.ArgumentParser(prog="dev-data", description=desc)
parser.add_argument('-i', '--init', action="store_true",
help="Create the dev db")
parser.add_argument('-p', '--populate', action="store_true",
help="Add test data to the db")
parser.add_argument('-d', '--delete', action="store_true",
help="Wipe the dev db")
parser.add_argument('-a', '--all', action="store_true",
help="Create, Populate then Wipe the dev db")
args = parser.parse_args()
# forcing the user to choose
if not any(vars(args).values()):
parser.error('No arguments provided.')
session = None
if args.init or args.all:
session = pagure.lib.model.create_tables(
db_url=_config["DB_URL"],
alembic_ini=None,
acls=_config["ACLS"],
debug=False)
print("Database created")
if args.populate or args.all:
if not session:
session = pagure.lib.query.create_session(_config['DB_URL'])
user_name = _get_username()
user_email = _get_user_email()
insert_data(session, user_name, user_email)
if args.delete or args.all:
empty_dev_db(session)
| gpl-2.0 |
simonwydooghe/ansible | lib/ansible/modules/packaging/os/slackpkg.py | 95 | 6148 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2014, Kim Nørgaard
# Written by Kim Nørgaard <[email protected]>
# Based on pkgng module written by bleader <[email protected]>
# that was based on pkgin module written by Shaun Zinck <shaun.zinck at gmail.com>
# that was based on pacman module written by Afterburn <https://github.com/afterburn>
# that was based on apt module written by Matthew Williams <[email protected]>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: slackpkg
short_description: Package manager for Slackware >= 12.2
description:
- Manage binary packages for Slackware using 'slackpkg' which
is available in versions after 12.2.
version_added: "2.0"
options:
name:
description:
- name of package to install/remove
required: true
state:
description:
- state of the package, you can use "installed" as an alias for C(present) and removed as one for C(absent).
choices: [ 'present', 'absent', 'latest' ]
required: false
default: present
update_cache:
description:
- update the package database first
required: false
default: false
type: bool
author: Kim Nørgaard (@KimNorgaard)
requirements: [ "Slackware >= 12.2" ]
'''
EXAMPLES = '''
# Install package foo
- slackpkg:
name: foo
state: present
# Remove packages foo and bar
- slackpkg:
name: foo,bar
state: absent
# Make sure that it is the most updated package
- slackpkg:
name: foo
state: latest
'''
from ansible.module_utils.basic import AnsibleModule
def query_package(module, slackpkg_path, name):
import glob
import platform
machine = platform.machine()
packages = glob.glob("/var/log/packages/%s-*-[%s|noarch]*" % (name,
machine))
if len(packages) > 0:
return True
return False
def remove_packages(module, slackpkg_path, packages):
remove_c = 0
# Using a for loop in case of error, we can report the package that failed
for package in packages:
# Query the package first, to see if we even need to remove
if not query_package(module, slackpkg_path, package):
continue
if not module.check_mode:
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
remove %s" % (slackpkg_path,
package))
if not module.check_mode and query_package(module, slackpkg_path,
package):
module.fail_json(msg="failed to remove %s: %s" % (package, out))
remove_c += 1
if remove_c > 0:
module.exit_json(changed=True, msg="removed %s package(s)" % remove_c)
module.exit_json(changed=False, msg="package(s) already absent")
def install_packages(module, slackpkg_path, packages):
install_c = 0
for package in packages:
if query_package(module, slackpkg_path, package):
continue
if not module.check_mode:
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
install %s" % (slackpkg_path,
package))
if not module.check_mode and not query_package(module, slackpkg_path,
package):
module.fail_json(msg="failed to install %s: %s" % (package, out),
stderr=err)
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="present %s package(s)"
% (install_c))
module.exit_json(changed=False, msg="package(s) already present")
def upgrade_packages(module, slackpkg_path, packages):
install_c = 0
for package in packages:
if not module.check_mode:
rc, out, err = module.run_command("%s -default_answer=y -batch=on \
upgrade %s" % (slackpkg_path,
package))
if not module.check_mode and not query_package(module, slackpkg_path,
package):
module.fail_json(msg="failed to install %s: %s" % (package, out),
stderr=err)
install_c += 1
if install_c > 0:
module.exit_json(changed=True, msg="present %s package(s)"
% (install_c))
module.exit_json(changed=False, msg="package(s) already present")
def update_cache(module, slackpkg_path):
rc, out, err = module.run_command("%s -batch=on update" % (slackpkg_path))
if rc != 0:
module.fail_json(msg="Could not update package cache")
def main():
module = AnsibleModule(
argument_spec=dict(
state=dict(default="installed", choices=['installed', 'removed', 'absent', 'present', 'latest']),
name=dict(aliases=["pkg"], required=True, type='list'),
update_cache=dict(default=False, aliases=["update-cache"],
type='bool'),
),
supports_check_mode=True)
slackpkg_path = module.get_bin_path('slackpkg', True)
p = module.params
pkgs = p['name']
if p["update_cache"]:
update_cache(module, slackpkg_path)
if p['state'] == 'latest':
upgrade_packages(module, slackpkg_path, pkgs)
elif p['state'] in ['present', 'installed']:
install_packages(module, slackpkg_path, pkgs)
elif p["state"] in ['removed', 'absent']:
remove_packages(module, slackpkg_path, pkgs)
if __name__ == '__main__':
main()
| gpl-3.0 |
simonwydooghe/ansible | lib/ansible/modules/network/nxos/nxos_vxlan_vtep_vni.py | 8 | 14044 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_vxlan_vtep_vni
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Creates a Virtual Network Identifier member (VNI)
description:
- Creates a Virtual Network Identifier member (VNI) for an NVE
overlay interface.
author: Gabriele Gerbino (@GGabriele)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- default, where supported, restores params default value.
options:
interface:
description:
- Interface name for the VXLAN Network Virtualization Endpoint.
required: true
vni:
description:
- ID of the Virtual Network Identifier.
required: true
assoc_vrf:
description:
- This attribute is used to identify and separate processing VNIs
that are associated with a VRF and used for routing. The VRF
and VNI specified with this command must match the configuration
of the VNI under the VRF.
type: bool
ingress_replication:
description:
- Specifies mechanism for host reachability advertisement.
choices: ['bgp','static', 'default']
multicast_group:
description:
- The multicast group (range) of the VNI. Valid values are
string and keyword 'default'.
peer_list:
description:
- Set the ingress-replication static peer list. Valid values
are an array, a space-separated string of ip addresses,
or the keyword 'default'.
suppress_arp:
description:
- Suppress arp under layer 2 VNI.
type: bool
suppress_arp_disable:
description:
- Overrides the global ARP suppression config.
This is available on NX-OS 9K series running 9.2.x or higher.
type: bool
version_added: "2.8"
state:
description:
- Determines whether the config should be present or not
on the device.
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- nxos_vxlan_vtep_vni:
interface: nve1
vni: 6000
ingress_replication: default
'''
RETURN = '''
commands:
description: commands sent to the device
returned: always
type: list
sample: ["interface nve1", "member vni 6000"]
'''
import re
from ansible.module_utils.network.nxos.nxos import get_config, load_config
from ansible.module_utils.network.nxos.nxos import nxos_argument_spec
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.common.config import CustomNetworkConfig
BOOL_PARAMS = [
'assoc_vrf',
'suppress_arp',
'suppress_arp_disable',
]
PARAM_TO_DEFAULT_KEYMAP = {
'multicast_group': '',
'peer_list': [],
'ingress_replication': '',
}
PARAM_TO_COMMAND_KEYMAP = {
'assoc_vrf': 'associate-vrf',
'interface': 'interface',
'vni': 'member vni',
'ingress_replication': 'ingress-replication protocol',
'multicast_group': 'mcast-group',
'peer_list': 'peer-ip',
'suppress_arp': 'suppress-arp',
'suppress_arp_disable': 'suppress-arp disable',
}
def get_value(arg, config, module):
command = PARAM_TO_COMMAND_KEYMAP[arg]
command_val_re = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(command), re.M)
if arg in BOOL_PARAMS:
command_re = re.compile(r'\s+{0}\s*$'.format(command), re.M)
value = False
if command_re.search(config):
value = True
elif arg == 'peer_list':
has_command_val = command_val_re.findall(config, re.M)
value = []
if has_command_val:
value = has_command_val
else:
value = ''
has_command_val = command_val_re.search(config, re.M)
if has_command_val:
value = has_command_val.group('value')
return value
def check_interface(module, netcfg):
config = str(netcfg)
has_interface = re.search(r'(?:interface nve)(?P<value>.*)$', config, re.M)
value = ''
if has_interface:
value = 'nve{0}'.format(has_interface.group('value'))
return value
def get_existing(module, args):
existing = {}
netcfg = CustomNetworkConfig(indent=2, contents=get_config(module))
interface_exist = check_interface(module, netcfg)
if interface_exist:
parents = ['interface {0}'.format(interface_exist)]
temp_config = netcfg.get_section(parents)
if 'member vni {0} associate-vrf'.format(module.params['vni']) in temp_config:
parents.append('member vni {0} associate-vrf'.format(module.params['vni']))
config = netcfg.get_section(parents)
elif "member vni {0}".format(module.params['vni']) in temp_config:
parents.append('member vni {0}'.format(module.params['vni']))
config = netcfg.get_section(parents)
else:
config = {}
if config:
for arg in args:
if arg not in ['interface', 'vni']:
existing[arg] = get_value(arg, config, module)
existing['interface'] = interface_exist
existing['vni'] = module.params['vni']
return existing, interface_exist
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
new_dict[new_key] = value
return new_dict
def state_present(module, existing, proposed, candidate):
commands = list()
proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed)
existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing)
for key, value in proposed_commands.items():
if key == 'associate-vrf':
command = 'member vni {0} {1}'.format(module.params['vni'], key)
if not value:
command = 'no {0}'.format(command)
commands.append(command)
elif key == 'peer-ip' and value != []:
for peer in value:
commands.append('{0} {1}'.format(key, peer))
elif key == 'mcast-group' and value != existing_commands.get(key):
commands.append('no {0}'.format(key))
vni_command = 'member vni {0}'.format(module.params['vni'])
if vni_command not in commands:
commands.append('member vni {0}'.format(module.params['vni']))
if value != PARAM_TO_DEFAULT_KEYMAP.get('multicast_group', 'default'):
commands.append('{0} {1}'.format(key, value))
elif key == 'ingress-replication protocol' and value != existing_commands.get(key):
evalue = existing_commands.get(key)
dvalue = PARAM_TO_DEFAULT_KEYMAP.get('ingress_replication', 'default')
if value != dvalue:
if evalue and evalue != dvalue:
commands.append('no {0} {1}'.format(key, evalue))
commands.append('{0} {1}'.format(key, value))
else:
if evalue:
commands.append('no {0} {1}'.format(key, evalue))
elif value is True:
commands.append(key)
elif value is False:
commands.append('no {0}'.format(key))
elif value == 'default' or value == []:
if existing_commands.get(key):
existing_value = existing_commands.get(key)
if key == 'peer-ip':
for peer in existing_value:
commands.append('no {0} {1}'.format(key, peer))
else:
commands.append('no {0} {1}'.format(key, existing_value))
else:
if key.replace(' ', '_').replace('-', '_') in BOOL_PARAMS:
commands.append('no {0}'.format(key.lower()))
else:
command = '{0} {1}'.format(key, value.lower())
commands.append(command)
if commands:
vni_command = 'member vni {0}'.format(module.params['vni'])
ingress_replications_command = 'ingress-replication protocol static'
ingress_replicationb_command = 'ingress-replication protocol bgp'
ingress_replicationns_command = 'no ingress-replication protocol static'
ingress_replicationnb_command = 'no ingress-replication protocol bgp'
interface_command = 'interface {0}'.format(module.params['interface'])
if any(c in commands for c in (ingress_replications_command, ingress_replicationb_command,
ingress_replicationnb_command, ingress_replicationns_command)):
static_level_cmds = [cmd for cmd in commands if 'peer' in cmd]
parents = [interface_command, vni_command]
commands = [cmd for cmd in commands if 'peer' not in cmd]
for cmd in commands:
parents.append(cmd)
candidate.add(static_level_cmds, parents=parents)
elif 'peer-ip' in commands[0]:
static_level_cmds = [cmd for cmd in commands]
parents = [interface_command, vni_command, ingress_replications_command]
candidate.add(static_level_cmds, parents=parents)
if vni_command in commands:
parents = [interface_command]
commands.remove(vni_command)
if module.params['assoc_vrf'] is None:
parents.append(vni_command)
candidate.add(commands, parents=parents)
def state_absent(module, existing, proposed, candidate):
if existing['assoc_vrf']:
commands = ['no member vni {0} associate-vrf'.format(
module.params['vni'])]
else:
commands = ['no member vni {0}'.format(module.params['vni'])]
parents = ['interface {0}'.format(module.params['interface'])]
candidate.add(commands, parents=parents)
def main():
argument_spec = dict(
interface=dict(required=True, type='str'),
vni=dict(required=True, type='str'),
assoc_vrf=dict(required=False, type='bool'),
multicast_group=dict(required=False, type='str'),
peer_list=dict(required=False, type='list'),
suppress_arp=dict(required=False, type='bool'),
suppress_arp_disable=dict(required=False, type='bool'),
ingress_replication=dict(required=False, type='str', choices=['bgp', 'static', 'default']),
state=dict(choices=['present', 'absent'], default='present', required=False),
)
argument_spec.update(nxos_argument_spec)
mutually_exclusive = [('suppress_arp', 'suppress_arp_disable'),
('assoc_vrf', 'multicast_group'),
('assoc_vrf', 'suppress_arp'),
('assoc_vrf', 'suppress_arp_disable'),
('assoc_vrf', 'ingress_replication')]
module = AnsibleModule(
argument_spec=argument_spec,
mutually_exclusive=mutually_exclusive,
supports_check_mode=True,
)
warnings = list()
result = {'changed': False, 'commands': [], 'warnings': warnings}
if module.params['peer_list']:
if module.params['peer_list'][0] != 'default' and module.params['ingress_replication'] != 'static':
module.fail_json(msg='ingress_replication=static is required '
'when using peer_list param')
else:
peer_list = module.params['peer_list']
if peer_list[0] == 'default':
module.params['peer_list'] = 'default'
else:
stripped_peer_list = list(map(str.strip, peer_list))
module.params['peer_list'] = stripped_peer_list
state = module.params['state']
args = PARAM_TO_COMMAND_KEYMAP.keys()
existing, interface_exist = get_existing(module, args)
if state == 'present':
if not interface_exist:
module.fail_json(msg="The proposed NVE interface does not exist. Use nxos_interface to create it first.")
elif interface_exist != module.params['interface']:
module.fail_json(msg='Only 1 NVE interface is allowed on the switch.')
elif state == 'absent':
if interface_exist != module.params['interface']:
module.exit_json(**result)
elif existing and existing['vni'] != module.params['vni']:
module.fail_json(
msg="ERROR: VNI delete failed: Could not find vni node for {0}".format(module.params['vni']),
existing_vni=existing['vni']
)
proposed_args = dict((k, v) for k, v in module.params.items()
if v is not None and k in args)
proposed = {}
for key, value in proposed_args.items():
if key in ['multicast_group', 'peer_list', 'ingress_replication']:
if str(value).lower() == 'default':
value = PARAM_TO_DEFAULT_KEYMAP.get(key, 'default')
if key != 'interface' and existing.get(key) != value:
proposed[key] = value
candidate = CustomNetworkConfig(indent=3)
if state == 'present':
state_present(module, existing, proposed, candidate)
elif existing and state == 'absent':
state_absent(module, existing, proposed, candidate)
if candidate:
candidate = candidate.items_text()
result['changed'] = True
result['commands'] = candidate
if not module.check_mode:
load_config(module, candidate)
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
chris4795/u-boot-novena | tools/patman/get_maintainer.py | 8 | 1239 | # Copyright (c) 2012 The Chromium OS Authors.
#
# SPDX-License-Identifier: GPL-2.0+
#
import command
import gitutil
import os
def FindGetMaintainer():
"""Look for the get_maintainer.pl script.
Returns:
If the script is found we'll return a path to it; else None.
"""
try_list = [
os.path.join(gitutil.GetTopLevel(), 'scripts'),
]
# Look in the list
for path in try_list:
fname = os.path.join(path, 'get_maintainer.pl')
if os.path.isfile(fname):
return fname
return None
def GetMaintainer(fname, verbose=False):
"""Run get_maintainer.pl on a file if we find it.
We look for get_maintainer.pl in the 'scripts' directory at the top of
git. If we find it we'll run it. If we don't find get_maintainer.pl
then we fail silently.
Args:
fname: Path to the patch file to run get_maintainer.pl on.
Returns:
A list of email addresses to CC to.
"""
get_maintainer = FindGetMaintainer()
if not get_maintainer:
if verbose:
print("WARNING: Couldn't find get_maintainer.pl")
return []
stdout = command.Output(get_maintainer, '--norolestats', fname)
return stdout.splitlines()
| gpl-2.0 |
sanjuro/RCJK | vendor/gdata/Crypto/Hash/HMAC.py | 226 | 3330 | """HMAC (Keyed-Hashing for Message Authentication) Python module.
Implements the HMAC algorithm as described by RFC 2104.
This is just a copy of the Python 2.2 HMAC module, modified to work when
used on versions of Python before 2.2.
"""
__revision__ = "$Id: HMAC.py,v 1.5 2002/07/25 17:19:02 z3p Exp $"
import string
def _strxor(s1, s2):
"""Utility method. XOR the two strings s1 and s2 (must have same length).
"""
return "".join(map(lambda x, y: chr(ord(x) ^ ord(y)), s1, s2))
# The size of the digests returned by HMAC depends on the underlying
# hashing module used.
digest_size = None
class HMAC:
"""RFC2104 HMAC class.
This supports the API for Cryptographic Hash Functions (PEP 247).
"""
def __init__(self, key, msg = None, digestmod = None):
"""Create a new HMAC object.
key: key for the keyed hash object.
msg: Initial input for the hash, if provided.
digestmod: A module supporting PEP 247. Defaults to the md5 module.
"""
if digestmod == None:
import md5
digestmod = md5
self.digestmod = digestmod
self.outer = digestmod.new()
self.inner = digestmod.new()
try:
self.digest_size = digestmod.digest_size
except AttributeError:
self.digest_size = len(self.outer.digest())
blocksize = 64
ipad = "\x36" * blocksize
opad = "\x5C" * blocksize
if len(key) > blocksize:
key = digestmod.new(key).digest()
key = key + chr(0) * (blocksize - len(key))
self.outer.update(_strxor(key, opad))
self.inner.update(_strxor(key, ipad))
if (msg):
self.update(msg)
## def clear(self):
## raise NotImplementedError, "clear() method not available in HMAC."
def update(self, msg):
"""Update this hashing object with the string msg.
"""
self.inner.update(msg)
def copy(self):
"""Return a separate copy of this hashing object.
An update to this copy won't affect the original object.
"""
other = HMAC("")
other.digestmod = self.digestmod
other.inner = self.inner.copy()
other.outer = self.outer.copy()
return other
def digest(self):
"""Return the hash value of this hashing object.
This returns a string containing 8-bit data. The object is
not altered in any way by this function; you can continue
updating the object after calling this function.
"""
h = self.outer.copy()
h.update(self.inner.digest())
return h.digest()
def hexdigest(self):
"""Like digest(), but returns a string of hexadecimal digits instead.
"""
return "".join([string.zfill(hex(ord(x))[2:], 2)
for x in tuple(self.digest())])
def new(key, msg = None, digestmod = None):
"""Create a new hashing object and return it.
key: The starting key for the hash.
msg: if available, will immediately be hashed into the object's starting
state.
You can now feed arbitrary strings into the object using its update()
method, and can ask for the hash value at any time by calling its digest()
method.
"""
return HMAC(key, msg, digestmod)
| apache-2.0 |
wayetender/whip | whip/src/adapter/frontends/rest.py | 1 | 3325 | from . import ProxyTerminus
from flask import Flask
from flask import request
from flask import make_response
import urllib2
import socket
import json
import threading
import datetime
import ssl
import logging
log = logging.getLogger('werkzeug')
log.setLevel(logging.ERROR)
network_times = open('times', 'w')
network_times.truncate()
class RestProxyTerminus(ProxyTerminus):
def __init__(self, ip, port):
self.actual_ip = ip
self.actual_port = port
def serve_requests(self, client_proxy, endpoint = None):
'''returns: endpoint it is listening on'''
context = ('server.pem', 'server.pem')
if not endpoint:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('localhost', 0))
port = sock.getsockname()[1]
sock.close()
host = '0.0.0.0'
else:
host = endpoint[0]
port = endpoint[1]
self.app = Flask(__name__)
def handle(p):
try:
arg = {
'args': dict(request.args.items()),
'headers': dict(request.headers.items())
}
result = client_proxy.on_unproxied_request('/%s' % p, [arg])
resp = make_response(json.dumps(result['body']))
for (header, v) in result['headers'].items():
if header == 'content-length': continue
resp.headers[header] = v
return resp
except:
import sys, traceback
print traceback.print_exc(file=sys.stdout)
print sys.exc_info()
self.app.add_url_rule("/<path:p>", 'handle', handle)
self.app.config['PROPAGATE_EXCEPTIONS'] =True
t = threading.Thread(target=lambda: self.app.run(host=host, port=port, ssl_context=context, threaded=True, debug=False, ))
t.setDaemon(True)
t.start()
return ('127.0.0.1', port)
def execute_request(self, callsite):
h = callsite.args[0]['headers']['Host']
apath = 'https://%s:%s%s' % (h, self.actual_port, callsite.opname) if ':' not in h else "https://%s%s" % (h, callsite.opname)
context = ssl._create_unverified_context()
nrequest = urllib2.Request(apath)
for (header, v) in callsite.args[0]['headers'].items():
if header == 'Content-Length' or header == 'Accept-Encoding': continue
nrequest.add_header(header, v)
startTime = datetime.datetime.now()
proxy_resp = urllib2.urlopen(nrequest, context=context)
body = str(proxy_resp.read()).encode('ascii', 'ignore')
code = proxy_resp.getcode()
tempTime = (datetime.datetime.now() - startTime).total_seconds() * 1000
network_times.write("%s\n" % tempTime)
network_times.flush()
res = {
'headers': dict(proxy_resp.info()),
'body': json.loads(body),
'code': code
}
return res
def generate(config, terminal, serviceconfig):
if 'mapsto' not in serviceconfig:
raise ValueError("mapstoservice must be set")
(ip, port) = serviceconfig['actual']
frompath = serviceconfig.get('fromhttppath', None)
return RestProxyTerminus(ip, port)
| gpl-2.0 |
Zhongqilong/kbengine | kbe/res/scripts/common/Lib/test/test_email/test_policy.py | 79 | 13417 | import io
import types
import textwrap
import unittest
import email.policy
import email.parser
import email.generator
from email import headerregistry
def make_defaults(base_defaults, differences):
defaults = base_defaults.copy()
defaults.update(differences)
return defaults
class PolicyAPITests(unittest.TestCase):
longMessage = True
# Base default values.
compat32_defaults = {
'max_line_length': 78,
'linesep': '\n',
'cte_type': '8bit',
'raise_on_defect': False,
}
# These default values are the ones set on email.policy.default.
# If any of these defaults change, the docs must be updated.
policy_defaults = compat32_defaults.copy()
policy_defaults.update({
'raise_on_defect': False,
'header_factory': email.policy.EmailPolicy.header_factory,
'refold_source': 'long',
'content_manager': email.policy.EmailPolicy.content_manager,
})
# For each policy under test, we give here what we expect the defaults to
# be for that policy. The second argument to make defaults is the
# difference between the base defaults and that for the particular policy.
new_policy = email.policy.EmailPolicy()
policies = {
email.policy.compat32: make_defaults(compat32_defaults, {}),
email.policy.default: make_defaults(policy_defaults, {}),
email.policy.SMTP: make_defaults(policy_defaults,
{'linesep': '\r\n'}),
email.policy.HTTP: make_defaults(policy_defaults,
{'linesep': '\r\n',
'max_line_length': None}),
email.policy.strict: make_defaults(policy_defaults,
{'raise_on_defect': True}),
new_policy: make_defaults(policy_defaults, {}),
}
# Creating a new policy creates a new header factory. There is a test
# later that proves this.
policies[new_policy]['header_factory'] = new_policy.header_factory
def test_defaults(self):
for policy, expected in self.policies.items():
for attr, value in expected.items():
self.assertEqual(getattr(policy, attr), value,
("change {} docs/docstrings if defaults have "
"changed").format(policy))
def test_all_attributes_covered(self):
for policy, expected in self.policies.items():
for attr in dir(policy):
if (attr.startswith('_') or
isinstance(getattr(email.policy.EmailPolicy, attr),
types.FunctionType)):
continue
else:
self.assertIn(attr, expected,
"{} is not fully tested".format(attr))
def test_abc(self):
with self.assertRaises(TypeError) as cm:
email.policy.Policy()
msg = str(cm.exception)
abstract_methods = ('fold',
'fold_binary',
'header_fetch_parse',
'header_source_parse',
'header_store_parse')
for method in abstract_methods:
self.assertIn(method, msg)
def test_policy_is_immutable(self):
for policy, defaults in self.policies.items():
for attr in defaults:
with self.assertRaisesRegex(AttributeError, attr+".*read-only"):
setattr(policy, attr, None)
with self.assertRaisesRegex(AttributeError, 'no attribute.*foo'):
policy.foo = None
def test_set_policy_attrs_when_cloned(self):
# None of the attributes has a default value of None, so we set them
# all to None in the clone call and check that it worked.
for policyclass, defaults in self.policies.items():
testattrdict = {attr: None for attr in defaults}
policy = policyclass.clone(**testattrdict)
for attr in defaults:
self.assertIsNone(getattr(policy, attr))
def test_reject_non_policy_keyword_when_called(self):
for policyclass in self.policies:
with self.assertRaises(TypeError):
policyclass(this_keyword_should_not_be_valid=None)
with self.assertRaises(TypeError):
policyclass(newtline=None)
def test_policy_addition(self):
expected = self.policy_defaults.copy()
p1 = email.policy.default.clone(max_line_length=100)
p2 = email.policy.default.clone(max_line_length=50)
added = p1 + p2
expected.update(max_line_length=50)
for attr, value in expected.items():
self.assertEqual(getattr(added, attr), value)
added = p2 + p1
expected.update(max_line_length=100)
for attr, value in expected.items():
self.assertEqual(getattr(added, attr), value)
added = added + email.policy.default
for attr, value in expected.items():
self.assertEqual(getattr(added, attr), value)
def test_register_defect(self):
class Dummy:
def __init__(self):
self.defects = []
obj = Dummy()
defect = object()
policy = email.policy.EmailPolicy()
policy.register_defect(obj, defect)
self.assertEqual(obj.defects, [defect])
defect2 = object()
policy.register_defect(obj, defect2)
self.assertEqual(obj.defects, [defect, defect2])
class MyObj:
def __init__(self):
self.defects = []
class MyDefect(Exception):
pass
def test_handle_defect_raises_on_strict(self):
foo = self.MyObj()
defect = self.MyDefect("the telly is broken")
with self.assertRaisesRegex(self.MyDefect, "the telly is broken"):
email.policy.strict.handle_defect(foo, defect)
def test_handle_defect_registers_defect(self):
foo = self.MyObj()
defect1 = self.MyDefect("one")
email.policy.default.handle_defect(foo, defect1)
self.assertEqual(foo.defects, [defect1])
defect2 = self.MyDefect("two")
email.policy.default.handle_defect(foo, defect2)
self.assertEqual(foo.defects, [defect1, defect2])
class MyPolicy(email.policy.EmailPolicy):
defects = None
def __init__(self, *args, **kw):
super().__init__(*args, defects=[], **kw)
def register_defect(self, obj, defect):
self.defects.append(defect)
def test_overridden_register_defect_still_raises(self):
foo = self.MyObj()
defect = self.MyDefect("the telly is broken")
with self.assertRaisesRegex(self.MyDefect, "the telly is broken"):
self.MyPolicy(raise_on_defect=True).handle_defect(foo, defect)
def test_overriden_register_defect_works(self):
foo = self.MyObj()
defect1 = self.MyDefect("one")
my_policy = self.MyPolicy()
my_policy.handle_defect(foo, defect1)
self.assertEqual(my_policy.defects, [defect1])
self.assertEqual(foo.defects, [])
defect2 = self.MyDefect("two")
my_policy.handle_defect(foo, defect2)
self.assertEqual(my_policy.defects, [defect1, defect2])
self.assertEqual(foo.defects, [])
def test_default_header_factory(self):
h = email.policy.default.header_factory('Test', 'test')
self.assertEqual(h.name, 'Test')
self.assertIsInstance(h, headerregistry.UnstructuredHeader)
self.assertIsInstance(h, headerregistry.BaseHeader)
class Foo:
parse = headerregistry.UnstructuredHeader.parse
def test_each_Policy_gets_unique_factory(self):
policy1 = email.policy.EmailPolicy()
policy2 = email.policy.EmailPolicy()
policy1.header_factory.map_to_type('foo', self.Foo)
h = policy1.header_factory('foo', 'test')
self.assertIsInstance(h, self.Foo)
self.assertNotIsInstance(h, headerregistry.UnstructuredHeader)
h = policy2.header_factory('foo', 'test')
self.assertNotIsInstance(h, self.Foo)
self.assertIsInstance(h, headerregistry.UnstructuredHeader)
def test_clone_copies_factory(self):
policy1 = email.policy.EmailPolicy()
policy2 = policy1.clone()
policy1.header_factory.map_to_type('foo', self.Foo)
h = policy1.header_factory('foo', 'test')
self.assertIsInstance(h, self.Foo)
h = policy2.header_factory('foo', 'test')
self.assertIsInstance(h, self.Foo)
def test_new_factory_overrides_default(self):
mypolicy = email.policy.EmailPolicy()
myfactory = mypolicy.header_factory
newpolicy = mypolicy + email.policy.strict
self.assertEqual(newpolicy.header_factory, myfactory)
newpolicy = email.policy.strict + mypolicy
self.assertEqual(newpolicy.header_factory, myfactory)
def test_adding_default_policies_preserves_default_factory(self):
newpolicy = email.policy.default + email.policy.strict
self.assertEqual(newpolicy.header_factory,
email.policy.EmailPolicy.header_factory)
self.assertEqual(newpolicy.__dict__, {'raise_on_defect': True})
# XXX: Need subclassing tests.
# For adding subclassed objects, make sure the usual rules apply (subclass
# wins), but that the order still works (right overrides left).
class TestPolicyPropagation(unittest.TestCase):
# The abstract methods are used by the parser but not by the wrapper
# functions that call it, so if the exception gets raised we know that the
# policy was actually propagated all the way to feedparser.
class MyPolicy(email.policy.Policy):
def badmethod(self, *args, **kw):
raise Exception("test")
fold = fold_binary = header_fetch_parser = badmethod
header_source_parse = header_store_parse = badmethod
def test_message_from_string(self):
with self.assertRaisesRegex(Exception, "^test$"):
email.message_from_string("Subject: test\n\n",
policy=self.MyPolicy)
def test_message_from_bytes(self):
with self.assertRaisesRegex(Exception, "^test$"):
email.message_from_bytes(b"Subject: test\n\n",
policy=self.MyPolicy)
def test_message_from_file(self):
f = io.StringIO('Subject: test\n\n')
with self.assertRaisesRegex(Exception, "^test$"):
email.message_from_file(f, policy=self.MyPolicy)
def test_message_from_binary_file(self):
f = io.BytesIO(b'Subject: test\n\n')
with self.assertRaisesRegex(Exception, "^test$"):
email.message_from_binary_file(f, policy=self.MyPolicy)
# These are redundant, but we need them for black-box completeness.
def test_parser(self):
p = email.parser.Parser(policy=self.MyPolicy)
with self.assertRaisesRegex(Exception, "^test$"):
p.parsestr('Subject: test\n\n')
def test_bytes_parser(self):
p = email.parser.BytesParser(policy=self.MyPolicy)
with self.assertRaisesRegex(Exception, "^test$"):
p.parsebytes(b'Subject: test\n\n')
# Now that we've established that all the parse methods get the
# policy in to feedparser, we can use message_from_string for
# the rest of the propagation tests.
def _make_msg(self, source='Subject: test\n\n', policy=None):
self.policy = email.policy.default.clone() if policy is None else policy
return email.message_from_string(source, policy=self.policy)
def test_parser_propagates_policy_to_message(self):
msg = self._make_msg()
self.assertIs(msg.policy, self.policy)
def test_parser_propagates_policy_to_sub_messages(self):
msg = self._make_msg(textwrap.dedent("""\
Subject: mime test
MIME-Version: 1.0
Content-Type: multipart/mixed, boundary="XXX"
--XXX
Content-Type: text/plain
test
--XXX
Content-Type: text/plain
test2
--XXX--
"""))
for part in msg.walk():
self.assertIs(part.policy, self.policy)
def test_message_policy_propagates_to_generator(self):
msg = self._make_msg("Subject: test\nTo: foo\n\n",
policy=email.policy.default.clone(linesep='X'))
s = io.StringIO()
g = email.generator.Generator(s)
g.flatten(msg)
self.assertEqual(s.getvalue(), "Subject: testXTo: fooXX")
def test_message_policy_used_by_as_string(self):
msg = self._make_msg("Subject: test\nTo: foo\n\n",
policy=email.policy.default.clone(linesep='X'))
self.assertEqual(msg.as_string(), "Subject: testXTo: fooXX")
class TestConcretePolicies(unittest.TestCase):
def test_header_store_parse_rejects_newlines(self):
instance = email.policy.EmailPolicy()
self.assertRaises(ValueError,
instance.header_store_parse,
'From', 'spam\[email protected]')
if __name__ == '__main__':
unittest.main()
| lgpl-3.0 |
drawks/ansible | lib/ansible/modules/database/aerospike/aerospike_migrations.py | 25 | 18758 | #!/usr/bin/python
"""short_description: Check or wait for migrations between nodes"""
# Copyright: (c) 2018, Albert Autin
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: aerospike_migrations
short_description: Check or wait for migrations between nodes
description:
- This can be used to check for migrations in a cluster.
This makes it easy to do a rolling upgrade/update on Aerospike nodes.
- If waiting for migrations is not desired, simply just poll until
port 3000 if available or asinfo -v status returns ok
version_added: 2.8
author: "Albert Autin (@Alb0t)"
options:
host:
description:
- Which host do we use as seed for info connection
required: False
type: str
default: localhost
port:
description:
- Which port to connect to Aerospike on (service port)
required: False
type: int
default: 3000
connect_timeout:
description:
- How long to try to connect before giving up (milliseconds)
required: False
type: int
default: 1000
consecutive_good_checks:
description:
- How many times should the cluster report "no migrations"
consecutively before returning OK back to ansible?
required: False
type: int
default: 3
sleep_between_checks:
description:
- How long to sleep between each check (seconds).
required: False
type: int
default: 60
tries_limit:
description:
- How many times do we poll before giving up and failing?
default: 300
required: False
type: int
local_only:
description:
- Do you wish to only check for migrations on the local node
before returning, or do you want all nodes in the cluster
to finish before returning?
required: True
type: bool
min_cluster_size:
description:
- Check will return bad until cluster size is met
or until tries is exhausted
required: False
type: int
default: 1
fail_on_cluster_change:
description:
- Fail if the cluster key changes
if something else is changing the cluster, we may want to fail
required: False
type: bool
default: True
migrate_tx_key:
description:
- The metric key used to determine if we have tx migrations
remaining. Changeable due to backwards compatibility.
required: False
type: str
default: migrate_tx_partitions_remaining
migrate_rx_key:
description:
- The metric key used to determine if we have rx migrations
remaining. Changeable due to backwards compatibility.
required: False
type: str
default: migrate_rx_partitions_remaining
target_cluster_size:
description:
- When all aerospike builds in the cluster are greater than
version 4.3, then the C(cluster-stable) info command will be used.
Inside this command, you can optionally specify what the target
cluster size is - but it is not necessary. You can still rely on
min_cluster_size if you don't want to use this option.
- If this option is specified on a cluster that has at least 1
host <4.3 then it will be ignored until the min version reaches
4.3.
required: False
type: int
'''
EXAMPLES = '''
# check for migrations on local node
- name: wait for migrations on local node before proceeding
aerospike_migrations:
host: "localhost"
connect_timeout: 2000
consecutive_good_checks: 5
sleep_between_checks: 15
tries_limit: 600
local_only: False
# example playbook:
---
- name: upgrade aerospike
hosts: all
become: true
serial: 1
tasks:
- name: Install dependencies
apt:
name:
- python
- python-pip
- python-setuptools
state: latest
- name: setup aerospike
pip:
name: aerospike
# check for migrations every (sleep_between_checks)
# If at least (consecutive_good_checks) checks come back OK in a row, then return OK.
# Will exit if any exception, which can be caused by bad nodes,
# nodes not returning data, or other reasons.
# Maximum runtime before giving up in this case will be:
# Tries Limit * Sleep Between Checks * delay * retries
- name: wait for aerospike migrations
aerospike_migrations:
local_only: True
sleep_between_checks: 1
tries_limit: 5
consecutive_good_checks: 3
fail_on_cluster_change: true
min_cluster_size: 3
target_cluster_size: 4
register: migrations_check
until: migrations_check is succeeded
changed_when: false
delay: 60
retries: 120
- name: another thing
shell: |
echo foo
- name: reboot
reboot:
'''
RETURN = '''
# Returns only a success/failure result. Changed is always false.
'''
import traceback
from ansible.module_utils.basic import AnsibleModule, missing_required_lib
LIB_FOUND_ERR = None
try:
import aerospike
from time import sleep
import re
except ImportError as ie:
LIB_FOUND = False
LIB_FOUND_ERR = traceback.format_exc()
else:
LIB_FOUND = True
def run_module():
"""run ansible module"""
module_args = dict(
host=dict(type='str', required=False, default='localhost'),
port=dict(type='int', required=False, default=3000),
connect_timeout=dict(type='int', required=False, default=1000),
consecutive_good_checks=dict(type='int', required=False, default=3),
sleep_between_checks=dict(type='int', required=False, default=60),
tries_limit=dict(type='int', requires=False, default=300),
local_only=dict(type='bool', required=True),
min_cluster_size=dict(type='int', required=False, default=1),
target_cluster_size=dict(type='int', required=False, default=None),
fail_on_cluster_change=dict(type='bool', required=False, default=True),
migrate_tx_key=dict(type='str', required=False,
default="migrate_tx_partitions_remaining"),
migrate_rx_key=dict(type='str', required=False,
default="migrate_rx_partitions_remaining")
)
result = dict(
changed=False,
)
module = AnsibleModule(
argument_spec=module_args,
supports_check_mode=True
)
if not LIB_FOUND:
module.fail_json(msg=missing_required_lib('aerospike'),
exception=LIB_FOUND_ERR)
try:
if module.check_mode:
has_migrations, skip_reason = False, None
else:
migrations = Migrations(module)
has_migrations, skip_reason = migrations.has_migs(
module.params['local_only']
)
if has_migrations:
module.fail_json(msg="Failed.", skip_reason=skip_reason)
except Exception as e:
module.fail_json(msg="Error: {0}".format(e))
module.exit_json(**result)
class Migrations:
""" Check or wait for migrations between nodes """
def __init__(self, module):
self.module = module
self._client = self._create_client().connect()
self._nodes = {}
self._update_nodes_list()
self._cluster_statistics = {}
self._update_cluster_statistics()
self._namespaces = set()
self._update_cluster_namespace_list()
self._build_list = set()
self._update_build_list()
self._start_cluster_key = \
self._cluster_statistics[self._nodes[0]]['cluster_key']
def _create_client(self):
""" TODO: add support for auth, tls, and other special features
I won't use those features, so I'll wait until somebody complains
or does it for me (Cross fingers)
create the client object"""
config = {
'hosts': [
(self.module.params['host'], self.module.params['port'])
],
'policies': {
'timeout': self.module.params['connect_timeout']
}
}
return aerospike.client(config)
def _info_cmd_helper(self, cmd, node=None, delimiter=';'):
"""delimiter is for seperate stats that come back, NOT for kv
separation which is ="""
if node is None: # If no node passed, use the first one (local)
node = self._nodes[0]
data = self._client.info_node(cmd, node)
data = data.split("\t")
if len(data) != 1 and len(data) != 2:
self.module.fail_json(
msg="Unexpected number of values returned in info command: " +
str(len(data))
)
# data will be in format 'command\touput'
data = data[-1]
data = data.rstrip("\n\r")
data_arr = data.split(delimiter)
# some commands don't return in kv format
# so we dont want a dict from those.
if '=' in data:
retval = dict(
metric.split("=", 1) for metric in data_arr
)
else:
# if only 1 element found, and not kv, return just the value.
if len(data_arr) == 1:
retval = data_arr[0]
else:
retval = data_arr
return retval
def _update_build_list(self):
"""creates self._build_list which is a unique list
of build versions."""
self._build_list = set()
for node in self._nodes:
build = self._info_cmd_helper('build', node)
self._build_list.add(build)
# just checks to see if the version is 4.3 or greater
def _can_use_cluster_stable(self):
# if version <4.3 we can't use cluster-stable info cmd
# regex hack to check for versions beginning with 0-3 or
# beginning with 4.0,4.1,4.2
if re.search(R'^([0-3]\.|4\.[0-2])', min(self._build_list)):
return False
return True
def _update_cluster_namespace_list(self):
""" make a unique list of namespaces
TODO: does this work on a rolling namespace add/deletion?
thankfully if it doesnt, we dont need this on builds >=4.3"""
self._namespaces = set()
for node in self._nodes:
namespaces = self._info_cmd_helper('namespaces', node)
for namespace in namespaces:
self._namespaces.add(namespace)
def _update_cluster_statistics(self):
"""create a dict of nodes with their related stats """
self._cluster_statistics = {}
for node in self._nodes:
self._cluster_statistics[node] = \
self._info_cmd_helper('statistics', node)
def _update_nodes_list(self):
"""get a fresh list of all the nodes"""
self._nodes = self._client.get_nodes()
if not self._nodes:
self.module.fail_json("Failed to retrieve at least 1 node.")
def _namespace_has_migs(self, namespace, node=None):
"""returns a True or False.
Does the namespace have migrations for the node passed?
If no node passed, uses the local node or the first one in the list"""
namespace_stats = self._info_cmd_helper("namespace/" + namespace, node)
try:
namespace_tx = \
int(namespace_stats[self.module.params['migrate_tx_key']])
namespace_rx = \
int(namespace_stats[self.module.params['migrate_tx_key']])
except KeyError:
self.module.fail_json(
msg="Did not find partition remaining key:" +
self.module.params['migrate_tx_key'] +
" or key:" +
self.module.params['migrate_rx_key'] +
" in 'namespace/" +
namespace +
"' output."
)
except TypeError:
self.module.fail_json(
msg="namespace stat returned was not numerical"
)
return namespace_tx != 0 or namespace_rx != 0
def _node_has_migs(self, node=None):
"""just calls namespace_has_migs and
if any namespace has migs returns true"""
migs = 0
self._update_cluster_namespace_list()
for namespace in self._namespaces:
if self._namespace_has_migs(namespace, node):
migs += 1
return migs != 0
def _cluster_key_consistent(self):
"""create a dictionary to store what each node
returns the cluster key as. we should end up with only 1 dict key,
with the key being the cluster key."""
cluster_keys = {}
for node in self._nodes:
cluster_key = self._cluster_statistics[node][
'cluster_key']
if cluster_key not in cluster_keys:
cluster_keys[cluster_key] = 1
else:
cluster_keys[cluster_key] += 1
if len(cluster_keys.keys()) == 1 and \
self._start_cluster_key in cluster_keys:
return True
return False
def _cluster_migrates_allowed(self):
"""ensure all nodes have 'migrate_allowed' in their stats output"""
for node in self._nodes:
node_stats = self._info_cmd_helper('statistics', node)
allowed = node_stats['migrate_allowed']
if allowed == "false":
return False
return True
def _cluster_has_migs(self):
"""calls node_has_migs for each node"""
migs = 0
for node in self._nodes:
if self._node_has_migs(node):
migs += 1
if migs == 0:
return False
return True
def _has_migs(self, local):
if local:
return self._local_node_has_migs()
return self._cluster_has_migs()
def _local_node_has_migs(self):
return self._node_has_migs(None)
def _is_min_cluster_size(self):
"""checks that all nodes in the cluster are returning the
mininimum cluster size specified in their statistics output"""
sizes = set()
for node in self._cluster_statistics:
sizes.add(int(self._cluster_statistics[node]['cluster_size']))
if (len(sizes)) > 1: # if we are getting more than 1 size, lets say no
return False
if (min(sizes)) >= self.module.params['min_cluster_size']:
return True
return False
def _cluster_stable(self):
"""Added 4.3:
cluster-stable:size=<target-cluster-size>;ignore-migrations=<yes/no>;namespace=<namespace-name>
Returns the current 'cluster_key' when the following are satisfied:
If 'size' is specified then the target node's 'cluster-size'
must match size.
If 'ignore-migrations' is either unspecified or 'false' then
the target node's migrations counts must be zero for the provided
'namespace' or all namespaces if 'namespace' is not provided."""
cluster_key = set()
cluster_key.add(self._info_cmd_helper('statistics')['cluster_key'])
cmd = "cluster-stable:"
target_cluster_size = self.module.params['target_cluster_size']
if target_cluster_size is not None:
cmd = cmd + "size=" + str(target_cluster_size) + ";"
for node in self._nodes:
cluster_key.add(self._info_cmd_helper(cmd, node))
if len(cluster_key) == 1:
return True
return False
def _cluster_good_state(self):
"""checks a few things to make sure we're OK to say the cluster
has no migs. It could be in a unhealthy condition that does not allow
migs, or a split brain"""
if self._cluster_key_consistent() is not True:
return False, "Cluster key inconsistent."
if self._is_min_cluster_size() is not True:
return False, "Cluster min size not reached."
if self._cluster_migrates_allowed() is not True:
return False, "migrate_allowed is false somewhere."
return True, "OK."
def has_migs(self, local=True):
"""returns a boolean, False if no migrations otherwise True"""
consecutive_good = 0
try_num = 0
skip_reason = list()
while \
try_num < int(self.module.params['tries_limit']) and \
consecutive_good < \
int(self.module.params['consecutive_good_checks']):
self._update_nodes_list()
self._update_cluster_statistics()
# These checks are outside of the while loop because
# we probably want to skip & sleep instead of failing entirely
stable, reason = self._cluster_good_state()
if stable is not True:
skip_reason.append(
"Skipping on try#" + str(try_num) +
" for reason:" + reason
)
else:
if self._can_use_cluster_stable():
if self._cluster_stable():
consecutive_good += 1
else:
consecutive_good = 0
skip_reason.append(
"Skipping on try#" + str(try_num) +
" for reason:" + " cluster_stable"
)
elif self._has_migs(local):
# print("_has_migs")
skip_reason.append(
"Skipping on try#" + str(try_num) +
" for reason:" + " migrations"
)
consecutive_good = 0
else:
consecutive_good += 1
if consecutive_good == self.module.params[
'consecutive_good_checks']:
break
try_num += 1
sleep(self.module.params['sleep_between_checks'])
# print(skip_reason)
if consecutive_good == self.module.params['consecutive_good_checks']:
return False, None
return True, skip_reason
def main():
"""main method for ansible module"""
run_module()
if __name__ == '__main__':
main()
| gpl-3.0 |
beswarm/django-allauth | allauth/socialaccount/providers/vk/tests.py | 71 | 1253 | from __future__ import absolute_import
from allauth.socialaccount.tests import create_oauth2_tests
from allauth.socialaccount.providers import registry
from allauth.tests import MockedResponse
from .provider import VKProvider
class VKTests(create_oauth2_tests(registry.by_id(VKProvider.id))):
def get_mocked_response(self, verified_email=True):
return MockedResponse(200, """
{"response": [{"last_name": "Penners", "university_name": "", "photo": "http://vk.com/images/camera_c.gif", "sex": 2, "photo_medium": "http://vk.com/images/camera_b.gif", "relation": "0", "timezone": 1, "photo_big": "http://vk.com/images/camera_a.gif", "uid": 219004864, "universities": [], "city": "1430", "first_name": "Raymond", "faculty_name": "", "online": 1, "counters": {"videos": 0, "online_friends": 0, "notes": 0, "audios": 0, "photos": 0, "followers": 0, "groups": 0, "user_videos": 0, "albums": 0, "friends": 0}, "home_phone": "", "faculty": 0, "nickname": "", "screen_name": "id219004864", "has_mobile": 1, "country": "139", "university": 0, "graduation": 0, "activity": "", "last_seen": {"time": 1377805189}}]}
""")
def get_login_response_json(self, with_refresh_token=True):
return '{"user_id": 219004864, "access_token":"testac"}'
| mit |
demisto/demisto-py | demisto_client/demisto_api/models/playbook_task.py | 1 | 26685 | # coding: utf-8
"""
Demisto API
This is the public REST API to integrate with the demisto server. HTTP request can be sent using any HTTP-client. For an example dedicated client take a look at: https://github.com/demisto/demisto-py. Requests must include API-key that can be generated in the Demisto web client under 'Settings' -> 'Integrations' -> 'API keys' Optimistic Locking and Versioning\\: When using Demisto REST API, you will need to make sure to work on the latest version of the item (incident, entry, etc.), otherwise, you will get a DB version error (which not allow you to override a newer item). In addition, you can pass 'version\\: -1' to force data override (make sure that other users data might be lost). Assume that Alice and Bob both read the same data from Demisto server, then they both changed the data, and then both tried to write the new versions back to the server. Whose changes should be saved? Alice’s? Bob’s? To solve this, each data item in Demisto has a numeric incremental version. If Alice saved an item with version 4 and Bob trying to save the same item with version 3, Demisto will rollback Bob request and returns a DB version conflict error. Bob will need to get the latest item and work on it so Alice work will not get lost. Example request using 'curl'\\: ``` curl 'https://hostname:443/incidents/search' -H 'content-type: application/json' -H 'accept: application/json' -H 'Authorization: <API Key goes here>' --data-binary '{\"filter\":{\"query\":\"-status:closed -category:job\",\"period\":{\"by\":\"day\",\"fromValue\":7}}}' --compressed ``` # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from demisto_client.demisto_api.models.advance_arg import AdvanceArg # noqa: F401,E501
from demisto_client.demisto_api.models.data_collection_form import DataCollectionForm # noqa: F401,E501
from demisto_client.demisto_api.models.evidence_data import EvidenceData # noqa: F401,E501
from demisto_client.demisto_api.models.field_mapping import FieldMapping # noqa: F401,E501
from demisto_client.demisto_api.models.form_display import FormDisplay # noqa: F401,E501
from demisto_client.demisto_api.models.notifiable_item import NotifiableItem # noqa: F401,E501
from demisto_client.demisto_api.models.quiet_mode import QuietMode # noqa: F401,E501
from demisto_client.demisto_api.models.reputation_calc_alg import ReputationCalcAlg # noqa: F401,E501
from demisto_client.demisto_api.models.sla import SLA # noqa: F401,E501
from demisto_client.demisto_api.models.task import Task # noqa: F401,E501
from demisto_client.demisto_api.models.task_condition import TaskCondition # noqa: F401,E501
from demisto_client.demisto_api.models.task_loop import TaskLoop # noqa: F401,E501
from demisto_client.demisto_api.models.task_type import TaskType # noqa: F401,E501
from demisto_client.demisto_api.models.task_view import TaskView # noqa: F401,E501
from demisto_client.demisto_api.models.timer_trigger import TimerTrigger # noqa: F401,E501
class PlaybookTask(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'conditions': 'list[TaskCondition]',
'continue_on_error': 'bool',
'default_assignee': 'str',
'default_assignee_complex': 'AdvanceArg',
'default_reminder': 'int',
'evidence_data': 'EvidenceData',
'field_mapping': 'list[FieldMapping]',
'form': 'DataCollectionForm',
'form_display': 'FormDisplay',
'id': 'str',
'ignore_worker': 'bool',
'loop': 'TaskLoop',
'message': 'NotifiableItem',
'next_tasks': 'dict(str, list[str])',
'note': 'bool',
'quiet_mode': 'QuietMode',
'reputation_calc': 'ReputationCalcAlg',
'restricted_completion': 'bool',
'script_arguments': 'dict(str, AdvanceArg)',
'separate_context': 'bool',
'skip_unavailable': 'bool',
'sla': 'SLA',
'sla_reminder': 'SLA',
'task': 'Task',
'task_id': 'str',
'timer_triggers': 'list[TimerTrigger]',
'type': 'TaskType',
'view': 'TaskView'
}
attribute_map = {
'conditions': 'conditions',
'continue_on_error': 'continueOnError',
'default_assignee': 'defaultAssignee',
'default_assignee_complex': 'defaultAssigneeComplex',
'default_reminder': 'defaultReminder',
'evidence_data': 'evidenceData',
'field_mapping': 'fieldMapping',
'form': 'form',
'form_display': 'formDisplay',
'id': 'id',
'ignore_worker': 'ignoreWorker',
'loop': 'loop',
'message': 'message',
'next_tasks': 'nextTasks',
'note': 'note',
'quiet_mode': 'quietMode',
'reputation_calc': 'reputationCalc',
'restricted_completion': 'restrictedCompletion',
'script_arguments': 'scriptArguments',
'separate_context': 'separateContext',
'skip_unavailable': 'skipUnavailable',
'sla': 'sla',
'sla_reminder': 'slaReminder',
'task': 'task',
'task_id': 'taskId',
'timer_triggers': 'timerTriggers',
'type': 'type',
'view': 'view'
}
def __init__(self, conditions=None, continue_on_error=None, default_assignee=None, default_assignee_complex=None, default_reminder=None, evidence_data=None, field_mapping=None, form=None, form_display=None, id=None, ignore_worker=None, loop=None, message=None, next_tasks=None, note=None, quiet_mode=None, reputation_calc=None, restricted_completion=None, script_arguments=None, separate_context=None, skip_unavailable=None, sla=None, sla_reminder=None, task=None, task_id=None, timer_triggers=None, type=None, view=None): # noqa: E501
"""PlaybookTask - a model defined in Swagger""" # noqa: E501
self._conditions = None
self._continue_on_error = None
self._default_assignee = None
self._default_assignee_complex = None
self._default_reminder = None
self._evidence_data = None
self._field_mapping = None
self._form = None
self._form_display = None
self._id = None
self._ignore_worker = None
self._loop = None
self._message = None
self._next_tasks = None
self._note = None
self._quiet_mode = None
self._reputation_calc = None
self._restricted_completion = None
self._script_arguments = None
self._separate_context = None
self._skip_unavailable = None
self._sla = None
self._sla_reminder = None
self._task = None
self._task_id = None
self._timer_triggers = None
self._type = None
self._view = None
self.discriminator = None
if conditions is not None:
self.conditions = conditions
if continue_on_error is not None:
self.continue_on_error = continue_on_error
if default_assignee is not None:
self.default_assignee = default_assignee
if default_assignee_complex is not None:
self.default_assignee_complex = default_assignee_complex
if default_reminder is not None:
self.default_reminder = default_reminder
if evidence_data is not None:
self.evidence_data = evidence_data
if field_mapping is not None:
self.field_mapping = field_mapping
if form is not None:
self.form = form
if form_display is not None:
self.form_display = form_display
if id is not None:
self.id = id
if ignore_worker is not None:
self.ignore_worker = ignore_worker
if loop is not None:
self.loop = loop
if message is not None:
self.message = message
if next_tasks is not None:
self.next_tasks = next_tasks
if note is not None:
self.note = note
if quiet_mode is not None:
self.quiet_mode = quiet_mode
if reputation_calc is not None:
self.reputation_calc = reputation_calc
if restricted_completion is not None:
self.restricted_completion = restricted_completion
if script_arguments is not None:
self.script_arguments = script_arguments
if separate_context is not None:
self.separate_context = separate_context
if skip_unavailable is not None:
self.skip_unavailable = skip_unavailable
if sla is not None:
self.sla = sla
if sla_reminder is not None:
self.sla_reminder = sla_reminder
if task is not None:
self.task = task
if task_id is not None:
self.task_id = task_id
if timer_triggers is not None:
self.timer_triggers = timer_triggers
if type is not None:
self.type = type
if view is not None:
self.view = view
@property
def conditions(self):
"""Gets the conditions of this PlaybookTask. # noqa: E501
Conditions - optional list of conditions to run when task is conditional. we check conditions by their order (e.i. - considering the first one that satisfied) # noqa: E501
:return: The conditions of this PlaybookTask. # noqa: E501
:rtype: list[TaskCondition]
"""
return self._conditions
@conditions.setter
def conditions(self, conditions):
"""Sets the conditions of this PlaybookTask.
Conditions - optional list of conditions to run when task is conditional. we check conditions by their order (e.i. - considering the first one that satisfied) # noqa: E501
:param conditions: The conditions of this PlaybookTask. # noqa: E501
:type: list[TaskCondition]
"""
self._conditions = conditions
@property
def continue_on_error(self):
"""Gets the continue_on_error of this PlaybookTask. # noqa: E501
:return: The continue_on_error of this PlaybookTask. # noqa: E501
:rtype: bool
"""
return self._continue_on_error
@continue_on_error.setter
def continue_on_error(self, continue_on_error):
"""Sets the continue_on_error of this PlaybookTask.
:param continue_on_error: The continue_on_error of this PlaybookTask. # noqa: E501
:type: bool
"""
self._continue_on_error = continue_on_error
@property
def default_assignee(self):
"""Gets the default_assignee of this PlaybookTask. # noqa: E501
:return: The default_assignee of this PlaybookTask. # noqa: E501
:rtype: str
"""
return self._default_assignee
@default_assignee.setter
def default_assignee(self, default_assignee):
"""Sets the default_assignee of this PlaybookTask.
:param default_assignee: The default_assignee of this PlaybookTask. # noqa: E501
:type: str
"""
self._default_assignee = default_assignee
@property
def default_assignee_complex(self):
"""Gets the default_assignee_complex of this PlaybookTask. # noqa: E501
:return: The default_assignee_complex of this PlaybookTask. # noqa: E501
:rtype: AdvanceArg
"""
return self._default_assignee_complex
@default_assignee_complex.setter
def default_assignee_complex(self, default_assignee_complex):
"""Sets the default_assignee_complex of this PlaybookTask.
:param default_assignee_complex: The default_assignee_complex of this PlaybookTask. # noqa: E501
:type: AdvanceArg
"""
self._default_assignee_complex = default_assignee_complex
@property
def default_reminder(self):
"""Gets the default_reminder of this PlaybookTask. # noqa: E501
:return: The default_reminder of this PlaybookTask. # noqa: E501
:rtype: int
"""
return self._default_reminder
@default_reminder.setter
def default_reminder(self, default_reminder):
"""Sets the default_reminder of this PlaybookTask.
:param default_reminder: The default_reminder of this PlaybookTask. # noqa: E501
:type: int
"""
self._default_reminder = default_reminder
@property
def evidence_data(self):
"""Gets the evidence_data of this PlaybookTask. # noqa: E501
:return: The evidence_data of this PlaybookTask. # noqa: E501
:rtype: EvidenceData
"""
return self._evidence_data
@evidence_data.setter
def evidence_data(self, evidence_data):
"""Sets the evidence_data of this PlaybookTask.
:param evidence_data: The evidence_data of this PlaybookTask. # noqa: E501
:type: EvidenceData
"""
self._evidence_data = evidence_data
@property
def field_mapping(self):
"""Gets the field_mapping of this PlaybookTask. # noqa: E501
:return: The field_mapping of this PlaybookTask. # noqa: E501
:rtype: list[FieldMapping]
"""
return self._field_mapping
@field_mapping.setter
def field_mapping(self, field_mapping):
"""Sets the field_mapping of this PlaybookTask.
:param field_mapping: The field_mapping of this PlaybookTask. # noqa: E501
:type: list[FieldMapping]
"""
self._field_mapping = field_mapping
@property
def form(self):
"""Gets the form of this PlaybookTask. # noqa: E501
:return: The form of this PlaybookTask. # noqa: E501
:rtype: DataCollectionForm
"""
return self._form
@form.setter
def form(self, form):
"""Sets the form of this PlaybookTask.
:param form: The form of this PlaybookTask. # noqa: E501
:type: DataCollectionForm
"""
self._form = form
@property
def form_display(self):
"""Gets the form_display of this PlaybookTask. # noqa: E501
:return: The form_display of this PlaybookTask. # noqa: E501
:rtype: FormDisplay
"""
return self._form_display
@form_display.setter
def form_display(self, form_display):
"""Sets the form_display of this PlaybookTask.
:param form_display: The form_display of this PlaybookTask. # noqa: E501
:type: FormDisplay
"""
self._form_display = form_display
@property
def id(self):
"""Gets the id of this PlaybookTask. # noqa: E501
:return: The id of this PlaybookTask. # noqa: E501
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this PlaybookTask.
:param id: The id of this PlaybookTask. # noqa: E501
:type: str
"""
self._id = id
@property
def ignore_worker(self):
"""Gets the ignore_worker of this PlaybookTask. # noqa: E501
Do not run this task in a worker # noqa: E501
:return: The ignore_worker of this PlaybookTask. # noqa: E501
:rtype: bool
"""
return self._ignore_worker
@ignore_worker.setter
def ignore_worker(self, ignore_worker):
"""Sets the ignore_worker of this PlaybookTask.
Do not run this task in a worker # noqa: E501
:param ignore_worker: The ignore_worker of this PlaybookTask. # noqa: E501
:type: bool
"""
self._ignore_worker = ignore_worker
@property
def loop(self):
"""Gets the loop of this PlaybookTask. # noqa: E501
:return: The loop of this PlaybookTask. # noqa: E501
:rtype: TaskLoop
"""
return self._loop
@loop.setter
def loop(self, loop):
"""Sets the loop of this PlaybookTask.
:param loop: The loop of this PlaybookTask. # noqa: E501
:type: TaskLoop
"""
self._loop = loop
@property
def message(self):
"""Gets the message of this PlaybookTask. # noqa: E501
:return: The message of this PlaybookTask. # noqa: E501
:rtype: NotifiableItem
"""
return self._message
@message.setter
def message(self, message):
"""Sets the message of this PlaybookTask.
:param message: The message of this PlaybookTask. # noqa: E501
:type: NotifiableItem
"""
self._message = message
@property
def next_tasks(self):
"""Gets the next_tasks of this PlaybookTask. # noqa: E501
:return: The next_tasks of this PlaybookTask. # noqa: E501
:rtype: dict(str, list[str])
"""
return self._next_tasks
@next_tasks.setter
def next_tasks(self, next_tasks):
"""Sets the next_tasks of this PlaybookTask.
:param next_tasks: The next_tasks of this PlaybookTask. # noqa: E501
:type: dict(str, list[str])
"""
self._next_tasks = next_tasks
@property
def note(self):
"""Gets the note of this PlaybookTask. # noqa: E501
:return: The note of this PlaybookTask. # noqa: E501
:rtype: bool
"""
return self._note
@note.setter
def note(self, note):
"""Sets the note of this PlaybookTask.
:param note: The note of this PlaybookTask. # noqa: E501
:type: bool
"""
self._note = note
@property
def quiet_mode(self):
"""Gets the quiet_mode of this PlaybookTask. # noqa: E501
:return: The quiet_mode of this PlaybookTask. # noqa: E501
:rtype: QuietMode
"""
return self._quiet_mode
@quiet_mode.setter
def quiet_mode(self, quiet_mode):
"""Sets the quiet_mode of this PlaybookTask.
:param quiet_mode: The quiet_mode of this PlaybookTask. # noqa: E501
:type: QuietMode
"""
self._quiet_mode = quiet_mode
@property
def reputation_calc(self):
"""Gets the reputation_calc of this PlaybookTask. # noqa: E501
:return: The reputation_calc of this PlaybookTask. # noqa: E501
:rtype: ReputationCalcAlg
"""
return self._reputation_calc
@reputation_calc.setter
def reputation_calc(self, reputation_calc):
"""Sets the reputation_calc of this PlaybookTask.
:param reputation_calc: The reputation_calc of this PlaybookTask. # noqa: E501
:type: ReputationCalcAlg
"""
self._reputation_calc = reputation_calc
@property
def restricted_completion(self):
"""Gets the restricted_completion of this PlaybookTask. # noqa: E501
:return: The restricted_completion of this PlaybookTask. # noqa: E501
:rtype: bool
"""
return self._restricted_completion
@restricted_completion.setter
def restricted_completion(self, restricted_completion):
"""Sets the restricted_completion of this PlaybookTask.
:param restricted_completion: The restricted_completion of this PlaybookTask. # noqa: E501
:type: bool
"""
self._restricted_completion = restricted_completion
@property
def script_arguments(self):
"""Gets the script_arguments of this PlaybookTask. # noqa: E501
:return: The script_arguments of this PlaybookTask. # noqa: E501
:rtype: dict(str, AdvanceArg)
"""
return self._script_arguments
@script_arguments.setter
def script_arguments(self, script_arguments):
"""Sets the script_arguments of this PlaybookTask.
:param script_arguments: The script_arguments of this PlaybookTask. # noqa: E501
:type: dict(str, AdvanceArg)
"""
self._script_arguments = script_arguments
@property
def separate_context(self):
"""Gets the separate_context of this PlaybookTask. # noqa: E501
:return: The separate_context of this PlaybookTask. # noqa: E501
:rtype: bool
"""
return self._separate_context
@separate_context.setter
def separate_context(self, separate_context):
"""Sets the separate_context of this PlaybookTask.
:param separate_context: The separate_context of this PlaybookTask. # noqa: E501
:type: bool
"""
self._separate_context = separate_context
@property
def skip_unavailable(self):
"""Gets the skip_unavailable of this PlaybookTask. # noqa: E501
SkipUnavailable if true then will check if automation exists, integration of that command is installed and active or sub playbook exists in Demisto # noqa: E501
:return: The skip_unavailable of this PlaybookTask. # noqa: E501
:rtype: bool
"""
return self._skip_unavailable
@skip_unavailable.setter
def skip_unavailable(self, skip_unavailable):
"""Sets the skip_unavailable of this PlaybookTask.
SkipUnavailable if true then will check if automation exists, integration of that command is installed and active or sub playbook exists in Demisto # noqa: E501
:param skip_unavailable: The skip_unavailable of this PlaybookTask. # noqa: E501
:type: bool
"""
self._skip_unavailable = skip_unavailable
@property
def sla(self):
"""Gets the sla of this PlaybookTask. # noqa: E501
:return: The sla of this PlaybookTask. # noqa: E501
:rtype: SLA
"""
return self._sla
@sla.setter
def sla(self, sla):
"""Sets the sla of this PlaybookTask.
:param sla: The sla of this PlaybookTask. # noqa: E501
:type: SLA
"""
self._sla = sla
@property
def sla_reminder(self):
"""Gets the sla_reminder of this PlaybookTask. # noqa: E501
:return: The sla_reminder of this PlaybookTask. # noqa: E501
:rtype: SLA
"""
return self._sla_reminder
@sla_reminder.setter
def sla_reminder(self, sla_reminder):
"""Sets the sla_reminder of this PlaybookTask.
:param sla_reminder: The sla_reminder of this PlaybookTask. # noqa: E501
:type: SLA
"""
self._sla_reminder = sla_reminder
@property
def task(self):
"""Gets the task of this PlaybookTask. # noqa: E501
:return: The task of this PlaybookTask. # noqa: E501
:rtype: Task
"""
return self._task
@task.setter
def task(self, task):
"""Sets the task of this PlaybookTask.
:param task: The task of this PlaybookTask. # noqa: E501
:type: Task
"""
self._task = task
@property
def task_id(self):
"""Gets the task_id of this PlaybookTask. # noqa: E501
:return: The task_id of this PlaybookTask. # noqa: E501
:rtype: str
"""
return self._task_id
@task_id.setter
def task_id(self, task_id):
"""Sets the task_id of this PlaybookTask.
:param task_id: The task_id of this PlaybookTask. # noqa: E501
:type: str
"""
self._task_id = task_id
@property
def timer_triggers(self):
"""Gets the timer_triggers of this PlaybookTask. # noqa: E501
SLA fields # noqa: E501
:return: The timer_triggers of this PlaybookTask. # noqa: E501
:rtype: list[TimerTrigger]
"""
return self._timer_triggers
@timer_triggers.setter
def timer_triggers(self, timer_triggers):
"""Sets the timer_triggers of this PlaybookTask.
SLA fields # noqa: E501
:param timer_triggers: The timer_triggers of this PlaybookTask. # noqa: E501
:type: list[TimerTrigger]
"""
self._timer_triggers = timer_triggers
@property
def type(self):
"""Gets the type of this PlaybookTask. # noqa: E501
:return: The type of this PlaybookTask. # noqa: E501
:rtype: TaskType
"""
return self._type
@type.setter
def type(self, type):
"""Sets the type of this PlaybookTask.
:param type: The type of this PlaybookTask. # noqa: E501
:type: TaskType
"""
self._type = type
@property
def view(self):
"""Gets the view of this PlaybookTask. # noqa: E501
:return: The view of this PlaybookTask. # noqa: E501
:rtype: TaskView
"""
return self._view
@view.setter
def view(self, view):
"""Sets the view of this PlaybookTask.
:param view: The view of this PlaybookTask. # noqa: E501
:type: TaskView
"""
self._view = view
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PlaybookTask, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PlaybookTask):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| apache-2.0 |
grivescorbett/heroku-buildpack-python | vendor/pip-pop/pip/baseparser.py | 424 | 10465 | """Base option parser setup"""
from __future__ import absolute_import
import sys
import optparse
import os
import re
import textwrap
from distutils.util import strtobool
from pip._vendor.six import string_types
from pip._vendor.six.moves import configparser
from pip.locations import (
legacy_config_file, config_basename, running_under_virtualenv,
site_config_files
)
from pip.utils import appdirs, get_terminal_size
_environ_prefix_re = re.compile(r"^PIP_", re.I)
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
"""A prettier/less verbose help formatter for optparse."""
def __init__(self, *args, **kwargs):
# help position must be aligned with __init__.parseopts.description
kwargs['max_help_position'] = 30
kwargs['indent_increment'] = 1
kwargs['width'] = get_terminal_size()[0] - 2
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
def format_option_strings(self, option):
return self._format_option_strings(option, ' <%s>', ', ')
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
"""
Return a comma-separated list of option strings and metavars.
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
:param optsep: separator
"""
opts = []
if option._short_opts:
opts.append(option._short_opts[0])
if option._long_opts:
opts.append(option._long_opts[0])
if len(opts) > 1:
opts.insert(1, optsep)
if option.takes_value():
metavar = option.metavar or option.dest.lower()
opts.append(mvarfmt % metavar.lower())
return ''.join(opts)
def format_heading(self, heading):
if heading == 'Options':
return ''
return heading + ':\n'
def format_usage(self, usage):
"""
Ensure there is only one newline between usage and the first heading
if there is no description.
"""
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
return msg
def format_description(self, description):
# leave full control over description to us
if description:
if hasattr(self.parser, 'main'):
label = 'Commands'
else:
label = 'Description'
# some doc strings have initial newlines, some don't
description = description.lstrip('\n')
# some doc strings have final newlines and spaces, some don't
description = description.rstrip()
# dedent, then reindent
description = self.indent_lines(textwrap.dedent(description), " ")
description = '%s:\n%s\n' % (label, description)
return description
else:
return ''
def format_epilog(self, epilog):
# leave full control over epilog to us
if epilog:
return epilog
else:
return ''
def indent_lines(self, text, indent):
new_lines = [indent + line for line in text.split('\n')]
return "\n".join(new_lines)
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
"""Custom help formatter for use in ConfigOptionParser.
This is updates the defaults before expanding them, allowing
them to show up correctly in the help listing.
"""
def expand_default(self, option):
if self.parser is not None:
self.parser._update_defaults(self.parser.defaults)
return optparse.IndentedHelpFormatter.expand_default(self, option)
class CustomOptionParser(optparse.OptionParser):
def insert_option_group(self, idx, *args, **kwargs):
"""Insert an OptionGroup at a given position."""
group = self.add_option_group(*args, **kwargs)
self.option_groups.pop()
self.option_groups.insert(idx, group)
return group
@property
def option_list_all(self):
"""Get a list of all options, including those in option groups."""
res = self.option_list[:]
for i in self.option_groups:
res.extend(i.option_list)
return res
class ConfigOptionParser(CustomOptionParser):
"""Custom option parser which updates its defaults by checking the
configuration files and environmental variables"""
isolated = False
def __init__(self, *args, **kwargs):
self.config = configparser.RawConfigParser()
self.name = kwargs.pop('name')
self.isolated = kwargs.pop("isolated", False)
self.files = self.get_config_files()
if self.files:
self.config.read(self.files)
assert self.name
optparse.OptionParser.__init__(self, *args, **kwargs)
def get_config_files(self):
# the files returned by this method will be parsed in order with the
# first files listed being overridden by later files in standard
# ConfigParser fashion
config_file = os.environ.get('PIP_CONFIG_FILE', False)
if config_file == os.devnull:
return []
# at the base we have any site-wide configuration
files = list(site_config_files)
# per-user configuration next
if not self.isolated:
if config_file and os.path.exists(config_file):
files.append(config_file)
else:
# This is the legacy config file, we consider it to be a lower
# priority than the new file location.
files.append(legacy_config_file)
# This is the new config file, we consider it to be a higher
# priority than the legacy file.
files.append(
os.path.join(
appdirs.user_config_dir("pip"),
config_basename,
)
)
# finally virtualenv configuration first trumping others
if running_under_virtualenv():
venv_config_file = os.path.join(
sys.prefix,
config_basename,
)
if os.path.exists(venv_config_file):
files.append(venv_config_file)
return files
def check_default(self, option, key, val):
try:
return option.check_value(key, val)
except optparse.OptionValueError as exc:
print("An error occurred during configuration: %s" % exc)
sys.exit(3)
def _update_defaults(self, defaults):
"""Updates the given defaults with values from the config files and
the environ. Does a little special handling for certain types of
options (lists)."""
# Then go and look for the other sources of configuration:
config = {}
# 1. config files
for section in ('global', self.name):
config.update(
self.normalize_keys(self.get_config_section(section))
)
# 2. environmental variables
if not self.isolated:
config.update(self.normalize_keys(self.get_environ_vars()))
# Accumulate complex default state.
self.values = optparse.Values(self.defaults)
late_eval = set()
# Then set the options with those values
for key, val in config.items():
# ignore empty values
if not val:
continue
option = self.get_option(key)
# Ignore options not present in this parser. E.g. non-globals put
# in [global] by users that want them to apply to all applicable
# commands.
if option is None:
continue
if option.action in ('store_true', 'store_false', 'count'):
val = strtobool(val)
elif option.action == 'append':
val = val.split()
val = [self.check_default(option, key, v) for v in val]
elif option.action == 'callback':
late_eval.add(option.dest)
opt_str = option.get_opt_string()
val = option.convert_value(opt_str, val)
# From take_action
args = option.callback_args or ()
kwargs = option.callback_kwargs or {}
option.callback(option, opt_str, val, self, *args, **kwargs)
else:
val = self.check_default(option, key, val)
defaults[option.dest] = val
for key in late_eval:
defaults[key] = getattr(self.values, key)
self.values = None
return defaults
def normalize_keys(self, items):
"""Return a config dictionary with normalized keys regardless of
whether the keys were specified in environment variables or in config
files"""
normalized = {}
for key, val in items:
key = key.replace('_', '-')
if not key.startswith('--'):
key = '--%s' % key # only prefer long opts
normalized[key] = val
return normalized
def get_config_section(self, name):
"""Get a section of a configuration"""
if self.config.has_section(name):
return self.config.items(name)
return []
def get_environ_vars(self):
"""Returns a generator with all environmental vars with prefix PIP_"""
for key, val in os.environ.items():
if _environ_prefix_re.search(key):
yield (_environ_prefix_re.sub("", key).lower(), val)
def get_default_values(self):
"""Overridding to make updating the defaults after instantiation of
the option parser possible, _update_defaults() does the dirty work."""
if not self.process_default_values:
# Old, pre-Optik 1.5 behaviour.
return optparse.Values(self.defaults)
defaults = self._update_defaults(self.defaults.copy()) # ours
for option in self._get_all_options():
default = defaults.get(option.dest)
if isinstance(default, string_types):
opt_str = option.get_opt_string()
defaults[option.dest] = option.check_value(opt_str, default)
return optparse.Values(defaults)
def error(self, msg):
self.print_usage(sys.stderr)
self.exit(2, "%s\n" % msg)
| mit |
robovm/robovm-studio | python/lib/Lib/site-packages/django/contrib/admin/helpers.py | 78 | 13324 | from django import forms
from django.conf import settings
from django.contrib.admin.util import flatten_fieldsets, lookup_field
from django.contrib.admin.util import display_for_field, label_for_field
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from django.db.models.fields.related import ManyToManyRel
from django.forms.util import flatatt
from django.template.defaultfilters import capfirst
from django.utils.encoding import force_unicode, smart_unicode
from django.utils.html import escape, conditional_escape
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
ACTION_CHECKBOX_NAME = '_selected_action'
class ActionForm(forms.Form):
action = forms.ChoiceField(label=_('Action:'))
select_across = forms.BooleanField(label='', required=False, initial=0,
widget=forms.HiddenInput({'class': 'select-across'}))
checkbox = forms.CheckboxInput({'class': 'action-select'}, lambda value: False)
class AdminForm(object):
def __init__(self, form, fieldsets, prepopulated_fields, readonly_fields=None, model_admin=None):
self.form, self.fieldsets = form, normalize_fieldsets(fieldsets)
self.prepopulated_fields = [{
'field': form[field_name],
'dependencies': [form[f] for f in dependencies]
} for field_name, dependencies in prepopulated_fields.items()]
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
def __iter__(self):
for name, options in self.fieldsets:
yield Fieldset(self.form, name,
readonly_fields=self.readonly_fields,
model_admin=self.model_admin,
**options
)
def first_field(self):
try:
fieldset_name, fieldset_options = self.fieldsets[0]
field_name = fieldset_options['fields'][0]
if not isinstance(field_name, basestring):
field_name = field_name[0]
return self.form[field_name]
except (KeyError, IndexError):
pass
try:
return iter(self.form).next()
except StopIteration:
return None
def _media(self):
media = self.form.media
for fs in self:
media = media + fs.media
return media
media = property(_media)
class Fieldset(object):
def __init__(self, form, name=None, readonly_fields=(), fields=(), classes=(),
description=None, model_admin=None):
self.form = form
self.name, self.fields = name, fields
self.classes = u' '.join(classes)
self.description = description
self.model_admin = model_admin
self.readonly_fields = readonly_fields
def _media(self):
if 'collapse' in self.classes:
js = ['js/jquery.min.js', 'js/jquery.init.js', 'js/collapse.min.js']
return forms.Media(js=['%s%s' % (settings.ADMIN_MEDIA_PREFIX, url) for url in js])
return forms.Media()
media = property(_media)
def __iter__(self):
for field in self.fields:
yield Fieldline(self.form, field, self.readonly_fields, model_admin=self.model_admin)
class Fieldline(object):
def __init__(self, form, field, readonly_fields=None, model_admin=None):
self.form = form # A django.forms.Form instance
if not hasattr(field, "__iter__"):
self.fields = [field]
else:
self.fields = field
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
def __iter__(self):
for i, field in enumerate(self.fields):
if field in self.readonly_fields:
yield AdminReadonlyField(self.form, field, is_first=(i == 0),
model_admin=self.model_admin)
else:
yield AdminField(self.form, field, is_first=(i == 0))
def errors(self):
return mark_safe(u'\n'.join([self.form[f].errors.as_ul() for f in self.fields if f not in self.readonly_fields]).strip('\n'))
class AdminField(object):
def __init__(self, form, field, is_first):
self.field = form[field] # A django.forms.BoundField instance
self.is_first = is_first # Whether this field is first on the line
self.is_checkbox = isinstance(self.field.field.widget, forms.CheckboxInput)
def label_tag(self):
classes = []
if self.is_checkbox:
classes.append(u'vCheckboxLabel')
contents = force_unicode(escape(self.field.label))
else:
contents = force_unicode(escape(self.field.label)) + u':'
if self.field.field.required:
classes.append(u'required')
if not self.is_first:
classes.append(u'inline')
attrs = classes and {'class': u' '.join(classes)} or {}
return self.field.label_tag(contents=contents, attrs=attrs)
def errors(self):
return mark_safe(self.field.errors.as_ul())
class AdminReadonlyField(object):
def __init__(self, form, field, is_first, model_admin=None):
label = label_for_field(field, form._meta.model, model_admin)
# Make self.field look a little bit like a field. This means that
# {{ field.name }} must be a useful class name to identify the field.
# For convenience, store other field-related data here too.
if callable(field):
class_name = field.__name__ != '<lambda>' and field.__name__ or ''
else:
class_name = field
self.field = {
'name': class_name,
'label': label,
'field': field,
}
self.form = form
self.model_admin = model_admin
self.is_first = is_first
self.is_checkbox = False
self.is_readonly = True
def label_tag(self):
attrs = {}
if not self.is_first:
attrs["class"] = "inline"
label = self.field['label']
contents = capfirst(force_unicode(escape(label))) + u":"
return mark_safe('<label%(attrs)s>%(contents)s</label>' % {
"attrs": flatatt(attrs),
"contents": contents,
})
def contents(self):
from django.contrib.admin.templatetags.admin_list import _boolean_icon
from django.contrib.admin.views.main import EMPTY_CHANGELIST_VALUE
field, obj, model_admin = self.field['field'], self.form.instance, self.model_admin
try:
f, attr, value = lookup_field(field, obj, model_admin)
except (AttributeError, ValueError, ObjectDoesNotExist):
result_repr = EMPTY_CHANGELIST_VALUE
else:
if f is None:
boolean = getattr(attr, "boolean", False)
if boolean:
result_repr = _boolean_icon(value)
else:
result_repr = smart_unicode(value)
if getattr(attr, "allow_tags", False):
result_repr = mark_safe(result_repr)
else:
if value is None:
result_repr = EMPTY_CHANGELIST_VALUE
elif isinstance(f.rel, ManyToManyRel):
result_repr = ", ".join(map(unicode, value.all()))
else:
result_repr = display_for_field(value, f)
return conditional_escape(result_repr)
class InlineAdminFormSet(object):
"""
A wrapper around an inline formset for use in the admin system.
"""
def __init__(self, inline, formset, fieldsets, readonly_fields=None, model_admin=None):
self.opts = inline
self.formset = formset
self.fieldsets = fieldsets
self.model_admin = model_admin
if readonly_fields is None:
readonly_fields = ()
self.readonly_fields = readonly_fields
def __iter__(self):
for form, original in zip(self.formset.initial_forms, self.formset.get_queryset()):
yield InlineAdminForm(self.formset, form, self.fieldsets,
self.opts.prepopulated_fields, original, self.readonly_fields,
model_admin=self.model_admin)
for form in self.formset.extra_forms:
yield InlineAdminForm(self.formset, form, self.fieldsets,
self.opts.prepopulated_fields, None, self.readonly_fields,
model_admin=self.model_admin)
yield InlineAdminForm(self.formset, self.formset.empty_form,
self.fieldsets, self.opts.prepopulated_fields, None,
self.readonly_fields, model_admin=self.model_admin)
def fields(self):
fk = getattr(self.formset, "fk", None)
for i, field in enumerate(flatten_fieldsets(self.fieldsets)):
if fk and fk.name == field:
continue
if field in self.readonly_fields:
yield {
'label': label_for_field(field, self.opts.model, self.model_admin),
'widget': {
'is_hidden': False
},
'required': False
}
else:
yield self.formset.form.base_fields[field]
def _media(self):
media = self.opts.media + self.formset.media
for fs in self:
media = media + fs.media
return media
media = property(_media)
class InlineAdminForm(AdminForm):
"""
A wrapper around an inline form for use in the admin system.
"""
def __init__(self, formset, form, fieldsets, prepopulated_fields, original,
readonly_fields=None, model_admin=None):
self.formset = formset
self.model_admin = model_admin
self.original = original
if original is not None:
self.original_content_type_id = ContentType.objects.get_for_model(original).pk
self.show_url = original and hasattr(original, 'get_absolute_url')
super(InlineAdminForm, self).__init__(form, fieldsets, prepopulated_fields,
readonly_fields, model_admin)
def __iter__(self):
for name, options in self.fieldsets:
yield InlineFieldset(self.formset, self.form, name,
self.readonly_fields, model_admin=self.model_admin, **options)
def has_auto_field(self):
if self.form._meta.model._meta.has_auto_field:
return True
# Also search any parents for an auto field.
for parent in self.form._meta.model._meta.get_parent_list():
if parent._meta.has_auto_field:
return True
return False
def field_count(self):
# tabular.html uses this function for colspan value.
num_of_fields = 0
if self.has_auto_field():
num_of_fields += 1
num_of_fields += len(self.fieldsets[0][1]["fields"])
if self.formset.can_order:
num_of_fields += 1
if self.formset.can_delete:
num_of_fields += 1
return num_of_fields
def pk_field(self):
return AdminField(self.form, self.formset._pk_field.name, False)
def fk_field(self):
fk = getattr(self.formset, "fk", None)
if fk:
return AdminField(self.form, fk.name, False)
else:
return ""
def deletion_field(self):
from django.forms.formsets import DELETION_FIELD_NAME
return AdminField(self.form, DELETION_FIELD_NAME, False)
def ordering_field(self):
from django.forms.formsets import ORDERING_FIELD_NAME
return AdminField(self.form, ORDERING_FIELD_NAME, False)
class InlineFieldset(Fieldset):
def __init__(self, formset, *args, **kwargs):
self.formset = formset
super(InlineFieldset, self).__init__(*args, **kwargs)
def __iter__(self):
fk = getattr(self.formset, "fk", None)
for field in self.fields:
if fk and fk.name == field:
continue
yield Fieldline(self.form, field, self.readonly_fields,
model_admin=self.model_admin)
class AdminErrorList(forms.util.ErrorList):
"""
Stores all errors for the form/formsets in an add/change stage view.
"""
def __init__(self, form, inline_formsets):
if form.is_bound:
self.extend(form.errors.values())
for inline_formset in inline_formsets:
self.extend(inline_formset.non_form_errors())
for errors_in_inline_form in inline_formset.errors:
self.extend(errors_in_inline_form.values())
def normalize_fieldsets(fieldsets):
"""
Make sure the keys in fieldset dictionaries are strings. Returns the
normalized data.
"""
result = []
for name, options in fieldsets:
result.append((name, normalize_dictionary(options)))
return result
def normalize_dictionary(data_dict):
"""
Converts all the keys in "data_dict" to strings. The keys must be
convertible using str().
"""
for key, value in data_dict.items():
if not isinstance(key, str):
del data_dict[key]
data_dict[str(key)] = value
return data_dict
| apache-2.0 |
turtleloveshoes/kitsune | kitsune/community/tests/test_api.py | 13 | 8753 | from datetime import datetime, timedelta
from nose.tools import eq_
from django.test.client import RequestFactory
from kitsune.community import api
from kitsune.products.tests import product
from kitsune.questions.tests import answer, answervote, question
from kitsune.search.tests import ElasticTestCase
from kitsune.users.tests import profile
from kitsune.wiki.tests import document, revision
class TestTopContributorsBase(ElasticTestCase):
"""Tests for the Community Hub top users API."""
def setUp(self):
super(TestTopContributorsBase, self).setUp()
self.factory = RequestFactory()
self.api = api.TopContributorsBase()
self.api.get_data = lambda request: {}
def test_invalid_filter_name(self):
req = self.factory.get('/', {'not_valid': 'wrong'})
self.api.request = req
self.api.get_filters()
eq_(self.api.warnings, ['Unknown filter not_valid'])
class TestTopContributorsQuestions(ElasticTestCase):
def setUp(self):
super(TestTopContributorsQuestions, self).setUp()
self.factory = RequestFactory()
self.api = api.TopContributorsQuestions()
def test_it_works(self):
u1 = profile().user
u2 = profile().user
a1 = answer(creator=u1, save=True) # noqa
a2 = answer(creator=u1, save=True)
a3 = answer(creator=u2, save=True)
a1.question.solution = a1
a1.question.save()
answervote(answer=a3, helpful=True, save=True)
self.refresh()
req = self.factory.get('/')
data = self.api.get_data(req)
eq_(data['count'], 2)
eq_(data['results'][0]['user']['username'], u1.username)
eq_(data['results'][0]['rank'], 1)
eq_(data['results'][0]['answer_count'], 2)
eq_(data['results'][0]['solution_count'], 1)
eq_(data['results'][0]['helpful_vote_count'], 0)
eq_(data['results'][0]['last_contribution_date'], a2.created.replace(microsecond=0))
eq_(data['results'][1]['user']['username'], u2.username)
eq_(data['results'][1]['rank'], 2)
eq_(data['results'][1]['answer_count'], 1)
eq_(data['results'][1]['solution_count'], 0)
eq_(data['results'][1]['helpful_vote_count'], 1)
eq_(data['results'][1]['last_contribution_date'], a3.created.replace(microsecond=0))
def test_filter_by_product(self):
u1 = profile().user
u2 = profile().user
p1 = product(save=True)
p2 = product(save=True)
q1 = question(product=p1, save=True)
answer(question=q1, creator=u1, save=True)
q2 = question(product=p2, save=True)
answer(question=q2, creator=u1, save=True)
q3 = question(product=p2, save=True)
answer(question=q3, creator=u2, save=True)
self.refresh()
req = self.factory.get('/', {'product': p1.slug})
data = self.api.get_data(req)
eq_(data['count'], 1)
eq_(data['results'][0]['user']['username'], u1.username)
eq_(data['results'][0]['answer_count'], 1)
def test_page_size(self):
u1 = profile().user
u2 = profile().user
q1 = question(save=True)
answer(question=q1, creator=u1, save=True)
q2 = question(save=True)
answer(question=q2, creator=u2, save=True)
self.refresh()
req = self.factory.get('/', {'page_size': 2})
data = self.api.get_data(req)
eq_(data['count'], 2)
eq_(len(data['results']), 2)
req = self.factory.get('/', {'page_size': 1})
data = self.api.get_data(req)
eq_(data['count'], 2)
eq_(len(data['results']), 1)
def test_filter_last_contribution(self):
u1 = profile().user
u2 = profile().user
today = datetime.now()
yesterday = today - timedelta(days=1)
day_before_yesterday = yesterday - timedelta(days=1)
answer(creator=u1, created=today, save=True)
answer(creator=u1, created=day_before_yesterday, save=True)
answer(creator=u2, created=day_before_yesterday, save=True)
self.refresh()
# Test 1
req = self.factory.get('/', {'last_contribution_date__gt': yesterday.strftime('%Y-%m-%d')})
data = self.api.get_data(req)
eq_(data['count'], 1)
eq_(data['results'][0]['user']['username'], u1.username)
# Even though only 1 contribution was made in the time range, this filter
# is only checking the last contribution time, so both are included.
eq_(data['results'][0]['answer_count'], 2)
# Test 2
req = self.factory.get('/', {'last_contribution_date__lt': yesterday.strftime('%Y-%m-%d')})
data = self.api.get_data(req)
eq_(data['count'], 1)
eq_(data['results'][0]['user']['username'], u2.username)
eq_(data['results'][0]['answer_count'], 1)
class TestTopContributorsLocalization(ElasticTestCase):
def setUp(self):
super(TestTopContributorsLocalization, self).setUp()
self.factory = RequestFactory()
self.api = api.TopContributorsLocalization()
def test_it_works(self):
u1 = profile().user
u2 = profile().user
r1 = revision(creator=u1, save=True) # noqa
r2 = revision(creator=u1, save=True)
r3 = revision(creator=u2, save=True)
r2.reviewer = u2
r2.save()
self.refresh()
req = self.factory.get('/')
data = self.api.get_data(req)
eq_(data['count'], 2)
eq_(data['results'][0]['user']['username'], u1.username)
eq_(data['results'][0]['rank'], 1)
eq_(data['results'][0]['revision_count'], 2)
eq_(data['results'][0]['review_count'], 0)
eq_(data['results'][0]['last_contribution_date'], r2.created.replace(microsecond=0))
eq_(data['results'][1]['user']['username'], u2.username)
eq_(data['results'][1]['rank'], 2)
eq_(data['results'][1]['revision_count'], 1)
eq_(data['results'][1]['review_count'], 1)
eq_(data['results'][1]['last_contribution_date'], r3.created.replace(microsecond=0))
def test_filter_by_product(self):
u1 = profile().user
u2 = profile().user
p1 = product(save=True)
p2 = product(save=True)
d1 = document(save=True)
d1.products.add(p1)
revision(document=d1, creator=u1, save=True)
d2 = document(save=True)
d2.products.add(p2)
revision(document=d2, creator=u1, save=True)
d3 = document(save=True)
d3.products.add(p2)
revision(document=d3, creator=u2, save=True)
self.refresh()
req = self.factory.get('/', {'product': p1.slug})
data = self.api.get_data(req)
eq_(data['count'], 1)
eq_(data['results'][0]['user']['username'], u1.username)
eq_(data['results'][0]['revision_count'], 1)
def test_page_size(self):
u1 = profile().user
u2 = profile().user
d1 = document(save=True)
revision(document=d1, creator=u1, save=True)
d2 = document(save=True)
revision(document=d2, creator=u2, save=True)
self.refresh()
req = self.factory.get('/', {'page_size': 2})
data = self.api.get_data(req)
eq_(data['count'], 2)
eq_(len(data['results']), 2)
req = self.factory.get('/', {'page_size': 1})
data = self.api.get_data(req)
eq_(data['count'], 2)
eq_(len(data['results']), 1)
def test_filter_last_contribution(self):
u1 = profile().user
u2 = profile().user
today = datetime.now()
yesterday = today - timedelta(days=1)
day_before_yesterday = yesterday - timedelta(days=1)
revision(creator=u1, created=today, save=True)
revision(creator=u1, created=day_before_yesterday, save=True)
revision(creator=u2, created=day_before_yesterday, save=True)
self.refresh()
# Test 1
req = self.factory.get('/', {'last_contribution_date__gt': yesterday.strftime('%Y-%m-%d')})
data = self.api.get_data(req)
eq_(data['count'], 1)
eq_(data['results'][0]['user']['username'], u1.username)
# Even though only 1 contribution was made in the time range, this filter
# is only checking the last contribution time, so both are included.
eq_(data['results'][0]['revision_count'], 2)
# Test 2
req = self.factory.get('/', {'last_contribution_date__lt': yesterday.strftime('%Y-%m-%d')})
data = self.api.get_data(req)
eq_(data['count'], 1)
eq_(data['results'][0]['user']['username'], u2.username)
eq_(data['results'][0]['revision_count'], 1)
| bsd-3-clause |
ISIFoundation/influenzanet-website | apps/survey/api/resources.py | 4 | 2676 | from tastypie.resources import ModelResource
from apps.survey.models import Profile, SurveyUser, Survey
from apps.survey.survey import parse_specification
from apps.survey.spec import Question, Branch, Else
from pickle import loads
from inspect import isclass
class EpiwebModelResource(ModelResource):
class Meta:
default_format = 'application/json'
include_resource_uri = False
allowed_methods = ['get']
def xmlify_spec(spec):
p = parse_specification(spec)
def a(s):
return str(s)
def t(tag, s):
return a('<%s>\n' % tag) + a(s) + a('</%s>\n' % tag)
def xo(options):
return reduce(lambda s,o: s+t('option', t('code', o[0]) + t('text', o[1])) ,
options, '')
def xs(f):
if not f:
return ''
if isinstance(f, str):
return f + '\n'
if isinstance(f, list) or isinstance(f, tuple):
return xs(f[0]) + xs(f[1:])
elif isinstance(f, Else):
return t('else', f.rules)
elif isinstance(f, Branch):
# Process condition here!!!
return t('branch', t('condition', f.condition) + t('rules', f.rules))
elif isclass(f) and issubclass(f, Question):
x = t('type', f.type)
x += t('question', f.question)
if 'options' in dir(f):
x += xo(f.options)
return t('item', x)
else:
t('unknown', type(f))
xml = t('survey', xs(p.rules))
return xml
## EIP resources
class GetUserProfile(EpiwebModelResource):
"""Takes global_id
Returns name, a_uids, code, report_ts
"""
class Meta:
resource_name = 'GetUserProfile'
queryset = Profile.objects.all()
# queryset = Profile.objects.filter(user__global_id="193807d8-4a30-4601-9bc5-bc59db1696cd")
filtering = ['user__global_id']
# fields = ['data']
def dehydrate(self, bundle):
id = bundle.data['id']
return loads(str(bundle.data['data']))
class GetReportSurvey(ModelResource):
"""Takes language int
Returns survey in XML format
"""
class Meta:
resource_name = 'GetReportSurvey'
queryset = Survey.objects.all()
fields = ['specification']
def dehydrate(self, bundle):
spec = bundle.data['specification']
xml = xmlify_spec(spec)
return xml
# return str(parse_specification(bundle.data['specification']))
class Report(ModelResource):
"""Takes uid and reportS
Returns status
"""
class Meta:
queryset = SurveyUser.objects.all()
allowed_methods = ['put']
| agpl-3.0 |
lizardsystem/lizard-damage | lizard_damage/migrations/0025_auto__add_field_damagescenario_ahn_version.py | 1 | 9142 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'DamageScenario.ahn_version'
db.add_column(u'lizard_damage_damagescenario', 'ahn_version',
self.gf('django.db.models.fields.CharField')(default=2, max_length=2),
keep_default=False)
def backwards(self, orm):
# Deleting field 'DamageScenario.ahn_version'
db.delete_column(u'lizard_damage_damagescenario', 'ahn_version')
models = {
u'lizard_damage.benefitscenario': {
'Meta': {'object_name': 'BenefitScenario'},
'datetime_created': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '128'}),
'expiration_date': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'zip_result': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'zip_risk_a': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'zip_risk_b': ('django.db.models.fields.files.FileField', [], {'max_length': '100'})
},
u'lizard_damage.benefitscenarioresult': {
'Meta': {'object_name': 'BenefitScenarioResult'},
'benefit_scenario': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lizard_damage.BenefitScenario']"}),
'east': ('django.db.models.fields.FloatField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'north': ('django.db.models.fields.FloatField', [], {}),
'south': ('django.db.models.fields.FloatField', [], {}),
'west': ('django.db.models.fields.FloatField', [], {})
},
u'lizard_damage.damageevent': {
'Meta': {'object_name': 'DamageEvent'},
'floodmonth': ('django.db.models.fields.IntegerField', [], {'default': '9'}),
'floodtime': ('django.db.models.fields.FloatField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_height': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'min_height': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'repairtime_buildings': ('django.db.models.fields.FloatField', [], {'default': '432000'}),
'repairtime_roads': ('django.db.models.fields.FloatField', [], {'default': '432000'}),
'repetition_time': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'scenario': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lizard_damage.DamageScenario']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'table': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
u'lizard_damage.damageeventresult': {
'Meta': {'object_name': 'DamageEventResult'},
'damage_event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lizard_damage.DamageEvent']"}),
'east': ('django.db.models.fields.FloatField', [], {}),
'geotransform_json': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'north': ('django.db.models.fields.FloatField', [], {}),
'relative_path': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'result_type': ('django.db.models.fields.CharField', [], {'max_length': '10'}),
'south': ('django.db.models.fields.FloatField', [], {}),
'west': ('django.db.models.fields.FloatField', [], {})
},
u'lizard_damage.damageeventwaterlevel': {
'Meta': {'ordering': "(u'index',)", 'object_name': 'DamageEventWaterlevel'},
'event': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lizard_damage.DamageEvent']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.IntegerField', [], {'default': '100'}),
'waterlevel_path': ('django.db.models.fields.FilePathField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'})
},
u'lizard_damage.damagescenario': {
'Meta': {'object_name': 'DamageScenario'},
'ahn_version': ('django.db.models.fields.CharField', [], {'default': '2', 'max_length': '2'}),
'calc_type': ('django.db.models.fields.IntegerField', [], {'default': '2'}),
'customheights': ('django.db.models.fields.FilePathField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'customlanduse': ('django.db.models.fields.FilePathField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'customlandusegeoimage': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lizard_damage.GeoImage']", 'null': 'True', 'blank': 'True'}),
'damagetable_file': ('django.db.models.fields.FilePathField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'datetime_created': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '128'}),
'expiration_date': ('django.db.models.fields.DateTimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'scenario_type': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'})
},
u'lizard_damage.geoimage': {
'Meta': {'object_name': 'GeoImage'},
'east': ('django.db.models.fields.FloatField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'north': ('django.db.models.fields.FloatField', [], {}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'south': ('django.db.models.fields.FloatField', [], {}),
'west': ('django.db.models.fields.FloatField', [], {})
},
u'lizard_damage.riskresult': {
'Meta': {'object_name': 'RiskResult'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'scenario': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['lizard_damage.DamageScenario']"}),
'zip_risk': ('django.db.models.fields.files.FileField', [], {'max_length': '100'})
},
u'lizard_damage.roads': {
'Meta': {'object_name': 'Roads', 'db_table': "u'data_roads'"},
'gid': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True'}),
'gridcode': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'the_geom': ('django.contrib.gis.db.models.fields.MultiPolygonField', [], {'srid': '28992', 'null': 'True', 'blank': 'True'}),
'typeinfr_1': ('django.db.models.fields.CharField', [], {'max_length': '25', 'blank': 'True'}),
'typeweg': ('django.db.models.fields.CharField', [], {'max_length': '120', 'blank': 'True'})
},
u'lizard_damage.unit': {
'Meta': {'object_name': 'Unit'},
'factor': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['lizard_damage'] | gpl-3.0 |
KamranMackey/readthedocs.org | readthedocs/projects/migrations/0041_add_programming_language.py | 8 | 12231 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Project.programming_language'
db.add_column(u'projects_project', 'programming_language',
self.gf('django.db.models.fields.CharField')(default='none', max_length=20),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Project.programming_language'
db.delete_column(u'projects_project', 'programming_language')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'builds.version': {
'Meta': {'ordering': "['-verbose_name']", 'unique_together': "[('project', 'slug')]", 'object_name': 'Version'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'built': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'identifier': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'machine': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'privacy_level': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '20'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'versions'", 'to': u"orm['projects.Project']"}),
'slug': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'supported': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'unknown'", 'max_length': '20'}),
'uploaded': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'verbose_name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'projects.emailhook': {
'Meta': {'object_name': 'EmailHook'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'emailhook_notifications'", 'to': u"orm['projects.Project']"})
},
u'projects.importedfile': {
'Meta': {'object_name': 'ImportedFile'},
'commit': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'md5': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'imported_files'", 'to': u"orm['projects.Project']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'version': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'imported_filed'", 'null': 'True', 'to': u"orm['builds.Version']"})
},
u'projects.project': {
'Meta': {'ordering': "('slug',)", 'object_name': 'Project'},
'analytics_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'canonical_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'conf_py_file': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'copyright': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'default_branch': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'default_version': ('django.db.models.fields.CharField', [], {'default': "'latest'", 'max_length': '255'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'django_packages_url': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'documentation_type': ('django.db.models.fields.CharField', [], {'default': "'sphinx'", 'max_length': '20'}),
'featured': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'default': "'en'", 'max_length': '20'}),
'main_language_project': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'translations'", 'null': 'True', 'to': u"orm['projects.Project']"}),
'mirror': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'modified_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'num_major': ('django.db.models.fields.IntegerField', [], {'default': '2', 'max_length': '3', 'null': 'True', 'blank': 'True'}),
'num_minor': ('django.db.models.fields.IntegerField', [], {'default': '2', 'max_length': '3', 'null': 'True', 'blank': 'True'}),
'num_point': ('django.db.models.fields.IntegerField', [], {'default': '2', 'max_length': '3', 'null': 'True', 'blank': 'True'}),
'path': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'privacy_level': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '20'}),
'programming_language': ('django.db.models.fields.CharField', [], {'default': "'none'", 'max_length': '20'}),
'project_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'pub_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'python_interpreter': ('django.db.models.fields.CharField', [], {'default': "'python'", 'max_length': '20'}),
'related_projects': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': u"orm['projects.Project']", 'null': 'True', 'through': u"orm['projects.ProjectRelationship']", 'blank': 'True'}),
'repo': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'repo_type': ('django.db.models.fields.CharField', [], {'default': "'git'", 'max_length': '10'}),
'requirements_file': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'null': 'True', 'blank': 'True'}),
'single_version': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'skip': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '255'}),
'suffix': ('django.db.models.fields.CharField', [], {'default': "'.rst'", 'max_length': '10'}),
'theme': ('django.db.models.fields.CharField', [], {'default': "'default'", 'max_length': '20'}),
'use_system_packages': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'use_virtualenv': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'projects'", 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'version_privacy_level': ('django.db.models.fields.CharField', [], {'default': "'public'", 'max_length': '20'})
},
u'projects.projectrelationship': {
'Meta': {'object_name': 'ProjectRelationship'},
'child': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'superprojects'", 'to': u"orm['projects.Project']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'subprojects'", 'to': u"orm['projects.Project']"})
},
u'projects.webhook': {
'Meta': {'object_name': 'WebHook'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'webhook_notifications'", 'to': u"orm['projects.Project']"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
}
}
complete_apps = ['projects'] | mit |
teampopong/crawlers | election_commission/crawlers/assembly/base.py | 3 | 3547 | #!/usr/bin/python2.7
# -*- encoding=utf-8 -*-
import gevent
from gevent import monkey
import itertools
from urlparse import urljoin
from utils import flatten, get_json, get_xpath, parse_cell, sanitize, split
monkey.patch_all()
class BaseCrawler(object):
url_image_base = 'http://info.nec.go.kr'
attrs = []
attrs_exclude_parse_cell = ['image']
def parse(self, url, city_name=None):
elems = get_xpath(url, '//td')
num_attrs = len(self.attrs)
members = (dict(zip(self.attrs, elems[i*num_attrs:(i+1)*num_attrs]))\
for i in xrange(len(elems) / num_attrs))
members = [self.parse_member(member, city_name=city_name) for member in members]
print 'crawled #%d - %s(%d)...' % (self.nth, city_name or '비례대표', len(members))
return members
def parse_record(self, record):
for attr in self.attrs:
if attr not in self.attrs_exclude_parse_cell:
record[attr] = parse_cell(record[attr])
def parse_member(self, member, city_name=None):
self.parse_record(member)
# never change the order
member['assembly_no'] = self.nth
member['elected'] = self.__class__.__name__.startswith('Elected')
self.parse_member_image(member)
self.parse_member_name(member)
self.parse_member_birth(member)
self.parse_member_district(member, city_name)
self.parse_member_vote(member)
return member
def parse_member_image(self, member):
if 'image' not in member: return
rel_path = member['image'].find("./input[@type='image']").attrib['src']
member['image'] = urljoin(self.url_image_base, rel_path)
def parse_member_name(self, member):
if 'name' not in member: return
member['name_kr'], member['name_cn'] = map(sanitize, member['name'][:2])
del member['name']
def parse_member_birth(self, member):
if 'birth' not in member: return
member['birthyear'], member['birthmonth'], member['birthday'] =\
split(member['birth'][0])
del member['birth']
def parse_member_district(self, member, city_name):
if city_name:
member['district'] = '%s %s' % (city_name, member['district'])
def parse_member_vote(self, member):
if 'vote' not in member: return
member['votenum'], member['voterate'] = map(sanitize, member['vote'][:2])
member['votenum'] = member['votenum'].replace(',', '')
del member['vote']
class MultiCityCrawler(BaseCrawler):
def city_codes(self):
list_ = get_json(self.url_city_codes_json)['body']
return map(lambda x: (x['CODE'], x['NAME']), list_)
def url_list(self, city_code):
return self.url_list_base + str(city_code)
def crawl(self):
# 지역구 대표
jobs = []
for city_code, city_name in self.city_codes():
req_url = self.url_list(city_code)
job = gevent.spawn(self.parse, req_url, city_name)
jobs.append(job)
gevent.joinall(jobs)
people = flatten(job.get() for job in jobs)
# 비례대표
if hasattr(self, 'prop_crawler'):
prop_people = self.prop_crawler.crawl()
for person in prop_people:
person['district'] = '비례대표'
people.extend(prop_people)
return people
class SinglePageCrawler(BaseCrawler):
def crawl(self):
people = self.parse(self.url_list)
return people
| agpl-3.0 |
axinging/sky_engine | sky/tools/webkitpy/common/system/filesystem_mock.py | 11 | 16806 | # Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import StringIO
import errno
import hashlib
import os
import re
from webkitpy.common.system import path
class MockFileSystem(object):
sep = '/'
pardir = '..'
def __init__(self, files=None, dirs=None, cwd='/'):
"""Initializes a "mock" filesystem that can be used to completely
stub out a filesystem.
Args:
files: a dict of filenames -> file contents. A file contents
value of None is used to indicate that the file should
not exist.
"""
self.files = files or {}
self.written_files = {}
self.last_tmpdir = None
self.current_tmpno = 0
self.cwd = cwd
self.dirs = set(dirs or [])
self.dirs.add(cwd)
for f in self.files:
d = self.dirname(f)
while not d in self.dirs:
self.dirs.add(d)
d = self.dirname(d)
def clear_written_files(self):
# This function can be used to track what is written between steps in a test.
self.written_files = {}
def _raise_not_found(self, path):
raise IOError(errno.ENOENT, path, os.strerror(errno.ENOENT))
def _split(self, path):
# This is not quite a full implementation of os.path.split
# http://docs.python.org/library/os.path.html#os.path.split
if self.sep in path:
return path.rsplit(self.sep, 1)
return ('', path)
def abspath(self, path):
if os.path.isabs(path):
return self.normpath(path)
return self.abspath(self.join(self.cwd, path))
def realpath(self, path):
return self.abspath(path)
def basename(self, path):
return self._split(path)[1]
def expanduser(self, path):
if path[0] != "~":
return path
parts = path.split(self.sep, 1)
home_directory = self.sep + "Users" + self.sep + "mock"
if len(parts) == 1:
return home_directory
return home_directory + self.sep + parts[1]
def path_to_module(self, module_name):
return "/mock-checkout/third_party/WebKit/tools/" + module_name.replace('.', '/') + ".py"
def chdir(self, path):
path = self.normpath(path)
if not self.isdir(path):
raise OSError(errno.ENOENT, path, os.strerror(errno.ENOENT))
self.cwd = path
def copyfile(self, source, destination):
if not self.exists(source):
self._raise_not_found(source)
if self.isdir(source):
raise IOError(errno.EISDIR, source, os.strerror(errno.EISDIR))
if self.isdir(destination):
raise IOError(errno.EISDIR, destination, os.strerror(errno.EISDIR))
if not self.exists(self.dirname(destination)):
raise IOError(errno.ENOENT, destination, os.strerror(errno.ENOENT))
self.files[destination] = self.files[source]
self.written_files[destination] = self.files[source]
def dirname(self, path):
return self._split(path)[0]
def exists(self, path):
return self.isfile(path) or self.isdir(path)
def files_under(self, path, dirs_to_skip=[], file_filter=None):
def filter_all(fs, dirpath, basename):
return True
file_filter = file_filter or filter_all
files = []
if self.isfile(path):
if file_filter(self, self.dirname(path), self.basename(path)) and self.files[path] is not None:
files.append(path)
return files
if self.basename(path) in dirs_to_skip:
return []
if not path.endswith(self.sep):
path += self.sep
dir_substrings = [self.sep + d + self.sep for d in dirs_to_skip]
for filename in self.files:
if not filename.startswith(path):
continue
suffix = filename[len(path) - 1:]
if any(dir_substring in suffix for dir_substring in dir_substrings):
continue
dirpath, basename = self._split(filename)
if file_filter(self, dirpath, basename) and self.files[filename] is not None:
files.append(filename)
return files
def getcwd(self):
return self.cwd
def glob(self, glob_string):
# FIXME: This handles '*', but not '?', '[', or ']'.
glob_string = re.escape(glob_string)
glob_string = glob_string.replace('\\*', '[^\\/]*') + '$'
glob_string = glob_string.replace('\\/', '/')
path_filter = lambda path: re.match(glob_string, path)
# We could use fnmatch.fnmatch, but that might not do the right thing on windows.
existing_files = [path for path, contents in self.files.items() if contents is not None]
return filter(path_filter, existing_files) + filter(path_filter, self.dirs)
def isabs(self, path):
return path.startswith(self.sep)
def isfile(self, path):
return path in self.files and self.files[path] is not None
def isdir(self, path):
return self.normpath(path) in self.dirs
def _slow_but_correct_join(self, *comps):
return re.sub(re.escape(os.path.sep), self.sep, os.path.join(*comps))
def join(self, *comps):
# This function is called a lot, so we optimize it; there are
# unittests to check that we match _slow_but_correct_join(), above.
path = ''
sep = self.sep
for comp in comps:
if not comp:
continue
if comp[0] == sep:
path = comp
continue
if path:
path += sep
path += comp
if comps[-1] == '' and path:
path += '/'
path = path.replace(sep + sep, sep)
return path
def listdir(self, path):
root, dirs, files = list(self.walk(path))[0]
return dirs + files
def walk(self, top):
sep = self.sep
if not self.isdir(top):
raise OSError("%s is not a directory" % top)
if not top.endswith(sep):
top += sep
dirs = []
files = []
for f in self.files:
if self.exists(f) and f.startswith(top):
remaining = f[len(top):]
if sep in remaining:
dir = remaining[:remaining.index(sep)]
if not dir in dirs:
dirs.append(dir)
else:
files.append(remaining)
return [(top[:-1], dirs, files)]
def mtime(self, path):
if self.exists(path):
return 0
self._raise_not_found(path)
def _mktemp(self, suffix='', prefix='tmp', dir=None, **kwargs):
if dir is None:
dir = self.sep + '__im_tmp'
curno = self.current_tmpno
self.current_tmpno += 1
self.last_tmpdir = self.join(dir, '%s_%u_%s' % (prefix, curno, suffix))
return self.last_tmpdir
def mkdtemp(self, **kwargs):
class TemporaryDirectory(object):
def __init__(self, fs, **kwargs):
self._kwargs = kwargs
self._filesystem = fs
self._directory_path = fs._mktemp(**kwargs)
fs.maybe_make_directory(self._directory_path)
def __str__(self):
return self._directory_path
def __enter__(self):
return self._directory_path
def __exit__(self, type, value, traceback):
# Only self-delete if necessary.
# FIXME: Should we delete non-empty directories?
if self._filesystem.exists(self._directory_path):
self._filesystem.rmtree(self._directory_path)
return TemporaryDirectory(fs=self, **kwargs)
def maybe_make_directory(self, *path):
norm_path = self.normpath(self.join(*path))
while norm_path and not self.isdir(norm_path):
self.dirs.add(norm_path)
norm_path = self.dirname(norm_path)
def move(self, source, destination):
if not self.exists(source):
self._raise_not_found(source)
if self.isfile(source):
self.files[destination] = self.files[source]
self.written_files[destination] = self.files[destination]
self.files[source] = None
self.written_files[source] = None
return
self.copytree(source, destination)
self.rmtree(source)
def _slow_but_correct_normpath(self, path):
return re.sub(re.escape(os.path.sep), self.sep, os.path.normpath(path))
def normpath(self, path):
# This function is called a lot, so we try to optimize the common cases
# instead of always calling _slow_but_correct_normpath(), above.
if '..' in path or '/./' in path:
# This doesn't happen very often; don't bother trying to optimize it.
return self._slow_but_correct_normpath(path)
if not path:
return '.'
if path == '/':
return path
if path == '/.':
return '/'
if path.endswith('/.'):
return path[:-2]
if path.endswith('/'):
return path[:-1]
return path
def open_binary_tempfile(self, suffix=''):
path = self._mktemp(suffix)
return (WritableBinaryFileObject(self, path), path)
def open_binary_file_for_reading(self, path):
if self.files[path] is None:
self._raise_not_found(path)
return ReadableBinaryFileObject(self, path, self.files[path])
def read_binary_file(self, path):
# Intentionally raises KeyError if we don't recognize the path.
if self.files[path] is None:
self._raise_not_found(path)
return self.files[path]
def write_binary_file(self, path, contents):
# FIXME: should this assert if dirname(path) doesn't exist?
self.maybe_make_directory(self.dirname(path))
self.files[path] = contents
self.written_files[path] = contents
def open_text_file_for_reading(self, path):
if self.files[path] is None:
self._raise_not_found(path)
return ReadableTextFileObject(self, path, self.files[path])
def open_text_file_for_writing(self, path):
return WritableTextFileObject(self, path)
def read_text_file(self, path):
return self.read_binary_file(path).decode('utf-8')
def write_text_file(self, path, contents):
return self.write_binary_file(path, contents.encode('utf-8'))
def sha1(self, path):
contents = self.read_binary_file(path)
return hashlib.sha1(contents).hexdigest()
def relpath(self, path, start='.'):
# Since os.path.relpath() calls os.path.normpath()
# (see http://docs.python.org/library/os.path.html#os.path.abspath )
# it also removes trailing slashes and converts forward and backward
# slashes to the preferred slash os.sep.
start = self.abspath(start)
path = self.abspath(path)
common_root = start
dot_dot = ''
while not common_root == '':
if path.startswith(common_root):
break
common_root = self.dirname(common_root)
dot_dot += '..' + self.sep
rel_path = path[len(common_root):]
if not rel_path:
return '.'
if rel_path[0] == self.sep:
# It is probably sufficient to remove just the first character
# since os.path.normpath() collapses separators, but we use
# lstrip() just to be sure.
rel_path = rel_path.lstrip(self.sep)
elif not common_root == '/':
# We are in the case typified by the following example:
# path = "/tmp/foobar", start = "/tmp/foo" -> rel_path = "bar"
common_root = self.dirname(common_root)
dot_dot += '..' + self.sep
rel_path = path[len(common_root) + 1:]
return dot_dot + rel_path
def remove(self, path):
if self.files[path] is None:
self._raise_not_found(path)
self.files[path] = None
self.written_files[path] = None
def rmtree(self, path):
path = self.normpath(path)
for f in self.files:
# We need to add a trailing separator to path to avoid matching
# cases like path='/foo/b' and f='/foo/bar/baz'.
if f == path or f.startswith(path + self.sep):
self.files[f] = None
self.dirs = set(filter(lambda d: not (d == path or d.startswith(path + self.sep)), self.dirs))
def copytree(self, source, destination):
source = self.normpath(source)
destination = self.normpath(destination)
for source_file in list(self.files):
if source_file.startswith(source):
destination_path = self.join(destination, self.relpath(source_file, source))
self.maybe_make_directory(self.dirname(destination_path))
self.files[destination_path] = self.files[source_file]
def split(self, path):
idx = path.rfind(self.sep)
if idx == -1:
return ('', path)
return (path[:idx], path[(idx + 1):])
def splitext(self, path):
idx = path.rfind('.')
if idx == -1:
idx = len(path)
return (path[0:idx], path[idx:])
class WritableBinaryFileObject(object):
def __init__(self, fs, path):
self.fs = fs
self.path = path
self.closed = False
self.fs.files[path] = ""
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def close(self):
self.closed = True
def write(self, str):
self.fs.files[self.path] += str
self.fs.written_files[self.path] = self.fs.files[self.path]
class WritableTextFileObject(WritableBinaryFileObject):
def write(self, str):
WritableBinaryFileObject.write(self, str.encode('utf-8'))
class ReadableBinaryFileObject(object):
def __init__(self, fs, path, data):
self.fs = fs
self.path = path
self.closed = False
self.data = data
self.offset = 0
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def close(self):
self.closed = True
def read(self, bytes=None):
if not bytes:
return self.data[self.offset:]
start = self.offset
self.offset += bytes
return self.data[start:self.offset]
class ReadableTextFileObject(ReadableBinaryFileObject):
def __init__(self, fs, path, data):
super(ReadableTextFileObject, self).__init__(fs, path, StringIO.StringIO(data.decode("utf-8")))
def close(self):
self.data.close()
super(ReadableTextFileObject, self).close()
def read(self, bytes=-1):
return self.data.read(bytes)
def readline(self, length=None):
return self.data.readline(length)
def __iter__(self):
return self.data.__iter__()
def next(self):
return self.data.next()
def seek(self, offset, whence=os.SEEK_SET):
self.data.seek(offset, whence)
| bsd-3-clause |
eayunstack/nova | nova/tests/objects/test_instance.py | 4 | 54406 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import iso8601
import mock
import mox
import netaddr
from nova.cells import rpcapi as cells_rpcapi
from nova.compute import flavors
from nova import db
from nova import exception
from nova.network import model as network_model
from nova import notifications
from nova.objects import instance
from nova.objects import instance_info_cache
from nova.objects import instance_numa_topology
from nova.objects import pci_device
from nova.objects import security_group
from nova.openstack.common import timeutils
from nova import test
from nova.tests.api.openstack import fakes
from nova.tests import fake_instance
from nova.tests.objects import test_instance_fault
from nova.tests.objects import test_instance_info_cache
from nova.tests.objects import test_instance_numa_topology
from nova.tests.objects import test_objects
from nova.tests.objects import test_security_group
from nova import utils
class _TestInstanceObject(object):
@property
def fake_instance(self):
fake_instance = fakes.stub_instance(id=2,
access_ipv4='1.2.3.4',
access_ipv6='::1')
fake_instance['cell_name'] = 'api!child'
fake_instance['scheduled_at'] = None
fake_instance['terminated_at'] = None
fake_instance['deleted_at'] = None
fake_instance['created_at'] = None
fake_instance['updated_at'] = None
fake_instance['launched_at'] = (
fake_instance['launched_at'].replace(
tzinfo=iso8601.iso8601.Utc(), microsecond=0))
fake_instance['deleted'] = False
fake_instance['info_cache']['instance_uuid'] = fake_instance['uuid']
fake_instance['security_groups'] = []
fake_instance['pci_devices'] = []
fake_instance['user_id'] = self.context.user_id
fake_instance['project_id'] = self.context.project_id
return fake_instance
def test_datetime_deserialization(self):
red_letter_date = timeutils.parse_isotime(
timeutils.isotime(datetime.datetime(1955, 11, 5)))
inst = instance.Instance(uuid='fake-uuid', launched_at=red_letter_date)
primitive = inst.obj_to_primitive()
expected = {'nova_object.name': 'Instance',
'nova_object.namespace': 'nova',
'nova_object.version': '1.15',
'nova_object.data':
{'uuid': 'fake-uuid',
'launched_at': '1955-11-05T00:00:00Z'},
'nova_object.changes': ['launched_at', 'uuid']}
self.assertEqual(primitive, expected)
inst2 = instance.Instance.obj_from_primitive(primitive)
self.assertIsInstance(inst2.launched_at, datetime.datetime)
self.assertEqual(inst2.launched_at, red_letter_date)
def test_ip_deserialization(self):
inst = instance.Instance(uuid='fake-uuid', access_ip_v4='1.2.3.4',
access_ip_v6='::1')
primitive = inst.obj_to_primitive()
expected = {'nova_object.name': 'Instance',
'nova_object.namespace': 'nova',
'nova_object.version': '1.15',
'nova_object.data':
{'uuid': 'fake-uuid',
'access_ip_v4': '1.2.3.4',
'access_ip_v6': '::1'},
'nova_object.changes': ['uuid', 'access_ip_v6',
'access_ip_v4']}
self.assertEqual(primitive, expected)
inst2 = instance.Instance.obj_from_primitive(primitive)
self.assertIsInstance(inst2.access_ip_v4, netaddr.IPAddress)
self.assertIsInstance(inst2.access_ip_v6, netaddr.IPAddress)
self.assertEqual(inst2.access_ip_v4, netaddr.IPAddress('1.2.3.4'))
self.assertEqual(inst2.access_ip_v6, netaddr.IPAddress('::1'))
def test_get_without_expected(self):
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, 'uuid',
columns_to_join=[],
use_slave=False
).AndReturn(self.fake_instance)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, 'uuid',
expected_attrs=[])
for attr in instance.INSTANCE_OPTIONAL_ATTRS:
self.assertFalse(inst.obj_attr_is_set(attr))
self.assertRemotes()
def test_get_with_expected(self):
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids')
self.mox.StubOutWithMock(
db, 'instance_extra_get_by_instance_uuid')
exp_cols = instance.INSTANCE_OPTIONAL_ATTRS[:]
exp_cols.remove('fault')
exp_cols.remove('numa_topology')
db.instance_get_by_uuid(
self.context, 'uuid',
columns_to_join=exp_cols,
use_slave=False
).AndReturn(self.fake_instance)
fake_faults = test_instance_fault.fake_faults
db.instance_fault_get_by_instance_uuids(
self.context, [self.fake_instance['uuid']]
).AndReturn(fake_faults)
fake_topology = test_instance_numa_topology.fake_db_topology
db.instance_extra_get_by_instance_uuid(
self.context, self.fake_instance['uuid']
).AndReturn(fake_topology)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(
self.context, 'uuid',
expected_attrs=instance.INSTANCE_OPTIONAL_ATTRS)
for attr in instance.INSTANCE_OPTIONAL_ATTRS:
self.assertTrue(inst.obj_attr_is_set(attr))
self.assertRemotes()
def test_get_by_id(self):
self.mox.StubOutWithMock(db, 'instance_get')
db.instance_get(self.context, 'instid',
columns_to_join=['info_cache',
'security_groups']
).AndReturn(self.fake_instance)
self.mox.ReplayAll()
inst = instance.Instance.get_by_id(self.context, 'instid')
self.assertEqual(inst.uuid, self.fake_instance['uuid'])
self.assertRemotes()
def test_load(self):
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
fake_uuid = self.fake_instance['uuid']
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(self.fake_instance)
fake_inst2 = dict(self.fake_instance,
system_metadata=[{'key': 'foo', 'value': 'bar'}])
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['system_metadata'],
use_slave=False
).AndReturn(fake_inst2)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
self.assertFalse(hasattr(inst, '_system_metadata'))
sys_meta = inst.system_metadata
self.assertEqual(sys_meta, {'foo': 'bar'})
self.assertTrue(hasattr(inst, '_system_metadata'))
# Make sure we don't run load again
sys_meta2 = inst.system_metadata
self.assertEqual(sys_meta2, {'foo': 'bar'})
self.assertRemotes()
def test_load_invalid(self):
inst = instance.Instance(context=self.context, uuid='fake-uuid')
self.assertRaises(exception.ObjectActionError,
inst.obj_load_attr, 'foo')
def test_get_remote(self):
# isotime doesn't have microseconds and is always UTC
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
fake_instance = self.fake_instance
db.instance_get_by_uuid(self.context, 'fake-uuid',
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_instance)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, 'fake-uuid')
self.assertEqual(inst.id, fake_instance['id'])
self.assertEqual(inst.launched_at, fake_instance['launched_at'])
self.assertEqual(str(inst.access_ip_v4),
fake_instance['access_ip_v4'])
self.assertEqual(str(inst.access_ip_v6),
fake_instance['access_ip_v6'])
self.assertRemotes()
def test_refresh(self):
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
fake_uuid = self.fake_instance['uuid']
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(dict(self.fake_instance,
host='orig-host'))
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(dict(self.fake_instance,
host='new-host'))
self.mox.StubOutWithMock(instance_info_cache.InstanceInfoCache,
'refresh')
instance_info_cache.InstanceInfoCache.refresh()
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
self.assertEqual(inst.host, 'orig-host')
inst.refresh()
self.assertEqual(inst.host, 'new-host')
self.assertRemotes()
self.assertEqual(set([]), inst.obj_what_changed())
def test_refresh_does_not_recurse(self):
inst = instance.Instance(context=self.context, uuid='fake-uuid',
metadata={})
inst_copy = instance.Instance()
inst_copy.uuid = inst.uuid
self.mox.StubOutWithMock(instance.Instance, 'get_by_uuid')
instance.Instance.get_by_uuid(self.context, uuid=inst.uuid,
expected_attrs=['metadata'],
use_slave=False
).AndReturn(inst_copy)
self.mox.ReplayAll()
self.assertRaises(exception.OrphanedObjectError, inst.refresh)
def _save_test_helper(self, cell_type, save_kwargs):
"""Common code for testing save() for cells/non-cells."""
if cell_type:
self.flags(enable=True, cell_type=cell_type, group='cells')
else:
self.flags(enable=False, group='cells')
old_ref = dict(self.fake_instance, host='oldhost', user_data='old',
vm_state='old', task_state='old')
fake_uuid = old_ref['uuid']
expected_updates = dict(vm_state='meow', task_state='wuff',
user_data='new')
new_ref = dict(old_ref, host='newhost', **expected_updates)
exp_vm_state = save_kwargs.get('expected_vm_state')
exp_task_state = save_kwargs.get('expected_task_state')
admin_reset = save_kwargs.get('admin_state_reset', False)
if exp_vm_state:
expected_updates['expected_vm_state'] = exp_vm_state
if exp_task_state:
if (exp_task_state == 'image_snapshot' and
'instance_version' in save_kwargs and
save_kwargs['instance_version'] == '1.9'):
expected_updates['expected_task_state'] = [
'image_snapshot', 'image_snapshot_pending']
else:
expected_updates['expected_task_state'] = exp_task_state
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.StubOutWithMock(db, 'instance_info_cache_update')
cells_api_mock = self.mox.CreateMock(cells_rpcapi.CellsAPI)
self.mox.StubOutWithMock(cells_api_mock,
'instance_update_at_top')
self.mox.StubOutWithMock(cells_api_mock,
'instance_update_from_api')
self.mox.StubOutWithMock(cells_rpcapi, 'CellsAPI',
use_mock_anything=True)
self.mox.StubOutWithMock(notifications, 'send_update')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(old_ref)
db.instance_update_and_get_original(
self.context, fake_uuid, expected_updates,
update_cells=False,
columns_to_join=['info_cache', 'security_groups',
'system_metadata']
).AndReturn((old_ref, new_ref))
if cell_type == 'api':
cells_rpcapi.CellsAPI().AndReturn(cells_api_mock)
cells_api_mock.instance_update_from_api(
self.context, mox.IsA(instance.Instance),
exp_vm_state, exp_task_state, admin_reset)
elif cell_type == 'compute':
cells_rpcapi.CellsAPI().AndReturn(cells_api_mock)
cells_api_mock.instance_update_at_top(self.context, new_ref)
notifications.send_update(self.context, mox.IgnoreArg(),
mox.IgnoreArg())
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, old_ref['uuid'])
if 'instance_version' in save_kwargs:
inst.VERSION = save_kwargs.pop('instance_version')
self.assertEqual('old', inst.task_state)
self.assertEqual('old', inst.vm_state)
self.assertEqual('old', inst.user_data)
inst.vm_state = 'meow'
inst.task_state = 'wuff'
inst.user_data = 'new'
inst.save(**save_kwargs)
self.assertEqual('newhost', inst.host)
self.assertEqual('meow', inst.vm_state)
self.assertEqual('wuff', inst.task_state)
self.assertEqual('new', inst.user_data)
self.assertEqual(set([]), inst.obj_what_changed())
def test_save(self):
self._save_test_helper(None, {})
def test_save_in_api_cell(self):
self._save_test_helper('api', {})
def test_save_in_compute_cell(self):
self._save_test_helper('compute', {})
def test_save_exp_vm_state(self):
self._save_test_helper(None, {'expected_vm_state': ['meow']})
def test_save_exp_task_state(self):
self._save_test_helper(None, {'expected_task_state': ['meow']})
def test_save_exp_task_state_havana(self):
self._save_test_helper(None, {
'expected_task_state': 'image_snapshot',
'instance_version': '1.9'})
def test_save_exp_vm_state_api_cell(self):
self._save_test_helper('api', {'expected_vm_state': ['meow']})
def test_save_exp_task_state_api_cell(self):
self._save_test_helper('api', {'expected_task_state': ['meow']})
def test_save_exp_task_state_api_cell_admin_reset(self):
self._save_test_helper('api', {'admin_state_reset': True})
def test_save_rename_sends_notification(self):
# Tests that simply changing the 'display_name' on the instance
# will send a notification.
self.flags(enable=False, group='cells')
old_ref = dict(self.fake_instance, display_name='hello')
fake_uuid = old_ref['uuid']
expected_updates = dict(display_name='goodbye')
new_ref = dict(old_ref, **expected_updates)
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.StubOutWithMock(notifications, 'send_update')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(old_ref)
db.instance_update_and_get_original(
self.context, fake_uuid, expected_updates, update_cells=False,
columns_to_join=['info_cache', 'security_groups',
'system_metadata']
).AndReturn((old_ref, new_ref))
notifications.send_update(self.context, mox.IgnoreArg(),
mox.IgnoreArg())
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, old_ref['uuid'],
use_slave=False)
self.assertEqual('hello', inst.display_name)
inst.display_name = 'goodbye'
inst.save()
self.assertEqual('goodbye', inst.display_name)
self.assertEqual(set([]), inst.obj_what_changed())
@mock.patch('nova.db.instance_update_and_get_original')
@mock.patch('nova.objects.Instance._from_db_object')
def test_save_does_not_refresh_pci_devices(self, mock_fdo, mock_update):
# NOTE(danms): This tests that we don't update the pci_devices
# field from the contents of the database. This is not because we
# don't necessarily want to, but because the way pci_devices is
# currently implemented it causes versioning issues. When that is
# resolved, this test should go away.
mock_update.return_value = None, None
inst = instance.Instance(context=self.context, id=123)
inst.uuid = 'foo'
inst.pci_devices = pci_device.PciDeviceList()
inst.save()
self.assertNotIn('pci_devices',
mock_fdo.call_args_list[0][1]['expected_attrs'])
def test_get_deleted(self):
fake_inst = dict(self.fake_instance, id=123, deleted=123)
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
# NOTE(danms): Make sure it's actually a bool
self.assertEqual(inst.deleted, True)
def test_get_not_cleaned(self):
fake_inst = dict(self.fake_instance, id=123, cleaned=None)
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
# NOTE(mikal): Make sure it's actually a bool
self.assertEqual(inst.cleaned, False)
def test_get_cleaned(self):
fake_inst = dict(self.fake_instance, id=123, cleaned=1)
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
# NOTE(mikal): Make sure it's actually a bool
self.assertEqual(inst.cleaned, True)
def test_with_info_cache(self):
fake_inst = dict(self.fake_instance)
fake_uuid = fake_inst['uuid']
nwinfo1 = network_model.NetworkInfo.hydrate([{'address': 'foo'}])
nwinfo2 = network_model.NetworkInfo.hydrate([{'address': 'bar'}])
nwinfo1_json = nwinfo1.json()
nwinfo2_json = nwinfo2.json()
fake_inst['info_cache'] = dict(
test_instance_info_cache.fake_info_cache,
network_info=nwinfo1_json,
instance_uuid=fake_uuid)
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.StubOutWithMock(db, 'instance_info_cache_update')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
db.instance_info_cache_update(self.context, fake_uuid,
{'network_info': nwinfo2_json})
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
self.assertEqual(inst.info_cache.network_info, nwinfo1)
self.assertEqual(inst.info_cache.instance_uuid, fake_uuid)
inst.info_cache.network_info = nwinfo2
inst.save()
def test_with_info_cache_none(self):
fake_inst = dict(self.fake_instance, info_cache=None)
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid,
['info_cache'])
self.assertIsNone(inst.info_cache)
def test_with_security_groups(self):
fake_inst = dict(self.fake_instance)
fake_uuid = fake_inst['uuid']
fake_inst['security_groups'] = [
{'id': 1, 'name': 'secgroup1', 'description': 'fake-desc',
'user_id': 'fake-user', 'project_id': 'fake_project',
'created_at': None, 'updated_at': None, 'deleted_at': None,
'deleted': False},
{'id': 2, 'name': 'secgroup2', 'description': 'fake-desc',
'user_id': 'fake-user', 'project_id': 'fake_project',
'created_at': None, 'updated_at': None, 'deleted_at': None,
'deleted': False},
]
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.StubOutWithMock(db, 'security_group_update')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
db.security_group_update(self.context, 1, {'description': 'changed'}
).AndReturn(fake_inst['security_groups'][0])
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
self.assertEqual(len(inst.security_groups), 2)
for index, group in enumerate(fake_inst['security_groups']):
for key in group:
self.assertEqual(group[key],
inst.security_groups[index][key])
self.assertIsInstance(inst.security_groups[index],
security_group.SecurityGroup)
self.assertEqual(inst.security_groups.obj_what_changed(), set())
inst.security_groups[0].description = 'changed'
inst.save()
self.assertEqual(inst.security_groups.obj_what_changed(), set())
def test_with_empty_security_groups(self):
fake_inst = dict(self.fake_instance, security_groups=[])
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['info_cache',
'security_groups'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid)
self.assertEqual(0, len(inst.security_groups))
def test_with_empty_pci_devices(self):
fake_inst = dict(self.fake_instance, pci_devices=[])
fake_uuid = fake_inst['uuid']
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['pci_devices'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid,
['pci_devices'])
self.assertEqual(len(inst.pci_devices), 0)
def test_with_pci_devices(self):
fake_inst = dict(self.fake_instance)
fake_uuid = fake_inst['uuid']
fake_inst['pci_devices'] = [
{'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': None,
'id': 2,
'compute_node_id': 1,
'address': 'a1',
'vendor_id': 'v1',
'product_id': 'p1',
'dev_type': 't',
'status': 'allocated',
'dev_id': 'i',
'label': 'l',
'instance_uuid': fake_uuid,
'request_id': None,
'extra_info': '{}'},
{
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': None,
'id': 1,
'compute_node_id': 1,
'address': 'a',
'vendor_id': 'v',
'product_id': 'p',
'dev_type': 't',
'status': 'allocated',
'dev_id': 'i',
'label': 'l',
'instance_uuid': fake_uuid,
'request_id': None,
'extra_info': '{}'},
]
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=['pci_devices'],
use_slave=False
).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid,
['pci_devices'])
self.assertEqual(len(inst.pci_devices), 2)
self.assertEqual(inst.pci_devices[0].instance_uuid, fake_uuid)
self.assertEqual(inst.pci_devices[1].instance_uuid, fake_uuid)
def test_with_fault(self):
fake_inst = dict(self.fake_instance)
fake_uuid = fake_inst['uuid']
fake_faults = [dict(x, instance_uuid=fake_uuid)
for x in test_instance_fault.fake_faults['fake-uuid']]
self.mox.StubOutWithMock(db, 'instance_get_by_uuid')
self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids')
db.instance_get_by_uuid(self.context, fake_uuid,
columns_to_join=[],
use_slave=False
).AndReturn(self.fake_instance)
db.instance_fault_get_by_instance_uuids(
self.context, [fake_uuid]).AndReturn({fake_uuid: fake_faults})
self.mox.ReplayAll()
inst = instance.Instance.get_by_uuid(self.context, fake_uuid,
expected_attrs=['fault'])
self.assertEqual(fake_faults[0], dict(inst.fault.items()))
self.assertRemotes()
def test_iteritems_with_extra_attrs(self):
self.stubs.Set(instance.Instance, 'name', 'foo')
inst = instance.Instance(uuid='fake-uuid')
self.assertEqual(inst.items(),
{'uuid': 'fake-uuid',
'name': 'foo',
}.items())
def _test_metadata_change_tracking(self, which):
inst = instance.Instance(uuid='fake-uuid')
setattr(inst, which, {})
inst.obj_reset_changes()
getattr(inst, which)['foo'] = 'bar'
self.assertEqual(set([which]), inst.obj_what_changed())
inst.obj_reset_changes()
self.assertEqual(set(), inst.obj_what_changed())
def test_metadata_change_tracking(self):
self._test_metadata_change_tracking('metadata')
def test_system_metadata_change_tracking(self):
self._test_metadata_change_tracking('system_metadata')
def test_create_stubbed(self):
self.mox.StubOutWithMock(db, 'instance_create')
vals = {'host': 'foo-host',
'memory_mb': 128,
'system_metadata': {'foo': 'bar'}}
fake_inst = fake_instance.fake_db_instance(**vals)
db.instance_create(self.context, vals).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance(host='foo-host', memory_mb=128,
system_metadata={'foo': 'bar'})
inst.create(self.context)
def test_create(self):
self.mox.StubOutWithMock(db, 'instance_create')
db.instance_create(self.context, {}).AndReturn(self.fake_instance)
self.mox.ReplayAll()
inst = instance.Instance()
inst.create(self.context)
self.assertEqual(self.fake_instance['id'], inst.id)
def test_create_with_values(self):
inst1 = instance.Instance(user_id=self.context.user_id,
project_id=self.context.project_id,
host='foo-host')
inst1.create(self.context)
self.assertEqual(inst1.host, 'foo-host')
inst2 = instance.Instance.get_by_uuid(self.context, inst1.uuid)
self.assertEqual(inst2.host, 'foo-host')
def test_create_with_numa_topology(self):
inst = instance.Instance(uuid=self.fake_instance['uuid'],
numa_topology=instance_numa_topology.InstanceNUMATopology
.obj_from_topology(
test_instance_numa_topology.fake_numa_topology))
inst.create(self.context)
self.assertIsNotNone(inst.numa_topology)
got_numa_topo = (
instance_numa_topology.InstanceNUMATopology
.get_by_instance_uuid(self.context, inst.uuid))
self.assertEqual(inst.numa_topology.id, got_numa_topo.id)
def test_recreate_fails(self):
inst = instance.Instance(user_id=self.context.user_id,
project_id=self.context.project_id,
host='foo-host')
inst.create(self.context)
self.assertRaises(exception.ObjectActionError, inst.create,
self.context)
def test_create_with_special_things(self):
self.mox.StubOutWithMock(db, 'instance_create')
fake_inst = fake_instance.fake_db_instance()
db.instance_create(self.context,
{'host': 'foo-host',
'security_groups': ['foo', 'bar'],
'info_cache': {'network_info': '[]'},
}
).AndReturn(fake_inst)
self.mox.ReplayAll()
secgroups = security_group.SecurityGroupList()
secgroups.objects = []
for name in ('foo', 'bar'):
secgroup = security_group.SecurityGroup()
secgroup.name = name
secgroups.objects.append(secgroup)
info_cache = instance_info_cache.InstanceInfoCache()
info_cache.network_info = network_model.NetworkInfo()
inst = instance.Instance(host='foo-host', security_groups=secgroups,
info_cache=info_cache)
inst.create(self.context)
def test_destroy_stubbed(self):
self.mox.StubOutWithMock(db, 'instance_destroy')
deleted_at = datetime.datetime(1955, 11, 6)
fake_inst = fake_instance.fake_db_instance(deleted_at=deleted_at,
deleted=True)
db.instance_destroy(self.context, 'fake-uuid',
constraint=None).AndReturn(fake_inst)
self.mox.ReplayAll()
inst = instance.Instance(id=1, uuid='fake-uuid', host='foo')
inst.destroy(self.context)
self.assertEqual(timeutils.normalize_time(inst.deleted_at),
timeutils.normalize_time(deleted_at))
self.assertTrue(inst.deleted)
def test_destroy(self):
values = {'user_id': self.context.user_id,
'project_id': self.context.project_id}
db_inst = db.instance_create(self.context, values)
inst = instance.Instance(id=db_inst['id'], uuid=db_inst['uuid'])
inst.destroy(self.context)
self.assertRaises(exception.InstanceNotFound,
db.instance_get_by_uuid, self.context,
db_inst['uuid'])
def test_destroy_host_constraint(self):
values = {'user_id': self.context.user_id,
'project_id': self.context.project_id,
'host': 'foo'}
db_inst = db.instance_create(self.context, values)
inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid'])
inst.host = None
self.assertRaises(exception.ObjectActionError,
inst.destroy)
def test_name_does_not_trigger_lazy_loads(self):
values = {'user_id': self.context.user_id,
'project_id': self.context.project_id,
'host': 'foo'}
db_inst = db.instance_create(self.context, values)
inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid'])
self.assertFalse(inst.obj_attr_is_set('fault'))
self.flags(instance_name_template='foo-%(uuid)s')
self.assertEqual('foo-%s' % db_inst['uuid'], inst.name)
self.assertFalse(inst.obj_attr_is_set('fault'))
def test_from_db_object_not_overwrite_info_cache(self):
info_cache = instance_info_cache.InstanceInfoCache()
inst = instance.Instance(context=self.context,
info_cache=info_cache)
db_inst = fake_instance.fake_db_instance()
db_inst['info_cache'] = dict(
test_instance_info_cache.fake_info_cache)
inst._from_db_object(self.context, inst, db_inst,
expected_attrs=['info_cache'])
self.assertIs(info_cache, inst.info_cache)
def test_compat_strings(self):
unicode_attributes = ['user_id', 'project_id', 'image_ref',
'kernel_id', 'ramdisk_id', 'hostname',
'key_name', 'key_data', 'host', 'node',
'user_data', 'availability_zone',
'display_name', 'display_description',
'launched_on', 'locked_by', 'os_type',
'architecture', 'vm_mode', 'root_device_name',
'default_ephemeral_device',
'default_swap_device', 'config_drive',
'cell_name']
inst = instance.Instance()
expected = {}
for key in unicode_attributes:
inst[key] = u'\u2603'
expected[key] = '?'
primitive = inst.obj_to_primitive(target_version='1.6')
self.assertEqual(expected, primitive['nova_object.data'])
self.assertEqual('1.6', primitive['nova_object.version'])
def test_compat_pci_devices(self):
inst = instance.Instance()
inst.pci_devices = pci_device.PciDeviceList()
primitive = inst.obj_to_primitive(target_version='1.5')
self.assertNotIn('pci_devices', primitive)
def test_compat_info_cache(self):
inst = instance.Instance()
inst.info_cache = instance_info_cache.InstanceInfoCache()
primitive = inst.obj_to_primitive(target_version='1.9')
self.assertEqual(
'1.4',
primitive['nova_object.data']['info_cache']['nova_object.version'])
def _test_get_flavor(self, namespace):
prefix = '%s_' % namespace if namespace is not None else ''
db_inst = db.instance_create(self.context, {
'user_id': self.context.user_id,
'project_id': self.context.project_id,
'system_metadata': flavors.save_flavor_info(
{}, flavors.get_default_flavor(), prefix)})
db_flavor = flavors.extract_flavor(db_inst, prefix)
inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid'])
flavor = inst.get_flavor(namespace)
self.assertEqual(db_flavor['flavorid'], flavor.flavorid)
def test_get_flavor(self):
self._test_get_flavor(None)
self._test_get_flavor('foo')
def _test_set_flavor(self, namespace):
prefix = '%s_' % namespace if namespace is not None else ''
db_inst = db.instance_create(self.context, {
'user_id': self.context.user_id,
'project_id': self.context.project_id,
})
inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid'])
db_flavor = flavors.get_default_flavor()
inst.set_flavor(db_flavor, namespace)
db_inst = db.instance_get(self.context, db_inst['id'])
self.assertEqual(
db_flavor['flavorid'], flavors.extract_flavor(
db_inst, prefix)['flavorid'])
def test_set_flavor(self):
self._test_set_flavor(None)
self._test_set_flavor('foo')
def test_delete_flavor(self):
namespace = 'foo'
prefix = '%s_' % namespace
db_inst = db.instance_create(self.context, {
'user_id': self.context.user_id,
'project_id': self.context.project_id,
'system_metadata': flavors.save_flavor_info(
{}, flavors.get_default_flavor(), prefix)})
inst = instance.Instance.get_by_uuid(self.context, db_inst['uuid'])
inst.delete_flavor(namespace)
db_inst = db.instance_get(self.context, db_inst['id'])
self.assertEqual({}, utils.instance_sys_meta(db_inst))
def test_delete_flavor_no_namespace_fails(self):
inst = instance.Instance(system_metadata={})
self.assertRaises(KeyError, inst.delete_flavor, None)
self.assertRaises(KeyError, inst.delete_flavor, '')
@mock.patch.object(db, 'instance_metadata_delete')
def test_delete_metadata_key(self, db_delete):
inst = instance.Instance(context=self.context,
id=1, uuid='fake-uuid')
inst.metadata = {'foo': '1', 'bar': '2'}
inst.obj_reset_changes()
inst.delete_metadata_key('foo')
self.assertEqual({'bar': '2'}, inst.metadata)
self.assertEqual({}, inst.obj_get_changes())
db_delete.assert_called_once_with(self.context, inst.uuid, 'foo')
def test_reset_changes(self):
inst = instance.Instance()
inst.metadata = {'1985': 'present'}
inst.system_metadata = {'1955': 'past'}
self.assertEqual({}, inst._orig_metadata)
inst.obj_reset_changes(['metadata'])
self.assertEqual({'1985': 'present'}, inst._orig_metadata)
self.assertEqual({}, inst._orig_system_metadata)
def test_load_generic_calls_handler(self):
inst = instance.Instance(context=self.context,
uuid='fake-uuid')
with mock.patch.object(inst, '_load_generic') as mock_load:
def fake_load(name):
inst.system_metadata = {}
mock_load.side_effect = fake_load
inst.system_metadata
mock_load.assert_called_once_with('system_metadata')
def test_load_fault_calls_handler(self):
inst = instance.Instance(context=self.context,
uuid='fake-uuid')
with mock.patch.object(inst, '_load_fault') as mock_load:
def fake_load():
inst.fault = None
mock_load.side_effect = fake_load
inst.fault
mock_load.assert_called_once_with()
@mock.patch('nova.objects.Instance.get_by_uuid')
def test_load_generic(self, mock_get):
inst2 = instance.Instance(metadata={'foo': 'bar'})
mock_get.return_value = inst2
inst = instance.Instance(context=self.context,
uuid='fake-uuid')
inst.metadata
self.assertEqual({'foo': 'bar'}, inst.metadata)
mock_get.assert_called_once_with(self.context,
uuid='fake-uuid',
expected_attrs=['metadata'])
self.assertNotIn('metadata', inst.obj_what_changed())
@mock.patch('nova.db.instance_fault_get_by_instance_uuids')
def test_load_fault(self, mock_get):
fake_fault = test_instance_fault.fake_faults['fake-uuid'][0]
mock_get.return_value = {'fake': [fake_fault]}
inst = instance.Instance(context=self.context, uuid='fake')
fault = inst.fault
mock_get.assert_called_once_with(self.context, ['fake'])
self.assertEqual(fake_fault['id'], fault.id)
self.assertNotIn('metadata', inst.obj_what_changed())
class TestInstanceObject(test_objects._LocalTest,
_TestInstanceObject):
pass
class TestRemoteInstanceObject(test_objects._RemoteTest,
_TestInstanceObject):
pass
class _TestInstanceListObject(object):
def fake_instance(self, id, updates=None):
fake_instance = fakes.stub_instance(id=2,
access_ipv4='1.2.3.4',
access_ipv6='::1')
fake_instance['scheduled_at'] = None
fake_instance['terminated_at'] = None
fake_instance['deleted_at'] = None
fake_instance['created_at'] = None
fake_instance['updated_at'] = None
fake_instance['launched_at'] = (
fake_instance['launched_at'].replace(
tzinfo=iso8601.iso8601.Utc(), microsecond=0))
fake_instance['info_cache'] = {'network_info': '[]',
'instance_uuid': fake_instance['uuid']}
fake_instance['security_groups'] = []
fake_instance['deleted'] = 0
if updates:
fake_instance.update(updates)
return fake_instance
def test_get_all_by_filters(self):
fakes = [self.fake_instance(1), self.fake_instance(2)]
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
db.instance_get_all_by_filters(self.context, {'foo': 'bar'}, 'uuid',
'asc', limit=None, marker=None,
columns_to_join=['metadata'],
use_slave=False).AndReturn(fakes)
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_by_filters(
self.context, {'foo': 'bar'}, 'uuid', 'asc',
expected_attrs=['metadata'], use_slave=False)
for i in range(0, len(fakes)):
self.assertIsInstance(inst_list.objects[i], instance.Instance)
self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid'])
self.assertRemotes()
def test_get_all_by_filters_works_for_cleaned(self):
fakes = [self.fake_instance(1),
self.fake_instance(2, updates={'deleted': 2,
'cleaned': None})]
self.context.read_deleted = 'yes'
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
db.instance_get_all_by_filters(self.context,
{'deleted': True, 'cleaned': False},
'uuid', 'asc', limit=None, marker=None,
columns_to_join=['metadata'],
use_slave=False).AndReturn(
[fakes[1]])
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_by_filters(
self.context, {'deleted': True, 'cleaned': False}, 'uuid', 'asc',
expected_attrs=['metadata'], use_slave=False)
self.assertEqual(1, len(inst_list))
self.assertIsInstance(inst_list.objects[0], instance.Instance)
self.assertEqual(inst_list.objects[0].uuid, fakes[1]['uuid'])
self.assertRemotes()
def test_get_by_host(self):
fakes = [self.fake_instance(1),
self.fake_instance(2)]
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
db.instance_get_all_by_host(self.context, 'foo',
columns_to_join=None,
use_slave=False).AndReturn(fakes)
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_by_host(self.context, 'foo')
for i in range(0, len(fakes)):
self.assertIsInstance(inst_list.objects[i], instance.Instance)
self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid'])
self.assertEqual(inst_list.objects[i]._context, self.context)
self.assertEqual(inst_list.obj_what_changed(), set())
self.assertRemotes()
def test_get_by_host_and_node(self):
fakes = [self.fake_instance(1),
self.fake_instance(2)]
self.mox.StubOutWithMock(db, 'instance_get_all_by_host_and_node')
db.instance_get_all_by_host_and_node(self.context, 'foo', 'bar'
).AndReturn(fakes)
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_by_host_and_node(self.context,
'foo', 'bar')
for i in range(0, len(fakes)):
self.assertIsInstance(inst_list.objects[i], instance.Instance)
self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid'])
self.assertRemotes()
def test_get_by_host_and_not_type(self):
fakes = [self.fake_instance(1),
self.fake_instance(2)]
self.mox.StubOutWithMock(db, 'instance_get_all_by_host_and_not_type')
db.instance_get_all_by_host_and_not_type(self.context, 'foo',
type_id='bar').AndReturn(
fakes)
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_by_host_and_not_type(
self.context, 'foo', 'bar')
for i in range(0, len(fakes)):
self.assertIsInstance(inst_list.objects[i], instance.Instance)
self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid'])
self.assertRemotes()
def test_get_hung_in_rebooting(self):
fakes = [self.fake_instance(1),
self.fake_instance(2)]
dt = timeutils.isotime()
self.mox.StubOutWithMock(db, 'instance_get_all_hung_in_rebooting')
db.instance_get_all_hung_in_rebooting(self.context, dt).AndReturn(
fakes)
self.mox.ReplayAll()
inst_list = instance.InstanceList.get_hung_in_rebooting(self.context,
dt)
for i in range(0, len(fakes)):
self.assertIsInstance(inst_list.objects[i], instance.Instance)
self.assertEqual(inst_list.objects[i].uuid, fakes[i]['uuid'])
self.assertRemotes()
def test_get_active_by_window_joined(self):
fakes = [self.fake_instance(1), self.fake_instance(2)]
# NOTE(mriedem): Send in a timezone-naive datetime since the
# InstanceList.get_active_by_window_joined method should convert it
# to tz-aware for the DB API call, which we'll assert with our stub.
dt = timeutils.utcnow()
def fake_instance_get_active_by_window_joined(context, begin, end,
project_id, host):
# make sure begin is tz-aware
self.assertIsNotNone(begin.utcoffset())
self.assertIsNone(end)
return fakes
with mock.patch.object(db, 'instance_get_active_by_window_joined',
fake_instance_get_active_by_window_joined):
inst_list = instance.InstanceList.get_active_by_window_joined(
self.context, dt)
for fake, obj in zip(fakes, inst_list.objects):
self.assertIsInstance(obj, instance.Instance)
self.assertEqual(obj.uuid, fake['uuid'])
self.assertRemotes()
def test_with_fault(self):
fake_insts = [
fake_instance.fake_db_instance(uuid='fake-uuid', host='host'),
fake_instance.fake_db_instance(uuid='fake-inst2', host='host'),
]
fake_faults = test_instance_fault.fake_faults
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids')
db.instance_get_all_by_host(self.context, 'host',
columns_to_join=[],
use_slave=False
).AndReturn(fake_insts)
db.instance_fault_get_by_instance_uuids(
self.context, [x['uuid'] for x in fake_insts]
).AndReturn(fake_faults)
self.mox.ReplayAll()
instances = instance.InstanceList.get_by_host(self.context, 'host',
expected_attrs=['fault'],
use_slave=False)
self.assertEqual(2, len(instances))
self.assertEqual(fake_faults['fake-uuid'][0],
dict(instances[0].fault.iteritems()))
self.assertIsNone(instances[1].fault)
def test_fill_faults(self):
self.mox.StubOutWithMock(db, 'instance_fault_get_by_instance_uuids')
inst1 = instance.Instance(uuid='uuid1')
inst2 = instance.Instance(uuid='uuid2')
insts = [inst1, inst2]
for inst in insts:
inst.obj_reset_changes()
db_faults = {
'uuid1': [{'id': 123,
'instance_uuid': 'uuid1',
'code': 456,
'message': 'Fake message',
'details': 'No details',
'host': 'foo',
'deleted': False,
'deleted_at': None,
'updated_at': None,
'created_at': None,
}
]}
db.instance_fault_get_by_instance_uuids(self.context,
[x.uuid for x in insts],
).AndReturn(db_faults)
self.mox.ReplayAll()
inst_list = instance.InstanceList()
inst_list._context = self.context
inst_list.objects = insts
faulty = inst_list.fill_faults()
self.assertEqual(faulty, ['uuid1'])
self.assertEqual(inst_list[0].fault.message,
db_faults['uuid1'][0]['message'])
self.assertIsNone(inst_list[1].fault)
for inst in inst_list:
self.assertEqual(inst.obj_what_changed(), set())
def test_get_by_security_group(self):
fake_secgroup = dict(test_security_group.fake_secgroup)
fake_secgroup['instances'] = [
fake_instance.fake_db_instance(id=1,
system_metadata={'foo': 'bar'}),
fake_instance.fake_db_instance(id=2),
]
with mock.patch.object(db, 'security_group_get') as sgg:
sgg.return_value = fake_secgroup
secgroup = security_group.SecurityGroup()
secgroup.id = fake_secgroup['id']
instances = instance.InstanceList.get_by_security_group(
self.context, secgroup)
self.assertEqual(2, len(instances))
self.assertEqual([1, 2], [x.id for x in instances])
self.assertTrue(instances[0].obj_attr_is_set('system_metadata'))
self.assertEqual({'foo': 'bar'}, instances[0].system_metadata)
class TestInstanceListObject(test_objects._LocalTest,
_TestInstanceListObject):
pass
class TestRemoteInstanceListObject(test_objects._RemoteTest,
_TestInstanceListObject):
pass
class TestInstanceObjectMisc(test.NoDBTestCase):
def test_expected_cols(self):
self.stubs.Set(instance, '_INSTANCE_OPTIONAL_JOINED_FIELDS', ['bar'])
self.assertEqual(['bar'], instance._expected_cols(['foo', 'bar']))
self.assertIsNone(instance._expected_cols(None))
| apache-2.0 |
Vagab0nd/SiCKRAGE | sickchill/oldbeard/providers/kat.py | 1 | 7549 | import re
import traceback
import urllib
from collections import OrderedDict
from urllib.parse import urljoin
import validators
from sickchill import logger
from sickchill.helper.common import convert_size, try_int
from sickchill.oldbeard import tvcache
from sickchill.oldbeard.bs4_parser import BS4Parser
from sickchill.providers.torrent.TorrentProvider import TorrentProvider
class Provider(TorrentProvider):
def __init__(self):
super().__init__("KickAssTorrents")
self.public = True
self.confirmed = True
self.minseed = 0
self.minleech = 0
self.confirmed = True
self.mirrors = []
self.disabled_mirrors = []
# https://kickasskat.org/tv?field=time_add&sorder=desc
# https://kickasskat.org/usearch/{query}/?category=tv&field=seeders&sorder=desc
self.url = "https://kickasskat.org"
self.urls = None
self.custom_url = None
self.cache = tvcache.TVCache(self)
self.rows_selector = dict(class_=re.compile(r"even|odd"), id=re.compile(r"torrent_.*_torrents"))
def search(self, search_strings, age=0, ep_obj=None):
results = []
if not (self.url and self.urls):
self.find_domain()
if not (self.url and self.urls):
return results
anime = (self.show and self.show.anime) or (ep_obj and ep_obj.show and ep_obj.show.anime) or False
search_params = {
"field": "seeders",
"sorder": "desc",
"category": ("tv", "anime")[anime]
}
for mode in search_strings:
items = []
logger.debug(_("Search Mode: {mode}".format(mode=mode)))
for search_string in {*search_strings[mode]}:
# search_params["q"] = (search_string, None)[mode == "RSS"]
search_params["field"] = ("seeders", "time_add")[mode == "RSS"]
if mode != "RSS":
if anime:
continue
logger.debug(_("Search String: {search_string}".format(search_string=search_string)))
search_url = self.urls["search"].format(q=search_string)
else:
search_url = self.urls["rss"]
if self.custom_url:
if not validators.url(self.custom_url):
logger.warning("Invalid custom url: {0}".format(self.custom_url))
return results
search_url = urljoin(self.custom_url, search_url.split(self.url)[1])
data = self.get_url(search_url, params=OrderedDict(sorted(list(search_params.items()), key=lambda x: x[0])), returns="text")
if not data:
logger.info("{url} did not return any data, it may be disabled. Trying to get a new domain".format(url=self.url))
self.disabled_mirrors.append(self.url)
self.find_domain()
if self.url in self.disabled_mirrors:
logger.info("Could not find a better mirror to try.")
logger.info("The search did not return data, if the results are on the site maybe try a custom url, or a different one")
return results
# This will recurse a few times until all of the mirrors are exhausted if none of them work.
return self.search(search_strings, age, ep_obj)
with BS4Parser(data, "html5lib") as html:
labels = [cell.get_text() for cell in html.find(class_="firstr")("th")]
logger.info("Found {} results".format(len(html("tr", **self.rows_selector))))
for result in html("tr", **self.rows_selector):
try:
download_url = urllib.parse.unquote_plus(result.find(title="Torrent magnet link")["href"].split("url=")[1]) + self._custom_trackers
parsed_magnet = urllib.parse.parse_qs(download_url)
torrent_hash = self.hash_from_magnet(download_url)
title = result.find(class_="torrentname").find(class_="cellMainLink").get_text(strip=True)
if title.endswith("..."):
title = parsed_magnet['dn'][0]
if not (title and download_url):
if mode != "RSS":
logger.debug("Discarding torrent because We could not parse the title and url")
continue
seeders = try_int(result.find(class_="green").get_text(strip=True))
leechers = try_int(result.find(class_="red").get_text(strip=True))
# Filter unseeded torrent
if seeders < self.minseed or leechers < self.minleech:
if mode != "RSS":
logger.debug("Discarding torrent because it doesn't meet the minimum seeders or leechers: {0} (S:{1} L:{2})".format
(title, seeders, leechers))
continue
if self.confirmed and not result.find(class_="ka-green"):
if mode != "RSS":
logger.debug("Found result " + title + " but that doesn't seem like a verified result so I'm ignoring it")
continue
torrent_size = result("td")[labels.index("size")].get_text(strip=True)
size = convert_size(torrent_size) or -1
item = {'title': title, 'link': download_url, 'size': size, 'seeders': seeders, 'leechers': leechers, 'hash': torrent_hash}
if mode != "RSS":
logger.debug("Found result: {0} with {1} seeders and {2} leechers".format(title, seeders, leechers))
items.append(item)
except (AttributeError, TypeError, KeyError, ValueError, Exception):
logger.info(traceback.format_exc())
continue
# For each search mode sort all the items by seeders if available
items.sort(key=lambda d: try_int(d.get('seeders', 0)), reverse=True)
results += items
return results
def find_domain(self):
data = self.get_url("https://kickass2.help")
if data:
with BS4Parser(data, "html5lib") as html:
mirrors = html(class_='domainLink')
if mirrors:
self.mirrors = []
for mirror in mirrors:
domain = mirror["href"]
if domain not in self.disabled_mirrors:
self.mirrors.append(mirror["href"])
if self.mirrors:
self.url = self.mirrors[0]
logger.info("Setting mirror to use to {url}".format(url=self.url))
else:
logger.warning("Unable to get a working mirror for kickasstorrents, you might need to enable another provider and disable KAT until KAT starts working "
"again.")
self.urls = {"search": urljoin(self.url, "/usearch/{q}/"), "rss": urljoin(self.url, "/tv/")}
return self.url
| gpl-3.0 |
jackrzhang/zulip | zerver/tests/test_timestamp.py | 14 | 1871 |
from django.utils.timezone import utc as timezone_utc
from zerver.lib.test_classes import ZulipTestCase
from zerver.lib.timestamp import floor_to_hour, floor_to_day, ceiling_to_hour, \
ceiling_to_day, timestamp_to_datetime, datetime_to_timestamp, \
TimezoneNotUTCException, convert_to_UTC
from datetime import datetime, timedelta
from dateutil import parser
import pytz
class TestTimestamp(ZulipTestCase):
def test_datetime_and_timestamp_conversions(self) -> None:
timestamp = 1483228800
for dt in [
parser.parse('2017-01-01 00:00:00.123 UTC'),
parser.parse('2017-01-01 00:00:00.123').replace(tzinfo=timezone_utc),
parser.parse('2017-01-01 00:00:00.123').replace(tzinfo=pytz.utc)]:
self.assertEqual(timestamp_to_datetime(timestamp), dt-timedelta(microseconds=123000))
self.assertEqual(datetime_to_timestamp(dt), timestamp)
for dt in [
parser.parse('2017-01-01 00:00:00.123+01:00'),
parser.parse('2017-01-01 00:00:00.123')]:
with self.assertRaises(TimezoneNotUTCException):
datetime_to_timestamp(dt)
def test_convert_to_UTC(self) -> None:
utc_datetime = parser.parse('2017-01-01 00:00:00.123 UTC')
for dt in [
parser.parse('2017-01-01 00:00:00.123').replace(tzinfo=timezone_utc),
parser.parse('2017-01-01 00:00:00.123'),
parser.parse('2017-01-01 05:00:00.123+05')]:
self.assertEqual(convert_to_UTC(dt), utc_datetime)
def test_enforce_UTC(self) -> None:
non_utc_datetime = parser.parse('2017-01-01 00:00:00.123')
for function in [floor_to_hour, floor_to_day, ceiling_to_hour, ceiling_to_hour]:
with self.assertRaises(TimezoneNotUTCException):
function(non_utc_datetime)
| apache-2.0 |
lnielsen/invenio | invenio/legacy/bibedit/webinterface.py | 2 | 13635 | ## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
# pylint: disable=C0103
"""Invenio BibEdit Administrator Interface."""
__revision__ = "$Id"
__lastupdated__ = """$Date: 2008/08/12 09:26:46 $"""
import cProfile
import cStringIO
import pstats
from flask.ext.login import current_user
from invenio.utils.json import json, json_unicode_to_utf8, CFG_JSON_AVAILABLE
from invenio.modules.access.engine import acc_authorize_action
from invenio.legacy.bibedit.engine import perform_request_ajax, perform_request_init, \
perform_request_newticket, perform_request_compare, \
perform_request_init_template_interface, \
perform_request_ajax_template_interface
from invenio.legacy.bibedit.utils import user_can_edit_record_collection
from invenio.config import CFG_SITE_LANG, CFG_SITE_SECURE_URL, CFG_SITE_RECORD
from invenio.base.i18n import gettext_set_language
from invenio.utils.url import redirect_to_url
from invenio.ext.legacy.handler import WebInterfaceDirectory, wash_urlargd
from invenio.legacy.webpage import page
from invenio.legacy.webuser import page_not_authorized
navtrail = (' <a class="navtrail" href=\"%s/help/admin\">Admin Area</a> '
) % CFG_SITE_SECURE_URL
navtrail_bibedit = (' <a class="navtrail" href=\"%s/help/admin\">Admin Area</a> ' + \
' > <a class="navtrail" href=\"%s/%s/edit\">Record Editor</a>'
) % (CFG_SITE_SECURE_URL, CFG_SITE_SECURE_URL, CFG_SITE_RECORD)
def wrap_json_req_profiler(func):
def json_req_profiler(self, req, form):
if "ajaxProfile" in form:
profiler = cProfile.Profile()
return_val = profiler.runcall(func, self, req, form)
results = cStringIO.StringIO()
stats = pstats.Stats(profiler, stream=results)
stats.sort_stats('cumulative')
stats.print_stats(100)
json_in = json.loads(str(form['jsondata']))
# Deunicode all strings (Invenio doesn't have unicode
# support).
json_in = json_unicode_to_utf8(json_in)
json_data = json.loads(return_val)
json_data.update({"profilerStats": "<pre style='overflow: scroll'>" + json_in['requestType'] + results.getvalue() + "</pre>"})
return json.dumps(json_data)
else:
return func(self, req, form)
return json_req_profiler
class WebInterfaceEditPages(WebInterfaceDirectory):
"""Defines the set of /edit pages."""
_exports = ['', 'new_ticket', 'compare_revisions', 'templates']
def __init__(self, recid=None):
"""Initialize."""
self.recid = recid
@wrap_json_req_profiler
def index(self, req, form):
"""Handle all BibEdit requests.
The responsibilities of this functions is:
* JSON decoding and encoding.
* Redirection, if necessary.
* Authorization.
* Calling the appropriate function from the engine.
"""
uid = current_user.get_id()
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG)})
# Abort if the simplejson module isn't available
if not CFG_JSON_AVAILABLE:
title = 'Record Editor'
body = '''Sorry, the record editor cannot operate when the
`simplejson' module is not installed. Please see the INSTALL
file.'''
return page(title = title,
body = body,
errors = [],
warnings = [],
uid = uid,
language = argd['ln'],
navtrail = navtrail,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
# If it is an Ajax request, extract any JSON data.
ajax_request, recid = False, None
if 'jsondata' in form:
json_data = json.loads(str(form['jsondata']))
# Deunicode all strings (Invenio doesn't have unicode
# support).
json_data = json_unicode_to_utf8(json_data)
ajax_request = True
if 'recID' in json_data:
recid = json_data['recID']
json_response = {'resultCode': 0, 'ID': json_data['ID']}
# Authorization.
if current_user.is_guest:
# User is not logged in.
if not ajax_request:
# Do not display the introductory recID selection box to guest
# users (as it used to be with v0.99.0):
auth_code, auth_message = acc_authorize_action(req,
'runbibedit')
referer = '/edit/'
if self.recid:
referer = '/%s/%s/edit/' % (CFG_SITE_RECORD, self.recid)
return page_not_authorized(req=req, referer=referer,
text=auth_message, navtrail=navtrail)
else:
# Session has most likely timed out.
json_response.update({'resultCode': 100})
return json.dumps(json_response)
elif self.recid:
# Handle RESTful calls from logged in users by redirecting to
# generic URL.
redirect_to_url(req, '%s/%s/edit/#state=edit&recid=%s&recrev=%s' % (
CFG_SITE_SECURE_URL, CFG_SITE_RECORD, self.recid, ""))
elif recid is not None:
json_response.update({'recID': recid})
if json_data['requestType'] == "getRecord":
# Authorize access to record.
if not user_can_edit_record_collection(req, recid):
json_response.update({'resultCode': 101})
return json.dumps(json_response)
# Handle request.
if not ajax_request:
# Show BibEdit start page.
body, errors, warnings = perform_request_init(uid, argd['ln'], req, __lastupdated__)
title = 'Record Editor'
return page(title = title,
body = body,
errors = errors,
warnings = warnings,
uid = uid,
language = argd['ln'],
navtrail = navtrail,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
else:
# Handle AJAX request.
json_response.update(perform_request_ajax(req, recid, uid,
json_data))
return json.dumps(json_response)
def compare_revisions(self, req, form):
"""Handle the compare revisions request"""
argd = wash_urlargd(form, { \
'ln': (str, CFG_SITE_LANG), \
'rev1' : (str, ''), \
'rev2' : (str, ''), \
'recid': (int, 0)})
ln = argd['ln']
uid = current_user.get_id()
_ = gettext_set_language(ln)
# Checking if currently logged user has permission to perform this request
auth_code, auth_message = acc_authorize_action(req, 'runbibedit')
if auth_code != 0:
return page_not_authorized(req=req, referer="/edit",
text=auth_message, navtrail=navtrail)
recid = argd['recid']
rev1 = argd['rev1']
rev2 = argd['rev2']
ln = argd['ln']
body, errors, warnings = perform_request_compare(ln, recid, rev1, rev2)
return page(title = _("Comparing two record revisions"),
body = body,
errors = errors,
warnings = warnings,
uid = uid,
language = ln,
navtrail = navtrail,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
def new_ticket(self, req, form):
"""handle a edit/new_ticket request"""
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG), 'recid': (int, 0)})
ln = argd['ln']
_ = gettext_set_language(ln)
auth_code, auth_message = acc_authorize_action(req, 'runbibedit')
if auth_code != 0:
return page_not_authorized(req=req, referer="/edit",
text=auth_message, navtrail=navtrail)
uid = current_user.get_id()
if argd['recid']:
(errmsg, url) = perform_request_newticket(argd['recid'], uid)
if errmsg:
return page(title = _("Failed to create a ticket"),
body = _("Error")+": "+errmsg,
errors = [],
warnings = [],
uid = uid,
language = ln,
navtrail = navtrail,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
else:
#redirect..
redirect_to_url(req, url)
def templates(self, req, form):
"""handle a edit/templates request"""
uid = current_user.get_id()
argd = wash_urlargd(form, {'ln': (str, CFG_SITE_LANG)})
# Abort if the simplejson module isn't available
if not CFG_JSON_AVAILABLE:
title = 'Record Editor Template Manager'
body = '''Sorry, the record editor cannot operate when the
`simplejson' module is not installed. Please see the INSTALL
file.'''
return page(title = title,
body = body,
errors = [],
warnings = [],
uid = uid,
language = argd['ln'],
navtrail = navtrail_bibedit,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
# If it is an Ajax request, extract any JSON data.
ajax_request = False
if 'jsondata' in form:
json_data = json.loads(str(form['jsondata']))
# Deunicode all strings (Invenio doesn't have unicode
# support).
json_data = json_unicode_to_utf8(json_data)
ajax_request = True
json_response = {'resultCode': 0}
# Authorization.
if current_user.is_guest:
# User is not logged in.
if not ajax_request:
# Do not display the introductory recID selection box to guest
# users (as it used to be with v0.99.0):
dummy_auth_code, auth_message = acc_authorize_action(req,
'runbibedit')
referer = '/edit'
return page_not_authorized(req=req, referer=referer,
text=auth_message, navtrail=navtrail)
else:
# Session has most likely timed out.
json_response.update({'resultCode': 100})
return json.dumps(json_response)
# Handle request.
if not ajax_request:
# Show BibEdit template management start page.
body, errors, warnings = perform_request_init_template_interface()
title = 'Record Editor Template Manager'
return page(title = title,
body = body,
errors = errors,
warnings = warnings,
uid = uid,
language = argd['ln'],
navtrail = navtrail_bibedit,
lastupdated = __lastupdated__,
req = req,
body_css_classes = ['bibedit'])
else:
# Handle AJAX request.
json_response.update(perform_request_ajax_template_interface(json_data))
return json.dumps(json_response)
def __call__(self, req, form):
"""Redirect calls without final slash."""
if self.recid:
redirect_to_url(req, '%s/%s/%s/edit/' % (CFG_SITE_SECURE_URL,
CFG_SITE_RECORD,
self.recid))
else:
redirect_to_url(req, '%s/%s/edit/' % (CFG_SITE_SECURE_URL, CFG_SITE_RECORD))
| gpl-2.0 |
hbrls/weixin-api-mockup | appl/jssdk/views.py | 1 | 1060 | # -*- coding: utf-8 -*-
import logging
from uuid import uuid4
from flask import Blueprint, request, current_app, jsonify
_logger = logging.getLogger(__name__)
mod = Blueprint('jssdk', __name__, template_folder='templates')
@mod.route('/cgi-bin/ticket/getticket', methods=['GET'])
def getticket():
"""
获取 jsapi_ticket
http://mp.weixin.qq.com/wiki/7/aaa137b55fb2e0456bf8dd9148dd613f.html#.E9.99.84.E5.BD.951-JS-SDK.E4.BD.BF.E7.94.A8.E6.9D.83.E9.99.90.E7.AD.BE.E5.90.8D.E7.AE.97.E6.B3.95
"""
access_token = request.args.get('access_token', None)
if not access_token:
return jsonify({
'errcode': 40014,
'errmsg': '不合法的 access_token'
})
auth_type = request.args.get('type', None)
if not auth_type or auth_type != 'jsapi':
return jsonify({
'errcode': 40014,
'errmsg': '不合法的 type'
})
return jsonify({
'errcode': 0,
'errmsg': 'ok',
'ticket': 'FAKE_JSAPI_TICKET',
'expires_in': 7200,
})
| mit |
rahiel/shellstats | shellstats.py | 1 | 3629 | # -*- coding: utf-8 -*-
from __future__ import division
from os import getenv
from os.path import isfile
from sys import exit
import click
@click.command()
@click.option("--n", default=10, help="How many commands to show.")
@click.option("--plot", is_flag=True, help="Plot command usage in pie chart.")
@click.option("--command", default=None,
help="Most frequent subcommands for command, e.g. sudo, git.")
@click.option("--history-file", type=click.Path(exists=True, readable=True),
default=None, help="Read shell history from history-file.")
@click.option("--shell", default=None,
help="Specify shell history format: bash, fish or zsh.")
def main(n, plot, command, history_file, shell):
"""Print the most frequently used shell commands."""
history = get_history(history_file, shell, command)
commands = {}
for line in history:
cmd = line.split()
if cmd[0] in commands:
commands[cmd[0]] += 1
else:
commands[cmd[0]] = 1
total = len(history)
# counts :: [(command, num_occurance)]
counts = sorted(commands.items(), key=lambda x: x[1], reverse=True)
print_top(n, counts, total)
if plot:
pie_top(n, counts, command)
return counts
def pie_top(n, counts, command):
"""Show a pie chart of n most used commands."""
try:
import matplotlib.pyplot as plt
except ImportError:
click.echo(click.style("Please install matplotlib for plotting.", fg="red"))
exit()
label, x = zip(*counts[:n])
fig = plt.figure()
fig.canvas.set_window_title("ShellStats")
plt.axes(aspect=1)
if command:
title = "Top {0} used {1} subcommands.".format(min(n, len(counts)), command)
else:
title = "Top {0} used shell commands.".format(min(n, len(counts)))
plt.title(title)
plt.pie(x, labels=label)
plt.show()
def print_top(n, counts, total):
"""Print the top n used commands."""
click.echo("{:>3} {:<20} {:<10} {:<3}"
.format('', "Command", "Count", "Percentage"))
# min for when history is too small
for i in min(range(n), range(len(counts)), key=len):
cmd, count = counts[i]
click.echo("{i:>3} {cmd:<20} {count:<10} {percent:<3.3}%"
.format(i=i+1, cmd=cmd, count=count,
percent=count / total * 100))
def get_history(history_file, shell, command):
"""Get usage history for the shell in use."""
shell = shell or getenv("SHELL").split('/')[-1]
if history_file is None:
home = getenv("HOME") + '/'
hist_files = {"bash": [".bash_history"],
"fish": [".config/fish/fish_history"],
"zsh": [".zhistory", ".zsh_history"]}
if shell in hist_files:
for hist_file in hist_files[shell]:
if isfile(home + hist_file):
history_file = home + hist_file
if not history_file:
click.echo(click.style("Shell history file not found.", fg="red"))
exit()
with open(history_file, 'r') as h:
history = [l.strip() for l in h.readlines() if l.strip()]
if shell == "fish":
history = [l[7:] for l in history if l.startswith("- cmd:")]
elif shell == "zsh":
hist = []
for l in history:
if l.startswith(": "):
hist.append(l.split(';', 1)[-1])
else:
hist.append(l)
history = hist
if command:
history = [l[len(command) + 1:] for l in history if l.startswith(str(command))]
return history
| mit |
18padx08/PPTex | PPTexEnv_x86_64/lib/python2.7/site-packages/sympy/core/tests/test_eval_power.py | 9 | 11820 | from sympy.core import (Rational, Symbol, S, Float, Integer, Number, Pow,
Basic, I, nan, pi, symbols)
from sympy.core.tests.test_evalf import NS
from sympy.functions.elementary.miscellaneous import sqrt, cbrt
from sympy.functions.elementary.exponential import exp, log
from sympy.functions.elementary.trigonometric import sin, cos
from sympy.series.order import O
def test_rational():
a = Rational(1, 5)
r = sqrt(5)/5
assert sqrt(a) == r
assert 2*sqrt(a) == 2*r
r = a*a**Rational(1, 2)
assert a**Rational(3, 2) == r
assert 2*a**Rational(3, 2) == 2*r
r = a**5*a**Rational(2, 3)
assert a**Rational(17, 3) == r
assert 2 * a**Rational(17, 3) == 2*r
def test_large_rational():
e = (Rational(123712**12 - 1, 7) + Rational(1, 7))**Rational(1, 3)
assert e == 234232585392159195136 * (Rational(1, 7)**Rational(1, 3))
def test_negative_real():
def feq(a, b):
return abs(a - b) < 1E-10
assert feq(S.One / Float(-0.5), -Integer(2))
def test_expand():
x = Symbol('x')
assert (2**(-1 - x)).expand() == Rational(1, 2)*2**(-x)
def test_issue_3449():
#test if powers are simplified correctly
#see also issue 3995
x = Symbol('x')
assert ((x**Rational(1, 3))**Rational(2)) == x**Rational(2, 3)
assert (
(x**Rational(3))**Rational(2, 5)) == (x**Rational(3))**Rational(2, 5)
a = Symbol('a', real=True)
b = Symbol('b', real=True)
assert (a**2)**b == (abs(a)**b)**2
assert sqrt(1/a) != 1/sqrt(a) # e.g. for a = -1
assert (a**3)**Rational(1, 3) != a
assert (x**a)**b != x**(a*b) # e.g. x = -1, a=2, b=1/2
assert (x**.5)**b == x**(.5*b)
assert (x**.5)**.5 == x**.25
assert (x**2.5)**.5 != x**1.25 # e.g. for x = 5*I
k = Symbol('k', integer=True)
m = Symbol('m', integer=True)
assert (x**k)**m == x**(k*m)
assert Number(5)**Rational(2, 3) == Number(25)**Rational(1, 3)
assert (x**.5)**2 == x**1.0
assert (x**2)**k == (x**k)**2 == x**(2*k)
a = Symbol('a', positive=True)
assert (a**3)**Rational(2, 5) == a**Rational(6, 5)
assert (a**2)**b == (a**b)**2
assert (a**Rational(2, 3))**x == (a**(2*x/3)) != (a**x)**Rational(2, 3)
def test_issue_3866():
assert --sqrt(sqrt(5) - 1) == sqrt(sqrt(5) - 1)
def test_negative_one():
x = Symbol('x', complex=True)
y = Symbol('y', complex=True)
assert 1/x**y == x**(-y)
def test_issue_4362():
neg = Symbol('neg', negative=True)
nonneg = Symbol('nonneg', nonnegative=True)
any = Symbol('any')
num, den = sqrt(1/neg).as_numer_denom()
assert num == sqrt(-1)
assert den == sqrt(-neg)
num, den = sqrt(1/nonneg).as_numer_denom()
assert num == 1
assert den == sqrt(nonneg)
num, den = sqrt(1/any).as_numer_denom()
assert num == sqrt(1/any)
assert den == 1
def eqn(num, den, pow):
return (num/den)**pow
npos = 1
nneg = -1
dpos = 2 - sqrt(3)
dneg = 1 - sqrt(3)
assert dpos > 0 and dneg < 0 and npos > 0 and nneg < 0
# pos or neg integer
eq = eqn(npos, dpos, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dpos**2)
eq = eqn(npos, dneg, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dneg**2)
eq = eqn(nneg, dpos, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dpos**2)
eq = eqn(nneg, dneg, 2)
assert eq.is_Pow and eq.as_numer_denom() == (1, dneg**2)
eq = eqn(npos, dpos, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**2, 1)
eq = eqn(npos, dneg, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dneg**2, 1)
eq = eqn(nneg, dpos, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**2, 1)
eq = eqn(nneg, dneg, -2)
assert eq.is_Pow and eq.as_numer_denom() == (dneg**2, 1)
# pos or neg rational
pow = S.Half
eq = eqn(npos, dpos, pow)
assert eq.is_Pow and eq.as_numer_denom() == (npos**pow, dpos**pow)
eq = eqn(npos, dneg, pow)
assert eq.is_Pow is False and eq.as_numer_denom() == ((-npos)**pow, (-dneg)**pow)
eq = eqn(nneg, dpos, pow)
assert not eq.is_Pow or eq.as_numer_denom() == (nneg**pow, dpos**pow)
eq = eqn(nneg, dneg, pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-nneg)**pow, (-dneg)**pow)
eq = eqn(npos, dpos, -pow)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**pow, npos**pow)
eq = eqn(npos, dneg, -pow)
assert eq.is_Pow is False and eq.as_numer_denom() == (-(-npos)**pow*(-dneg)**pow, npos)
eq = eqn(nneg, dpos, -pow)
assert not eq.is_Pow or eq.as_numer_denom() == (dpos**pow, nneg**pow)
eq = eqn(nneg, dneg, -pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-dneg)**pow, (-nneg)**pow)
# unknown exponent
pow = 2*any
eq = eqn(npos, dpos, pow)
assert eq.is_Pow and eq.as_numer_denom() == (npos**pow, dpos**pow)
eq = eqn(npos, dneg, pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-npos)**pow, (-dneg)**pow)
eq = eqn(nneg, dpos, pow)
assert eq.is_Pow and eq.as_numer_denom() == (nneg**pow, dpos**pow)
eq = eqn(nneg, dneg, pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-nneg)**pow, (-dneg)**pow)
eq = eqn(npos, dpos, -pow)
assert eq.as_numer_denom() == (dpos**pow, npos**pow)
eq = eqn(npos, dneg, -pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-dneg)**pow, (-npos)**pow)
eq = eqn(nneg, dpos, -pow)
assert eq.is_Pow and eq.as_numer_denom() == (dpos**pow, nneg**pow)
eq = eqn(nneg, dneg, -pow)
assert eq.is_Pow and eq.as_numer_denom() == ((-dneg)**pow, (-nneg)**pow)
x = Symbol('x')
y = Symbol('y')
assert ((1/(1 + x/3))**(-S.One)).as_numer_denom() == (3 + x, 3)
notp = Symbol('notp', positive=False) # not positive does not imply real
b = ((1 + x/notp)**-2)
assert (b**(-y)).as_numer_denom() == (1, b**y)
assert (b**(-S.One)).as_numer_denom() == ((notp + x)**2, notp**2)
nonp = Symbol('nonp', nonpositive=True)
assert (((1 + x/nonp)**-2)**(-S.One)).as_numer_denom() == ((-nonp -
x)**2, nonp**2)
n = Symbol('n', negative=True)
assert (x**n).as_numer_denom() == (1, x**-n)
assert sqrt(1/n).as_numer_denom() == (S.ImaginaryUnit, sqrt(-n))
n = Symbol('0 or neg', nonpositive=True)
# if x and n are split up without negating each term and n is negative
# then the answer might be wrong; if n is 0 it won't matter since
# 1/oo and 1/zoo are both zero as is sqrt(0)/sqrt(-x) unless x is also
# zero (in which case the negative sign doesn't matter):
# 1/sqrt(1/-1) = -I but sqrt(-1)/sqrt(1) = I
assert (1/sqrt(x/n)).as_numer_denom() == (sqrt(-n), sqrt(-x))
c = Symbol('c', complex=True)
e = sqrt(1/c)
assert e.as_numer_denom() == (e, 1)
i = Symbol('i', integer=True)
assert (((1 + x/y)**i)).as_numer_denom() == ((x + y)**i, y**i)
def test_Pow_signs():
"""Cf. issues 4595 and 5250"""
x = Symbol('x')
y = Symbol('y')
n = Symbol('n', even=True)
assert (3 - y)**2 != (y - 3)**2
assert (3 - y)**n != (y - 3)**n
assert (-3 + y - x)**2 != (3 - y + x)**2
assert (y - 3)**3 != -(3 - y)**3
def test_power_with_noncommutative_mul_as_base():
x = Symbol('x', commutative=False)
y = Symbol('y', commutative=False)
assert not (x*y)**3 == x**3*y**3
assert (2*x*y)**3 == 8*(x*y)**3
def test_zero():
x = Symbol('x')
y = Symbol('y')
assert 0**x != 0
assert 0**(2*x) == 0**x
assert 0**(1.0*x) == 0**x
assert 0**(2.0*x) == 0**x
assert (0**(2 - x)).as_base_exp() == (0, 2 - x)
assert 0**(x - 2) != S.Infinity**(2 - x)
assert 0**(2*x*y) == 0**(x*y)
assert 0**(-2*x*y) == S.ComplexInfinity**(x*y)
assert 0**I == nan
i = Symbol('i', imaginary=True)
assert 0**i == nan
def test_pow_as_base_exp():
x = Symbol('x')
assert (S.Infinity**(2 - x)).as_base_exp() == (S.Infinity, 2 - x)
assert (S.Infinity**(x - 2)).as_base_exp() == (S.Infinity, x - 2)
p = S.Half**x
assert p.base, p.exp == p.as_base_exp() == (S(2), -x)
# issue 8344:
assert Pow(1, 2, evaluate=False).as_base_exp() == (S(1), S(2))
def test_issue_6100():
x = Symbol('x')
y = Symbol('y')
assert x**1.0 == x
assert x == x**1.0
assert True != x**1.0
assert x**1.0 is not True
assert x is not True
assert x*y == (x*y)**1.0
assert (x**1.0)**1.0 == x
assert (x**1.0)**2.0 == x**2
b = Basic()
assert Pow(b, 1.0, evaluate=False) == b
# if the following gets distributed as a Mul (x**1.0*y**1.0 then
# __eq__ methods could be added to Symbol and Pow to detect the
# power-of-1.0 case.
assert ((x*y)**1.0).func is Pow
def test_issue_6208():
from sympy import root, Rational
I = S.ImaginaryUnit
assert sqrt(33**(9*I/10)) == -33**(9*I/20)
assert root((6*I)**(2*I), 3).as_base_exp()[1] == Rational(1, 3) # != 2*I/3
assert root((6*I)**(I/3), 3).as_base_exp()[1] == I/9
assert sqrt(exp(3*I)) == exp(3*I/2)
assert sqrt(-sqrt(3)*(1 + 2*I)) == sqrt(sqrt(3))*sqrt(-1 - 2*I)
assert sqrt(exp(5*I)) == -exp(5*I/2)
assert root(exp(5*I), 3).exp == Rational(1, 3)
def test_issue_6990():
x = Symbol('x')
a = Symbol('a')
b = Symbol('b')
assert (sqrt(a + b*x + x**2)).series(x, 0, 3).removeO() == \
b*x/(2*sqrt(a)) + x**2*(1/(2*sqrt(a)) - \
b**2/(8*a**(S(3)/2))) + sqrt(a)
def test_issue_6068():
x = Symbol('x')
assert sqrt(sin(x)).series(x, 0, 7) == \
sqrt(x) - x**(S(5)/2)/12 + x**(S(9)/2)/1440 - \
x**(S(13)/2)/24192 + O(x**7)
assert sqrt(sin(x)).series(x, 0, 9) == \
sqrt(x) - x**(S(5)/2)/12 + x**(S(9)/2)/1440 - \
x**(S(13)/2)/24192 - 67*x**(S(17)/2)/29030400 + O(x**9)
assert sqrt(sin(x**3)).series(x, 0, 19) == \
x**(S(3)/2) - x**(S(15)/2)/12 + x**(S(27)/2)/1440 + O(x**19)
assert sqrt(sin(x**3)).series(x, 0, 20) == \
x**(S(3)/2) - x**(S(15)/2)/12 + x**(S(27)/2)/1440 - \
x**(S(39)/2)/24192 + O(x**20)
def test_issue_6782():
x = Symbol('x')
assert sqrt(sin(x**3)).series(x, 0, 7) == x**(S(3)/2) + O(x**7)
assert sqrt(sin(x**4)).series(x, 0, 3) == x**2 + O(x**3)
def test_issue_6653():
x = Symbol('x')
assert (1 / sqrt(1 + sin(x**2))).series(x, 0, 3) == 1 - x**2/2 + O(x**3)
def test_issue_6429():
x = Symbol('x')
c = Symbol('c')
f = (c**2 + x)**(0.5)
assert f.series(x, x0=0, n=1) == (c**2)**0.5 + O(x)
assert f.taylor_term(0, x) == (c**2)**0.5
assert f.taylor_term(1, x) == 0.5*x*(c**2)**(-0.5)
assert f.taylor_term(2, x) == -0.125*x**2*(c**2)**(-1.5)
def test_issue_7638():
f = pi/log(sqrt(2))
assert ((1 + I)**(I*f/2))**0.3 == (1 + I)**(0.15*I*f)
# if 1/3 -> 1.0/3 this should fail since it cannot be shown that the
# sign will be +/-1; for the previous "small arg" case, it didn't matter
# that this could not be proved
assert (1 + I)**(4*I*f) == ((1 + I)**(12*I*f))**(S(1)/3)
assert (((1 + I)**(I*(1 + 7*f)))**(S(1)/3)).exp == S(1)/3
r = symbols('r', real=True)
assert sqrt(r**2) == abs(r)
assert cbrt(r**3) != r
assert sqrt(Pow(2*I, 5*S.Half)) != (2*I)**(5/S(4))
p = symbols('p', positive=True)
assert cbrt(p**2) == p**(2/S(3))
assert NS(((0.2 + 0.7*I)**(0.7 + 1.0*I))**(0.5 - 0.1*I), 1) == '0.4 + 0.2*I'
assert sqrt(1/(1 + I)) == sqrt((1 - I)/2) # or 1/sqrt(1 + I)
e = 1/(1 - sqrt(2))
assert sqrt(e) == I/sqrt(-1 + sqrt(2))
assert e**-S.Half == -I*sqrt(-1 + sqrt(2))
assert sqrt((cos(1)**2 + sin(1)**2 - 1)**(3 + I)).exp == S.Half
assert sqrt(r**(4/S(3))) != r**(2/S(3))
assert sqrt((p + I)**(4/S(3))) == (p + I)**(2/S(3))
assert sqrt((p - p**2*I)**2) == p - p**2*I
assert sqrt((p + r*I)**2) != p + r*I
e = (1 + I/5)
assert sqrt(e**5) == e**(5*S.Half)
assert sqrt(e**6) == e**3
assert sqrt((1 + I*r)**6) != (1 + I*r)**3
| mit |
woltage/ansible-modules-core | web_infrastructure/django_manage.py | 5 | 11099 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Scott Anderson <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
DOCUMENTATION = '''
---
module: django_manage
short_description: Manages a Django application.
description:
- Manages a Django application using the I(manage.py) application frontend to I(django-admin). With the I(virtualenv) parameter, all management commands will be executed by the given I(virtualenv) installation.
version_added: "1.1"
options:
command:
choices: [ 'cleanup', 'collectstatic', 'flush', 'loaddata', 'migrate', 'runfcgi', 'syncdb', 'test', 'validate', ]
description:
- The name of the Django management command to run. Built in commands are cleanup, collectstatic, flush, loaddata, migrate, runfcgi, syncdb, test, and validate.
- Other commands can be entered, but will fail if they're unknown to Django. Other commands that may prompt for user input should be run with the I(--noinput) flag.
required: true
app_path:
description:
- The path to the root of the Django application where B(manage.py) lives.
required: true
settings:
description:
- The Python path to the application's settings module, such as 'myapp.settings'.
required: false
pythonpath:
description:
- A directory to add to the Python path. Typically used to include the settings module if it is located external to the application directory.
required: false
virtualenv:
description:
- An optional path to a I(virtualenv) installation to use while running the manage application.
required: false
apps:
description:
- A list of space-delimited apps to target. Used by the 'test' command.
required: false
cache_table:
description:
- The name of the table used for database-backed caching. Used by the 'createcachetable' command.
required: false
database:
description:
- The database to target. Used by the 'createcachetable', 'flush', 'loaddata', and 'syncdb' commands.
required: false
failfast:
description:
- Fail the command immediately if a test fails. Used by the 'test' command.
required: false
default: "no"
choices: [ "yes", "no" ]
fixtures:
description:
- A space-delimited list of fixture file names to load in the database. B(Required) by the 'loaddata' command.
required: false
skip:
description:
- Will skip over out-of-order missing migrations, you can only use this parameter with I(migrate)
required: false
version_added: "1.3"
merge:
description:
- Will run out-of-order or missing migrations as they are not rollback migrations, you can only use this parameter with 'migrate' command
required: false
version_added: "1.3"
link:
description:
- Will create links to the files instead of copying them, you can only use this parameter with 'collectstatic' command
required: false
version_added: "1.3"
notes:
- I(virtualenv) (U(http://www.virtualenv.org)) must be installed on the remote host if the virtualenv parameter is specified.
- This module will create a virtualenv if the virtualenv parameter is specified and a virtualenv does not already exist at the given location.
- This module assumes English error messages for the 'createcachetable' command to detect table existence, unfortunately.
- To be able to use the migrate command, you must have south installed and added as an app in your settings
- To be able to use the collectstatic command, you must have enabled staticfiles in your settings
requirements: [ "virtualenv", "django" ]
author: "Scott Anderson (@tastychutney)"
'''
EXAMPLES = """
# Run cleanup on the application installed in 'django_dir'.
- django_manage: command=cleanup app_path={{ django_dir }}
# Load the initial_data fixture into the application
- django_manage: command=loaddata app_path={{ django_dir }} fixtures={{ initial_data }}
# Run syncdb on the application
- django_manage: >
command=syncdb
app_path={{ django_dir }}
settings={{ settings_app_name }}
pythonpath={{ settings_dir }}
virtualenv={{ virtualenv_dir }}
# Run the SmokeTest test case from the main app. Useful for testing deploys.
- django_manage: command=test app_path={{ django_dir }} apps=main.SmokeTest
# Create an initial superuser.
- django_manage: command="createsuperuser --noinput --username=admin [email protected]" app_path={{ django_dir }}
"""
import os
def _fail(module, cmd, out, err, **kwargs):
msg = ''
if out:
msg += "stdout: %s" % (out, )
if err:
msg += "\n:stderr: %s" % (err, )
module.fail_json(cmd=cmd, msg=msg, **kwargs)
def _ensure_virtualenv(module):
venv_param = module.params['virtualenv']
if venv_param is None:
return
vbin = os.path.join(os.path.expanduser(venv_param), 'bin')
activate = os.path.join(vbin, 'activate')
if not os.path.exists(activate):
virtualenv = module.get_bin_path('virtualenv', True)
vcmd = '%s %s' % (virtualenv, venv_param)
vcmd = [virtualenv, venv_param]
rc, out_venv, err_venv = module.run_command(vcmd)
if rc != 0:
_fail(module, vcmd, out_venv, err_venv)
os.environ["PATH"] = "%s:%s" % (vbin, os.environ["PATH"])
os.environ["VIRTUAL_ENV"] = venv_param
def createcachetable_filter_output(line):
return "Already exists" not in line
def flush_filter_output(line):
return "Installed" in line and "Installed 0 object" not in line
def loaddata_filter_output(line):
return "Installed" in line and "Installed 0 object" not in line
def syncdb_filter_output(line):
return ("Creating table " in line) or ("Installed" in line and "Installed 0 object" not in line)
def migrate_filter_output(line):
return ("Migrating forwards " in line) or ("Installed" in line and "Installed 0 object" not in line)
def main():
command_allowed_param_map = dict(
cleanup=(),
createcachetable=('cache_table', 'database', ),
flush=('database', ),
loaddata=('database', 'fixtures', ),
syncdb=('database', ),
test=('failfast', 'testrunner', 'liveserver', 'apps', ),
validate=(),
migrate=('apps', 'skip', 'merge', 'database',),
collectstatic=('link', ),
)
command_required_param_map = dict(
loaddata=('fixtures', ),
createcachetable=('cache_table', ),
)
# forces --noinput on every command that needs it
noinput_commands = (
'flush',
'syncdb',
'migrate',
'test',
'collectstatic',
)
# These params are allowed for certain commands only
specific_params = ('apps', 'database', 'failfast', 'fixtures', 'liveserver', 'testrunner')
# These params are automatically added to the command if present
general_params = ('settings', 'pythonpath', 'database',)
specific_boolean_params = ('failfast', 'skip', 'merge', 'link')
end_of_command_params = ('apps', 'cache_table', 'fixtures')
module = AnsibleModule(
argument_spec=dict(
command = dict(default=None, required=True),
app_path = dict(default=None, required=True),
settings = dict(default=None, required=False),
pythonpath = dict(default=None, required=False, aliases=['python_path']),
virtualenv = dict(default=None, required=False, aliases=['virtual_env']),
apps = dict(default=None, required=False),
cache_table = dict(default=None, required=False),
database = dict(default=None, required=False),
failfast = dict(default='no', required=False, type='bool', aliases=['fail_fast']),
fixtures = dict(default=None, required=False),
liveserver = dict(default=None, required=False, aliases=['live_server']),
testrunner = dict(default=None, required=False, aliases=['test_runner']),
skip = dict(default=None, required=False, type='bool'),
merge = dict(default=None, required=False, type='bool'),
link = dict(default=None, required=False, type='bool'),
),
)
command = module.params['command']
app_path = os.path.expanduser(module.params['app_path'])
virtualenv = module.params['virtualenv']
for param in specific_params:
value = module.params[param]
if param in specific_boolean_params:
value = module.boolean(value)
if value and param not in command_allowed_param_map[command]:
module.fail_json(msg='%s param is incompatible with command=%s' % (param, command))
for param in command_required_param_map.get(command, ()):
if not module.params[param]:
module.fail_json(msg='%s param is required for command=%s' % (param, command))
_ensure_virtualenv(module)
cmd = "./manage.py %s" % (command, )
if command in noinput_commands:
cmd = '%s --noinput' % cmd
for param in general_params:
if module.params[param]:
cmd = '%s --%s=%s' % (cmd, param, module.params[param])
for param in specific_boolean_params:
if module.boolean(module.params[param]):
cmd = '%s --%s' % (cmd, param)
# these params always get tacked on the end of the command
for param in end_of_command_params:
if module.params[param]:
cmd = '%s %s' % (cmd, module.params[param])
rc, out, err = module.run_command(cmd, cwd=os.path.expanduser(app_path))
if rc != 0:
if command == 'createcachetable' and 'table' in err and 'already exists' in err:
out = 'Already exists.'
else:
if "Unknown command:" in err:
_fail(module, cmd, err, "Unknown django command: %s" % command)
_fail(module, cmd, out, err, path=os.environ["PATH"], syspath=sys.path)
changed = False
lines = out.split('\n')
filt = globals().get(command + "_filter_output", None)
if filt:
filtered_output = filter(filt, out.split('\n'))
if len(filtered_output):
changed = filtered_output
module.exit_json(changed=changed, out=out, cmd=cmd, app_path=app_path, virtualenv=virtualenv,
settings=module.params['settings'], pythonpath=module.params['pythonpath'])
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
mdavid/cherokee-webserver-svnclone | admin/plugins/ssi.py | 3 | 1122 | # -*- coding: utf-8 -*-
#
# Cherokee-admin
#
# Authors:
# Alvaro Lopez Ortega <[email protected]>
#
# Copyright (C) 2010 Alvaro Lopez Ortega
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of version 2 of the GNU General Public
# License as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
import CTK
import Handler
HELPS = [('modules_handlers_ssi', _("Server Side Includes"))]
class Plugin_ssi (Handler.PluginHandler):
def __init__ (self, key, **kwargs):
kwargs['show_document_root'] = False
Handler.PluginHandler.__init__ (self, key, **kwargs)
Handler.PluginHandler.AddCommon (self)
| gpl-2.0 |
snakeleon/YouCompleteMe-x86 | python/ycm/tests/event_notification_test.py | 1 | 18402 | # coding: utf-8
#
# Copyright (C) 2015-2016 YouCompleteMe contributors
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import * # noqa
from ycm.tests.test_utils import ( CurrentWorkingDirectory, ExtendedMock,
MockVimBuffers, MockVimModule, VimBuffer )
MockVimModule()
import contextlib
import os
from ycm.tests import PathToTestFile, YouCompleteMeInstance
from ycmd.responses import ( BuildDiagnosticData, Diagnostic, Location, Range,
UnknownExtraConf, ServerError )
from hamcrest import assert_that, contains, has_entries, has_item
from mock import call, MagicMock, patch
from nose.tools import eq_, ok_
def PresentDialog_Confirm_Call( message ):
"""Return a mock.call object for a call to vimsupport.PresentDialog, as called
why vimsupport.Confirm with the supplied confirmation message"""
return call( message, [ 'Ok', 'Cancel' ] )
def PlaceSign_Call( sign_id, line_num, buffer_num, is_error ):
sign_name = 'YcmError' if is_error else 'YcmWarning'
return call( 'sign place {0} line={1} name={2} buffer={3}'
.format( sign_id, line_num, sign_name, buffer_num ) )
def UnplaceSign_Call( sign_id, buffer_num ):
return call( 'try | exec "sign unplace {0} buffer={1}" |'
' catch /E158/ | endtry'.format( sign_id, buffer_num ) )
@contextlib.contextmanager
def MockArbitraryBuffer( filetype ):
"""Used via the with statement, set up a single buffer with an arbitrary name
and no contents. Its filetype is set to the supplied filetype."""
# Arbitrary, but valid, single buffer open.
current_buffer = VimBuffer( os.path.realpath( 'TEST_BUFFER' ),
window = 1,
filetype = filetype )
with MockVimBuffers( [ current_buffer ], current_buffer ):
yield
@contextlib.contextmanager
def MockEventNotification( response_method, native_filetype_completer = True ):
"""Mock out the EventNotification client request object, replacing the
Response handler's JsonFromFuture with the supplied |response_method|.
Additionally mock out YouCompleteMe's FiletypeCompleterExistsForFiletype
method to return the supplied |native_filetype_completer| parameter, rather
than querying the server"""
# We don't want the event to actually be sent to the server, just have it
# return success
with patch( 'ycm.client.base_request.BaseRequest.PostDataToHandlerAsync',
return_value = MagicMock( return_value=True ) ):
# We set up a fake a Response (as called by EventNotification.Response)
# which calls the supplied callback method. Generally this callback just
# raises an apropriate exception, otherwise it would have to return a mock
# future object.
#
# Note: JsonFromFuture is actually part of ycm.client.base_request, but we
# must patch where an object is looked up, not where it is defined.
# See https://docs.python.org/dev/library/unittest.mock.html#where-to-patch
# for details.
with patch( 'ycm.client.event_notification.JsonFromFuture',
side_effect = response_method ):
# Filetype available information comes from the server, so rather than
# relying on that request, we mock out the check. The caller decides if
# filetype completion is available
with patch(
'ycm.youcompleteme.YouCompleteMe.FiletypeCompleterExistsForFiletype',
return_value = native_filetype_completer ):
yield
@patch( 'ycm.vimsupport.PostVimMessage', new_callable = ExtendedMock )
@YouCompleteMeInstance()
def EventNotification_FileReadyToParse_NonDiagnostic_Error_test(
ycm, post_vim_message ):
# This test validates the behaviour of YouCompleteMe.HandleFileParseRequest
# in combination with YouCompleteMe.OnFileReadyToParse when the completer
# raises an exception handling FileReadyToParse event notification
ERROR_TEXT = 'Some completer response text'
def ErrorResponse( *args ):
raise ServerError( ERROR_TEXT )
with MockArbitraryBuffer( 'javascript' ):
with MockEventNotification( ErrorResponse ):
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
# The first call raises a warning
post_vim_message.assert_has_exact_calls( [
call( ERROR_TEXT, truncate = True )
] )
# Subsequent calls don't re-raise the warning
ycm.HandleFileParseRequest()
post_vim_message.assert_has_exact_calls( [
call( ERROR_TEXT, truncate = True )
] )
# But it does if a subsequent event raises again
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
post_vim_message.assert_has_exact_calls( [
call( ERROR_TEXT, truncate = True ),
call( ERROR_TEXT, truncate = True )
] )
@patch( 'vim.command' )
@YouCompleteMeInstance()
def EventNotification_FileReadyToParse_NonDiagnostic_Error_NonNative_test(
ycm, vim_command ):
with MockArbitraryBuffer( 'javascript' ):
with MockEventNotification( None, False ):
ycm.OnFileReadyToParse()
ycm.HandleFileParseRequest()
vim_command.assert_not_called()
@patch( 'ycm.client.base_request._LoadExtraConfFile',
new_callable = ExtendedMock )
@patch( 'ycm.client.base_request._IgnoreExtraConfFile',
new_callable = ExtendedMock )
@YouCompleteMeInstance()
def EventNotification_FileReadyToParse_NonDiagnostic_ConfirmExtraConf_test(
ycm, ignore_extra_conf, load_extra_conf ):
# This test validates the behaviour of YouCompleteMe.HandleFileParseRequest
# in combination with YouCompleteMe.OnFileReadyToParse when the completer
# raises the (special) UnknownExtraConf exception
FILE_NAME = 'a_file'
MESSAGE = ( 'Found ' + FILE_NAME + '. Load? \n\n(Question can be '
'turned off with options, see YCM docs)' )
def UnknownExtraConfResponse( *args ):
raise UnknownExtraConf( FILE_NAME )
with MockArbitraryBuffer( 'javascript' ):
with MockEventNotification( UnknownExtraConfResponse ):
# When the user accepts the extra conf, we load it
with patch( 'ycm.vimsupport.PresentDialog',
return_value = 0,
new_callable = ExtendedMock ) as present_dialog:
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE ),
] )
load_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
] )
# Subsequent calls don't re-raise the warning
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE )
] )
load_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
] )
# But it does if a subsequent event raises again
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE ),
PresentDialog_Confirm_Call( MESSAGE ),
] )
load_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
call( FILE_NAME ),
] )
# When the user rejects the extra conf, we reject it
with patch( 'ycm.vimsupport.PresentDialog',
return_value = 1,
new_callable = ExtendedMock ) as present_dialog:
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE ),
] )
ignore_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
] )
# Subsequent calls don't re-raise the warning
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE )
] )
ignore_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
] )
# But it does if a subsequent event raises again
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
present_dialog.assert_has_exact_calls( [
PresentDialog_Confirm_Call( MESSAGE ),
PresentDialog_Confirm_Call( MESSAGE ),
] )
ignore_extra_conf.assert_has_exact_calls( [
call( FILE_NAME ),
call( FILE_NAME ),
] )
@YouCompleteMeInstance()
def EventNotification_FileReadyToParse_Diagnostic_Error_Native_test( ycm ):
_Check_FileReadyToParse_Diagnostic_Error( ycm )
_Check_FileReadyToParse_Diagnostic_Warning( ycm )
_Check_FileReadyToParse_Diagnostic_Clean( ycm )
@patch( 'vim.command' )
def _Check_FileReadyToParse_Diagnostic_Error( ycm, vim_command ):
# Tests Vim sign placement and error/warning count python API
# when one error is returned.
def DiagnosticResponse( *args ):
start = Location( 1, 2, 'TEST_BUFFER' )
end = Location( 1, 4, 'TEST_BUFFER' )
extent = Range( start, end )
diagnostic = Diagnostic( [], start, extent, 'expected ;', 'ERROR' )
return [ BuildDiagnosticData( diagnostic ) ]
with MockArbitraryBuffer( 'cpp' ):
with MockEventNotification( DiagnosticResponse ):
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
vim_command.assert_has_calls( [
PlaceSign_Call( 1, 1, 1, True )
] )
eq_( ycm.GetErrorCount(), 1 )
eq_( ycm.GetWarningCount(), 0 )
# Consequent calls to HandleFileParseRequest shouldn't mess with
# existing diagnostics, when there is no new parse request.
vim_command.reset_mock()
ok_( not ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
vim_command.assert_not_called()
eq_( ycm.GetErrorCount(), 1 )
eq_( ycm.GetWarningCount(), 0 )
@patch( 'vim.command' )
def _Check_FileReadyToParse_Diagnostic_Warning( ycm, vim_command ):
# Tests Vim sign placement/unplacement and error/warning count python API
# when one warning is returned.
# Should be called after _Check_FileReadyToParse_Diagnostic_Error
def DiagnosticResponse( *args ):
start = Location( 2, 2, 'TEST_BUFFER' )
end = Location( 2, 4, 'TEST_BUFFER' )
extent = Range( start, end )
diagnostic = Diagnostic( [], start, extent, 'cast', 'WARNING' )
return [ BuildDiagnosticData( diagnostic ) ]
with MockArbitraryBuffer( 'cpp' ):
with MockEventNotification( DiagnosticResponse ):
ycm.OnFileReadyToParse()
ok_( ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
vim_command.assert_has_calls( [
PlaceSign_Call( 2, 2, 1, False ),
UnplaceSign_Call( 1, 1 )
] )
eq_( ycm.GetErrorCount(), 0 )
eq_( ycm.GetWarningCount(), 1 )
# Consequent calls to HandleFileParseRequest shouldn't mess with
# existing diagnostics, when there is no new parse request.
vim_command.reset_mock()
ok_( not ycm.FileParseRequestReady() )
ycm.HandleFileParseRequest()
vim_command.assert_not_called()
eq_( ycm.GetErrorCount(), 0 )
eq_( ycm.GetWarningCount(), 1 )
@patch( 'vim.command' )
def _Check_FileReadyToParse_Diagnostic_Clean( ycm, vim_command ):
# Tests Vim sign unplacement and error/warning count python API
# when there are no errors/warnings left.
# Should be called after _Check_FileReadyToParse_Diagnostic_Warning
with MockArbitraryBuffer( 'cpp' ):
with MockEventNotification( MagicMock( return_value = [] ) ):
ycm.OnFileReadyToParse()
ycm.HandleFileParseRequest()
vim_command.assert_has_calls( [
UnplaceSign_Call( 2, 1 )
] )
eq_( ycm.GetErrorCount(), 0 )
eq_( ycm.GetWarningCount(), 0 )
@patch( 'ycm.youcompleteme.YouCompleteMe._AddUltiSnipsDataIfNeeded' )
@YouCompleteMeInstance( { 'collect_identifiers_from_tags_files': 1 } )
def EventNotification_FileReadyToParse_TagFiles_UnicodeWorkingDirectory_test(
ycm, *args ):
unicode_dir = PathToTestFile( 'uni¢𐍈d€' )
current_buffer_file = PathToTestFile( 'uni¢𐍈d€', 'current_buffer' )
current_buffer = VimBuffer( name = current_buffer_file,
contents = [ 'current_buffer_contents' ],
filetype = 'some_filetype' )
with patch( 'ycm.client.base_request.BaseRequest.'
'PostDataToHandlerAsync' ) as post_data_to_handler_async:
with CurrentWorkingDirectory( unicode_dir ):
with MockVimBuffers( [ current_buffer ], current_buffer, ( 6, 5 ) ):
ycm.OnFileReadyToParse()
assert_that(
# Positional arguments passed to PostDataToHandlerAsync.
post_data_to_handler_async.call_args[ 0 ],
contains(
has_entries( {
'filepath': current_buffer_file,
'line_num': 6,
'column_num': 6,
'file_data': has_entries( {
current_buffer_file: has_entries( {
'contents': 'current_buffer_contents\n',
'filetypes': [ 'some_filetype' ]
} )
} ),
'event_name': 'FileReadyToParse',
'tag_files': has_item( PathToTestFile( 'uni¢𐍈d€', 'tags' ) )
} ),
'event_notification'
)
)
@patch( 'ycm.youcompleteme.YouCompleteMe._AddUltiSnipsDataIfNeeded' )
@YouCompleteMeInstance()
def EventNotification_BufferVisit_BuildRequestForCurrentAndUnsavedBuffers_test(
ycm, *args ):
current_buffer_file = os.path.realpath( 'current_buffer' )
current_buffer = VimBuffer( name = current_buffer_file,
number = 1,
contents = [ 'current_buffer_contents' ],
filetype = 'some_filetype',
modified = False )
modified_buffer_file = os.path.realpath( 'modified_buffer' )
modified_buffer = VimBuffer( name = modified_buffer_file,
number = 2,
contents = [ 'modified_buffer_contents' ],
filetype = 'some_filetype',
modified = True )
unmodified_buffer_file = os.path.realpath( 'unmodified_buffer' )
unmodified_buffer = VimBuffer( name = unmodified_buffer_file,
number = 3,
contents = [ 'unmodified_buffer_contents' ],
filetype = 'some_filetype',
modified = False )
with patch( 'ycm.client.base_request.BaseRequest.'
'PostDataToHandlerAsync' ) as post_data_to_handler_async:
with MockVimBuffers( [ current_buffer, modified_buffer, unmodified_buffer ],
current_buffer,
( 3, 5 ) ):
ycm.OnBufferVisit()
assert_that(
# Positional arguments passed to PostDataToHandlerAsync.
post_data_to_handler_async.call_args[ 0 ],
contains(
has_entries( {
'filepath': current_buffer_file,
'line_num': 3,
'column_num': 6,
'file_data': has_entries( {
current_buffer_file: has_entries( {
'contents': 'current_buffer_contents\n',
'filetypes': [ 'some_filetype' ]
} ),
modified_buffer_file: has_entries( {
'contents': 'modified_buffer_contents\n',
'filetypes': [ 'some_filetype' ]
} )
} ),
'event_name': 'BufferVisit'
} ),
'event_notification'
)
)
@YouCompleteMeInstance()
def EventNotification_BufferUnload_BuildRequestForDeletedAndUnsavedBuffers_test(
ycm ):
current_buffer_file = os.path.realpath( 'current_buffer' )
current_buffer = VimBuffer( name = current_buffer_file,
number = 1,
contents = [ 'current_buffer_contents' ],
filetype = 'some_filetype',
modified = True )
deleted_buffer_file = os.path.realpath( 'deleted_buffer' )
deleted_buffer = VimBuffer( name = deleted_buffer_file,
number = 2,
contents = [ 'deleted_buffer_contents' ],
filetype = 'some_filetype',
modified = False )
with patch( 'ycm.client.base_request.BaseRequest.'
'PostDataToHandlerAsync' ) as post_data_to_handler_async:
with MockVimBuffers( [ current_buffer, deleted_buffer ], current_buffer ):
ycm.OnBufferUnload( deleted_buffer_file )
assert_that(
# Positional arguments passed to PostDataToHandlerAsync.
post_data_to_handler_async.call_args[ 0 ],
contains(
has_entries( {
'filepath': deleted_buffer_file,
'line_num': 1,
'column_num': 1,
'file_data': has_entries( {
current_buffer_file: has_entries( {
'contents': 'current_buffer_contents\n',
'filetypes': [ 'some_filetype' ]
} ),
deleted_buffer_file: has_entries( {
'contents': 'deleted_buffer_contents\n',
'filetypes': [ 'some_filetype' ]
} )
} ),
'event_name': 'BufferUnload'
} ),
'event_notification'
)
)
| gpl-3.0 |
partofthething/home-assistant | tests/components/zwave/test_climate.py | 16 | 32535 | """Test Z-Wave climate devices."""
import pytest
from homeassistant.components.climate.const import (
ATTR_TARGET_TEMP_HIGH,
ATTR_TARGET_TEMP_LOW,
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
HVAC_MODE_OFF,
HVAC_MODES,
PRESET_AWAY,
PRESET_BOOST,
PRESET_ECO,
PRESET_NONE,
SUPPORT_AUX_HEAT,
SUPPORT_FAN_MODE,
SUPPORT_PRESET_MODE,
SUPPORT_SWING_MODE,
SUPPORT_TARGET_TEMPERATURE,
SUPPORT_TARGET_TEMPERATURE_RANGE,
)
from homeassistant.components.zwave import climate, const
from homeassistant.components.zwave.climate import (
AUX_HEAT_ZWAVE_MODE,
DEFAULT_HVAC_MODES,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS, TEMP_FAHRENHEIT
from tests.mock.zwave import MockEntityValues, MockNode, MockValue, value_changed
@pytest.fixture
def device(hass, mock_openzwave):
"""Fixture to provide a precreated climate device."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data=CURRENT_HVAC_HEAT, node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_zxt_120(hass, mock_openzwave):
"""Fixture to provide a precreated climate device."""
node = MockNode(manufacturer_id="5254", product_id="8377")
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data=CURRENT_HVAC_HEAT, node=node),
fan_action=MockValue(data=7, node=node),
zxt_120_swing_mode=MockValue(data="test3", data_items=[6, 7, 8], node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_mapping(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. Test state mapping."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data="Heat",
data_items=["Off", "Cool", "Heat", "Full Power", "Auto"],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="heating", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_unknown(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. Test state unknown."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data="Heat",
data_items=["Off", "Cool", "Heat", "heat_cool", "Abcdefg"],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_heat_cool(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. Test state heat only."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
"Heat Eco",
"Cool Eco",
],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_heat_cool_range(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. Target range mode."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT_COOL,
data_items=[
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
],
node=node,
),
setpoint_heating=MockValue(data=1, node=node),
setpoint_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_heat_cool_away(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. Target range mode."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT_COOL,
data_items=[
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
PRESET_AWAY,
],
node=node,
),
setpoint_heating=MockValue(data=2, node=node),
setpoint_cooling=MockValue(data=9, node=node),
setpoint_away_heating=MockValue(data=1, node=node),
setpoint_away_cooling=MockValue(data=10, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_heat_eco(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. heat/heat eco."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[HVAC_MODE_OFF, HVAC_MODE_HEAT, "heat econ"],
node=node,
),
setpoint_heating=MockValue(data=2, node=node),
setpoint_eco_heating=MockValue(data=1, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_aux_heat(hass, mock_openzwave):
"""Fixture to provide a precreated climate device. aux heat."""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[HVAC_MODE_OFF, HVAC_MODE_HEAT, "Aux Heat"],
node=node,
),
setpoint_heating=MockValue(data=2, node=node),
setpoint_eco_heating=MockValue(data=1, node=node),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data="test4", node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_single_setpoint(hass, mock_openzwave):
"""Fixture to provide a precreated climate device.
SETPOINT_THERMOSTAT device class.
"""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_SETPOINT, data=1, node=node
),
mode=None,
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data=CURRENT_HVAC_HEAT, node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
@pytest.fixture
def device_single_setpoint_with_mode(hass, mock_openzwave):
"""Fixture to provide a precreated climate device.
SETPOINT_THERMOSTAT device class with COMMAND_CLASS_THERMOSTAT_MODE command class
"""
node = MockNode()
values = MockEntityValues(
primary=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_SETPOINT, data=1, node=node
),
mode=MockValue(
command_class=const.COMMAND_CLASS_THERMOSTAT_MODE,
data=HVAC_MODE_HEAT,
data_items=[HVAC_MODE_OFF, HVAC_MODE_HEAT],
node=node,
),
temperature=MockValue(data=5, node=node, units=None),
fan_mode=MockValue(data="test2", data_items=[3, 4, 5], node=node),
operating_state=MockValue(data=CURRENT_HVAC_HEAT, node=node),
fan_action=MockValue(data=7, node=node),
)
device = climate.get_device(hass, node=node, values=values, node_config={})
yield device
def test_get_device_detects_none(hass, mock_openzwave):
"""Test get_device returns None."""
node = MockNode()
value = MockValue(data=0, node=node)
values = MockEntityValues(primary=value)
device = climate.get_device(hass, node=node, values=values, node_config={})
assert device is None
def test_get_device_detects_multiple_setpoint_device(device):
"""Test get_device returns a Z-Wave multiple setpoint device."""
assert isinstance(device, climate.ZWaveClimateMultipleSetpoint)
def test_get_device_detects_single_setpoint_device(device_single_setpoint):
"""Test get_device returns a Z-Wave single setpoint device."""
assert isinstance(device_single_setpoint, climate.ZWaveClimateSingleSetpoint)
def test_default_hvac_modes():
"""Test whether all hvac modes are included in default_hvac_modes."""
for hvac_mode in HVAC_MODES:
assert hvac_mode in DEFAULT_HVAC_MODES
def test_supported_features(device):
"""Test supported features flags."""
assert (
device.supported_features
== SUPPORT_FAN_MODE
+ SUPPORT_TARGET_TEMPERATURE
+ SUPPORT_TARGET_TEMPERATURE_RANGE
)
def test_supported_features_temp_range(device_heat_cool_range):
"""Test supported features flags with target temp range."""
device = device_heat_cool_range
assert (
device.supported_features
== SUPPORT_FAN_MODE
+ SUPPORT_TARGET_TEMPERATURE
+ SUPPORT_TARGET_TEMPERATURE_RANGE
)
def test_supported_features_preset_mode(device_mapping):
"""Test supported features flags with swing mode."""
device = device_mapping
assert (
device.supported_features
== SUPPORT_FAN_MODE
+ SUPPORT_TARGET_TEMPERATURE
+ SUPPORT_TARGET_TEMPERATURE_RANGE
+ SUPPORT_PRESET_MODE
)
def test_supported_features_preset_mode_away(device_heat_cool_away):
"""Test supported features flags with swing mode."""
device = device_heat_cool_away
assert (
device.supported_features
== SUPPORT_FAN_MODE
+ SUPPORT_TARGET_TEMPERATURE
+ SUPPORT_TARGET_TEMPERATURE_RANGE
+ SUPPORT_PRESET_MODE
)
def test_supported_features_swing_mode(device_zxt_120):
"""Test supported features flags with swing mode."""
device = device_zxt_120
assert (
device.supported_features
== SUPPORT_FAN_MODE
+ SUPPORT_TARGET_TEMPERATURE
+ SUPPORT_TARGET_TEMPERATURE_RANGE
+ SUPPORT_SWING_MODE
)
def test_supported_features_aux_heat(device_aux_heat):
"""Test supported features flags with aux heat."""
device = device_aux_heat
assert (
device.supported_features
== SUPPORT_FAN_MODE + SUPPORT_TARGET_TEMPERATURE + SUPPORT_AUX_HEAT
)
def test_supported_features_single_setpoint(device_single_setpoint):
"""Test supported features flags for SETPOINT_THERMOSTAT."""
device = device_single_setpoint
assert device.supported_features == SUPPORT_FAN_MODE + SUPPORT_TARGET_TEMPERATURE
def test_supported_features_single_setpoint_with_mode(device_single_setpoint_with_mode):
"""Test supported features flags for SETPOINT_THERMOSTAT."""
device = device_single_setpoint_with_mode
assert device.supported_features == SUPPORT_FAN_MODE + SUPPORT_TARGET_TEMPERATURE
def test_zxt_120_swing_mode(device_zxt_120):
"""Test operation of the zxt 120 swing mode."""
device = device_zxt_120
assert device.swing_modes == [6, 7, 8]
assert device._zxt_120 == 1
# Test set mode
assert device.values.zxt_120_swing_mode.data == "test3"
device.set_swing_mode("test_swing_set")
assert device.values.zxt_120_swing_mode.data == "test_swing_set"
# Test mode changed
value_changed(device.values.zxt_120_swing_mode)
assert device.swing_mode == "test_swing_set"
device.values.zxt_120_swing_mode.data = "test_swing_updated"
value_changed(device.values.zxt_120_swing_mode)
assert device.swing_mode == "test_swing_updated"
def test_temperature_unit(device):
"""Test temperature unit."""
assert device.temperature_unit == TEMP_CELSIUS
device.values.temperature.units = "F"
value_changed(device.values.temperature)
assert device.temperature_unit == TEMP_FAHRENHEIT
device.values.temperature.units = "C"
value_changed(device.values.temperature)
assert device.temperature_unit == TEMP_CELSIUS
def test_data_lists(device):
"""Test data lists from zwave value items."""
assert device.fan_modes == [3, 4, 5]
assert device.hvac_modes == [
HVAC_MODE_OFF,
HVAC_MODE_HEAT,
HVAC_MODE_COOL,
HVAC_MODE_HEAT_COOL,
]
assert device.preset_modes == []
device.values.primary = None
assert device.preset_modes == []
def test_data_lists_single_setpoint(device_single_setpoint):
"""Test data lists from zwave value items."""
device = device_single_setpoint
assert device.fan_modes == [3, 4, 5]
assert device.hvac_modes == []
assert device.preset_modes == []
def test_data_lists_single_setpoint_with_mode(device_single_setpoint_with_mode):
"""Test data lists from zwave value items."""
device = device_single_setpoint_with_mode
assert device.fan_modes == [3, 4, 5]
assert device.hvac_modes == [HVAC_MODE_OFF, HVAC_MODE_HEAT]
assert device.preset_modes == []
def test_data_lists_mapping(device_mapping):
"""Test data lists from zwave value items."""
device = device_mapping
assert device.hvac_modes == ["off", "cool", "heat", "heat_cool"]
assert device.preset_modes == ["boost", "none"]
device.values.primary = None
assert device.preset_modes == []
def test_target_value_set(device):
"""Test values changed for climate device."""
assert device.values.setpoint_heating.data == 1
assert device.values.setpoint_cooling.data == 10
device.set_temperature()
assert device.values.setpoint_heating.data == 1
assert device.values.setpoint_cooling.data == 10
device.set_temperature(**{ATTR_TEMPERATURE: 2})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 10
device.set_hvac_mode(HVAC_MODE_COOL)
value_changed(device.values.primary)
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 10
device.set_temperature(**{ATTR_TEMPERATURE: 9})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 9
def test_target_value_set_range(device_heat_cool_range):
"""Test values changed for climate device."""
device = device_heat_cool_range
assert device.values.setpoint_heating.data == 1
assert device.values.setpoint_cooling.data == 10
device.set_temperature()
assert device.values.setpoint_heating.data == 1
assert device.values.setpoint_cooling.data == 10
device.set_temperature(**{ATTR_TARGET_TEMP_LOW: 2})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 10
device.set_temperature(**{ATTR_TARGET_TEMP_HIGH: 9})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 9
device.set_temperature(**{ATTR_TARGET_TEMP_LOW: 3, ATTR_TARGET_TEMP_HIGH: 8})
assert device.values.setpoint_heating.data == 3
assert device.values.setpoint_cooling.data == 8
def test_target_value_set_range_away(device_heat_cool_away):
"""Test values changed for climate device."""
device = device_heat_cool_away
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 9
assert device.values.setpoint_away_heating.data == 1
assert device.values.setpoint_away_cooling.data == 10
device.set_preset_mode(PRESET_AWAY)
device.set_temperature(**{ATTR_TARGET_TEMP_LOW: 0, ATTR_TARGET_TEMP_HIGH: 11})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_cooling.data == 9
assert device.values.setpoint_away_heating.data == 0
assert device.values.setpoint_away_cooling.data == 11
def test_target_value_set_eco(device_heat_eco):
"""Test values changed for climate device."""
device = device_heat_eco
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_eco_heating.data == 1
device.set_preset_mode("heat econ")
device.set_temperature(**{ATTR_TEMPERATURE: 0})
assert device.values.setpoint_heating.data == 2
assert device.values.setpoint_eco_heating.data == 0
def test_target_value_set_single_setpoint(device_single_setpoint):
"""Test values changed for climate device."""
device = device_single_setpoint
assert device.values.primary.data == 1
device.set_temperature(**{ATTR_TEMPERATURE: 2})
assert device.values.primary.data == 2
def test_operation_value_set(device):
"""Test values changed for climate device."""
assert device.values.primary.data == HVAC_MODE_HEAT
device.set_hvac_mode(HVAC_MODE_COOL)
assert device.values.primary.data == HVAC_MODE_COOL
device.set_preset_mode(PRESET_ECO)
assert device.values.primary.data == PRESET_ECO
device.set_preset_mode(PRESET_NONE)
assert device.values.primary.data == HVAC_MODE_HEAT_COOL
device.values.primary = None
device.set_hvac_mode("test_set_failes")
assert device.values.primary is None
device.set_preset_mode("test_set_failes")
assert device.values.primary is None
def test_operation_value_set_mapping(device_mapping):
"""Test values changed for climate device. Mapping."""
device = device_mapping
assert device.values.primary.data == "Heat"
device.set_hvac_mode(HVAC_MODE_COOL)
assert device.values.primary.data == "Cool"
device.set_hvac_mode(HVAC_MODE_OFF)
assert device.values.primary.data == "Off"
device.set_preset_mode(PRESET_BOOST)
assert device.values.primary.data == "Full Power"
device.set_preset_mode(PRESET_ECO)
assert device.values.primary.data == "eco"
def test_operation_value_set_unknown(device_unknown):
"""Test values changed for climate device. Unknown."""
device = device_unknown
assert device.values.primary.data == "Heat"
device.set_preset_mode("Abcdefg")
assert device.values.primary.data == "Abcdefg"
device.set_preset_mode(PRESET_NONE)
assert device.values.primary.data == HVAC_MODE_HEAT_COOL
def test_operation_value_set_heat_cool(device_heat_cool):
"""Test values changed for climate device. Heat/Cool only."""
device = device_heat_cool
assert device.values.primary.data == HVAC_MODE_HEAT
device.set_preset_mode("Heat Eco")
assert device.values.primary.data == "Heat Eco"
device.set_preset_mode(PRESET_NONE)
assert device.values.primary.data == HVAC_MODE_HEAT
device.set_preset_mode("Cool Eco")
assert device.values.primary.data == "Cool Eco"
device.set_preset_mode(PRESET_NONE)
assert device.values.primary.data == HVAC_MODE_COOL
def test_fan_mode_value_set(device):
"""Test values changed for climate device."""
assert device.values.fan_mode.data == "test2"
device.set_fan_mode("test_fan_set")
assert device.values.fan_mode.data == "test_fan_set"
device.values.fan_mode = None
device.set_fan_mode("test_fan_set_failes")
assert device.values.fan_mode is None
def test_target_value_changed(device):
"""Test values changed for climate device."""
assert device.target_temperature == 1
device.values.setpoint_heating.data = 2
value_changed(device.values.setpoint_heating)
assert device.target_temperature == 2
device.values.primary.data = HVAC_MODE_COOL
value_changed(device.values.primary)
assert device.target_temperature == 10
device.values.setpoint_cooling.data = 9
value_changed(device.values.setpoint_cooling)
assert device.target_temperature == 9
def test_target_range_changed(device_heat_cool_range):
"""Test values changed for climate device."""
device = device_heat_cool_range
assert device.target_temperature_low == 1
assert device.target_temperature_high == 10
device.values.setpoint_heating.data = 2
value_changed(device.values.setpoint_heating)
assert device.target_temperature_low == 2
assert device.target_temperature_high == 10
device.values.setpoint_cooling.data = 9
value_changed(device.values.setpoint_cooling)
assert device.target_temperature_low == 2
assert device.target_temperature_high == 9
def test_target_changed_preset_range(device_heat_cool_away):
"""Test values changed for climate device."""
device = device_heat_cool_away
assert device.target_temperature_low == 2
assert device.target_temperature_high == 9
device.values.primary.data = PRESET_AWAY
value_changed(device.values.primary)
assert device.target_temperature_low == 1
assert device.target_temperature_high == 10
device.values.setpoint_away_heating.data = 0
value_changed(device.values.setpoint_away_heating)
device.values.setpoint_away_cooling.data = 11
value_changed(device.values.setpoint_away_cooling)
assert device.target_temperature_low == 0
assert device.target_temperature_high == 11
device.values.primary.data = HVAC_MODE_HEAT_COOL
value_changed(device.values.primary)
assert device.target_temperature_low == 2
assert device.target_temperature_high == 9
def test_target_changed_eco(device_heat_eco):
"""Test values changed for climate device."""
device = device_heat_eco
assert device.target_temperature == 2
device.values.primary.data = "heat econ"
value_changed(device.values.primary)
assert device.target_temperature == 1
device.values.setpoint_eco_heating.data = 0
value_changed(device.values.setpoint_eco_heating)
assert device.target_temperature == 0
device.values.primary.data = HVAC_MODE_HEAT
value_changed(device.values.primary)
assert device.target_temperature == 2
def test_target_changed_with_mode(device):
"""Test values changed for climate device."""
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.target_temperature == 1
device.values.primary.data = HVAC_MODE_COOL
value_changed(device.values.primary)
assert device.target_temperature == 10
device.values.primary.data = HVAC_MODE_HEAT_COOL
value_changed(device.values.primary)
assert device.target_temperature_low == 1
assert device.target_temperature_high == 10
def test_target_value_changed_single_setpoint(device_single_setpoint):
"""Test values changed for climate device."""
device = device_single_setpoint
assert device.target_temperature == 1
device.values.primary.data = 2
value_changed(device.values.primary)
assert device.target_temperature == 2
def test_temperature_value_changed(device):
"""Test values changed for climate device."""
assert device.current_temperature == 5
device.values.temperature.data = 3
value_changed(device.values.temperature)
assert device.current_temperature == 3
def test_operation_value_changed(device):
"""Test values changed for climate device."""
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = HVAC_MODE_COOL
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_COOL
assert device.preset_mode == PRESET_NONE
device.values.primary.data = HVAC_MODE_OFF
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_OFF
assert device.preset_mode == PRESET_NONE
device.values.primary = None
assert device.hvac_mode == HVAC_MODE_HEAT_COOL
assert device.preset_mode == PRESET_NONE
def test_operation_value_changed_preset(device_mapping):
"""Test preset changed for climate device."""
device = device_mapping
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = PRESET_ECO
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_HEAT_COOL
assert device.preset_mode == PRESET_ECO
def test_operation_value_changed_mapping(device_mapping):
"""Test values changed for climate device. Mapping."""
device = device_mapping
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = "Off"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_OFF
assert device.preset_mode == PRESET_NONE
device.values.primary.data = "Cool"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_COOL
assert device.preset_mode == PRESET_NONE
def test_operation_value_changed_mapping_preset(device_mapping):
"""Test values changed for climate device. Mapping with presets."""
device = device_mapping
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = "Full Power"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_HEAT_COOL
assert device.preset_mode == PRESET_BOOST
device.values.primary = None
assert device.hvac_mode == HVAC_MODE_HEAT_COOL
assert device.preset_mode == PRESET_NONE
def test_operation_value_changed_unknown(device_unknown):
"""Test preset changed for climate device. Unknown."""
device = device_unknown
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = "Abcdefg"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_HEAT_COOL
assert device.preset_mode == "Abcdefg"
def test_operation_value_changed_heat_cool(device_heat_cool):
"""Test preset changed for climate device. Heat/Cool only."""
device = device_heat_cool
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == PRESET_NONE
device.values.primary.data = "Cool Eco"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_COOL
assert device.preset_mode == "Cool Eco"
device.values.primary.data = "Heat Eco"
value_changed(device.values.primary)
assert device.hvac_mode == HVAC_MODE_HEAT
assert device.preset_mode == "Heat Eco"
def test_fan_mode_value_changed(device):
"""Test values changed for climate device."""
assert device.fan_mode == "test2"
device.values.fan_mode.data = "test_updated_fan"
value_changed(device.values.fan_mode)
assert device.fan_mode == "test_updated_fan"
def test_hvac_action_value_changed(device):
"""Test values changed for climate device."""
assert device.hvac_action == CURRENT_HVAC_HEAT
device.values.operating_state.data = CURRENT_HVAC_COOL
value_changed(device.values.operating_state)
assert device.hvac_action == CURRENT_HVAC_COOL
def test_hvac_action_value_changed_mapping(device_mapping):
"""Test values changed for climate device."""
device = device_mapping
assert device.hvac_action == CURRENT_HVAC_HEAT
device.values.operating_state.data = "cooling"
value_changed(device.values.operating_state)
assert device.hvac_action == CURRENT_HVAC_COOL
def test_hvac_action_value_changed_unknown(device_unknown):
"""Test values changed for climate device."""
device = device_unknown
assert device.hvac_action == "test4"
device.values.operating_state.data = "another_hvac_action"
value_changed(device.values.operating_state)
assert device.hvac_action == "another_hvac_action"
def test_fan_action_value_changed(device):
"""Test values changed for climate device."""
assert device.device_state_attributes[climate.ATTR_FAN_ACTION] == 7
device.values.fan_action.data = 9
value_changed(device.values.fan_action)
assert device.device_state_attributes[climate.ATTR_FAN_ACTION] == 9
def test_aux_heat_unsupported_set(device):
"""Test aux heat for climate device."""
assert device.values.primary.data == HVAC_MODE_HEAT
device.turn_aux_heat_on()
assert device.values.primary.data == HVAC_MODE_HEAT
device.turn_aux_heat_off()
assert device.values.primary.data == HVAC_MODE_HEAT
def test_aux_heat_unsupported_value_changed(device):
"""Test aux heat for climate device."""
assert device.is_aux_heat is None
device.values.primary.data = HVAC_MODE_HEAT
value_changed(device.values.primary)
assert device.is_aux_heat is None
def test_aux_heat_set(device_aux_heat):
"""Test aux heat for climate device."""
device = device_aux_heat
assert device.values.primary.data == HVAC_MODE_HEAT
device.turn_aux_heat_on()
assert device.values.primary.data == AUX_HEAT_ZWAVE_MODE
device.turn_aux_heat_off()
assert device.values.primary.data == HVAC_MODE_HEAT
def test_aux_heat_value_changed(device_aux_heat):
"""Test aux heat for climate device."""
device = device_aux_heat
assert device.is_aux_heat is False
device.values.primary.data = AUX_HEAT_ZWAVE_MODE
value_changed(device.values.primary)
assert device.is_aux_heat is True
device.values.primary.data = HVAC_MODE_HEAT
value_changed(device.values.primary)
assert device.is_aux_heat is False
| mit |
inspirehep/sqlalchemy | lib/sqlalchemy/ext/associationproxy.py | 45 | 33253 | # ext/associationproxy.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Contain the ``AssociationProxy`` class.
The ``AssociationProxy`` is a Python property object which provides
transparent proxied access to the endpoint of an association object.
See the example ``examples/association/proxied_association.py``.
"""
import itertools
import operator
import weakref
from .. import exc, orm, util
from ..orm import collections, interfaces
from ..sql import not_, or_
def association_proxy(target_collection, attr, **kw):
"""Return a Python property implementing a view of a target
attribute which references an attribute on members of the
target.
The returned value is an instance of :class:`.AssociationProxy`.
Implements a Python property representing a relationship as a collection
of simpler values, or a scalar value. The proxied property will mimic
the collection type of the target (list, dict or set), or, in the case of
a one to one relationship, a simple scalar value.
:param target_collection: Name of the attribute we'll proxy to.
This attribute is typically mapped by
:func:`~sqlalchemy.orm.relationship` to link to a target collection, but
can also be a many-to-one or non-scalar relationship.
:param attr: Attribute on the associated instance or instances we'll
proxy for.
For example, given a target collection of [obj1, obj2], a list created
by this proxy property would look like [getattr(obj1, *attr*),
getattr(obj2, *attr*)]
If the relationship is one-to-one or otherwise uselist=False, then
simply: getattr(obj, *attr*)
:param creator: optional.
When new items are added to this proxied collection, new instances of
the class collected by the target collection will be created. For list
and set collections, the target class constructor will be called with
the 'value' for the new instance. For dict types, two arguments are
passed: key and value.
If you want to construct instances differently, supply a *creator*
function that takes arguments as above and returns instances.
For scalar relationships, creator() will be called if the target is None.
If the target is present, set operations are proxied to setattr() on the
associated object.
If you have an associated object with multiple attributes, you may set
up multiple association proxies mapping to different attributes. See
the unit tests for examples, and for examples of how creator() functions
can be used to construct the scalar relationship on-demand in this
situation.
:param \*\*kw: Passes along any other keyword arguments to
:class:`.AssociationProxy`.
"""
return AssociationProxy(target_collection, attr, **kw)
ASSOCIATION_PROXY = util.symbol('ASSOCIATION_PROXY')
"""Symbol indicating an :class:`InspectionAttr` that's
of type :class:`.AssociationProxy`.
Is assigned to the :attr:`.InspectionAttr.extension_type`
attibute.
"""
class AssociationProxy(interfaces.InspectionAttrInfo):
"""A descriptor that presents a read/write view of an object attribute."""
is_attribute = False
extension_type = ASSOCIATION_PROXY
def __init__(self, target_collection, attr, creator=None,
getset_factory=None, proxy_factory=None,
proxy_bulk_set=None):
"""Construct a new :class:`.AssociationProxy`.
The :func:`.association_proxy` function is provided as the usual
entrypoint here, though :class:`.AssociationProxy` can be instantiated
and/or subclassed directly.
:param target_collection: Name of the collection we'll proxy to,
usually created with :func:`.relationship`.
:param attr: Attribute on the collected instances we'll proxy
for. For example, given a target collection of [obj1, obj2], a
list created by this proxy property would look like
[getattr(obj1, attr), getattr(obj2, attr)]
:param creator: Optional. When new items are added to this proxied
collection, new instances of the class collected by the target
collection will be created. For list and set collections, the
target class constructor will be called with the 'value' for the
new instance. For dict types, two arguments are passed:
key and value.
If you want to construct instances differently, supply a 'creator'
function that takes arguments as above and returns instances.
:param getset_factory: Optional. Proxied attribute access is
automatically handled by routines that get and set values based on
the `attr` argument for this proxy.
If you would like to customize this behavior, you may supply a
`getset_factory` callable that produces a tuple of `getter` and
`setter` functions. The factory is called with two arguments, the
abstract type of the underlying collection and this proxy instance.
:param proxy_factory: Optional. The type of collection to emulate is
determined by sniffing the target collection. If your collection
type can't be determined by duck typing or you'd like to use a
different collection implementation, you may supply a factory
function to produce those collections. Only applicable to
non-scalar relationships.
:param proxy_bulk_set: Optional, use with proxy_factory. See
the _set() method for details.
"""
self.target_collection = target_collection
self.value_attr = attr
self.creator = creator
self.getset_factory = getset_factory
self.proxy_factory = proxy_factory
self.proxy_bulk_set = proxy_bulk_set
self.owning_class = None
self.key = '_%s_%s_%s' % (
type(self).__name__, target_collection, id(self))
self.collection_class = None
@property
def remote_attr(self):
"""The 'remote' :class:`.MapperProperty` referenced by this
:class:`.AssociationProxy`.
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.attr`
:attr:`.AssociationProxy.local_attr`
"""
return getattr(self.target_class, self.value_attr)
@property
def local_attr(self):
"""The 'local' :class:`.MapperProperty` referenced by this
:class:`.AssociationProxy`.
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.attr`
:attr:`.AssociationProxy.remote_attr`
"""
return getattr(self.owning_class, self.target_collection)
@property
def attr(self):
"""Return a tuple of ``(local_attr, remote_attr)``.
This attribute is convenient when specifying a join
using :meth:`.Query.join` across two relationships::
sess.query(Parent).join(*Parent.proxied.attr)
.. versionadded:: 0.7.3
See also:
:attr:`.AssociationProxy.local_attr`
:attr:`.AssociationProxy.remote_attr`
"""
return (self.local_attr, self.remote_attr)
def _get_property(self):
return (orm.class_mapper(self.owning_class).
get_property(self.target_collection))
@util.memoized_property
def target_class(self):
"""The intermediary class handled by this :class:`.AssociationProxy`.
Intercepted append/set/assignment events will result
in the generation of new instances of this class.
"""
return self._get_property().mapper.class_
@util.memoized_property
def scalar(self):
"""Return ``True`` if this :class:`.AssociationProxy` proxies a scalar
relationship on the local side."""
scalar = not self._get_property().uselist
if scalar:
self._initialize_scalar_accessors()
return scalar
@util.memoized_property
def _value_is_scalar(self):
return not self._get_property().\
mapper.get_property(self.value_attr).uselist
@util.memoized_property
def _target_is_object(self):
return getattr(self.target_class, self.value_attr).impl.uses_objects
def __get__(self, obj, class_):
if self.owning_class is None:
self.owning_class = class_ and class_ or type(obj)
if obj is None:
return self
if self.scalar:
target = getattr(obj, self.target_collection)
return self._scalar_get(target)
else:
try:
# If the owning instance is reborn (orm session resurrect,
# etc.), refresh the proxy cache.
creator_id, proxy = getattr(obj, self.key)
if id(obj) == creator_id:
return proxy
except AttributeError:
pass
proxy = self._new(_lazy_collection(obj, self.target_collection))
setattr(obj, self.key, (id(obj), proxy))
return proxy
def __set__(self, obj, values):
if self.owning_class is None:
self.owning_class = type(obj)
if self.scalar:
creator = self.creator and self.creator or self.target_class
target = getattr(obj, self.target_collection)
if target is None:
setattr(obj, self.target_collection, creator(values))
else:
self._scalar_set(target, values)
else:
proxy = self.__get__(obj, None)
if proxy is not values:
proxy.clear()
self._set(proxy, values)
def __delete__(self, obj):
if self.owning_class is None:
self.owning_class = type(obj)
delattr(obj, self.key)
def _initialize_scalar_accessors(self):
if self.getset_factory:
get, set = self.getset_factory(None, self)
else:
get, set = self._default_getset(None)
self._scalar_get, self._scalar_set = get, set
def _default_getset(self, collection_class):
attr = self.value_attr
_getter = operator.attrgetter(attr)
getter = lambda target: _getter(target) if target is not None else None
if collection_class is dict:
setter = lambda o, k, v: setattr(o, attr, v)
else:
setter = lambda o, v: setattr(o, attr, v)
return getter, setter
def _new(self, lazy_collection):
creator = self.creator and self.creator or self.target_class
self.collection_class = util.duck_type_collection(lazy_collection())
if self.proxy_factory:
return self.proxy_factory(
lazy_collection, creator, self.value_attr, self)
if self.getset_factory:
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
if self.collection_class is list:
return _AssociationList(
lazy_collection, creator, getter, setter, self)
elif self.collection_class is dict:
return _AssociationDict(
lazy_collection, creator, getter, setter, self)
elif self.collection_class is set:
return _AssociationSet(
lazy_collection, creator, getter, setter, self)
else:
raise exc.ArgumentError(
'could not guess which interface to use for '
'collection_class "%s" backing "%s"; specify a '
'proxy_factory and proxy_bulk_set manually' %
(self.collection_class.__name__, self.target_collection))
def _inflate(self, proxy):
creator = self.creator and self.creator or self.target_class
if self.getset_factory:
getter, setter = self.getset_factory(self.collection_class, self)
else:
getter, setter = self._default_getset(self.collection_class)
proxy.creator = creator
proxy.getter = getter
proxy.setter = setter
def _set(self, proxy, values):
if self.proxy_bulk_set:
self.proxy_bulk_set(proxy, values)
elif self.collection_class is list:
proxy.extend(values)
elif self.collection_class is dict:
proxy.update(values)
elif self.collection_class is set:
proxy.update(values)
else:
raise exc.ArgumentError(
'no proxy_bulk_set supplied for custom '
'collection_class implementation')
@property
def _comparator(self):
return self._get_property().comparator
def any(self, criterion=None, **kwargs):
"""Produce a proxied 'any' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
and/or :meth:`.RelationshipProperty.Comparator.has`
operators of the underlying proxied attributes.
"""
if self._target_is_object:
if self._value_is_scalar:
value_expr = getattr(
self.target_class, self.value_attr).has(
criterion, **kwargs)
else:
value_expr = getattr(
self.target_class, self.value_attr).any(
criterion, **kwargs)
else:
value_expr = criterion
# check _value_is_scalar here, otherwise
# we're scalar->scalar - call .any() so that
# the "can't call any() on a scalar" msg is raised.
if self.scalar and not self._value_is_scalar:
return self._comparator.has(
value_expr
)
else:
return self._comparator.any(
value_expr
)
def has(self, criterion=None, **kwargs):
"""Produce a proxied 'has' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
and/or :meth:`.RelationshipProperty.Comparator.has`
operators of the underlying proxied attributes.
"""
if self._target_is_object:
return self._comparator.has(
getattr(self.target_class, self.value_attr).
has(criterion, **kwargs)
)
else:
if criterion is not None or kwargs:
raise exc.ArgumentError(
"Non-empty has() not allowed for "
"column-targeted association proxy; use ==")
return self._comparator.has()
def contains(self, obj):
"""Produce a proxied 'contains' expression using EXISTS.
This expression will be a composed product
using the :meth:`.RelationshipProperty.Comparator.any`
, :meth:`.RelationshipProperty.Comparator.has`,
and/or :meth:`.RelationshipProperty.Comparator.contains`
operators of the underlying proxied attributes.
"""
if self.scalar and not self._value_is_scalar:
return self._comparator.has(
getattr(self.target_class, self.value_attr).contains(obj)
)
else:
return self._comparator.any(**{self.value_attr: obj})
def __eq__(self, obj):
# note the has() here will fail for collections; eq_()
# is only allowed with a scalar.
if obj is None:
return or_(
self._comparator.has(**{self.value_attr: obj}),
self._comparator == None
)
else:
return self._comparator.has(**{self.value_attr: obj})
def __ne__(self, obj):
# note the has() here will fail for collections; eq_()
# is only allowed with a scalar.
return self._comparator.has(
getattr(self.target_class, self.value_attr) != obj)
class _lazy_collection(object):
def __init__(self, obj, target):
self.ref = weakref.ref(obj)
self.target = target
def __call__(self):
obj = self.ref()
if obj is None:
raise exc.InvalidRequestError(
"stale association proxy, parent object has gone out of "
"scope")
return getattr(obj, self.target)
def __getstate__(self):
return {'obj': self.ref(), 'target': self.target}
def __setstate__(self, state):
self.ref = weakref.ref(state['obj'])
self.target = state['target']
class _AssociationCollection(object):
def __init__(self, lazy_collection, creator, getter, setter, parent):
"""Constructs an _AssociationCollection.
This will always be a subclass of either _AssociationList,
_AssociationSet, or _AssociationDict.
lazy_collection
A callable returning a list-based collection of entities (usually an
object attribute managed by a SQLAlchemy relationship())
creator
A function that creates new target entities. Given one parameter:
value. This assertion is assumed::
obj = creator(somevalue)
assert getter(obj) == somevalue
getter
A function. Given an associated object, return the 'value'.
setter
A function. Given an associated object and a value, store that
value on the object.
"""
self.lazy_collection = lazy_collection
self.creator = creator
self.getter = getter
self.setter = setter
self.parent = parent
col = property(lambda self: self.lazy_collection())
def __len__(self):
return len(self.col)
def __bool__(self):
return bool(self.col)
__nonzero__ = __bool__
def __getstate__(self):
return {'parent': self.parent, 'lazy_collection': self.lazy_collection}
def __setstate__(self, state):
self.parent = state['parent']
self.lazy_collection = state['lazy_collection']
self.parent._inflate(self)
class _AssociationList(_AssociationCollection):
"""Generic, converting, list-to-list proxy."""
def _create(self, value):
return self.creator(value)
def _get(self, object):
return self.getter(object)
def _set(self, object, value):
return self.setter(object, value)
def __getitem__(self, index):
if not isinstance(index, slice):
return self._get(self.col[index])
else:
return [self._get(member) for member in self.col[index]]
def __setitem__(self, index, value):
if not isinstance(index, slice):
self._set(self.col[index], value)
else:
if index.stop is None:
stop = len(self)
elif index.stop < 0:
stop = len(self) + index.stop
else:
stop = index.stop
step = index.step or 1
start = index.start or 0
rng = list(range(index.start or 0, stop, step))
if step == 1:
for i in rng:
del self[start]
i = start
for item in value:
self.insert(i, item)
i += 1
else:
if len(value) != len(rng):
raise ValueError(
"attempt to assign sequence of size %s to "
"extended slice of size %s" % (len(value),
len(rng)))
for i, item in zip(rng, value):
self._set(self.col[i], item)
def __delitem__(self, index):
del self.col[index]
def __contains__(self, value):
for member in self.col:
# testlib.pragma exempt:__eq__
if self._get(member) == value:
return True
return False
def __getslice__(self, start, end):
return [self._get(member) for member in self.col[start:end]]
def __setslice__(self, start, end, values):
members = [self._create(v) for v in values]
self.col[start:end] = members
def __delslice__(self, start, end):
del self.col[start:end]
def __iter__(self):
"""Iterate over proxied values.
For the actual domain objects, iterate over .col instead or
just use the underlying collection directly from its property
on the parent.
"""
for member in self.col:
yield self._get(member)
raise StopIteration
def append(self, value):
item = self._create(value)
self.col.append(item)
def count(self, value):
return sum([1 for _ in
util.itertools_filter(lambda v: v == value, iter(self))])
def extend(self, values):
for v in values:
self.append(v)
def insert(self, index, value):
self.col[index:index] = [self._create(value)]
def pop(self, index=-1):
return self.getter(self.col.pop(index))
def remove(self, value):
for i, val in enumerate(self):
if val == value:
del self.col[i]
return
raise ValueError("value not in list")
def reverse(self):
"""Not supported, use reversed(mylist)"""
raise NotImplementedError
def sort(self):
"""Not supported, use sorted(mylist)"""
raise NotImplementedError
def clear(self):
del self.col[0:len(self.col)]
def __eq__(self, other):
return list(self) == other
def __ne__(self, other):
return list(self) != other
def __lt__(self, other):
return list(self) < other
def __le__(self, other):
return list(self) <= other
def __gt__(self, other):
return list(self) > other
def __ge__(self, other):
return list(self) >= other
def __cmp__(self, other):
return cmp(list(self), other)
def __add__(self, iterable):
try:
other = list(iterable)
except TypeError:
return NotImplemented
return list(self) + other
def __radd__(self, iterable):
try:
other = list(iterable)
except TypeError:
return NotImplemented
return other + list(self)
def __mul__(self, n):
if not isinstance(n, int):
return NotImplemented
return list(self) * n
__rmul__ = __mul__
def __iadd__(self, iterable):
self.extend(iterable)
return self
def __imul__(self, n):
# unlike a regular list *=, proxied __imul__ will generate unique
# backing objects for each copy. *= on proxied lists is a bit of
# a stretch anyhow, and this interpretation of the __imul__ contract
# is more plausibly useful than copying the backing objects.
if not isinstance(n, int):
return NotImplemented
if n == 0:
self.clear()
elif n > 1:
self.extend(list(self) * (n - 1))
return self
def copy(self):
return list(self)
def __repr__(self):
return repr(list(self))
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(list, func_name)):
func.__doc__ = getattr(list, func_name).__doc__
del func_name, func
_NotProvided = util.symbol('_NotProvided')
class _AssociationDict(_AssociationCollection):
"""Generic, converting, dict-to-dict proxy."""
def _create(self, key, value):
return self.creator(key, value)
def _get(self, object):
return self.getter(object)
def _set(self, object, key, value):
return self.setter(object, key, value)
def __getitem__(self, key):
return self._get(self.col[key])
def __setitem__(self, key, value):
if key in self.col:
self._set(self.col[key], key, value)
else:
self.col[key] = self._create(key, value)
def __delitem__(self, key):
del self.col[key]
def __contains__(self, key):
# testlib.pragma exempt:__hash__
return key in self.col
def has_key(self, key):
# testlib.pragma exempt:__hash__
return key in self.col
def __iter__(self):
return iter(self.col.keys())
def clear(self):
self.col.clear()
def __eq__(self, other):
return dict(self) == other
def __ne__(self, other):
return dict(self) != other
def __lt__(self, other):
return dict(self) < other
def __le__(self, other):
return dict(self) <= other
def __gt__(self, other):
return dict(self) > other
def __ge__(self, other):
return dict(self) >= other
def __cmp__(self, other):
return cmp(dict(self), other)
def __repr__(self):
return repr(dict(self.items()))
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def setdefault(self, key, default=None):
if key not in self.col:
self.col[key] = self._create(key, default)
return default
else:
return self[key]
def keys(self):
return self.col.keys()
if util.py2k:
def iteritems(self):
return ((key, self._get(self.col[key])) for key in self.col)
def itervalues(self):
return (self._get(self.col[key]) for key in self.col)
def iterkeys(self):
return self.col.iterkeys()
def values(self):
return [self._get(member) for member in self.col.values()]
def items(self):
return [(k, self._get(self.col[k])) for k in self]
else:
def items(self):
return ((key, self._get(self.col[key])) for key in self.col)
def values(self):
return (self._get(self.col[key]) for key in self.col)
def pop(self, key, default=_NotProvided):
if default is _NotProvided:
member = self.col.pop(key)
else:
member = self.col.pop(key, default)
return self._get(member)
def popitem(self):
item = self.col.popitem()
return (item[0], self._get(item[1]))
def update(self, *a, **kw):
if len(a) > 1:
raise TypeError('update expected at most 1 arguments, got %i' %
len(a))
elif len(a) == 1:
seq_or_map = a[0]
# discern dict from sequence - took the advice from
# http://www.voidspace.org.uk/python/articles/duck_typing.shtml
# still not perfect :(
if hasattr(seq_or_map, 'keys'):
for item in seq_or_map:
self[item] = seq_or_map[item]
else:
try:
for k, v in seq_or_map:
self[k] = v
except ValueError:
raise ValueError(
"dictionary update sequence "
"requires 2-element tuples")
for key, value in kw:
self[key] = value
def copy(self):
return dict(self.items())
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(dict, func_name)):
func.__doc__ = getattr(dict, func_name).__doc__
del func_name, func
class _AssociationSet(_AssociationCollection):
"""Generic, converting, set-to-set proxy."""
def _create(self, value):
return self.creator(value)
def _get(self, object):
return self.getter(object)
def _set(self, object, value):
return self.setter(object, value)
def __len__(self):
return len(self.col)
def __bool__(self):
if self.col:
return True
else:
return False
__nonzero__ = __bool__
def __contains__(self, value):
for member in self.col:
# testlib.pragma exempt:__eq__
if self._get(member) == value:
return True
return False
def __iter__(self):
"""Iterate over proxied values.
For the actual domain objects, iterate over .col instead or just use
the underlying collection directly from its property on the parent.
"""
for member in self.col:
yield self._get(member)
raise StopIteration
def add(self, value):
if value not in self:
self.col.add(self._create(value))
# for discard and remove, choosing a more expensive check strategy rather
# than call self.creator()
def discard(self, value):
for member in self.col:
if self._get(member) == value:
self.col.discard(member)
break
def remove(self, value):
for member in self.col:
if self._get(member) == value:
self.col.discard(member)
return
raise KeyError(value)
def pop(self):
if not self.col:
raise KeyError('pop from an empty set')
member = self.col.pop()
return self._get(member)
def update(self, other):
for value in other:
self.add(value)
def __ior__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
for value in other:
self.add(value)
return self
def _set(self):
return set(iter(self))
def union(self, other):
return set(self).union(other)
__or__ = union
def difference(self, other):
return set(self).difference(other)
__sub__ = difference
def difference_update(self, other):
for value in other:
self.discard(value)
def __isub__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
for value in other:
self.discard(value)
return self
def intersection(self, other):
return set(self).intersection(other)
__and__ = intersection
def intersection_update(self, other):
want, have = self.intersection(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
def __iand__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
want, have = self.intersection(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
return self
def symmetric_difference(self, other):
return set(self).symmetric_difference(other)
__xor__ = symmetric_difference
def symmetric_difference_update(self, other):
want, have = self.symmetric_difference(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
def __ixor__(self, other):
if not collections._set_binops_check_strict(self, other):
return NotImplemented
want, have = self.symmetric_difference(other), set(self)
remove, add = have - want, want - have
for value in remove:
self.remove(value)
for value in add:
self.add(value)
return self
def issubset(self, other):
return set(self).issubset(other)
def issuperset(self, other):
return set(self).issuperset(other)
def clear(self):
self.col.clear()
def copy(self):
return set(self)
def __eq__(self, other):
return set(self) == other
def __ne__(self, other):
return set(self) != other
def __lt__(self, other):
return set(self) < other
def __le__(self, other):
return set(self) <= other
def __gt__(self, other):
return set(self) > other
def __ge__(self, other):
return set(self) >= other
def __repr__(self):
return repr(set(self))
def __hash__(self):
raise TypeError("%s objects are unhashable" % type(self).__name__)
for func_name, func in list(locals().items()):
if (util.callable(func) and func.__name__ == func_name and
not func.__doc__ and hasattr(set, func_name)):
func.__doc__ = getattr(set, func_name).__doc__
del func_name, func
| mit |
gautam1858/tensorflow | tensorflow/python/keras/applications/nasnet.py | 17 | 1814 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=invalid-name
"""NASNet-A models for Keras.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras_applications import nasnet
from tensorflow.python.keras.applications import keras_modules_injection
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.applications.nasnet.NASNetMobile',
'keras.applications.NASNetMobile')
@keras_modules_injection
def NASNetMobile(*args, **kwargs):
return nasnet.NASNetMobile(*args, **kwargs)
@keras_export('keras.applications.nasnet.NASNetLarge',
'keras.applications.NASNetLarge')
@keras_modules_injection
def NASNetLarge(*args, **kwargs):
return nasnet.NASNetLarge(*args, **kwargs)
@keras_export('keras.applications.nasnet.decode_predictions')
@keras_modules_injection
def decode_predictions(*args, **kwargs):
return nasnet.decode_predictions(*args, **kwargs)
@keras_export('keras.applications.nasnet.preprocess_input')
@keras_modules_injection
def preprocess_input(*args, **kwargs):
return nasnet.preprocess_input(*args, **kwargs)
| apache-2.0 |
aadrian/w2ui | server/python/django_w2ui/django_w2ui/demo/migrations/0001_initial.py | 25 | 4991 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Tipo_User'
db.create_table(u'demo_tipo_user', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('codice', self.gf('django.db.models.fields.IntegerField')(null=True, db_column='codice', blank=True)),
('descri', self.gf('django.db.models.fields.CharField')(max_length=30L, db_column='descri', blank=True)),
))
db.send_create_signal(u'demo', ['Tipo_User'])
# Adding model 'Users'
db.create_table('users', (
('userid', self.gf('django.db.models.fields.IntegerField')(primary_key=True, db_column='userid')),
('fname', self.gf('django.db.models.fields.CharField')(max_length=50, null=True, db_column='fname', blank=True)),
('lname', self.gf('django.db.models.fields.CharField')(max_length=50, null=True, db_column='lname', blank=True)),
('email', self.gf('django.db.models.fields.CharField')(max_length=75, null=True, db_column='email', blank=True)),
('login', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, db_column='login', blank=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=32, null=True, db_column='password', blank=True)),
('date_birthday', self.gf('django.db.models.fields.DateField')(null=True, db_column='date_birthday', blank=True)),
('date_registration', self.gf('django.db.models.fields.DateField')(null=True, db_column='date_birthday', blank=True)),
('importo_registrato', self.gf('django.db.models.fields.DecimalField')(blank=True, null=True, db_column='importo_registrato', decimal_places=3, max_digits=15)),
('text', self.gf('django.db.models.fields.CharField')(default='', max_length=512, null=True, db_column='text', blank=True)),
('timestamp', self.gf('django.db.models.fields.DateTimeField')(null=True, db_column='timestamp', blank=True)),
('tipo_user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['demo.Tipo_User'], null=True, db_column='tipo_user', blank=True)),
))
db.send_create_signal(u'demo', ['Users'])
def backwards(self, orm):
# Deleting model 'Tipo_User'
db.delete_table(u'demo_tipo_user')
# Deleting model 'Users'
db.delete_table('users')
models = {
u'demo.tipo_user': {
'Meta': {'ordering': "['descri']", 'object_name': 'Tipo_User'},
'codice': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'db_column': "'codice'", 'blank': 'True'}),
'descri': ('django.db.models.fields.CharField', [], {'max_length': '30L', 'db_column': "'descri'", 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'demo.users': {
'Meta': {'object_name': 'Users', 'db_table': "'users'"},
'date_birthday': ('django.db.models.fields.DateField', [], {'null': 'True', 'db_column': "'date_birthday'", 'blank': 'True'}),
'date_registration': ('django.db.models.fields.DateField', [], {'null': 'True', 'db_column': "'date_birthday'", 'blank': 'True'}),
'email': ('django.db.models.fields.CharField', [], {'max_length': '75', 'null': 'True', 'db_column': "'email'", 'blank': 'True'}),
'fname': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_column': "'fname'", 'blank': 'True'}),
'importo_registrato': ('django.db.models.fields.DecimalField', [], {'blank': 'True', 'null': 'True', 'db_column': "'importo_registrato'", 'decimal_places': '3', 'max_digits': '15'}),
'lname': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'db_column': "'lname'", 'blank': 'True'}),
'login': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'login'", 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'password'", 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '512', 'null': 'True', 'db_column': "'text'", 'blank': 'True'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_column': "'timestamp'", 'blank': 'True'}),
'tipo_user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['demo.Tipo_User']", 'null': 'True', 'db_column': "'tipo_user'", 'blank': 'True'}),
'userid': ('django.db.models.fields.IntegerField', [], {'primary_key': 'True', 'db_column': "'userid'"})
}
}
complete_apps = ['demo'] | mit |
reinhrst/hooks | checks/compile.py | 1 | 4934 | import os
import subprocess
from . import base
from . import filetype
from . import status
class CompileCheck(base.PerFileCheck):
COMPILECOMMAND = []
ONLY_IF_OLDFILE_COMPILES = True
def prepareOldFileDir(self, dirname):
return dirname
def checkOldFile(self, changedFile):
with base.TempDir() as dirname:
dirname = self.prepareOldFileDir(dirname)
tempfilename = os.path.join(
dirname,
os.path.basename(changedFile.filename))
with open(tempfilename, "w") as f:
f.write("\n".join(changedFile.oldlines))
cmd = list(self.COMPILECOMMAND)
cmd.append(tempfilename)
try:
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError:
return False
return True
def checkFile(self, changedFile):
if changedFile.status != status.ADDED:
if (self.ONLY_IF_OLDFILE_COMPILES and
not self.checkOldFile(changedFile)):
# nothing to check, old file didn't compile
return []
cmd = list(self.COMPILECOMMAND)
cmd.append(changedFile.filename)
try:
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as calledprocesserror:
return [base.CheckError(changedFile, self.__class__,
calledprocesserror.output)]
except OSError as e:
error = (
"Trying to execute:\n%s\n. This failed (%s), possibly "
"executable is not installed on your system." % (
repr(cmd)[1:-1], str(e)))
return [base.CheckError(changedFile, self.__class__, error)]
return []
class PythonCompileCheck(CompileCheck):
INTERESTED_IN_FILETYPES = [filetype.PYTHON]
COMPILECOMMAND = ['python', '-m' 'py_compile']
class Pep8Check(CompileCheck):
INTERESTED_IN_FILETYPES = [filetype.PYTHON]
COMPILECOMMAND = ['flake8']
def prepareOldFileDir(self, dirname):
if os.path.exists("setup.cfg"):
subprocess.check_call([
"cp", "setup.cfg", dirname])
return dirname
def check_file_get_error_numbers(self, filename):
cmd = list(self.COMPILECOMMAND) + [filename]
try:
output = subprocess.check_output(
cmd, stderr=subprocess.STDOUT).decode("UTF-8")
except subprocess.CalledProcessError as e:
errornos = set()
for line in e.output.decode("UTF-8").split("\n"):
if line == "":
continue
filenameandline, errorno, error = line.split(" ", 2)
errornos.add(errorno)
return (False, errornos, e.output.decode("UTF-8"), e.returncode)
return (True, set(), output, 0)
def checkFile(self, changedFile):
if changedFile.status != status.ADDED:
with base.TempDir() as dirname:
dirname = self.prepareOldFileDir(dirname)
tempfilename = os.path.join(
dirname,
os.path.basename(changedFile.filename))
with open(tempfilename, "w") as f:
f.write("\n".join(changedFile.oldlines))
_, old_errornos, _, _ = \
self.check_file_get_error_numbers(tempfilename)
else:
old_errornos = set()
_, new_errornos, output, returncode = \
self.check_file_get_error_numbers(changedFile.filename)
cmd = list(self.COMPILECOMMAND) + [changedFile.filename]
if returncode == 127:
return [base.CheckError(
changedFile, self.__class__,
"Could not run %s, is it installed on the system?" % (
cmd, ))]
extra_errornos = new_errornos - old_errornos
if extra_errornos:
return [base.CheckError(
changedFile, self.__class__,
"Running %s resulted in new errors, number %s:\n%s" % (
cmd, ", ".join(extra_errornos), output))]
killed_errornos = old_errornos - new_errornos
if killed_errornos:
if new_errornos:
print((
"You got rid of errors %s in %s, you deserve stars: " +
("\U00002B50" * len(killed_errornos))) % (
", ".join(killed_errornos),
changedFile.filename)) # noqa
else:
print((
"You got rid of all errors (%s) in %s, you deserve stars: "
"" + ("\U0001F31F" * len(killed_errornos))) % (
", ".join(killed_errornos),
changedFile.filename)) # noqa
return []
| mit |
davehunt/kuma | vendor/packages/nose/plugins/xunit.py | 48 | 11667 | """This plugin provides test results in the standard XUnit XML format.
It's designed for the `Jenkins`_ (previously Hudson) continuous build
system, but will probably work for anything else that understands an
XUnit-formatted XML representation of test results.
Add this shell command to your builder ::
nosetests --with-xunit
And by default a file named nosetests.xml will be written to the
working directory.
In a Jenkins builder, tick the box named "Publish JUnit test result report"
under the Post-build Actions and enter this value for Test report XMLs::
**/nosetests.xml
If you need to change the name or location of the file, you can set the
``--xunit-file`` option.
If you need to change the name of the test suite, you can set the
``--xunit-testsuite-name`` option.
Here is an abbreviated version of what an XML test report might look like::
<?xml version="1.0" encoding="UTF-8"?>
<testsuite name="nosetests" tests="1" errors="1" failures="0" skip="0">
<testcase classname="path_to_test_suite.TestSomething"
name="test_it" time="0">
<error type="exceptions.TypeError" message="oops, wrong type">
Traceback (most recent call last):
...
TypeError: oops, wrong type
</error>
</testcase>
</testsuite>
.. _Jenkins: http://jenkins-ci.org/
"""
import codecs
import doctest
import os
import sys
import traceback
import re
import inspect
from StringIO import StringIO
from time import time
from xml.sax import saxutils
from nose.plugins.base import Plugin
from nose.exc import SkipTest
from nose.pyversion import force_unicode, format_exception
# Invalid XML characters, control characters 0-31 sans \t, \n and \r
CONTROL_CHARACTERS = re.compile(r"[\000-\010\013\014\016-\037]")
TEST_ID = re.compile(r'^(.*?)(\(.*\))$')
def xml_safe(value):
"""Replaces invalid XML characters with '?'."""
return CONTROL_CHARACTERS.sub('?', value)
def escape_cdata(cdata):
"""Escape a string for an XML CDATA section."""
return xml_safe(cdata).replace(']]>', ']]>]]><![CDATA[')
def id_split(idval):
m = TEST_ID.match(idval)
if m:
name, fargs = m.groups()
head, tail = name.rsplit(".", 1)
return [head, tail+fargs]
else:
return idval.rsplit(".", 1)
def nice_classname(obj):
"""Returns a nice name for class object or class instance.
>>> nice_classname(Exception()) # doctest: +ELLIPSIS
'...Exception'
>>> nice_classname(Exception) # doctest: +ELLIPSIS
'...Exception'
"""
if inspect.isclass(obj):
cls_name = obj.__name__
else:
cls_name = obj.__class__.__name__
mod = inspect.getmodule(obj)
if mod:
name = mod.__name__
# jython
if name.startswith('org.python.core.'):
name = name[len('org.python.core.'):]
return "%s.%s" % (name, cls_name)
else:
return cls_name
def exc_message(exc_info):
"""Return the exception's message."""
exc = exc_info[1]
if exc is None:
# str exception
result = exc_info[0]
else:
try:
result = str(exc)
except UnicodeEncodeError:
try:
result = unicode(exc)
except UnicodeError:
# Fallback to args as neither str nor
# unicode(Exception(u'\xe6')) work in Python < 2.6
result = exc.args[0]
result = force_unicode(result, 'UTF-8')
return xml_safe(result)
class Tee(object):
def __init__(self, encoding, *args):
self._encoding = encoding
self._streams = args
def write(self, data):
data = force_unicode(data, self._encoding)
for s in self._streams:
s.write(data)
def writelines(self, lines):
for line in lines:
self.write(line)
def flush(self):
for s in self._streams:
s.flush()
def isatty(self):
return False
class Xunit(Plugin):
"""This plugin provides test results in the standard XUnit XML format."""
name = 'xunit'
score = 1500
encoding = 'UTF-8'
error_report_file = None
def __init__(self):
super(Xunit, self).__init__()
self._capture_stack = []
self._currentStdout = None
self._currentStderr = None
def _timeTaken(self):
if hasattr(self, '_timer'):
taken = time() - self._timer
else:
# test died before it ran (probably error in setup())
# or success/failure added before test started probably
# due to custom TestResult munging
taken = 0.0
return taken
def _quoteattr(self, attr):
"""Escape an XML attribute. Value can be unicode."""
attr = xml_safe(attr)
return saxutils.quoteattr(attr)
def options(self, parser, env):
"""Sets additional command line options."""
Plugin.options(self, parser, env)
parser.add_option(
'--xunit-file', action='store',
dest='xunit_file', metavar="FILE",
default=env.get('NOSE_XUNIT_FILE', 'nosetests.xml'),
help=("Path to xml file to store the xunit report in. "
"Default is nosetests.xml in the working directory "
"[NOSE_XUNIT_FILE]"))
parser.add_option(
'--xunit-testsuite-name', action='store',
dest='xunit_testsuite_name', metavar="PACKAGE",
default=env.get('NOSE_XUNIT_TESTSUITE_NAME', 'nosetests'),
help=("Name of the testsuite in the xunit xml, generated by plugin. "
"Default test suite name is nosetests."))
def configure(self, options, config):
"""Configures the xunit plugin."""
Plugin.configure(self, options, config)
self.config = config
if self.enabled:
self.stats = {'errors': 0,
'failures': 0,
'passes': 0,
'skipped': 0
}
self.errorlist = []
self.error_report_file_name = os.path.realpath(options.xunit_file)
self.xunit_testsuite_name = options.xunit_testsuite_name
def report(self, stream):
"""Writes an Xunit-formatted XML file
The file includes a report of test errors and failures.
"""
self.error_report_file = codecs.open(self.error_report_file_name, 'w',
self.encoding, 'replace')
self.stats['encoding'] = self.encoding
self.stats['testsuite_name'] = self.xunit_testsuite_name
self.stats['total'] = (self.stats['errors'] + self.stats['failures']
+ self.stats['passes'] + self.stats['skipped'])
self.error_report_file.write(
u'<?xml version="1.0" encoding="%(encoding)s"?>'
u'<testsuite name="%(testsuite_name)s" tests="%(total)d" '
u'errors="%(errors)d" failures="%(failures)d" '
u'skip="%(skipped)d">' % self.stats)
self.error_report_file.write(u''.join([force_unicode(e, self.encoding)
for e in self.errorlist]))
self.error_report_file.write(u'</testsuite>')
self.error_report_file.close()
if self.config.verbosity > 1:
stream.writeln("-" * 70)
stream.writeln("XML: %s" % self.error_report_file.name)
def _startCapture(self):
self._capture_stack.append((sys.stdout, sys.stderr))
self._currentStdout = StringIO()
self._currentStderr = StringIO()
sys.stdout = Tee(self.encoding, self._currentStdout, sys.stdout)
sys.stderr = Tee(self.encoding, self._currentStderr, sys.stderr)
def startContext(self, context):
self._startCapture()
def stopContext(self, context):
self._endCapture()
def beforeTest(self, test):
"""Initializes a timer before starting a test."""
self._timer = time()
self._startCapture()
def _endCapture(self):
if self._capture_stack:
sys.stdout, sys.stderr = self._capture_stack.pop()
def afterTest(self, test):
self._endCapture()
self._currentStdout = None
self._currentStderr = None
def finalize(self, test):
while self._capture_stack:
self._endCapture()
def _getCapturedStdout(self):
if self._currentStdout:
value = self._currentStdout.getvalue()
if value:
return '<system-out><![CDATA[%s]]></system-out>' % escape_cdata(
value)
return ''
def _getCapturedStderr(self):
if self._currentStderr:
value = self._currentStderr.getvalue()
if value:
return '<system-err><![CDATA[%s]]></system-err>' % escape_cdata(
value)
return ''
def addError(self, test, err, capt=None):
"""Add error output to Xunit report.
"""
taken = self._timeTaken()
if issubclass(err[0], SkipTest):
type = 'skipped'
self.stats['skipped'] += 1
else:
type = 'error'
self.stats['errors'] += 1
tb = format_exception(err, self.encoding)
id = test.id()
self.errorlist.append(
u'<testcase classname=%(cls)s name=%(name)s time="%(taken).3f">'
u'<%(type)s type=%(errtype)s message=%(message)s><![CDATA[%(tb)s]]>'
u'</%(type)s>%(systemout)s%(systemerr)s</testcase>' %
{'cls': self._quoteattr(id_split(id)[0]),
'name': self._quoteattr(id_split(id)[-1]),
'taken': taken,
'type': type,
'errtype': self._quoteattr(nice_classname(err[0])),
'message': self._quoteattr(exc_message(err)),
'tb': escape_cdata(tb),
'systemout': self._getCapturedStdout(),
'systemerr': self._getCapturedStderr(),
})
def addFailure(self, test, err, capt=None, tb_info=None):
"""Add failure output to Xunit report.
"""
taken = self._timeTaken()
tb = format_exception(err, self.encoding)
self.stats['failures'] += 1
id = test.id()
self.errorlist.append(
u'<testcase classname=%(cls)s name=%(name)s time="%(taken).3f">'
u'<failure type=%(errtype)s message=%(message)s><![CDATA[%(tb)s]]>'
u'</failure>%(systemout)s%(systemerr)s</testcase>' %
{'cls': self._quoteattr(id_split(id)[0]),
'name': self._quoteattr(id_split(id)[-1]),
'taken': taken,
'errtype': self._quoteattr(nice_classname(err[0])),
'message': self._quoteattr(exc_message(err)),
'tb': escape_cdata(tb),
'systemout': self._getCapturedStdout(),
'systemerr': self._getCapturedStderr(),
})
def addSuccess(self, test, capt=None):
"""Add success output to Xunit report.
"""
taken = self._timeTaken()
self.stats['passes'] += 1
id = test.id()
self.errorlist.append(
'<testcase classname=%(cls)s name=%(name)s '
'time="%(taken).3f">%(systemout)s%(systemerr)s</testcase>' %
{'cls': self._quoteattr(id_split(id)[0]),
'name': self._quoteattr(id_split(id)[-1]),
'taken': taken,
'systemout': self._getCapturedStdout(),
'systemerr': self._getCapturedStderr(),
})
| mpl-2.0 |
codeaudit/pattern-1 | pattern/web/json/encoder.py | 26 | 21771 | """Implementation of JSONEncoder
"""
import re
from decimal import Decimal
def _import_speedups():
try:
import _speedups
return _speedups.encode_basestring_ascii, _speedups.make_encoder
except ImportError:
return None, None
c_encode_basestring_ascii, c_make_encoder = _import_speedups()
from decoder import PosInf
ESCAPE = re.compile(ur'[\x00-\x1f\\"\b\f\n\r\t\u2028\u2029]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
u'\u2028': '\\u2028',
u'\u2029': '\\u2029',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return u'"' + ESCAPE.sub(replace, s) + u'"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict, namedtuple | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None,
use_decimal=True, namedtuple_as_object=True,
tuple_as_array=True, bigint_as_string=False,
item_sort_key=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
If use_decimal is true (not the default), ``decimal.Decimal`` will
be supported directly by the encoder. For the inverse, decode JSON
with ``parse_float=decimal.Decimal``.
If namedtuple_as_object is true (the default), objects with
``_asdict()`` methods will be encoded as JSON objects.
If tuple_as_array is true (the default), tuple (and subclasses) will
be encoded as JSON arrays.
If bigint_as_string is true (not the default), ints 2**53 and higher
or lower than -2**53 will be encoded as strings. This is to avoid the
rounding that happens in Javascript otherwise.
If specified, item_sort_key is a callable used to sort the items in
each dictionary. This is useful if you want to sort items other than
in alphabetical order by key.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.use_decimal = use_decimal
self.namedtuple_as_object = namedtuple_as_object
self.tuple_as_array = tuple_as_array
self.bigint_as_string = bigint_as_string
self.item_sort_key = item_sort_key
if indent is not None and not isinstance(indent, basestring):
indent = indent * ' '
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
elif indent is not None:
self.item_separator = ','
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> from simplejson import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on
# the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
key_memo = {}
if (_one_shot and c_make_encoder is not None
and self.indent is None):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan, key_memo, self.use_decimal,
self.namedtuple_as_object, self.tuple_as_array,
self.bigint_as_string, self.item_sort_key,
Decimal)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot, self.use_decimal,
self.namedtuple_as_object, self.tuple_as_array,
self.bigint_as_string, self.item_sort_key,
Decimal=Decimal)
try:
return _iterencode(o, 0)
finally:
key_memo.clear()
class JSONEncoderForHTML(JSONEncoder):
"""An encoder that produces JSON safe to embed in HTML.
To embed JSON content in, say, a script tag on a web page, the
characters &, < and > should be escaped. They cannot be escaped
with the usual entities (e.g. &) because they are not expanded
within <script> tags.
"""
def encode(self, o):
# Override JSONEncoder.encode because it has hacks for
# performance that make things more complicated.
chunks = self.iterencode(o, True)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
for chunk in chunks:
chunk = chunk.replace('&', '\\u0026')
chunk = chunk.replace('<', '\\u003c')
chunk = chunk.replace('>', '\\u003e')
yield chunk
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
_use_decimal, _namedtuple_as_object, _tuple_as_array,
_bigint_as_string, _item_sort_key,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
Decimal=Decimal,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
if _item_sort_key and not callable(_item_sort_key):
raise TypeError("item_sort_key must be None or callable")
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield ((buf + str(value))
if (not _bigint_as_string or
(-1 << 53) < value < (1 << 53))
else (buf + '"' + str(value) + '"'))
elif isinstance(value, float):
yield buf + _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield buf + str(value)
else:
yield buf
if isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _item_sort_key:
items = dct.items()
items.sort(key=_item_sort_key)
elif _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield (str(value)
if (not _bigint_as_string or
(-1 << 53) < value < (1 << 53))
else ('"' + str(value) + '"'))
elif isinstance(value, float):
yield _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield str(value)
else:
if isinstance(value, list):
chunks = _iterencode_list(value, _current_indent_level)
else:
_asdict = _namedtuple_as_object and getattr(value, '_asdict', None)
if _asdict and callable(_asdict):
chunks = _iterencode_dict(_asdict(),
_current_indent_level)
elif _tuple_as_array and isinstance(value, tuple):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield (str(o)
if (not _bigint_as_string or
(-1 << 53) < o < (1 << 53))
else ('"' + str(o) + '"'))
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, list):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
else:
_asdict = _namedtuple_as_object and getattr(o, '_asdict', None)
if _asdict and callable(_asdict):
for chunk in _iterencode_dict(_asdict(), _current_indent_level):
yield chunk
elif (_tuple_as_array and isinstance(o, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
elif _use_decimal and isinstance(o, Decimal):
yield str(o)
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| bsd-3-clause |
mkhuthir/learnPython | Book_pythonlearn_com/code3/bs4/__init__.py | 20 | 18244 | """Beautiful Soup
Elixir and Tonic
"The Screen-Scraper's Friend"
http://www.crummy.com/software/BeautifulSoup/
Beautiful Soup uses a pluggable XML or HTML parser to parse a
(possibly invalid) document into a tree representation. Beautiful Soup
provides provides methods and Pythonic idioms that make it easy to
navigate, search, and modify the parse tree.
Beautiful Soup works with Python 2.6 and up. It works better if lxml
and/or html5lib is installed.
For more than you ever wanted to know about Beautiful Soup, see the
documentation:
http://www.crummy.com/software/BeautifulSoup/bs4/doc/
"""
__author__ = "Leonard Richardson ([email protected])"
__version__ = "4.4.1"
__copyright__ = "Copyright (c) 2004-2015 Leonard Richardson"
__license__ = "MIT"
__all__ = ['BeautifulSoup']
import os
import re
import warnings
from .builder import builder_registry, ParserRejectedMarkup
from .dammit import UnicodeDammit
from .element import (
CData,
Comment,
DEFAULT_OUTPUT_ENCODING,
Declaration,
Doctype,
NavigableString,
PageElement,
ProcessingInstruction,
ResultSet,
SoupStrainer,
Tag,
)
# The very first thing we do is give a useful error if someone is
# running this code under Python 3 without converting it.
'You are trying to run the Python 2 version of Beautiful Soup under Python 3. This will not work.'!='You need to convert the code, either by installing it (`python setup.py install`) or by running 2to3 (`2to3 -w bs4`).'
class BeautifulSoup(Tag):
"""
This class defines the basic interface called by the tree builders.
These methods will be called by the parser:
reset()
feed(markup)
The tree builder may call these methods from its feed() implementation:
handle_starttag(name, attrs) # See note about return value
handle_endtag(name)
handle_data(data) # Appends to the current data node
endData(containerClass=NavigableString) # Ends the current data node
No matter how complicated the underlying parser is, you should be
able to build a tree using 'start tag' events, 'end tag' events,
'data' events, and "done with data" events.
If you encounter an empty-element tag (aka a self-closing tag,
like HTML's <br> tag), call handle_starttag and then
handle_endtag.
"""
ROOT_TAG_NAME = '[document]'
# If the end-user gives no indication which tree builder they
# want, look for one with these features.
DEFAULT_BUILDER_FEATURES = ['html', 'fast']
ASCII_SPACES = '\x20\x0a\x09\x0c\x0d'
NO_PARSER_SPECIFIED_WARNING = "No parser was explicitly specified, so I'm using the best available %(markup_type)s parser for this system (\"%(parser)s\"). This usually isn't a problem, but if you run this code on another system, or in a different virtual environment, it may use a different parser and behave differently.\n\nTo get rid of this warning, change this:\n\n BeautifulSoup([your markup])\n\nto this:\n\n BeautifulSoup([your markup], \"%(parser)s\")\n"
def __init__(self, markup="", features=None, builder=None,
parse_only=None, from_encoding=None, exclude_encodings=None,
**kwargs):
"""The Soup object is initialized as the 'root tag', and the
provided markup (which can be a string or a file-like object)
is fed into the underlying parser."""
if 'convertEntities' in kwargs:
warnings.warn(
"BS4 does not respect the convertEntities argument to the "
"BeautifulSoup constructor. Entities are always converted "
"to Unicode characters.")
if 'markupMassage' in kwargs:
del kwargs['markupMassage']
warnings.warn(
"BS4 does not respect the markupMassage argument to the "
"BeautifulSoup constructor. The tree builder is responsible "
"for any necessary markup massage.")
if 'smartQuotesTo' in kwargs:
del kwargs['smartQuotesTo']
warnings.warn(
"BS4 does not respect the smartQuotesTo argument to the "
"BeautifulSoup constructor. Smart quotes are always converted "
"to Unicode characters.")
if 'selfClosingTags' in kwargs:
del kwargs['selfClosingTags']
warnings.warn(
"BS4 does not respect the selfClosingTags argument to the "
"BeautifulSoup constructor. The tree builder is responsible "
"for understanding self-closing tags.")
if 'isHTML' in kwargs:
del kwargs['isHTML']
warnings.warn(
"BS4 does not respect the isHTML argument to the "
"BeautifulSoup constructor. Suggest you use "
"features='lxml' for HTML and features='lxml-xml' for "
"XML.")
def deprecated_argument(old_name, new_name):
if old_name in kwargs:
warnings.warn(
'The "%s" argument to the BeautifulSoup constructor '
'has been renamed to "%s."' % (old_name, new_name))
value = kwargs[old_name]
del kwargs[old_name]
return value
return None
parse_only = parse_only or deprecated_argument(
"parseOnlyThese", "parse_only")
from_encoding = from_encoding or deprecated_argument(
"fromEncoding", "from_encoding")
if len(kwargs) > 0:
arg = list(kwargs.keys()).pop()
raise TypeError(
"__init__() got an unexpected keyword argument '%s'" % arg)
if builder is None:
original_features = features
if isinstance(features, str):
features = [features]
if features is None or len(features) == 0:
features = self.DEFAULT_BUILDER_FEATURES
builder_class = builder_registry.lookup(*features)
if builder_class is None:
raise FeatureNotFound(
"Couldn't find a tree builder with the features you "
"requested: %s. Do you need to install a parser library?"
% ",".join(features))
builder = builder_class()
if not (original_features == builder.NAME or
original_features in builder.ALTERNATE_NAMES):
if builder.is_xml:
markup_type = "XML"
else:
markup_type = "HTML"
warnings.warn(self.NO_PARSER_SPECIFIED_WARNING % dict(
parser=builder.NAME,
markup_type=markup_type))
self.builder = builder
self.is_xml = builder.is_xml
self.builder.soup = self
self.parse_only = parse_only
if hasattr(markup, 'read'): # It's a file-type object.
markup = markup.read()
elif len(markup) <= 256:
# Print out warnings for a couple beginner problems
# involving passing non-markup to Beautiful Soup.
# Beautiful Soup will still parse the input as markup,
# just in case that's what the user really wants.
if (isinstance(markup, str)
and not os.path.supports_unicode_filenames):
possible_filename = markup.encode("utf8")
else:
possible_filename = markup
is_file = False
try:
is_file = os.path.exists(possible_filename)
except Exception as e:
# This is almost certainly a problem involving
# characters not valid in filenames on this
# system. Just let it go.
pass
if is_file:
if isinstance(markup, str):
markup = markup.encode("utf8")
warnings.warn(
'"%s" looks like a filename, not markup. You should probably open this file and pass the filehandle into Beautiful Soup.' % markup)
if markup[:5] == "http:" or markup[:6] == "https:":
# TODO: This is ugly but I couldn't get it to work in
# Python 3 otherwise.
if ((isinstance(markup, bytes) and not b' ' in markup)
or (isinstance(markup, str) and not ' ' in markup)):
if isinstance(markup, str):
markup = markup.encode("utf8")
warnings.warn(
'"%s" looks like a URL. Beautiful Soup is not an HTTP client. You should probably use an HTTP client to get the document behind the URL, and feed that document to Beautiful Soup.' % markup)
for (self.markup, self.original_encoding, self.declared_html_encoding,
self.contains_replacement_characters) in (
self.builder.prepare_markup(
markup, from_encoding, exclude_encodings=exclude_encodings)):
self.reset()
try:
self._feed()
break
except ParserRejectedMarkup:
pass
# Clear out the markup and remove the builder's circular
# reference to this object.
self.markup = None
self.builder.soup = None
def __copy__(self):
return type(self)(self.encode(), builder=self.builder)
def __getstate__(self):
# Frequently a tree builder can't be pickled.
d = dict(self.__dict__)
if 'builder' in d and not self.builder.picklable:
del d['builder']
return d
def _feed(self):
# Convert the document to Unicode.
self.builder.reset()
self.builder.feed(self.markup)
# Close out any unfinished strings and close all the open tags.
self.endData()
while self.currentTag.name != self.ROOT_TAG_NAME:
self.popTag()
def reset(self):
Tag.__init__(self, self, self.builder, self.ROOT_TAG_NAME)
self.hidden = 1
self.builder.reset()
self.current_data = []
self.currentTag = None
self.tagStack = []
self.preserve_whitespace_tag_stack = []
self.pushTag(self)
def new_tag(self, name, namespace=None, nsprefix=None, **attrs):
"""Create a new tag associated with this soup."""
return Tag(None, self.builder, name, namespace, nsprefix, attrs)
def new_string(self, s, subclass=NavigableString):
"""Create a new NavigableString associated with this soup."""
return subclass(s)
def insert_before(self, successor):
raise NotImplementedError("BeautifulSoup objects don't support insert_before().")
def insert_after(self, successor):
raise NotImplementedError("BeautifulSoup objects don't support insert_after().")
def popTag(self):
tag = self.tagStack.pop()
if self.preserve_whitespace_tag_stack and tag == self.preserve_whitespace_tag_stack[-1]:
self.preserve_whitespace_tag_stack.pop()
#print "Pop", tag.name
if self.tagStack:
self.currentTag = self.tagStack[-1]
return self.currentTag
def pushTag(self, tag):
#print "Push", tag.name
if self.currentTag:
self.currentTag.contents.append(tag)
self.tagStack.append(tag)
self.currentTag = self.tagStack[-1]
if tag.name in self.builder.preserve_whitespace_tags:
self.preserve_whitespace_tag_stack.append(tag)
def endData(self, containerClass=NavigableString):
if self.current_data:
current_data = ''.join(self.current_data)
# If whitespace is not preserved, and this string contains
# nothing but ASCII spaces, replace it with a single space
# or newline.
if not self.preserve_whitespace_tag_stack:
strippable = True
for i in current_data:
if i not in self.ASCII_SPACES:
strippable = False
break
if strippable:
if '\n' in current_data:
current_data = '\n'
else:
current_data = ' '
# Reset the data collector.
self.current_data = []
# Should we add this string to the tree at all?
if self.parse_only and len(self.tagStack) <= 1 and \
(not self.parse_only.text or \
not self.parse_only.search(current_data)):
return
o = containerClass(current_data)
self.object_was_parsed(o)
def object_was_parsed(self, o, parent=None, most_recent_element=None):
"""Add an object to the parse tree."""
parent = parent or self.currentTag
previous_element = most_recent_element or self._most_recent_element
next_element = previous_sibling = next_sibling = None
if isinstance(o, Tag):
next_element = o.next_element
next_sibling = o.next_sibling
previous_sibling = o.previous_sibling
if not previous_element:
previous_element = o.previous_element
o.setup(parent, previous_element, next_element, previous_sibling, next_sibling)
self._most_recent_element = o
parent.contents.append(o)
if parent.next_sibling:
# This node is being inserted into an element that has
# already been parsed. Deal with any dangling references.
index = parent.contents.index(o)
if index == 0:
previous_element = parent
previous_sibling = None
else:
previous_element = previous_sibling = parent.contents[index-1]
if index == len(parent.contents)-1:
next_element = parent.next_sibling
next_sibling = None
else:
next_element = next_sibling = parent.contents[index+1]
o.previous_element = previous_element
if previous_element:
previous_element.next_element = o
o.next_element = next_element
if next_element:
next_element.previous_element = o
o.next_sibling = next_sibling
if next_sibling:
next_sibling.previous_sibling = o
o.previous_sibling = previous_sibling
if previous_sibling:
previous_sibling.next_sibling = o
def _popToTag(self, name, nsprefix=None, inclusivePop=True):
"""Pops the tag stack up to and including the most recent
instance of the given tag. If inclusivePop is false, pops the tag
stack up to but *not* including the most recent instqance of
the given tag."""
#print "Popping to %s" % name
if name == self.ROOT_TAG_NAME:
# The BeautifulSoup object itself can never be popped.
return
most_recently_popped = None
stack_size = len(self.tagStack)
for i in range(stack_size - 1, 0, -1):
t = self.tagStack[i]
if (name == t.name and nsprefix == t.prefix):
if inclusivePop:
most_recently_popped = self.popTag()
break
most_recently_popped = self.popTag()
return most_recently_popped
def handle_starttag(self, name, namespace, nsprefix, attrs):
"""Push a start tag on to the stack.
If this method returns None, the tag was rejected by the
SoupStrainer. You should proceed as if the tag had not occured
in the document. For instance, if this was a self-closing tag,
don't call handle_endtag.
"""
# print "Start tag %s: %s" % (name, attrs)
self.endData()
if (self.parse_only and len(self.tagStack) <= 1
and (self.parse_only.text
or not self.parse_only.search_tag(name, attrs))):
return None
tag = Tag(self, self.builder, name, namespace, nsprefix, attrs,
self.currentTag, self._most_recent_element)
if tag is None:
return tag
if self._most_recent_element:
self._most_recent_element.next_element = tag
self._most_recent_element = tag
self.pushTag(tag)
return tag
def handle_endtag(self, name, nsprefix=None):
#print "End tag: " + name
self.endData()
self._popToTag(name, nsprefix)
def handle_data(self, data):
self.current_data.append(data)
def decode(self, pretty_print=False,
eventual_encoding=DEFAULT_OUTPUT_ENCODING,
formatter="minimal"):
"""Returns a string or Unicode representation of this document.
To get Unicode, pass None for encoding."""
if self.is_xml:
# Print the XML declaration
encoding_part = ''
if eventual_encoding != None:
encoding_part = ' encoding="%s"' % eventual_encoding
prefix = '<?xml version="1.0"%s?>\n' % encoding_part
else:
prefix = ''
if not pretty_print:
indent_level = None
else:
indent_level = 0
return prefix + super(BeautifulSoup, self).decode(
indent_level, eventual_encoding, formatter)
# Alias to make it easier to type import: 'from bs4 import _soup'
_s = BeautifulSoup
_soup = BeautifulSoup
class BeautifulStoneSoup(BeautifulSoup):
"""Deprecated interface to an XML parser."""
def __init__(self, *args, **kwargs):
kwargs['features'] = 'xml'
warnings.warn(
'The BeautifulStoneSoup class is deprecated. Instead of using '
'it, pass features="xml" into the BeautifulSoup constructor.')
super(BeautifulStoneSoup, self).__init__(*args, **kwargs)
class StopParsing(Exception):
pass
class FeatureNotFound(ValueError):
pass
#By default, act as an HTML pretty-printer.
if __name__ == '__main__':
import sys
soup = BeautifulSoup(sys.stdin)
print(soup.prettify())
| mit |
pbaesse/Sissens | lib/python2.7/site-packages/sqlalchemy/orm/path_registry.py | 28 | 7751 | # orm/path_registry.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Path tracking utilities, representing mapper graph traversals.
"""
from .. import inspection
from .. import util
from .. import exc
from itertools import chain
from .base import class_mapper
import logging
log = logging.getLogger(__name__)
def _unreduce_path(path):
return PathRegistry.deserialize(path)
_WILDCARD_TOKEN = "*"
_DEFAULT_TOKEN = "_sa_default"
class PathRegistry(object):
"""Represent query load paths and registry functions.
Basically represents structures like:
(<User mapper>, "orders", <Order mapper>, "items", <Item mapper>)
These structures are generated by things like
query options (joinedload(), subqueryload(), etc.) and are
used to compose keys stored in the query._attributes dictionary
for various options.
They are then re-composed at query compile/result row time as
the query is formed and as rows are fetched, where they again
serve to compose keys to look up options in the context.attributes
dictionary, which is copied from query._attributes.
The path structure has a limited amount of caching, where each
"root" ultimately pulls from a fixed registry associated with
the first mapper, that also contains elements for each of its
property keys. However paths longer than two elements, which
are the exception rather than the rule, are generated on an
as-needed basis.
"""
is_token = False
is_root = False
def __eq__(self, other):
return other is not None and \
self.path == other.path
def set(self, attributes, key, value):
log.debug("set '%s' on path '%s' to '%s'", key, self, value)
attributes[(key, self.path)] = value
def setdefault(self, attributes, key, value):
log.debug("setdefault '%s' on path '%s' to '%s'", key, self, value)
attributes.setdefault((key, self.path), value)
def get(self, attributes, key, value=None):
key = (key, self.path)
if key in attributes:
return attributes[key]
else:
return value
def __len__(self):
return len(self.path)
@property
def length(self):
return len(self.path)
def pairs(self):
path = self.path
for i in range(0, len(path), 2):
yield path[i], path[i + 1]
def contains_mapper(self, mapper):
for path_mapper in [
self.path[i] for i in range(0, len(self.path), 2)
]:
if path_mapper.is_mapper and \
path_mapper.isa(mapper):
return True
else:
return False
def contains(self, attributes, key):
return (key, self.path) in attributes
def __reduce__(self):
return _unreduce_path, (self.serialize(), )
def serialize(self):
path = self.path
return list(zip(
[m.class_ for m in [path[i] for i in range(0, len(path), 2)]],
[path[i].key for i in range(1, len(path), 2)] + [None]
))
@classmethod
def deserialize(cls, path):
if path is None:
return None
p = tuple(chain(*[(class_mapper(mcls),
class_mapper(mcls).attrs[key]
if key is not None else None)
for mcls, key in path]))
if p and p[-1] is None:
p = p[0:-1]
return cls.coerce(p)
@classmethod
def per_mapper(cls, mapper):
return EntityRegistry(
cls.root, mapper
)
@classmethod
def coerce(cls, raw):
return util.reduce(lambda prev, next: prev[next], raw, cls.root)
def token(self, token):
if token.endswith(':' + _WILDCARD_TOKEN):
return TokenRegistry(self, token)
elif token.endswith(":" + _DEFAULT_TOKEN):
return TokenRegistry(self.root, token)
else:
raise exc.ArgumentError("invalid token: %s" % token)
def __add__(self, other):
return util.reduce(
lambda prev, next: prev[next],
other.path, self)
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.path, )
class RootRegistry(PathRegistry):
"""Root registry, defers to mappers so that
paths are maintained per-root-mapper.
"""
path = ()
has_entity = False
is_aliased_class = False
is_root = True
def __getitem__(self, entity):
return entity._path_registry
PathRegistry.root = RootRegistry()
class TokenRegistry(PathRegistry):
def __init__(self, parent, token):
self.token = token
self.parent = parent
self.path = parent.path + (token,)
has_entity = False
is_token = True
def generate_for_superclasses(self):
if not self.parent.is_aliased_class and not self.parent.is_root:
for ent in self.parent.mapper.iterate_to_root():
yield TokenRegistry(self.parent.parent[ent], self.token)
else:
yield self
def __getitem__(self, entity):
raise NotImplementedError()
class PropRegistry(PathRegistry):
def __init__(self, parent, prop):
# restate this path in terms of the
# given MapperProperty's parent.
insp = inspection.inspect(parent[-1])
if not insp.is_aliased_class or insp._use_mapper_path:
parent = parent.parent[prop.parent]
elif insp.is_aliased_class and insp.with_polymorphic_mappers:
if prop.parent is not insp.mapper and \
prop.parent in insp.with_polymorphic_mappers:
subclass_entity = parent[-1]._entity_for_mapper(prop.parent)
parent = parent.parent[subclass_entity]
self.prop = prop
self.parent = parent
self.path = parent.path + (prop,)
self._wildcard_path_loader_key = (
"loader",
self.parent.path + self.prop._wildcard_token
)
self._default_path_loader_key = self.prop._default_path_loader_key
self._loader_key = ("loader", self.path)
def __str__(self):
return " -> ".join(
str(elem) for elem in self.path
)
@util.memoized_property
def has_entity(self):
return hasattr(self.prop, "mapper")
@util.memoized_property
def entity(self):
return self.prop.mapper
@property
def mapper(self):
return self.entity
@property
def entity_path(self):
return self[self.entity]
def __getitem__(self, entity):
if isinstance(entity, (int, slice)):
return self.path[entity]
else:
return EntityRegistry(
self, entity
)
class EntityRegistry(PathRegistry, dict):
is_aliased_class = False
has_entity = True
def __init__(self, parent, entity):
self.key = entity
self.parent = parent
self.is_aliased_class = entity.is_aliased_class
self.entity = entity
self.path = parent.path + (entity,)
self.entity_path = self
@property
def mapper(self):
return inspection.inspect(self.entity).mapper
def __bool__(self):
return True
__nonzero__ = __bool__
def __getitem__(self, entity):
if isinstance(entity, (int, slice)):
return self.path[entity]
else:
return dict.__getitem__(self, entity)
def __missing__(self, key):
self[key] = item = PropRegistry(self, key)
return item
| gpl-3.0 |
EntityFXCode/arsenalsuite | cpp/lib/PyQt4/examples/demos/embeddeddialogs/embeddeddialogs.py | 15 | 9138 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2010 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:LGPL$
## Commercial Usage
## Licensees holding valid Qt Commercial licenses may use this file in
## accordance with the Qt Commercial License Agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and Nokia.
##
## GNU Lesser General Public License Usage
## Alternatively, this file may be used under the terms of the GNU Lesser
## General Public License version 2.1 as published by the Free Software
## Foundation and appearing in the file LICENSE.LGPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU Lesser General Public License version 2.1 requirements
## will be met: http://www.gnu.org/licenses/old-licenses/lgpl-2.1.html.
##
## In addition, as a special exception, Nokia gives you certain additional
## rights. These rights are described in the Nokia Qt LGPL Exception
## version 1.1, included in the file LGPL_EXCEPTION.txt in this package.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3.0 as published by the Free Software
## Foundation and appearing in the file LICENSE.GPL included in the
## packaging of this file. Please review the following information to
## ensure the GNU General Public License version 3.0 requirements will be
## met: http://www.gnu.org/copyleft/gpl.html.
##
## If you have questions regarding the use of this file, please contact
## Nokia at [email protected].
## $QT_END_LICENSE$
##
#############################################################################
# This is only needed for Python v2 but is harmless for Python v3.
import sip
sip.setapi('QString', 2)
from PyQt4 import QtCore, QtGui
from embeddeddialog import Ui_embeddedDialog
from embeddeddialogs_rc import *
class CustomProxy(QtGui.QGraphicsProxyWidget):
def __init__(self, parent=None, wFlags=0):
super(CustomProxy, self).__init__(parent, wFlags)
self.popupShown = False
self.timeLine = QtCore.QTimeLine(250, self)
self.timeLine.valueChanged.connect(self.updateStep)
self.timeLine.stateChanged.connect(self.stateChanged)
def boundingRect(self):
return QtGui.QGraphicsProxyWidget.boundingRect(self).adjusted(0, 0, 10, 10)
def paintWindowFrame(self, painter, option, widget):
color = QtGui.QColor(0, 0, 0, 64)
r = self.windowFrameRect()
right = QtCore.QRectF(r.right(), r.top()+10, 10, r.height()-10)
bottom = QtCore.QRectF(r.left()+10, r.bottom(), r.width(), 10)
intersectsRight = right.intersects(option.exposedRect)
intersectsBottom = bottom.intersects(option.exposedRect)
if intersectsRight and intersectsBottom:
path=QtGui.QPainterPath()
path.addRect(right)
path.addRect(bottom)
painter.setPen(QtCore.Qt.NoPen)
painter.setBrush(color)
painter.drawPath(path)
elif intersectsBottom:
painter.fillRect(bottom, color)
elif intersectsRight:
painter.fillRect(right, color)
super(CustomProxy, self).paintWindowFrame(painter, option, widget)
def hoverEnterEvent(self, event):
super(CustomProxy, self).hoverEnterEvent(event)
self.scene().setActiveWindow(self)
if self.timeLine.currentValue != 1:
self.zoomIn()
def hoverLeaveEvent(self, event):
super(CustomProxy, self).hoverLeaveEvent(event)
if not self.popupShown and (self.timeLine.direction() != QtCore.QTimeLine.Backward or self.timeLine.currentValue() != 0):
self.zoomOut()
def sceneEventFilter(self, watched, event):
if watched.isWindow() and (event.type() == QtCore.QEvent.UngrabMouse or event.type() == QtCore.QEvent.GrabMouse):
self.popupShown = watched.isVisible()
if not self.popupShown and not self.isUnderMouse():
self.zoomOut()
return super(CustomProxy, self).sceneEventFilter(watched, event)
def itemChange(self, change, value):
if change == self.ItemChildAddedChange or change == self.ItemChildRemovedChange :
# how to translate this line to python?
# QGraphicsItem *item = qVariantValue<QGraphicsItem *>(value);
item = value
try:
if change == self.ItemChildAddedChange:
item.installSceneEventFilter(self)
else:
item.removeSceneEventFilter(self)
except:
pass
return super(CustomProxy, self).itemChange(change, value)
def updateStep(self, step):
r=self.boundingRect()
self.setTransform( QtGui.QTransform() \
.translate(r.width() / 2, r.height() / 2)\
.rotate(step * 30, QtCore.Qt.XAxis)\
.rotate(step * 10, QtCore.Qt.YAxis)\
.rotate(step * 5, QtCore.Qt.ZAxis)\
.scale(1 + 1.5 * step, 1 + 1.5 * step)\
.translate(-r.width() / 2, -r.height() / 2))
def stateChanged(self, state):
if state == QtCore.QTimeLine.Running:
if self.timeLine.direction() == QtCore.QTimeLine.Forward:
self.setCacheMode(self.NoCache)
elif state == QtCore.QTimeLine.NotRunning:
if self.timeLine.direction() == QtCore.QTimeLine.Backward:
self.setCacheMode(self.DeviceCoordinateCache)
def zoomIn(self):
if self.timeLine.direction() != QtCore.QTimeLine.Forward:
self.timeLine.setDirection(QtCore.QTimeLine.Forward)
if self.timeLine.state() == QtCore.QTimeLine.NotRunning:
self.timeLine.start()
def zoomOut(self):
if self.timeLine.direction() != QtCore.QTimeLine.Backward:
self.timeLine.setDirection(QtCore.QTimeLine.Backward)
if self.timeLine.state() == QtCore.QTimeLine.NotRunning:
self.timeLine.start()
class EmbeddedDialog(QtGui.QDialog):
def __init__(self, parent=None):
super(EmbeddedDialog, self).__init__(parent)
self.ui = Ui_embeddedDialog()
self.ui.setupUi(self)
self.ui.layoutDirection.setCurrentIndex(self.layoutDirection() != QtCore.Qt.LeftToRight)
for styleName in QtGui.QStyleFactory.keys():
self.ui.style.addItem(styleName)
if self.style().objectName().lower() == styleName.lower():
self.ui.style.setCurrentIndex(self.ui.style.count() -1)
self.ui.layoutDirection.activated.connect(self.layoutDirectionChanged)
self.ui.spacing.valueChanged.connect(self.spacingChanged)
self.ui.fontComboBox.currentFontChanged.connect(self.fontChanged)
self.ui.style.activated[str].connect(self.styleChanged)
def layoutDirectionChanged(self, index):
if index == 0:
self.setLayoutDirection(QtCore.Qt.LeftToRight)
else:
self.setLayoutDirection(QtCore.Qt.RightToLeft)
def spacingChanged(self, spacing):
self.layout().setSpacing(spacing)
self.adjustSize()
def fontChanged(self, font):
self.setFont(font)
def setStyleHelper(self, widget, style):
widget.setStyle(style)
widget.setPalette(style.standardPalette())
for child in widget.children():
if isinstance(child, QtGui.QWidget):
self.setStyleHelper(child, style)
def styleChanged(self, styleName):
style=QtGui.QStyleFactory.create(styleName)
if style:
self.setStyleHelper(self, style)
# Keep a reference to the style.
self._style = style
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
scene = QtGui.QGraphicsScene()
for y in range(10):
for x in range(10):
proxy = CustomProxy(None, QtCore.Qt.Window)
proxy.setWidget(EmbeddedDialog())
rect = proxy.boundingRect()
proxy.setPos( x * rect.width()*1.05, y*rect.height()*1.05 )
proxy.setCacheMode(QtGui.QGraphicsItem.DeviceCoordinateCache)
scene.addItem(proxy)
scene.setSceneRect(scene.itemsBoundingRect())
view = QtGui.QGraphicsView(scene)
view.scale(0.5, 0.5)
view.setRenderHints(view.renderHints() | QtGui.QPainter.Antialiasing | QtGui.QPainter.SmoothPixmapTransform)
view.setBackgroundBrush(QtGui.QBrush(QtGui.QPixmap(':/No-Ones-Laughing-3.jpg')))
view.setCacheMode(QtGui.QGraphicsView.CacheBackground)
view.setViewportUpdateMode(QtGui.QGraphicsView.BoundingRectViewportUpdate)
view.show()
view.setWindowTitle("Embedded Dialogs Demo")
sys.exit(app.exec_())
| gpl-2.0 |
yencarnacion/jaikuengine | .google_appengine/lib/django-1.2/django/db/backends/sqlite3/introspection.py | 65 | 5857 | import re
from django.db.backends import BaseDatabaseIntrospection
# This light wrapper "fakes" a dictionary interface, because some SQLite data
# types include variables in them -- e.g. "varchar(30)" -- and can't be matched
# as a simple dictionary lookup.
class FlexibleFieldLookupDict:
# Maps SQL types to Django Field types. Some of the SQL types have multiple
# entries here because SQLite allows for anything and doesn't normalize the
# field type; it uses whatever was given.
base_data_types_reverse = {
'bool': 'BooleanField',
'boolean': 'BooleanField',
'smallint': 'SmallIntegerField',
'smallint unsigned': 'PositiveSmallIntegerField',
'smallinteger': 'SmallIntegerField',
'int': 'IntegerField',
'integer': 'IntegerField',
'bigint': 'BigIntegerField',
'integer unsigned': 'PositiveIntegerField',
'decimal': 'DecimalField',
'real': 'FloatField',
'text': 'TextField',
'char': 'CharField',
'date': 'DateField',
'datetime': 'DateTimeField',
'time': 'TimeField',
}
def __getitem__(self, key):
key = key.lower()
try:
return self.base_data_types_reverse[key]
except KeyError:
import re
m = re.search(r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$', key)
if m:
return ('CharField', {'max_length': int(m.group(1))})
raise KeyError
class DatabaseIntrospection(BaseDatabaseIntrospection):
data_types_reverse = FlexibleFieldLookupDict()
def get_table_list(self, cursor):
"Returns a list of table names in the current database."
# Skip the sqlite_sequence system table used for autoincrement key
# generation.
cursor.execute("""
SELECT name FROM sqlite_master
WHERE type='table' AND NOT name='sqlite_sequence'
ORDER BY name""")
return [row[0] for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"Returns a description of the table, with the DB-API cursor.description interface."
return [(info['name'], info['type'], None, None, None, None,
info['null_ok']) for info in self._table_info(cursor, table_name)]
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based.
"""
# Dictionary of relations to return
relations = {}
# Schema for this table
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"])
results = cursor.fetchone()[0].strip()
results = results[results.index('(')+1:results.rindex(')')]
# Walk through and look for references to other tables. SQLite doesn't
# really have enforced references, but since it echoes out the SQL used
# to create the table we can look for REFERENCES statements used there.
for field_index, field_desc in enumerate(results.split(',')):
field_desc = field_desc.strip()
if field_desc.startswith("UNIQUE"):
continue
m = re.search('references (.*) \(["|](.*)["|]\)', field_desc, re.I)
if not m:
continue
table, column = [s.strip('"') for s in m.groups()]
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s", [table])
result = cursor.fetchone()
if not result:
continue
other_table_results = result[0].strip()
li, ri = other_table_results.index('('), other_table_results.rindex(')')
other_table_results = other_table_results[li+1:ri]
for other_index, other_desc in enumerate(other_table_results.split(',')):
other_desc = other_desc.strip()
if other_desc.startswith('UNIQUE'):
continue
name = other_desc.split(' ', 1)[0].strip('"')
if name == column:
relations[field_index] = (other_index, table)
break
return relations
def get_indexes(self, cursor, table_name):
"""
Returns a dictionary of fieldname -> infodict for the given table,
where each infodict is in the format:
{'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index}
"""
indexes = {}
for info in self._table_info(cursor, table_name):
indexes[info['name']] = {'primary_key': info['pk'] != 0,
'unique': False}
cursor.execute('PRAGMA index_list(%s)' % self.connection.ops.quote_name(table_name))
# seq, name, unique
for index, unique in [(field[1], field[2]) for field in cursor.fetchall()]:
if not unique:
continue
cursor.execute('PRAGMA index_info(%s)' % self.connection.ops.quote_name(index))
info = cursor.fetchall()
# Skip indexes across multiple fields
if len(info) != 1:
continue
name = info[0][2] # seqno, cid, name
indexes[name]['unique'] = True
return indexes
def _table_info(self, cursor, name):
cursor.execute('PRAGMA table_info(%s)' % self.connection.ops.quote_name(name))
# cid, name, type, notnull, dflt_value, pk
return [{'name': field[1],
'type': field[2],
'null_ok': not field[3],
'pk': field[5] # undocumented
} for field in cursor.fetchall()]
| apache-2.0 |
mrbandrews/bitcoin | qa/rpc-tests/netutil.py | 8 | 4561 | #!/usr/bin/env python
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Linux network utilities
import sys
import socket
import fcntl
import struct
import array
import os
import binascii
# Roughly based on http://voorloopnul.com/blog/a-python-netstat-in-less-than-100-lines-of-code/ by Ricardo Pascal
STATE_ESTABLISHED = '01'
STATE_SYN_SENT = '02'
STATE_SYN_RECV = '03'
STATE_FIN_WAIT1 = '04'
STATE_FIN_WAIT2 = '05'
STATE_TIME_WAIT = '06'
STATE_CLOSE = '07'
STATE_CLOSE_WAIT = '08'
STATE_LAST_ACK = '09'
STATE_LISTEN = '0A'
STATE_CLOSING = '0B'
def get_socket_inodes(pid):
'''
Get list of socket inodes for process pid.
'''
base = '/proc/%i/fd' % pid
inodes = []
for item in os.listdir(base):
target = os.readlink(os.path.join(base, item))
if target.startswith('socket:'):
inodes.append(int(target[8:-1]))
return inodes
def _remove_empty(array):
return [x for x in array if x !='']
def _convert_ip_port(array):
host,port = array.split(':')
# convert host from mangled-per-four-bytes form as used by kernel
host = binascii.unhexlify(host)
host_out = ''
for x in range(0, len(host)/4):
(val,) = struct.unpack('=I', host[x*4:(x+1)*4])
host_out += '%08x' % val
return host_out,int(port,16)
def netstat(typ='tcp'):
'''
Function to return a list with status of tcp connections at linux systems
To get pid of all network process running on system, you must run this script
as superuser
'''
with open('/proc/net/'+typ,'r') as f:
content = f.readlines()
content.pop(0)
result = []
for line in content:
line_array = _remove_empty(line.split(' ')) # Split lines and remove empty spaces.
tcp_id = line_array[0]
l_addr = _convert_ip_port(line_array[1])
r_addr = _convert_ip_port(line_array[2])
state = line_array[3]
inode = int(line_array[9]) # Need the inode to match with process pid.
nline = [tcp_id, l_addr, r_addr, state, inode]
result.append(nline)
return result
def get_bind_addrs(pid):
'''
Get bind addresses as (host,port) tuples for process pid.
'''
inodes = get_socket_inodes(pid)
bind_addrs = []
for conn in netstat('tcp') + netstat('tcp6'):
if conn[3] == STATE_LISTEN and conn[4] in inodes:
bind_addrs.append(conn[1])
return bind_addrs
# from: http://code.activestate.com/recipes/439093/
def all_interfaces():
'''
Return all interfaces that are up
'''
is_64bits = sys.maxsize > 2**32
struct_size = 40 if is_64bits else 32
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
max_possible = 8 # initial value
while True:
bytes = max_possible * struct_size
names = array.array('B', '\0' * bytes)
outbytes = struct.unpack('iL', fcntl.ioctl(
s.fileno(),
0x8912, # SIOCGIFCONF
struct.pack('iL', bytes, names.buffer_info()[0])
))[0]
if outbytes == bytes:
max_possible *= 2
else:
break
namestr = names.tostring()
return [(namestr[i:i+16].split('\0', 1)[0],
socket.inet_ntoa(namestr[i+20:i+24]))
for i in range(0, outbytes, struct_size)]
def addr_to_hex(addr):
'''
Convert string IPv4 or IPv6 address to binary address as returned by
get_bind_addrs.
Very naive implementation that certainly doesn't work for all IPv6 variants.
'''
if '.' in addr: # IPv4
addr = [int(x) for x in addr.split('.')]
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
addr = sub[0] + ([0] * nullbytes) + sub[1]
else:
raise ValueError('Could not parse address %s' % addr)
return binascii.hexlify(bytearray(addr))
| mit |
chacoroot/planetary | addons/hr_expense/__init__.py | 436 | 1079 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_expense
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
chaubold/opengm | src/interfaces/python/test.py | 2 | 52311 | import numpy
import opengm
import os
import sys
import random
import opengm.learning
class TestAllExampes:
def test_run(self):
for r, d, f in os.walk("examples"):
for files in f:
if files.endswith(".py"):
if(not str(files).endswith('gui.py')):
pass
# execfile(filePath)
# subprocess.call([filePath, arg1, arg2])
#execfile("examples/" + files)
def lenOfGen(gen):
return len([i for i in gen])
def generate_grid(dimx, dimy, labels, beta1, beta2, operator="adder"):
nos = numpy.ones(dimx * dimy, dtype=numpy.uint64) * labels
gm = opengm.gm(nos, operator, 0)
for vi in range(dimx * dimy):
f1 = numpy.random.random((labels,)).astype(numpy.float64) * 0.6 + 0.2
assert len(f1.shape) == 1
assert f1.shape[0] == labels
fid1 = gm.addFunction(f1)
gm.addFactor(fid1, (vi,))
f2 = numpy.ones([labels, labels], dtype=numpy.float64)
for l in range(labels):
f2[l, l] = beta1
fid2 = gm.addFunction(f2)
for y in range(dimy):
for x in range(dimx):
if x + 1 < dimx:
vis = [x + y * dimx, x + 1 + y * dimx]
assert vis.sort is not None
vis.sort
gm.addFactor(fid2, vis)
if y + 1 < dimy:
vis = [x + y * dimx, x + (y + 1) * dimx]
vis.sort()
gm.addFactor(fid2, vis)
return gm
def generate_mc_grid(dimx, dimy, operator="adder"):
labels=dimx*dimy
nos = numpy.ones(labels, dtype=numpy.uint64) * labels
gm = opengm.gm(nos, operator, 0)
for y in range(dimy):
for x in range(dimx):
if x + 1 < dimx:
vis = [x + y * dimx, x + 1 + y * dimx]
assert vis.sort is not None
vis.sort
l=random.random()*2.0 - 1.0
fr=opengm.pottsFunction([labels,labels],0.0,l)
fid2=gm.addFunction(fr)
gm.addFactor(fid2, vis)
if y + 1 < dimy:
vis = [x + y * dimx, x + (y + 1) * dimx]
vis.sort()
l=random.random()*2.0 - 1.0
fr=opengm.pottsFunction([labels,labels],0.0,l)
fid2=gm.addFunction(fr)
gm.addFactor(fid2, vis)
return gm
def makeGrid(dimx, dimy, labels, beta, acc="min"):
nos = numpy.ones(dimx * dimy, dtype=numpy.uint64) * labels
if acc == "min":
gm = opengm.adder.GraphicalModel(nos)
else:
gm = opengm.multiplier.GraphicalModel(nos)
for vi in range(dimx * dimy):
f1 = numpy.random.random((labels,)).astype(numpy.float64)
fid1 = gm.addFunction(f1)
gm.addFactor(fid1, (vi,))
f2 = numpy.ones(labels * labels, dtype=numpy.float64).reshape(
labels, labels) * beta
for l in range(labels):
f2[l, l] = 0
fid2 = gm.addFunction(f2)
for y in range(dimy):
for x in range(dimx):
if x + 1 < dimx - 1:
gm.addFactor(fid2, [x + y * dimx, x + 1 + y * dimx])
if y + 1 < dimy - 1:
gm.addFactor(fid2, [x + y * dimx, x + (y + 1) * dimx])
return gm
def checkSolution(gm, argOpt, arg, acc="min", tolerance=None, check=True):
valOpt = gm.evaluate(argOpt)
val = gm.evaluate(arg)
numtol = 0.00000000001
if check:
if acc == "min":
if tolerance is None:
tol = numtol
assert(val - tol <= valOpt)
else:
tol = valOpt * tolerance
assert(val - tol <= valOpt)
if acc == "max":
if tolerance is None:
tol = numtol
assert(val - tol >= valOpt)
else:
tol = valOpt * tolerance + numtol
assert(val - tol >= valOpt)
def checkInference(gm, solver, argOpt, optimal=False, tolerance=None,
acc="min"):
solver.infer()
arg = solver.arg()
checkSolution(gm, argOpt, arg, acc, tolerance, optimal)
class TestUtilities:
def test_vector(self):
assert(True)
def test_enums(self):
assert(True)
def test_is_build_in_simple_parameter(self):
class MyClass(object):
def __init__(self):
pass
assert(not opengm._to_native_converter.is_build_in_simple_parameter(
classType=MyClass))
assert(not opengm._to_native_converter.is_build_in_simple_parameter(
instanceType=MyClass()))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
classType=bool))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
instanceType=bool()))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
instanceType=1))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
instanceType=1.0))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
instanceType='1.0'))
simple_types = [int, long, float, bool, str]
for st in simple_types:
assert(opengm._to_native_converter.is_build_in_simple_parameter(
classType=st))
assert(opengm._to_native_converter.is_build_in_simple_parameter(
instanceType=st()))
def test_is_tribool(self):
assert(opengm._to_native_converter.is_tribool(
classType=opengm.Tribool))
assert(opengm._to_native_converter.is_tribool(
instanceType=opengm.Tribool(0)))
assert(not opengm._to_native_converter.is_tribool(classType=bool))
assert(not opengm._to_native_converter.is_tribool(
instanceType=True))
class TestSparseFunction:
def test_constructor(self):
functions = []
functions.append(opengm.SparseFunction([2, 3, 4], 1))
functions.append(opengm.SparseFunction((2, 3, 4), 1))
for f in functions:
assert(f.defaultValue == 1)
assert(f.dimension == 3)
assert(f.shape[0] == 2)
assert(f.shape[1] == 3)
assert(f.shape[2] == 4)
assert(len(f.shape) == 3)
assert(f.size == 2 * 3 * 4)
def test_key_to_coordinate(self):
f = opengm.SparseFunction([2, 3, 4], 0)
c = numpy.ones(3, dtype=numpy.uint64)
for key, cTrue in enumerate(opengm.shapeWalker(f.shape)):
f.keyToCoordinate(key, c)
for ct, cOwn in zip(cTrue, c):
assert ct == cOwn
def test_dense_assignment(self):
f = opengm.SparseFunction()
fDense = numpy.zeros([3, 4])
fDense[0, 1] = 1
fDense[0, 2] = 2
f.assignDense(fDense, 0)
assert f.dimension == 2
assert f.shape[0] == 3
assert f.shape[1] == 4
assert f[[0, 0]] == 0
assert f[[0, 1]] == 1
assert f[[0, 2]] == 2
for c in opengm.shapeWalker(f.shape):
assert f[c] == fDense[c[0], c[1]]
assert len(f.container) == 2
class TestFunctions:
def test_potts(self):
nl1 = numpy.ones(10, dtype=numpy.uint64) * 2
nl2 = numpy.ones(5, dtype=numpy.uint64) * 3
veq = numpy.zeros(1, dtype=numpy.float64)
vnew = numpy.arange(0, 10, dtype=numpy.float64)
pottsFunctionVector = opengm.PottsFunctionVector(nl1, nl2, veq, vnew)
assert len(pottsFunctionVector) == 10
for i, f in enumerate(pottsFunctionVector):
assert f.shape[0] == 2
assert f.shape[1] == 3
assert f[0, 0] == 0
assert f[[1, 1]] == 0
assert f[[0, 1]] == vnew[i]
class TestGm:
def test_constructor_generic(self):
def mygen():
yield 2
yield 3
yield 4
nos_list = [
numpy.arange(2, 5, dtype=numpy.uint64),
[2, 3, 4],
(2, 3, 4),
(x for x in xrange(2, 5)),
mygen(),
opengm.IndexVector(x for x in xrange(2, 5))
]
for i, nos in enumerate(nos_list):
if(type(nos) != type(mygen())):
pass
# assert(len(nos)==3)
gm = opengm.gm(nos, operator='adder')
assert(gm.numberOfVariables == 3)
assert(gm.numberOfLabels(0) == 2)
assert(gm.numberOfLabels(1) == 3)
assert(gm.numberOfLabels(2) == 4)
assert(gm.space().numberOfVariables == 3)
assert(gm.space()[0] == 2)
assert(gm.space()[1] == 3)
assert(gm.space()[2] == 4)
nos_list = [
numpy.arange(2, 5, dtype=numpy.uint64),
[2, 3, 4],
(2, 3, 4),
(x for x in xrange(2, 5)),
mygen(),
opengm.IndexVector(x for x in xrange(2, 5))
]
for i, nos in enumerate(nos_list):
if(type(nos) != type(mygen())):
pass # assert(len(nos)==3)
gm = opengm.adder.GraphicalModel()
gm.assign(nos)
assert(gm.numberOfVariables == 3)
assert(gm.numberOfLabels(0) == 2)
assert(gm.numberOfLabels(1) == 3)
assert(gm.numberOfLabels(2) == 4)
assert(gm.space().numberOfVariables == 3)
assert(gm.space()[0] == 2)
assert(gm.space()[1] == 3)
assert(gm.space()[2] == 4)
def test_add_factors_generic(self):
def mygen():
yield 0
yield 1
gm = opengm.gm([2, 4])
f = opengm.PottsFunction([2, 4], 0.0, 1.0)
fid = gm.addFunction(f)
vis_list = [
[0, 1],
(0, 1),
(x for x in xrange(2)),
mygen(),
opengm.IndexVector(x for x in xrange(0, 2)),
numpy.arange(0, 2, dtype=numpy.uint64)
]
for i, vis in enumerate(vis_list):
fIndex = gm.addFactor(fid, vis)
assert(gm.numberOfFactors == i + 1)
assert(fIndex == i)
assert(gm[fIndex].numberOfVariables == 2)
assert(gm[fIndex].shape[0] == 2)
assert(gm[fIndex].shape[1] == 4)
assert(gm[fIndex].variableIndices[0] == 0)
assert(gm[fIndex].variableIndices[1] == 1)
def test_add_function(self):
numberOfStates = [2, 3, 4]
gm = opengm.adder.GraphicalModel(numberOfStates)
f1 = numpy.ones(6 * 4, numpy.float64)
p = 1
for i in range(2 * 3 * 4):
f1[i] = i
p *= i
f1 = f1.reshape(2, 3, 4)
idf = gm.addFunction(f1)
gm.addFactor(idf, (0, 1, 2))
assert(gm[0].min() == 0)
assert(gm[0].max() == 2 * 3 * 4 - 1)
assert(gm[0].sum() == sum(range(2 * 3 * 4)))
assert(gm[0].product() == p)
nf1 = gm[0].__array__()
assert(len(f1.shape) == len(nf1.shape))
for i in range(len(f1.shape)):
assert(f1.shape[i] == nf1.shape[i])
for k in range(f1.shape[2]):
for j in range(f1.shape[1]):
for i in range(f1.shape[0]):
assert(gm[0][numpy.array(
[i, j, k], dtype=numpy.uint64)] == f1[i, j, k])
assert(gm[0][(i, j, k)] == f1[i, j, k])
assert(gm[0][(i, j, k)] == nf1[i, j, k])
def test_add_multiple_functions(self):
nVar = 10
nLabels = 2
for nFunctions in [1, 10]:
for order in [1, 2, 3, 4]:
gm = opengm.gm([nLabels] * nVar)
# add functionS
fShape = [nFunctions] + [nLabels] * order
f = numpy.ones(fShape, dtype=opengm.value_type).reshape(-1)
f[:] = numpy.random.rand(f.size)[:]
f = f.reshape(fShape)
fids = gm.addFunctions(f)
# assertions
assert len(fids) == nFunctions
def test_add_multiple_functions_order1(self):
nVar = 4
nLabels = 2
gm = opengm.gm([nLabels] * nVar)
# add functionS
fShape =[4,2]
f = opengm.randomFunction(fShape)
vis=numpy.ones([4,1])
vis[0,0]=0
vis[1,0]=1
vis[2,0]=2
vis[3,0]=3
fids = gm.addFunctions(f)
gm.addFactors(fids,vis)
assert gm[1][(0,)]==f[1,0]
assert gm[1][(1,)]==f[1,1]
for x in xrange(4):
assert gm[x][(0,)]==f[x,0]
assert gm[x][(1,)]==f[x,1]
def test_add_multiple_functions_order2a(self):
nVar = 4
nLabels = 2
gm = opengm.gm([nLabels] * nVar)
# add functionS
fShape =[2,2,2]
f = opengm.randomFunction(fShape)
vis=numpy.ones([4,2])
vis[0,0]=0
vis[0,1]=1
vis[1,0]=1
vis[1,1]=2
fid = gm.addFunction(f)
gm.addFactor(fid,[0,1,2])
assert gm[0][0,0,0]==f[0,0,0]
assert gm[0][1,0,0]==f[1,0,0]
assert gm[0][0,1,0]==f[0,1,0]
assert gm[0][1,1,0]==f[1,1,0]
def test_add_multiple_functions_order2(self):
nVar = 4
nLabels = 2
gm = opengm.gm([nLabels] * nVar)
# add functionS
fShape =[4,2,2]
f = opengm.randomFunction(fShape)
vis=numpy.ones([4,2])
vis[0,0]=0
vis[0,1]=1
vis[1,0]=1
vis[1,1]=2
vis[2,0]=2
vis[2,1]=3
vis[3,0]=0
vis[3,1]=3
fids = gm.addFunctions(f)
gm.addFactors(fids,vis)
assert gm[1][0,0]==f[1,0,0]
assert gm[1][1,1]==f[1,1,1]
assert gm[1][1,0]==f[1,1,0]
assert gm[1][0,1]==f[1,0,1]
for x in xrange(4):
assert gm[x][0,0]==f[x,0,0]
assert gm[x][1,1]==f[x,1,1]
assert gm[x][1,0]==f[x,1,0]
assert gm[x][0,1]==f[x,0,1]
def test_add_multiple_functions_with_map(self):
gm = opengm.gm([2] * 10)
def add_a_function(w):
return gm.addFunction(opengm.differenceFunction(shape=[2, 2],
weight=w))
weights = [0.2, 0.3, 0.4]
fidList = map(add_a_function, weights)
assert isinstance(fidList, list)
assert len(fidList) == len(weights)
gm.addFactors(fidList, [[0, 1], [1, 2], [3, 4]])
def test_evaluate(self):
numberOfStates = [2, 2, 2, 2]
gm = opengm.adder.GraphicalModel(numberOfStates)
f1 = numpy.ones(2, dtype=numpy.float64).reshape(2)
f2 = numpy.ones(4, dtype=numpy.float64).reshape(2, 2)
for i in range(3):
gm.addFactor(gm.addFunction(f1), [i])
for i in range(2):
gm.addFactor(gm.addFunction(f2), [i, i + 1])
sequenceList = [0, 1, 0, 1]
valueList = gm.evaluate(sequenceList)
assert(float(valueList) == float(gm.numberOfFactors))
sequenceNumpy = numpy.array([0, 1, 0, 1], dtype=numpy.uint64)
valueNumpy = gm.evaluate(sequenceNumpy)
assert(float(valueNumpy) == float(gm.numberOfFactors))
assert(float(valueNumpy) == float(valueList))
def test_variables_generator(self):
nos = [2, 3, 4, 5, 6]
gm = opengm.adder.GraphicalModel(nos)
truevis = [0, 1, 2, 3, 4]
myvis = [vi for vi in gm.variables()]
assert (len(truevis) == len(myvis))
for a, b in zip(truevis, myvis):
assert a == b
truevis = [2]
myvis = [vi for vi in gm.variables(labels=4)]
assert (len(truevis) == len(myvis))
for a, b in zip(truevis, myvis):
assert a == b
truevis = [1, 2, 3, 4]
myvis = [vi for vi in gm.variables(minLabels=3)]
assert (len(truevis) == len(myvis))
for a, b in zip(truevis, myvis):
assert a == b
truevis = [0, 1, 2]
myvis = [vi for vi in gm.variables(maxLabels=4)]
assert (len(truevis) == len(myvis))
for a, b in zip(truevis, myvis):
assert a == b
truevis = [1, 2]
myvis = [vi for vi in gm.variables(minLabels=3, maxLabels=4)]
assert (len(truevis) == len(myvis))
for a, b in zip(truevis, myvis):
assert a == b
def test_factor_generators(self):
numberOfStates = [2, 2, 2, 2, 2]
gm = opengm.adder.GraphicalModel(numberOfStates)
functions = [numpy.ones(2, dtype=numpy.float64).reshape(2),
numpy.ones(4, dtype=numpy.float64).reshape(2, 2),
numpy.ones(8, dtype=numpy.float64).reshape(2, 2, 2),
numpy.ones(16, dtype=numpy.float64).reshape(2, 2, 2, 2),
numpy.ones(32,
dtype=numpy.float64).reshape(2, 2, 2, 2, 2)]
for f in functions:
fid = gm.addFunction(f)
vis = [i for i in xrange(len(f.shape))]
gm.addFactor(fid, vis)
assert gm.numberOfVariables == 5
# test generators
for i, factor in enumerate(gm.factors(), start=1):
assert factor.numberOfVariables == i
for i, fId in enumerate(gm.factorIds()):
assert fId == i
for i, (factor, fId) in enumerate(gm.factorsAndIds()):
assert fId == i
assert factor.numberOfVariables == i + 1
# with order
for order in xrange(1, 6):
gens = []
gens.append(gm.factors(order=order))
gens.append(gm.factorIds(order=order))
gens.append(gm.factorsAndIds(order=order))
for gen in gens:
assert lenOfGen(gen) == 1
gens = []
gens.append(gm.factors(order=order))
gens.append(gm.factorIds(order=order))
gens.append(gm.factorsAndIds(order=order))
for factor in gens[0]:
assert factor.numberOfVariables == order
for fId in gens[1]:
assert gm[fId].numberOfVariables == order
for factor, fId in gens[2]:
assert factor.numberOfVariables == order
assert gm[fId].numberOfVariables == order
# with order
for order in xrange(1, 6):
orderSets = [set(), set(), set()]
gens = [gm.factors(minOrder=order), gm.factorIds(
minOrder=order), gm.factorsAndIds(minOrder=order)]
assert(len(gens) == 3)
for gen in gens:
print "len assert"
assert lenOfGen(gen) == 6 - order
gens = [gm.factors(minOrder=order), gm.factorIds(
minOrder=order), gm.factorsAndIds(minOrder=order)]
for factor in gens[0]:
assert factor.numberOfVariables >= order
orderSets[0].add(factor.numberOfVariables)
for fId in gens[1]:
assert gm[fId].numberOfVariables >= order
orderSets[1].add(gm[fId].numberOfVariables)
for factor, fId in gens[2]:
assert factor.numberOfVariables >= order
assert gm[fId].numberOfVariables >= order
orderSets[2].add(factor.numberOfVariables)
for oset in orderSets:
assert len(oset) == 6 - order
for order in xrange(2, 6):
orderSets = [set(), set(), set()]
gens = [gm.factors(maxOrder=order), gm.factorIds(
maxOrder=order), gm.factorsAndIds(maxOrder=order)]
assert(len(gens) == 3)
for gen in gens:
print "len assert"
assert lenOfGen(gen) == order
gens = [gm.factors(maxOrder=order), gm.factorIds(
maxOrder=order), gm.factorsAndIds(maxOrder=order)]
for factor in gens[0]:
assert factor.numberOfVariables <= order
orderSets[0].add(factor.numberOfVariables)
for fId in gens[1]:
assert gm[fId].numberOfVariables <= order
orderSets[1].add(gm[fId].numberOfVariables)
for factor, fId in gens[2]:
assert factor.numberOfVariables <= order
assert gm[fId].numberOfVariables <= order
orderSets[2].add(factor.numberOfVariables)
for oset in orderSets:
assert len(oset) == order
for order in xrange(1, 6):
orderSets = [set(), set(), set()]
gens = [gm.factors(minOrder=order, maxOrder=4),
gm.factorIds(minOrder=order, maxOrder=4),
gm.factorsAndIds(minOrder=order, maxOrder=4)]
assert(len(gens) == 3)
for gen in gens:
print "len assert"
assert lenOfGen(gen) == 6 - order - 1
gens = [gm.factors(minOrder=order, maxOrder=4),
gm.factorIds(minOrder=order, maxOrder=4),
gm.factorsAndIds(minOrder=order, maxOrder=4)]
for factor in gens[0]:
assert (factor.numberOfVariables >= order
and factor.numberOfVariables <= 4)
orderSets[0].add(factor.numberOfVariables)
for fId in gens[1]:
assert gm[fId].numberOfVariables >= order and gm[
fId].numberOfVariables <= 4
orderSets[1].add(gm[fId].numberOfVariables)
for factor, fId in gens[2]:
assert(factor.numberOfVariables >= order
and factor.numberOfVariables <= 4)
assert gm[fId].numberOfVariables >= order and gm[
fId].numberOfVariables <= 4
orderSets[2].add(factor.numberOfVariables)
for oset in orderSets:
assert len(oset) == 6 - order - 1
class TestFactor:
def test_factor_shape(self):
numberOfStates = [2, 3, 4]
gm = opengm.adder.GraphicalModel(numberOfStates)
f1 = numpy.ones(6 * 4, numpy.float64).reshape(2, 3, 4)
idf = gm.addFunction(f1)
gm.addFactor(idf, (0, 1, 2))
nf1 = gm[0].__array__() # not used?
for i in range(3):
assert(gm[0].shape[i] == numberOfStates[i])
assert(gm[0].shape.__array__()[i] == numberOfStates[i])
assert(gm[0].shape.__list__()[i] == numberOfStates[i])
assert(gm[0].shape.__tuple__()[i] == numberOfStates[i])
def test_factor_vi(self):
numberOfStates = [2, 3, 4]
gm = opengm.adder.GraphicalModel(numberOfStates)
f1 = numpy.ones(6 * 4, numpy.float64).reshape(2, 3, 4)
idf = gm.addFunction(f1)
gm.addFactor(idf, (0, 1, 2))
nf1 = gm[0].__array__() # not used?
for i in range(3):
assert(gm[0].variableIndices[i] == i)
assert(gm[0].variableIndices.__array__()[i] == i)
assert(gm[0].variableIndices.__list__()[i] == i)
assert(gm[0].variableIndices.__tuple__()[i] == i)
def test_factor_properties(self):
numberOfStates = [2, 2, 2, 2]
gm = opengm.adder.GraphicalModel(numberOfStates)
assert(gm.space().numberOfVariables == 4)
assert(gm.numberOfFactors == 0)
f1 = numpy.array([2, 3], numpy.float64)
f2 = numpy.array([1, 2, 3, 4], numpy.float64).reshape(2, 2)
if1 = gm.addFunction(f1)
if2 = gm.addFunction(f2)
gm.addFactor(if1, (0,))
gm.addFactor(if2, (0, 1))
nf0 = gm[0].__array__()
nf1 = gm[1].__array__()
for i in range(f1.shape[0]):
assert(nf0[i] == gm[0][(i,)])
assert(nf0[i] == f1[i])
for i in range(f2.shape[0]):
for j in range(f2.shape[1]):
assert(nf1[i, j] == gm[1][(i, j)])
assert(nf1[i, j] == f2[i, j])
assert(gm[0].min() == 2)
assert(gm[0].max() == 3)
assert(gm[0].sum() == 5)
assert(gm[0].product() == 6)
assert(gm[0][(0,)] == 2)
assert(gm[0][(1,)] == 3)
assert(gm[1].min() == 1)
assert(gm[1].max() == 4)
assert(gm[1].sum() == 1 + 2 + 3 + 4)
assert(gm[1].product() == 1 * 2 * 3 * 4)
def genericSolverCheck(solverClass, params, gms, semiRings,checkPartial=False,checkMarginals=False,testPythonVisitor=True,testLpInterface=False):
class PyCallback(object):
def __init__(self):
self.inBegin=False
self.inEnd=False
self.inVisit=False
def begin(self,inference):
self.inBegin=True
def end(self,inference):
self.inEnd=True
def visit(self,inference):
self.inVisit=True
for operator, accumulator in semiRings:
for gmGen in gms:
gm = gmGen[operator]
for param in params:
# start inference
solver = solverClass(gm=gm, accumulator=accumulator, parameter=param)
solver2 = solverClass(gm=gm, accumulator=accumulator, parameter=param)
if (testLpInterface==True):
c=0
for vi in range(gm.numberOfVariables):
nl = gm.numberOfLabels(vi)
for l in range(nl):
assert c==solver.lpNodeVariableIndex(vi,l)
c+=1
cv=c
for fi in range(gm.numberOfFactors):
if gm[fi].numberOfVariables>1:
s = gm[fi].size
for l in range(nl):
assert solver.lpFactorVariableIndex(fi,s)>0 or cv==0
sw = opengm.shapeWalker(gm[fi].shape)
for c in sw:
assert solver.lpFactorVariableIndex(fi,c)>0 or cv==0
solver2.addConstraint(lpVariableIndices=[0,1],coefficients=[1,1],lowerBound=0.0,upperBound=1.0)
solver2.addConstraints(lpVariableIndices=[ [0,1],[0,2] ,[1,2]],coefficients=[ [1,1],[2,2],[1,2]],lowerBounds=[0,0,0],upperBounds=[1,1,1])
solver.infer()
arg = solver.arg() # no used?
value = solver.value()
bound = solver.bound()
if testPythonVisitor==True:
solver = solverClass(gm=gm, accumulator=accumulator, parameter=param)
callback=PyCallback()
pvisitor=solver.pythonVisitor(callback,1)
solver.infer(pvisitor)
assert callback.inBegin == True
assert callback.inEnd == True
if checkPartial :
pOptimal = solver.partialOptimality()
assert len(pOptimal)==gm.numberOfVariables
#assert len(numpy.where(pOptimal==True)[0]) == gm.numberOfVariables
if checkMarginals :
visRange=numpy.arange(gm.numberOfVariables)
marginal = solver.marginals(visRange)
assert marginal.shape[0]==gm.numberOfVariables
assert marginal.shape[1]==gm.numberOfLabels(0)
fis1 = gm.factorSubset(order=1).factorIndices
fis2 = gm.factorSubset(order=2).factorIndices
assert len(fis1)!=0
assert len(fis2)!=0
factorMarginal1 = solver.factorMarginals(fis1)
assert factorMarginal1.ndim==2
assert factorMarginal1.shape[0]==len(fis1)
assert factorMarginal1.shape[1]==gm.numberOfLabels(0)
factorMarginal2 = solver.factorMarginals(fis2)
assert factorMarginal2.ndim==3
assert factorMarginal2.shape[0]==len(fis2)
assert factorMarginal2.shape[1]==gm.numberOfLabels(0)
assert factorMarginal2.shape[2]==gm.numberOfLabels(0)
class Test_Inference():
def __init__(self):
self.gridGm = {
'adder': generate_grid(dimx=2, dimy=2, labels=2, beta1=0.1,
beta2=0.2, operator='adder'),
'multiplier': generate_grid(dimx=2, dimy=2, labels=2, beta1=0.1,
beta2=0.2, operator='multiplier'),
}
self.gridGm3 = {
'adder': generate_grid(dimx=3, dimy=2, labels=3, beta1=0.1,
beta2=0.2, operator='adder'),
'multiplier': generate_grid(dimx=3, dimy=2, labels=3, beta1=0.1,
beta2=0.2, operator='multiplier'),
}
self.gridGm30 = {
'adder': generate_grid(dimx=3, dimy=2, labels=3, beta1=0.0,
beta2=0.2, operator='adder'),
'multiplier': generate_grid(dimx=3, dimy=2, labels=3, beta1=0.0,
beta2=0.2, operator='multiplier'),
}
self.chainGm = {
'adder': generate_grid(dimx=4, dimy=1, labels=2, beta1=0.1,
beta2=0.2, operator='adder'),
'multiplier': generate_grid(dimx=4, dimy=1, labels=2, beta1=0.1,
beta2=0.2, operator='multiplier')
}
self.chainGm3 = {
'adder': generate_grid(dimx=4, dimy=1, labels=3, beta1=0.1,
beta2=0.2, operator='adder'),
'multiplier': generate_grid(dimx=4, dimy=1, labels=3, beta1=0.1,
beta2=0.2, operator='multiplier')
}
self.mcGm={
'adder' : generate_mc_grid(dimx=5,dimy=5,operator='adder'),
'multiplier' : generate_mc_grid(dimx=5,dimy=5,operator='multiplier')
}
self.all = [('adder', 'minimizer'), ('adder', 'maximizer'), (
'multiplier', 'minimizer'), ('multiplier', 'maximizer')]
self.minSum = [('adder', 'minimizer')]
self.minSumMaxSum = [('adder', 'minimizer'),('adder', 'maximizer')]
self.minSumMaxProd = [('adder', 'minimizer'), (
'multiplier', 'maximizer')]
self.allAndIntegrator= [('adder', 'minimizer'), ('adder', 'maximizer'),
('multiplier', 'minimizer'), ('multiplier', 'maximizer'), ('multiplier','integrator')]
def test_bruteforce(self):
solverClass = opengm.inference.Bruteforce
params = [None, opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.all)
def test_astar_fast(self):
solverClass = opengm.inference.AStar
params = [None, opengm.InfParam(heuristic='fast')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.all)
def test_icm(self):
solverClass = opengm.inference.Icm
params = [None, opengm.InfParam(moveType='variable'), opengm.InfParam(
moveType='factor'), opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.all)
def test_lazyflipper(self):
solverClass = opengm.inference.LazyFlipper
params = [None, opengm.InfParam(
maxSubgraphSize=2), opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.all)
def test_loc(self):
if opengm.configuration.withAd3:
solverClass = opengm.inference.Loc
params = [None]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSumMaxSum)
def test_dualdecompostion_subgradient(self):
solverClass = opengm.inference.DualDecompositionSubgradient
params = [opengm.InfParam()]
try:
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.gridGm3, self.chainGm,
self.chainGm3],
semiRings=self.minSum)
except RuntimeError as detail:
raise RuntimeError("Error In C++ Impl. of "
"DualDecompositionSubgradient:\n\nReason: %s"
% (str(detail),))
def test_dualdecompostion_subgradient_dynamic_programming(self):
solverClass = opengm.inference.DualDecompositionSubgradient
params = [opengm.InfParam(
subInference='dynamic-programming', subInfParam=opengm.InfParam()),
opengm.InfParam(subInference='dynamic-programming',
decompositionId='tree',
subInfParam=opengm.InfParam())
]
try:
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.gridGm3, self.chainGm,
self.chainGm3],
semiRings=self.minSum)
except RuntimeError as detail:
raise RuntimeError("Error In C++ Impl. of "
"DualDecompositionSubgradient:\n\nReason: %s"
% (str(detail),))
"""
def test_dualdecompostion_subgradient_graph_cut(self):
solverClass = opengm.inference.DualDecompositionSubgradient
params = [opengm.InfParam(subInference='graph-cut',
decompositionId='blocks',
subInfParam=opengm.InfParam())]
try:
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm],
semiRings=self.minSum)
except RuntimeError as detail:
raise RuntimeError("Error In C++ Impl. of "
"DualDecompositionSubgradient:\n\nReason: %s" %
(str(detail),))
"""
#def test_gibbs(self):
# solverClass = opengm.inference.Gibbs
# params = [opengm.InfParam(steps=10000)]
# genericSolverCheck(solverClass, params=params,
# gms=[self.gridGm, self.chainGm, self.gridGm3,
# self.chainGm3],
# semiRings=self.minSumMaxProd)
def test_bp(self):
solverClass = opengm.inference.BeliefPropagation
params = [opengm.InfParam(steps=10)]
genericSolverCheck(solverClass,
params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,self.chainGm3],
semiRings=self.allAndIntegrator,checkMarginals=True)
def test_trwbp(self):
solverClass = opengm.inference.TreeReweightedBp
params = [opengm.InfParam(steps=10)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.allAndIntegrator,checkMarginals=True)
def test_trws_external(self):
if opengm.configuration.withTrws:
solverClass = opengm.inference.TrwsExternal
params = [None, opengm.InfParam(),
opengm.InfParam(steps=100, energyType='view'),
opengm.InfParam(steps=1, energyType='tables')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_graphcut(self):
solverClass = opengm.inference.GraphCut
params = [None, opengm.InfParam(),
opengm.InfParam(minStCut='boost-kolmogorov'),
opengm.InfParam(minStCut='push-relabel')]
if opengm.configuration.withMaxflow:
params.append(opengm.InfParam(minStCut='kolmogorov'))
genericSolverCheck(solverClass, params=params,gms=[self.gridGm, self.chainGm], semiRings=self.minSum,testPythonVisitor=False)
def test_graphcut_maxflow_ibfs(self):
if opengm.configuration.withMaxflowIbfs :
solverClass = opengm.inference.GraphCut
params=[ opengm.InfParam(minStCut='ibfs') ]
genericSolverCheck(solverClass, params=params,gms=[self.gridGm, self.chainGm], semiRings=self.minSum,testPythonVisitor=False)
def test_mqpbo(self):
if opengm.configuration.withQpbo:
solverClass = opengm.inference.Mqpbo
params = [opengm.InfParam(useKovtunsMethod=True)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm3, self.chainGm3],
semiRings=self.minSum, checkPartial = True,testPythonVisitor=False)
def test_fastPd(self):
pass
#if opengm.configuration.withFastPd:
# solverClass = opengm.inference.FastPd
# params = [ None, opengm.InfParam(steps=1000)]
# genericSolverCheck(solverClass, params=params,
# gms=[self.gridGm3],
# semiRings=self.minSum,testPythonVisitor=False)
def test_ad3(self):
if opengm.configuration.withAd3:
solverClass = opengm.inference.Ad3
params = [ None, opengm.InfParam(steps=1000,solverType='ad3_ilp')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm3],
semiRings=self.minSumMaxSum,testPythonVisitor=False)
def test_qpbo_external(self):
if opengm.configuration.withQpbo:
solverClass = opengm.inference.QpboExternal
params = [
opengm.InfParam(strongPersistency=True),
opengm.InfParam(useImproveing=True)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm],
semiRings=self.minSum, checkPartial = True,testPythonVisitor=False)
def test_alpha_beta_swap(self):
solverClass = opengm.inference.AlphaBetaSwap
params = [None, opengm.InfParam(steps=10),
opengm.InfParam(minStCut='boost-kolmogorov', steps=10),
opengm.InfParam(minStCut='push-relabel', steps=10)]
if opengm.configuration.withMaxflow:
params.append(opengm.InfParam(minStCut='kolmogorov', steps=10))
genericSolverCheck(solverClass, params=params, gms=[
self.gridGm3, self.chainGm3], semiRings=self.minSum)
def test_alpha_expansion(self):
solverClass = opengm.inference.AlphaExpansion
params = [None, opengm.InfParam(steps=10),
opengm.InfParam(minStCut='boost-kolmogorov', steps=10),
opengm.InfParam(minStCut='push-relabel', steps=10)]
if opengm.configuration.withMaxflow:
params.append(opengm.InfParam(minStCut='kolmogorov', steps=10))
genericSolverCheck(solverClass, params=params, gms=[
self.gridGm3, self.chainGm3], semiRings=self.minSum)
def test_alpha_expansion_fusion(self):
if opengm.configuration.withQpbo:
solverClass = opengm.inference.AlphaExpansionFusion
params = [None, opengm.InfParam(steps=10)]
genericSolverCheck(solverClass, params=params, gms=[
self.gridGm3, self.chainGm3], semiRings=self.minSum)
def test_partition_move(self):
solverClass = opengm.inference.PartitionMove
params = [None, opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.mcGm],
semiRings=self.minSum,testPythonVisitor=False)
def test_multicut(self):
if opengm.configuration.withCplex:
solverClass = opengm.inference.Multicut
params = [None, opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.mcGm],
semiRings=self.minSum,testPythonVisitor=False)
"""
def test_lpcplex(self):
if opengm.configuration.withCplex:
solverClass = opengm.inference.LpCplex
params = [None, opengm.InfParam(),
opengm.InfParam(integerConstraint=True),
opengm.InfParam(integerConstraint=False)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False,testLpInterface=True)
def test_lpcplex2(self):
if False and opengm.configuration.withCplex:
solverClass = opengm.inference.LpCplex2
params = [None, opengm.InfParam(),
opengm.InfParam(integerConstraint=True),
opengm.InfParam(integerConstraint=False)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False,testLpInterface=True)
def test_gurobi(self):
if False and opengm.configuration.withGurobi:
solverClass = opengm.inference.LpGurobi
params = [None, opengm.InfParam(),
opengm.InfParam(integerConstraint=True),
opengm.InfParam(integerConstraint=False)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False,testLpInterface=True)
"""
def test_libdai_bp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.BeliefPropagationLibDai
params = [None, opengm.InfParam(), opengm.InfParam(
updateRule='parall'), opengm.InfParam(updateRule='seqrnd')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_fractional_bp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.FractionalBpLibDai
params = [None, opengm.InfParam(), opengm.InfParam(
updateRule='parall'), opengm.InfParam(updateRule='seqrnd')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_trw_bp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.TreeReweightedBpLibDai
params = [None, opengm.InfParam(),
opengm.InfParam(updateRule='parall'),
opengm.InfParam(updateRule='seqrnd', ntrees=2)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_gibbs(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.GibbsLibDai
params = [None, opengm.InfParam(),
opengm.InfParam(steps=100)]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_junction_tree(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.JunctionTreeLibDai
params = [None, opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_decimation(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.DecimationLibDai
params = [None, opengm.InfParam()]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_decimation_bp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.DecimationLibDai
params = [opengm.InfParam(subInference='bp')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_decimation_trwbp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.DecimationLibDai
params = [opengm.InfParam(subInference='trwBp')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_decimation_fractional_bp(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.DecimationLibDai
params = [opengm.InfParam(subInference='fractionalBp')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
def test_libdai_decimation_gibbs(self):
if opengm.configuration.withLibdai:
solverClass = opengm.inference.DecimationLibDai
params = [opengm.InfParam(subInference='gibbs')]
genericSolverCheck(solverClass, params=params,
gms=[self.gridGm, self.chainGm, self.gridGm3,
self.chainGm3],
semiRings=self.minSum,testPythonVisitor=False)
class Test_Learning:
def __init__(self):
self.__nWeights = 12
self.__shape = [10,10]
# utility functions
def __makeGt(self, shape):
gt=numpy.ones(shape,dtype='uint8')
gt[0:shape[0]/2,:] = 0
return gt
def __create_dataset(self, functionType, numModels=1):
numWeights = 4
dataset = opengm.learning.createDataset(numWeights=numWeights)
weights = dataset.getWeights()
gt = self.__makeGt(self.__shape)
numVars = self.__shape[0] * self.__shape[1]
numLabels = 2
uWeightIds = numpy.array([[0, 1]], dtype='uint64')
bWeightIds = numpy.array([2, 3], dtype='uint64')
for m in range(numModels):
gm = opengm.gm(numpy.ones(numVars) * numLabels)
# create noisy data
random = (numpy.random.rand(*gt.shape)-0.5)*0.3
noisyGt = random + gt
# add unarias
for x in range(self.__shape[0]):
for y in range(self.__shape[1]):
# use noised GT input, and a constant feature
uFeat = numpy.array([[noisyGt[x,y], 1]], dtype='float64')
lu = opengm.learning.lUnaryFunction(weights=weights,numberOfLabels=numLabels,
features=uFeat, weightIds=uWeightIds)
fid = gm.addFunction(lu)
facIndex = gm.addFactor(fid, y+x*self.__shape[1])
# add pairwise
for x in range(self.__shape[0]):
for y in range(self.__shape[1]):
if x+1 < self.__shape[0]:
gradientMag = (noisyGt[x+1,y] - noisyGt[x,y])**2
bFeat = numpy.array([gradientMag, 1], dtype='float64')
pf = opengm.LPottsFunction(weights=weights,numberOfLabels=numLabels, features=bFeat, weightIds=bWeightIds)
fid= gm.addFunction(pf)
gm.addFactor(fid, [y+x*self.__shape[1], y+(x+1)*self.__shape[1]])
if y+1 < self.__shape[1]:
gradientMag = (noisyGt[x,y+1] - noisyGt[x,y])**2
bFeat = numpy.array([gradientMag, 1], dtype='float64')
pf = opengm.LPottsFunction(weights=weights,numberOfLabels=numLabels, features=bFeat, weightIds=bWeightIds)
fid= gm.addFunction(pf)
gm.addFactor(fid, [y+x*self.__shape[1], (y+1)+x*self.__shape[1]])
# store GM and its GT
dataset.pushBackInstance(gm, gt.reshape([-1]).astype(opengm.label_type))
return dataset
def __create_weights(self, numWeights):
weightVals = numpy.ones(numWeights)
weights = opengm.learning.Weights(weightVals)
return weights
# def __create_loss(self):
def __generic_learner_test(self, learner):
if opengm.configuration.withTrws:
learner.learn(infCls=opengm.inference.TrwsExternal, parameter=opengm.InfParam())
elif opengm.configuration.withCplex:
learner.learn(infCls=opengm.inference.LpCplex, parameter=opengm.InfParam())
else:
learner.learn(infCls=opengm.inference.Icm, parameter=opengm.InfParam())
# tests
def test_weights(self):
weights = self.__create_weights(self.__nWeights)
assert(len(weights) == self.__nWeights)
value = 15
weights[3] = value
assert(weights[3] == value)
def test_dataset(self):
ds = self.__create_dataset('potts', 1)
assert(ds.getNumberOfWeights() == 4)
assert(ds.getNumberOfModels() == 1)
def test_dataset_serialization(self):
import tempfile
import shutil
ds = self.__create_dataset(self.__nWeights)
# TODO: create temp directory
temp_path = tempfile.mkdtemp()
prefix = 'test'
ds.save(temp_path, prefix)
loaded_ds = opengm.learning.DatasetWithFlexibleLoss(0)
loaded_ds.load(temp_path, prefix)
shutil.rmtree(temp_path)
assert(ds.getNumberOfWeights() == loaded_ds.getNumberOfWeights())
assert(ds.getNumberOfModels() == loaded_ds.getNumberOfModels())
assert(ds.getModel(0).numberOfVariables == loaded_ds.getModel(0).numberOfVariables)
assert(ds.getModel(0).numberOfFactors == loaded_ds.getModel(0).numberOfFactors)
if __name__ == "__main__":
t = Test_Inference()
t.test_trwbp()
| mit |
fengbeihong/tempest_automate_ironic | tempest/scenario/test_load_balancer_basic.py | 4 | 13056 | # Copyright 2014 Mirantis.inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import tempfile
import time
import urllib2
from tempest.common import commands
from tempest import config
from tempest import exceptions
from tempest.scenario import manager
from tempest.services.network import resources as net_resources
from tempest import test
config = config.CONF
class TestLoadBalancerBasic(manager.NetworkScenarioTest):
"""
This test checks basic load balancing.
The following is the scenario outline:
1. Create an instance
2. SSH to the instance and start two servers
3. Create a load balancer with two members and with ROUND_ROBIN algorithm
associate the VIP with a floating ip
4. Send NUM requests to the floating ip and check that they are shared
between the two servers.
"""
@classmethod
def skip_checks(cls):
super(TestLoadBalancerBasic, cls).skip_checks()
cfg = config.network
if not test.is_extension_enabled('lbaas', 'network'):
msg = 'LBaaS Extension is not enabled'
raise cls.skipException(msg)
if not (cfg.tenant_networks_reachable or cfg.public_network_id):
msg = ('Either tenant_networks_reachable must be "true", or '
'public_network_id must be defined.')
raise cls.skipException(msg)
@classmethod
def resource_setup(cls):
super(TestLoadBalancerBasic, cls).resource_setup()
cls.servers_keypairs = {}
cls.members = []
cls.floating_ips = {}
cls.server_ips = {}
cls.port1 = 80
cls.port2 = 88
cls.num = 50
def setUp(self):
super(TestLoadBalancerBasic, self).setUp()
self.server_ips = {}
self.server_fixed_ips = {}
self._create_security_group_for_test()
self._set_net_and_subnet()
def _set_net_and_subnet(self):
"""
Query and set appropriate network and subnet attributes to be used
for the test. Existing tenant networks are used if they are found.
The configured private network and associated subnet is used as a
fallback in absence of tenant networking.
"""
try:
tenant_net = self._list_networks(tenant_id=self.tenant_id)[0]
except IndexError:
tenant_net = None
if tenant_net:
tenant_subnet = self._list_subnets(tenant_id=self.tenant_id)[0]
self.subnet = net_resources.DeletableSubnet(
client=self.network_client,
**tenant_subnet)
self.network = tenant_net
else:
self.network = self._get_network_by_name(
config.compute.fixed_network_name)
# TODO(adam_g): We are assuming that the first subnet associated
# with the fixed network is the one we want. In the future, we
# should instead pull a subnet id from config, which is set by
# devstack/admin/etc.
subnet = self._list_subnets(network_id=self.network['id'])[0]
self.subnet = net_resources.AttributeDict(subnet)
def _create_security_group_for_test(self):
self.security_group = self._create_security_group(
tenant_id=self.tenant_id)
self._create_security_group_rules_for_port(self.port1)
self._create_security_group_rules_for_port(self.port2)
def _create_security_group_rules_for_port(self, port):
rule = {
'direction': 'ingress',
'protocol': 'tcp',
'port_range_min': port,
'port_range_max': port,
}
self._create_security_group_rule(
secgroup=self.security_group,
tenant_id=self.tenant_id,
**rule)
def _create_server(self, name):
keypair = self.create_keypair()
security_groups = [{'name': self.security_group['name']}]
create_kwargs = {
'networks': [
{'uuid': self.network['id']},
],
'key_name': keypair['name'],
'security_groups': security_groups,
}
net_name = self.network['name']
server = self.create_server(name=name, create_kwargs=create_kwargs)
self.servers_keypairs[server['id']] = keypair
if (config.network.public_network_id and not
config.network.tenant_networks_reachable):
public_network_id = config.network.public_network_id
floating_ip = self.create_floating_ip(
server, public_network_id)
self.floating_ips[floating_ip] = server
self.server_ips[server['id']] = floating_ip.floating_ip_address
else:
self.server_ips[server['id']] =\
server['addresses'][net_name][0]['addr']
self.server_fixed_ips[server['id']] =\
server['addresses'][net_name][0]['addr']
self.assertTrue(self.servers_keypairs)
return server
def _create_servers(self):
for count in range(2):
self._create_server(name=("server%s" % (count + 1)))
self.assertEqual(len(self.servers_keypairs), 2)
def _start_servers(self):
"""
Start two backends
1. SSH to the instance
2. Start two http backends listening on ports 80 and 88 respectively
"""
for server_id, ip in self.server_ips.iteritems():
private_key = self.servers_keypairs[server_id]['private_key']
server_name = self.servers_client.get_server(server_id)['name']
username = config.scenario.ssh_user
ssh_client = self.get_remote_client(
server_or_ip=ip,
private_key=private_key)
# Write a backend's response into a file
resp = ('echo -ne "HTTP/1.1 200 OK\r\nContent-Length: 7\r\n'
'Connection: close\r\nContent-Type: text/html; '
'charset=UTF-8\r\n\r\n%s"; cat >/dev/null')
with tempfile.NamedTemporaryFile() as script:
script.write(resp % server_name)
script.flush()
with tempfile.NamedTemporaryFile() as key:
key.write(private_key)
key.flush()
commands.copy_file_to_host(script.name,
"/tmp/script1",
ip,
username, key.name)
# Start netcat
start_server = ('while true; do '
'sudo nc -ll -p %(port)s -e sh /tmp/%(script)s; '
'done &')
cmd = start_server % {'port': self.port1,
'script': 'script1'}
ssh_client.exec_command(cmd)
if len(self.server_ips) == 1:
with tempfile.NamedTemporaryFile() as script:
script.write(resp % 'server2')
script.flush()
with tempfile.NamedTemporaryFile() as key:
key.write(private_key)
key.flush()
commands.copy_file_to_host(script.name,
"/tmp/script2", ip,
username, key.name)
cmd = start_server % {'port': self.port2,
'script': 'script2'}
ssh_client.exec_command(cmd)
def _check_connection(self, check_ip, port=80):
def try_connect(ip, port):
try:
resp = urllib2.urlopen("http://{0}:{1}/".format(ip, port))
if resp.getcode() == 200:
return True
return False
except IOError:
return False
except urllib2.HTTPError:
return False
timeout = config.compute.ping_timeout
start = time.time()
while not try_connect(check_ip, port):
if (time.time() - start) > timeout:
message = "Timed out trying to connect to %s" % check_ip
raise exceptions.TimeoutException(message)
def _create_pool(self):
"""Create a pool with ROUND_ROBIN algorithm."""
self.pool = super(TestLoadBalancerBasic, self)._create_pool(
lb_method='ROUND_ROBIN',
protocol='HTTP',
subnet_id=self.subnet.id)
self.assertTrue(self.pool)
def _create_members(self):
"""
Create two members.
In case there is only one server, create both members with the same ip
but with different ports to listen on.
"""
for server_id, ip in self.server_fixed_ips.iteritems():
if len(self.server_fixed_ips) == 1:
member1 = self._create_member(address=ip,
protocol_port=self.port1,
pool_id=self.pool.id)
member2 = self._create_member(address=ip,
protocol_port=self.port2,
pool_id=self.pool.id)
self.members.extend([member1, member2])
else:
member = self._create_member(address=ip,
protocol_port=self.port1,
pool_id=self.pool.id)
self.members.append(member)
self.assertTrue(self.members)
def _assign_floating_ip_to_vip(self, vip):
public_network_id = config.network.public_network_id
port_id = vip.port_id
floating_ip = self.create_floating_ip(vip, public_network_id,
port_id=port_id)
self.floating_ips.setdefault(vip.id, [])
self.floating_ips[vip.id].append(floating_ip)
# Check for floating ip status before you check load-balancer
self.check_floating_ip_status(floating_ip, "ACTIVE")
def _create_load_balancer(self):
self._create_pool()
self._create_members()
self.vip = self._create_vip(protocol='HTTP',
protocol_port=80,
subnet_id=self.subnet.id,
pool_id=self.pool.id)
self.vip.wait_for_status('ACTIVE')
if (config.network.public_network_id and not
config.network.tenant_networks_reachable):
self._assign_floating_ip_to_vip(self.vip)
self.vip_ip = self.floating_ips[
self.vip.id][0]['floating_ip_address']
else:
self.vip_ip = self.vip.address
# Currently the ovs-agent is not enforcing security groups on the
# vip port - see https://bugs.launchpad.net/neutron/+bug/1163569
# However the linuxbridge-agent does, and it is necessary to add a
# security group with a rule that allows tcp port 80 to the vip port.
self.network_client.update_port(
self.vip.port_id, security_groups=[self.security_group.id])
def _check_load_balancing(self):
"""
1. Send NUM requests on the floating ip associated with the VIP
2. Check that the requests are shared between the two servers
"""
self._check_connection(self.vip_ip)
self._send_requests(self.vip_ip, ["server1", "server2"])
def _send_requests(self, vip_ip, servers):
counters = dict.fromkeys(servers, 0)
for i in range(self.num):
try:
server = urllib2.urlopen("http://{0}/".format(vip_ip)).read()
counters[server] += 1
# HTTP exception means fail of server, so don't increase counter
# of success and continue connection tries
except urllib2.HTTPError:
continue
# Assert that each member of the pool gets balanced at least once
for member, counter in counters.iteritems():
self.assertGreater(counter, 0, 'Member %s never balanced' % member)
@test.idempotent_id('c0c6f1ca-603b-4509-9c0f-2c63f0d838ee')
@test.services('compute', 'network')
def test_load_balancer_basic(self):
self._create_server('server1')
self._start_servers()
self._create_load_balancer()
self._check_load_balancing()
| apache-2.0 |
prospwro/odoo | addons/hr_evaluation/__init__.py | 432 | 1084 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_evaluation
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
yiannist/ganeti | test/py/ganeti.locking_unittest.py | 9 | 34568 | #!/usr/bin/python
#
# Copyright (C) 2006, 2007, 2010 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Script for unittesting the locking module"""
import os
import unittest
import time
import Queue
import threading
import random
import gc
import itertools
from ganeti import constants
from ganeti import locking
from ganeti import errors
from ganeti import utils
from ganeti import compat
from ganeti import objects
from ganeti import query
import testutils
# This is used to test the ssynchronize decorator.
# Since it's passed as input to a decorator it must be declared as a global.
_decoratorlock = locking.SharedLock("decorator lock")
#: List for looping tests
ITERATIONS = range(8)
def _Repeat(fn):
"""Decorator for executing a function many times"""
def wrapper(*args, **kwargs):
for i in ITERATIONS:
fn(*args, **kwargs)
return wrapper
def SafeSleep(duration):
start = time.time()
while True:
delay = start + duration - time.time()
if delay <= 0.0:
break
time.sleep(delay)
class _ThreadedTestCase(unittest.TestCase):
"""Test class that supports adding/waiting on threads"""
def setUp(self):
unittest.TestCase.setUp(self)
self.done = Queue.Queue(0)
self.threads = []
def _addThread(self, *args, **kwargs):
"""Create and remember a new thread"""
t = threading.Thread(*args, **kwargs)
self.threads.append(t)
t.start()
return t
def _waitThreads(self):
"""Wait for all our threads to finish"""
for t in self.threads:
t.join(60)
self.failIf(t.isAlive())
self.threads = []
class _ConditionTestCase(_ThreadedTestCase):
"""Common test case for conditions"""
def setUp(self, cls):
_ThreadedTestCase.setUp(self)
self.lock = threading.Lock()
self.cond = cls(self.lock)
def _testAcquireRelease(self):
self.assertFalse(self.cond._is_owned())
self.assertRaises(RuntimeError, self.cond.wait, None)
self.assertRaises(RuntimeError, self.cond.notifyAll)
self.cond.acquire()
self.assert_(self.cond._is_owned())
self.cond.notifyAll()
self.assert_(self.cond._is_owned())
self.cond.release()
self.assertFalse(self.cond._is_owned())
self.assertRaises(RuntimeError, self.cond.wait, None)
self.assertRaises(RuntimeError, self.cond.notifyAll)
def _testNotification(self):
def _NotifyAll():
self.done.put("NE")
self.cond.acquire()
self.done.put("NA")
self.cond.notifyAll()
self.done.put("NN")
self.cond.release()
self.cond.acquire()
self._addThread(target=_NotifyAll)
self.assertEqual(self.done.get(True, 1), "NE")
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.cond.wait(None)
self.assertEqual(self.done.get(True, 1), "NA")
self.assertEqual(self.done.get(True, 1), "NN")
self.assert_(self.cond._is_owned())
self.cond.release()
self.assertFalse(self.cond._is_owned())
class TestSingleNotifyPipeCondition(_ConditionTestCase):
"""SingleNotifyPipeCondition tests"""
def setUp(self):
_ConditionTestCase.setUp(self, locking.SingleNotifyPipeCondition)
def testAcquireRelease(self):
self._testAcquireRelease()
def testNotification(self):
self._testNotification()
def testWaitReuse(self):
self.cond.acquire()
self.cond.wait(0)
self.cond.wait(0.1)
self.cond.release()
def testNoNotifyReuse(self):
self.cond.acquire()
self.cond.notifyAll()
self.assertRaises(RuntimeError, self.cond.wait, None)
self.assertRaises(RuntimeError, self.cond.notifyAll)
self.cond.release()
class TestPipeCondition(_ConditionTestCase):
"""PipeCondition tests"""
def setUp(self):
_ConditionTestCase.setUp(self, locking.PipeCondition)
def testAcquireRelease(self):
self._testAcquireRelease()
def testNotification(self):
self._testNotification()
def _TestWait(self, fn):
threads = [
self._addThread(target=fn),
self._addThread(target=fn),
self._addThread(target=fn),
]
# Wait for threads to be waiting
for _ in threads:
self.assertEqual(self.done.get(True, 1), "A")
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.cond.acquire()
self.assertEqual(len(self.cond._waiters), 3)
self.assertEqual(self.cond._waiters, set(threads))
self.assertTrue(repr(self.cond).startswith("<"))
self.assertTrue("waiters=" in repr(self.cond))
# This new thread can't acquire the lock, and thus call wait, before we
# release it
self._addThread(target=fn)
self.cond.notifyAll()
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.cond.release()
# We should now get 3 W and 1 A (for the new thread) in whatever order
w = 0
a = 0
for i in range(4):
got = self.done.get(True, 1)
if got == "W":
w += 1
elif got == "A":
a += 1
else:
self.fail("Got %s on the done queue" % got)
self.assertEqual(w, 3)
self.assertEqual(a, 1)
self.cond.acquire()
self.cond.notifyAll()
self.cond.release()
self._waitThreads()
self.assertEqual(self.done.get_nowait(), "W")
self.assertRaises(Queue.Empty, self.done.get_nowait)
def testBlockingWait(self):
def _BlockingWait():
self.cond.acquire()
self.done.put("A")
self.cond.wait(None)
self.cond.release()
self.done.put("W")
self._TestWait(_BlockingWait)
def testLongTimeoutWait(self):
def _Helper():
self.cond.acquire()
self.done.put("A")
self.cond.wait(15.0)
self.cond.release()
self.done.put("W")
self._TestWait(_Helper)
def _TimeoutWait(self, timeout, check):
self.cond.acquire()
self.cond.wait(timeout)
self.cond.release()
self.done.put(check)
def testShortTimeoutWait(self):
self._addThread(target=self._TimeoutWait, args=(0.1, "T1"))
self._addThread(target=self._TimeoutWait, args=(0.1, "T1"))
self._waitThreads()
self.assertEqual(self.done.get_nowait(), "T1")
self.assertEqual(self.done.get_nowait(), "T1")
self.assertRaises(Queue.Empty, self.done.get_nowait)
def testZeroTimeoutWait(self):
self._addThread(target=self._TimeoutWait, args=(0, "T0"))
self._addThread(target=self._TimeoutWait, args=(0, "T0"))
self._addThread(target=self._TimeoutWait, args=(0, "T0"))
self._waitThreads()
self.assertEqual(self.done.get_nowait(), "T0")
self.assertEqual(self.done.get_nowait(), "T0")
self.assertEqual(self.done.get_nowait(), "T0")
self.assertRaises(Queue.Empty, self.done.get_nowait)
class TestSharedLock(_ThreadedTestCase):
"""SharedLock tests"""
def setUp(self):
_ThreadedTestCase.setUp(self)
self.sl = locking.SharedLock("TestSharedLock")
self.assertTrue(repr(self.sl).startswith("<"))
self.assertTrue("name=TestSharedLock" in repr(self.sl))
def testSequenceAndOwnership(self):
self.assertFalse(self.sl.is_owned())
self.sl.acquire(shared=1)
self.assert_(self.sl.is_owned())
self.assert_(self.sl.is_owned(shared=1))
self.assertFalse(self.sl.is_owned(shared=0))
self.sl.release()
self.assertFalse(self.sl.is_owned())
self.sl.acquire()
self.assert_(self.sl.is_owned())
self.assertFalse(self.sl.is_owned(shared=1))
self.assert_(self.sl.is_owned(shared=0))
self.sl.release()
self.assertFalse(self.sl.is_owned())
self.sl.acquire(shared=1)
self.assert_(self.sl.is_owned())
self.assert_(self.sl.is_owned(shared=1))
self.assertFalse(self.sl.is_owned(shared=0))
self.sl.release()
self.assertFalse(self.sl.is_owned())
def testBooleanValue(self):
# semaphores are supposed to return a true value on a successful acquire
self.assert_(self.sl.acquire(shared=1))
self.sl.release()
self.assert_(self.sl.acquire())
self.sl.release()
def testDoubleLockingStoE(self):
self.sl.acquire(shared=1)
self.assertRaises(AssertionError, self.sl.acquire)
def testDoubleLockingEtoS(self):
self.sl.acquire()
self.assertRaises(AssertionError, self.sl.acquire, shared=1)
def testDoubleLockingStoS(self):
self.sl.acquire(shared=1)
self.assertRaises(AssertionError, self.sl.acquire, shared=1)
def testDoubleLockingEtoE(self):
self.sl.acquire()
self.assertRaises(AssertionError, self.sl.acquire)
# helper functions: called in a separate thread they acquire the lock, send
# their identifier on the done queue, then release it.
def _doItSharer(self):
try:
self.sl.acquire(shared=1)
self.done.put("SHR")
self.sl.release()
except errors.LockError:
self.done.put("ERR")
def _doItExclusive(self):
try:
self.sl.acquire()
self.done.put("EXC")
self.sl.release()
except errors.LockError:
self.done.put("ERR")
def _doItDelete(self):
try:
self.sl.delete()
self.done.put("DEL")
except errors.LockError:
self.done.put("ERR")
def testSharersCanCoexist(self):
self.sl.acquire(shared=1)
threading.Thread(target=self._doItSharer).start()
self.assert_(self.done.get(True, 1))
self.sl.release()
@_Repeat
def testExclusiveBlocksExclusive(self):
self.sl.acquire()
self._addThread(target=self._doItExclusive)
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "EXC")
@_Repeat
def testExclusiveBlocksDelete(self):
self.sl.acquire()
self._addThread(target=self._doItDelete)
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "DEL")
self.sl = locking.SharedLock(self.sl.name)
@_Repeat
def testExclusiveBlocksSharer(self):
self.sl.acquire()
self._addThread(target=self._doItSharer)
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "SHR")
@_Repeat
def testSharerBlocksExclusive(self):
self.sl.acquire(shared=1)
self._addThread(target=self._doItExclusive)
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "EXC")
@_Repeat
def testSharerBlocksDelete(self):
self.sl.acquire(shared=1)
self._addThread(target=self._doItDelete)
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "DEL")
self.sl = locking.SharedLock(self.sl.name)
@_Repeat
def testWaitingExclusiveBlocksSharer(self):
"""SKIPPED testWaitingExclusiveBlockSharer"""
return
self.sl.acquire(shared=1)
# the lock is acquired in shared mode...
self._addThread(target=self._doItExclusive)
# ...but now an exclusive is waiting...
self._addThread(target=self._doItSharer)
# ...so the sharer should be blocked as well
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
# The exclusive passed before
self.failUnlessEqual(self.done.get_nowait(), "EXC")
self.failUnlessEqual(self.done.get_nowait(), "SHR")
@_Repeat
def testWaitingSharerBlocksExclusive(self):
"""SKIPPED testWaitingSharerBlocksExclusive"""
return
self.sl.acquire()
# the lock is acquired in exclusive mode...
self._addThread(target=self._doItSharer)
# ...but now a sharer is waiting...
self._addThread(target=self._doItExclusive)
# ...the exclusive is waiting too...
self.assertRaises(Queue.Empty, self.done.get_nowait)
self.sl.release()
self._waitThreads()
# The sharer passed before
self.assertEqual(self.done.get_nowait(), "SHR")
self.assertEqual(self.done.get_nowait(), "EXC")
def testDelete(self):
self.sl.delete()
self.assertRaises(errors.LockError, self.sl.acquire)
self.assertRaises(errors.LockError, self.sl.acquire, shared=1)
self.assertRaises(errors.LockError, self.sl.delete)
def testDeleteTimeout(self):
self.assertTrue(self.sl.delete(timeout=60))
def testDeleteTimeoutFail(self):
ready = threading.Event()
finish = threading.Event()
def fn():
self.sl.acquire(shared=0)
ready.set()
finish.wait()
self.sl.release()
self._addThread(target=fn)
ready.wait()
# Test if deleting a lock owned in exclusive mode by another thread fails
# to delete when a timeout is used
self.assertFalse(self.sl.delete(timeout=0.02))
finish.set()
self._waitThreads()
self.assertTrue(self.sl.delete())
self.assertRaises(errors.LockError, self.sl.acquire)
def testNoDeleteIfSharer(self):
self.sl.acquire(shared=1)
self.assertRaises(AssertionError, self.sl.delete)
@_Repeat
def testDeletePendingSharersExclusiveDelete(self):
self.sl.acquire()
self._addThread(target=self._doItSharer)
self._addThread(target=self._doItSharer)
self._addThread(target=self._doItExclusive)
self._addThread(target=self._doItDelete)
self.sl.delete()
self._waitThreads()
# The threads who were pending return ERR
for _ in range(4):
self.assertEqual(self.done.get_nowait(), "ERR")
self.sl = locking.SharedLock(self.sl.name)
@_Repeat
def testDeletePendingDeleteExclusiveSharers(self):
self.sl.acquire()
self._addThread(target=self._doItDelete)
self._addThread(target=self._doItExclusive)
self._addThread(target=self._doItSharer)
self._addThread(target=self._doItSharer)
self.sl.delete()
self._waitThreads()
# The two threads who were pending return both ERR
self.assertEqual(self.done.get_nowait(), "ERR")
self.assertEqual(self.done.get_nowait(), "ERR")
self.assertEqual(self.done.get_nowait(), "ERR")
self.assertEqual(self.done.get_nowait(), "ERR")
self.sl = locking.SharedLock(self.sl.name)
@_Repeat
def testExclusiveAcquireTimeout(self):
for shared in [0, 1]:
on_queue = threading.Event()
release_exclusive = threading.Event()
def _LockExclusive():
self.sl.acquire(shared=0, test_notify=on_queue.set)
self.done.put("A: start wait")
release_exclusive.wait()
self.done.put("A: end wait")
self.sl.release()
# Start thread to hold lock in exclusive mode
self._addThread(target=_LockExclusive)
# Wait for wait to begin
self.assertEqual(self.done.get(timeout=60), "A: start wait")
# Wait up to 60s to get lock, but release exclusive lock as soon as we're
# on the queue
self.failUnless(self.sl.acquire(shared=shared, timeout=60,
test_notify=release_exclusive.set))
self.done.put("got 2nd")
self.sl.release()
self._waitThreads()
self.assertEqual(self.done.get_nowait(), "A: end wait")
self.assertEqual(self.done.get_nowait(), "got 2nd")
self.assertRaises(Queue.Empty, self.done.get_nowait)
@_Repeat
def testAcquireExpiringTimeout(self):
def _AcquireWithTimeout(shared, timeout):
if not self.sl.acquire(shared=shared, timeout=timeout):
self.done.put("timeout")
for shared in [0, 1]:
# Lock exclusively
self.sl.acquire()
# Start shared acquires with timeout between 0 and 20 ms
for i in range(11):
self._addThread(target=_AcquireWithTimeout,
args=(shared, i * 2.0 / 1000.0))
# Wait for threads to finish (makes sure the acquire timeout expires
# before releasing the lock)
self._waitThreads()
# Release lock
self.sl.release()
for _ in range(11):
self.assertEqual(self.done.get_nowait(), "timeout")
self.assertRaises(Queue.Empty, self.done.get_nowait)
@_Repeat
def testSharedSkipExclusiveAcquires(self):
# Tests whether shared acquires jump in front of exclusive acquires in the
# queue.
def _Acquire(shared, name, notify_ev, wait_ev):
if notify_ev:
notify_fn = notify_ev.set
else:
notify_fn = None
if wait_ev:
wait_ev.wait()
if not self.sl.acquire(shared=shared, test_notify=notify_fn):
return
self.done.put(name)
self.sl.release()
# Get exclusive lock while we fill the queue
self.sl.acquire()
shrcnt1 = 5
shrcnt2 = 7
shrcnt3 = 9
shrcnt4 = 2
# Add acquires using threading.Event for synchronization. They'll be
# acquired exactly in the order defined in this list.
acquires = (shrcnt1 * [(1, "shared 1")] +
3 * [(0, "exclusive 1")] +
shrcnt2 * [(1, "shared 2")] +
shrcnt3 * [(1, "shared 3")] +
shrcnt4 * [(1, "shared 4")] +
3 * [(0, "exclusive 2")])
ev_cur = None
ev_prev = None
for args in acquires:
ev_cur = threading.Event()
self._addThread(target=_Acquire, args=args + (ev_cur, ev_prev))
ev_prev = ev_cur
# Wait for last acquire to start
ev_prev.wait()
# Expect 6 pending exclusive acquires and 1 for all shared acquires
# together
self.assertEqual(self.sl._count_pending(), 7)
# Release exclusive lock and wait
self.sl.release()
self._waitThreads()
# Check sequence
for _ in range(shrcnt1 + shrcnt2 + shrcnt3 + shrcnt4):
# Shared locks aren't guaranteed to be notified in order, but they'll be
# first
tmp = self.done.get_nowait()
if tmp == "shared 1":
shrcnt1 -= 1
elif tmp == "shared 2":
shrcnt2 -= 1
elif tmp == "shared 3":
shrcnt3 -= 1
elif tmp == "shared 4":
shrcnt4 -= 1
self.assertEqual(shrcnt1, 0)
self.assertEqual(shrcnt2, 0)
self.assertEqual(shrcnt3, 0)
self.assertEqual(shrcnt3, 0)
for _ in range(3):
self.assertEqual(self.done.get_nowait(), "exclusive 1")
for _ in range(3):
self.assertEqual(self.done.get_nowait(), "exclusive 2")
self.assertRaises(Queue.Empty, self.done.get_nowait)
def testIllegalDowngrade(self):
# Not yet acquired
self.assertRaises(AssertionError, self.sl.downgrade)
# Acquire in shared mode, downgrade should be no-op
self.assertTrue(self.sl.acquire(shared=1))
self.assertTrue(self.sl.is_owned(shared=1))
self.assertTrue(self.sl.downgrade())
self.assertTrue(self.sl.is_owned(shared=1))
self.sl.release()
def testDowngrade(self):
self.assertTrue(self.sl.acquire())
self.assertTrue(self.sl.is_owned(shared=0))
self.assertTrue(self.sl.downgrade())
self.assertTrue(self.sl.is_owned(shared=1))
self.sl.release()
@_Repeat
def testDowngradeJumpsAheadOfExclusive(self):
def _KeepExclusive(ev_got, ev_downgrade, ev_release):
self.assertTrue(self.sl.acquire())
self.assertTrue(self.sl.is_owned(shared=0))
ev_got.set()
ev_downgrade.wait()
self.assertTrue(self.sl.is_owned(shared=0))
self.assertTrue(self.sl.downgrade())
self.assertTrue(self.sl.is_owned(shared=1))
ev_release.wait()
self.assertTrue(self.sl.is_owned(shared=1))
self.sl.release()
def _KeepExclusive2(ev_started, ev_release):
self.assertTrue(self.sl.acquire(test_notify=ev_started.set))
self.assertTrue(self.sl.is_owned(shared=0))
ev_release.wait()
self.assertTrue(self.sl.is_owned(shared=0))
self.sl.release()
def _KeepShared(ev_started, ev_got, ev_release):
self.assertTrue(self.sl.acquire(shared=1, test_notify=ev_started.set))
self.assertTrue(self.sl.is_owned(shared=1))
ev_got.set()
ev_release.wait()
self.assertTrue(self.sl.is_owned(shared=1))
self.sl.release()
# Acquire lock in exclusive mode
ev_got_excl1 = threading.Event()
ev_downgrade_excl1 = threading.Event()
ev_release_excl1 = threading.Event()
th_excl1 = self._addThread(target=_KeepExclusive,
args=(ev_got_excl1, ev_downgrade_excl1,
ev_release_excl1))
ev_got_excl1.wait()
# Start a second exclusive acquire
ev_started_excl2 = threading.Event()
ev_release_excl2 = threading.Event()
th_excl2 = self._addThread(target=_KeepExclusive2,
args=(ev_started_excl2, ev_release_excl2))
ev_started_excl2.wait()
# Start shared acquires, will jump ahead of second exclusive acquire when
# first exclusive acquire downgrades
ev_shared = [(threading.Event(), threading.Event()) for _ in range(5)]
ev_release_shared = threading.Event()
th_shared = [self._addThread(target=_KeepShared,
args=(ev_started, ev_got, ev_release_shared))
for (ev_started, ev_got) in ev_shared]
# Wait for all shared acquires to start
for (ev, _) in ev_shared:
ev.wait()
# Check lock information
self.assertEqual(self.sl.GetLockInfo(set([query.LQ_MODE, query.LQ_OWNER])),
[(self.sl.name, "exclusive", [th_excl1.getName()], None)])
[(_, _, _, pending), ] = self.sl.GetLockInfo(set([query.LQ_PENDING]))
self.assertEqual([(pendmode, sorted(waiting))
for (pendmode, waiting) in pending],
[("exclusive", [th_excl2.getName()]),
("shared", sorted(th.getName() for th in th_shared))])
# Shared acquires won't start until the exclusive lock is downgraded
ev_downgrade_excl1.set()
# Wait for all shared acquires to be successful
for (_, ev) in ev_shared:
ev.wait()
# Check lock information again
self.assertEqual(self.sl.GetLockInfo(set([query.LQ_MODE,
query.LQ_PENDING])),
[(self.sl.name, "shared", None,
[("exclusive", [th_excl2.getName()])])])
[(_, _, owner, _), ] = self.sl.GetLockInfo(set([query.LQ_OWNER]))
self.assertEqual(set(owner), set([th_excl1.getName()] +
[th.getName() for th in th_shared]))
ev_release_excl1.set()
ev_release_excl2.set()
ev_release_shared.set()
self._waitThreads()
self.assertEqual(self.sl.GetLockInfo(set([query.LQ_MODE, query.LQ_OWNER,
query.LQ_PENDING])),
[(self.sl.name, None, None, [])])
@_Repeat
def testMixedAcquireTimeout(self):
sync = threading.Event()
def _AcquireShared(ev):
if not self.sl.acquire(shared=1, timeout=None):
return
self.done.put("shared")
# Notify main thread
ev.set()
# Wait for notification from main thread
sync.wait()
# Release lock
self.sl.release()
acquires = []
for _ in range(3):
ev = threading.Event()
self._addThread(target=_AcquireShared, args=(ev, ))
acquires.append(ev)
# Wait for all acquires to finish
for i in acquires:
i.wait()
self.assertEqual(self.sl._count_pending(), 0)
# Try to get exclusive lock
self.failIf(self.sl.acquire(shared=0, timeout=0.02))
# Acquire exclusive without timeout
exclsync = threading.Event()
exclev = threading.Event()
def _AcquireExclusive():
if not self.sl.acquire(shared=0):
return
self.done.put("exclusive")
# Notify main thread
exclev.set()
# Wait for notification from main thread
exclsync.wait()
self.sl.release()
self._addThread(target=_AcquireExclusive)
# Try to get exclusive lock
self.failIf(self.sl.acquire(shared=0, timeout=0.02))
# Make all shared holders release their locks
sync.set()
# Wait for exclusive acquire to succeed
exclev.wait()
self.assertEqual(self.sl._count_pending(), 0)
# Try to get exclusive lock
self.failIf(self.sl.acquire(shared=0, timeout=0.02))
def _AcquireSharedSimple():
if self.sl.acquire(shared=1, timeout=None):
self.done.put("shared2")
self.sl.release()
for _ in range(10):
self._addThread(target=_AcquireSharedSimple)
# Tell exclusive lock to release
exclsync.set()
# Wait for everything to finish
self._waitThreads()
self.assertEqual(self.sl._count_pending(), 0)
# Check sequence
for _ in range(3):
self.assertEqual(self.done.get_nowait(), "shared")
self.assertEqual(self.done.get_nowait(), "exclusive")
for _ in range(10):
self.assertEqual(self.done.get_nowait(), "shared2")
self.assertRaises(Queue.Empty, self.done.get_nowait)
def testPriority(self):
# Acquire in exclusive mode
self.assert_(self.sl.acquire(shared=0))
# Queue acquires
def _Acquire(prev, next, shared, priority, result):
prev.wait()
self.sl.acquire(shared=shared, priority=priority, test_notify=next.set)
try:
self.done.put(result)
finally:
self.sl.release()
counter = itertools.count(0)
priorities = range(-20, 30)
first = threading.Event()
prev = first
# Data structure:
# {
# priority:
# [(shared/exclusive, set(acquire names), set(pending threads)),
# (shared/exclusive, ...),
# ...,
# ],
# }
perprio = {}
# References shared acquire per priority in L{perprio}. Data structure:
# {
# priority: (shared=1, set(acquire names), set(pending threads)),
# }
prioshared = {}
for seed in [4979, 9523, 14902, 32440]:
# Use a deterministic random generator
rnd = random.Random(seed)
for priority in [rnd.choice(priorities) for _ in range(30)]:
modes = [0, 1]
rnd.shuffle(modes)
for shared in modes:
# Unique name
acqname = "%s/shr=%s/prio=%s" % (counter.next(), shared, priority)
ev = threading.Event()
thread = self._addThread(target=_Acquire,
args=(prev, ev, shared, priority, acqname))
prev = ev
# Record expected aqcuire, see above for structure
data = (shared, set([acqname]), set([thread]))
priolist = perprio.setdefault(priority, [])
if shared:
priosh = prioshared.get(priority, None)
if priosh:
# Shared acquires are merged
for i, j in zip(priosh[1:], data[1:]):
i.update(j)
assert data[0] == priosh[0]
else:
prioshared[priority] = data
priolist.append(data)
else:
priolist.append(data)
# Start all acquires and wait for them
first.set()
prev.wait()
# Check lock information
self.assertEqual(self.sl.GetLockInfo(set()),
[(self.sl.name, None, None, None)])
self.assertEqual(self.sl.GetLockInfo(set([query.LQ_MODE, query.LQ_OWNER])),
[(self.sl.name, "exclusive",
[threading.currentThread().getName()], None)])
self._VerifyPrioPending(self.sl.GetLockInfo(set([query.LQ_PENDING])),
perprio)
# Let threads acquire the lock
self.sl.release()
# Wait for everything to finish
self._waitThreads()
self.assert_(self.sl._check_empty())
# Check acquires by priority
for acquires in [perprio[i] for i in sorted(perprio.keys())]:
for (_, names, _) in acquires:
# For shared acquires, the set will contain 1..n entries. For exclusive
# acquires only one.
while names:
names.remove(self.done.get_nowait())
self.assertFalse(compat.any(names for (_, names, _) in acquires))
self.assertRaises(Queue.Empty, self.done.get_nowait)
def _VerifyPrioPending(self, ((name, mode, owner, pending), ), perprio):
self.assertEqual(name, self.sl.name)
self.assert_(mode is None)
self.assert_(owner is None)
self.assertEqual([(pendmode, sorted(waiting))
for (pendmode, waiting) in pending],
[(["exclusive", "shared"][int(bool(shared))],
sorted(t.getName() for t in threads))
for acquires in [perprio[i]
for i in sorted(perprio.keys())]
for (shared, _, threads) in acquires])
class _FakeTimeForSpuriousNotifications:
def __init__(self, now, check_end):
self.now = now
self.check_end = check_end
# Deterministic random number generator
self.rnd = random.Random(15086)
def time(self):
# Advance time if the random number generator thinks so (this is to test
# multiple notifications without advancing the time)
if self.rnd.random() < 0.3:
self.now += self.rnd.random()
self.check_end(self.now)
return self.now
@_Repeat
def testAcquireTimeoutWithSpuriousNotifications(self):
ready = threading.Event()
locked = threading.Event()
req = Queue.Queue(0)
epoch = 4000.0
timeout = 60.0
def check_end(now):
self.assertFalse(locked.isSet())
# If we waited long enough (in virtual time), tell main thread to release
# lock, otherwise tell it to notify once more
req.put(now < (epoch + (timeout * 0.8)))
time_fn = self._FakeTimeForSpuriousNotifications(epoch, check_end).time
sl = locking.SharedLock("test", _time_fn=time_fn)
# Acquire in exclusive mode
sl.acquire(shared=0)
def fn():
self.assertTrue(sl.acquire(shared=0, timeout=timeout,
test_notify=ready.set))
locked.set()
sl.release()
self.done.put("success")
# Start acquire with timeout and wait for it to be ready
self._addThread(target=fn)
ready.wait()
# The separate thread is now waiting to acquire the lock, so start sending
# spurious notifications.
# Wait for separate thread to ask for another notification
count = 0
while req.get():
# After sending the notification, the lock will take a short amount of
# time to notice and to retrieve the current time
sl._notify_topmost()
count += 1
self.assertTrue(count > 100, "Not enough notifications were sent")
self.assertFalse(locked.isSet())
# Some notifications have been sent, now actually release the lock
sl.release()
# Wait for lock to be acquired
locked.wait()
self._waitThreads()
self.assertEqual(self.done.get_nowait(), "success")
self.assertRaises(Queue.Empty, self.done.get_nowait)
class TestSharedLockInCondition(_ThreadedTestCase):
"""SharedLock as a condition lock tests"""
def setUp(self):
_ThreadedTestCase.setUp(self)
self.sl = locking.SharedLock("TestSharedLockInCondition")
self.setCondition()
def setCondition(self):
self.cond = threading.Condition(self.sl)
def testKeepMode(self):
self.cond.acquire(shared=1)
self.assert_(self.sl.is_owned(shared=1))
self.cond.wait(0)
self.assert_(self.sl.is_owned(shared=1))
self.cond.release()
self.cond.acquire(shared=0)
self.assert_(self.sl.is_owned(shared=0))
self.cond.wait(0)
self.assert_(self.sl.is_owned(shared=0))
self.cond.release()
class TestSharedLockInPipeCondition(TestSharedLockInCondition):
"""SharedLock as a pipe condition lock tests"""
def setCondition(self):
self.cond = locking.PipeCondition(self.sl)
class TestSSynchronizedDecorator(_ThreadedTestCase):
"""Shared Lock Synchronized decorator test"""
def setUp(self):
_ThreadedTestCase.setUp(self)
@locking.ssynchronized(_decoratorlock)
def _doItExclusive(self):
self.assert_(_decoratorlock.is_owned())
self.done.put("EXC")
@locking.ssynchronized(_decoratorlock, shared=1)
def _doItSharer(self):
self.assert_(_decoratorlock.is_owned(shared=1))
self.done.put("SHR")
def testDecoratedFunctions(self):
self._doItExclusive()
self.assertFalse(_decoratorlock.is_owned())
self._doItSharer()
self.assertFalse(_decoratorlock.is_owned())
def testSharersCanCoexist(self):
_decoratorlock.acquire(shared=1)
threading.Thread(target=self._doItSharer).start()
self.assert_(self.done.get(True, 1))
_decoratorlock.release()
@_Repeat
def testExclusiveBlocksExclusive(self):
_decoratorlock.acquire()
self._addThread(target=self._doItExclusive)
# give it a bit of time to check that it's not actually doing anything
self.assertRaises(Queue.Empty, self.done.get_nowait)
_decoratorlock.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "EXC")
@_Repeat
def testExclusiveBlocksSharer(self):
_decoratorlock.acquire()
self._addThread(target=self._doItSharer)
self.assertRaises(Queue.Empty, self.done.get_nowait)
_decoratorlock.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "SHR")
@_Repeat
def testSharerBlocksExclusive(self):
_decoratorlock.acquire(shared=1)
self._addThread(target=self._doItExclusive)
self.assertRaises(Queue.Empty, self.done.get_nowait)
_decoratorlock.release()
self._waitThreads()
self.failUnlessEqual(self.done.get_nowait(), "EXC")
if __name__ == "__main__":
testutils.GanetiTestProgram()
| bsd-2-clause |
hamiltont/CouchPotatoServer | couchpotato/core/_base/_core/__init__.py | 16 | 3338 | from .main import Core
from uuid import uuid4
def start():
return Core()
config = [{
'name': 'core',
'order': 1,
'groups': [
{
'tab': 'general',
'name': 'basics',
'description': 'Needs restart before changes take effect.',
'wizard': True,
'options': [
{
'name': 'username',
'default': '',
},
{
'name': 'password',
'default': '',
'type': 'password',
},
{
'name': 'port',
'default': 5050,
'type': 'int',
'description': 'The port I should listen to.',
},
{
'name': 'ssl_cert',
'description': 'Path to SSL server.crt',
'advanced': True,
},
{
'name': 'ssl_key',
'description': 'Path to SSL server.key',
'advanced': True,
},
{
'name': 'launch_browser',
'default': True,
'type': 'bool',
'description': 'Launch the browser when I start.',
'wizard': True,
},
],
},
{
'tab': 'general',
'name': 'advanced',
'description': "For those who know what they're doing",
'advanced': True,
'options': [
{
'name': 'api_key',
'default': uuid4().hex,
'readonly': 1,
'description': 'Let 3rd party app do stuff. <a target="_self" href="../../docs/">Docs</a>',
},
{
'name': 'debug',
'default': 0,
'type': 'bool',
'description': 'Enable debugging.',
},
{
'name': 'development',
'default': 0,
'type': 'bool',
'description': 'Enable this if you\'re developing, and NOT in any other case, thanks.',
},
{
'name': 'data_dir',
'type': 'directory',
'description': 'Where cache/logs/etc are stored. Keep empty for defaults.',
},
{
'name': 'url_base',
'default': '',
'description': 'When using mod_proxy use this to append the url with this.',
},
{
'name': 'permission_folder',
'default': '0755',
'label': 'Folder CHMOD',
'description': 'Can be either decimal (493) or octal (leading zero: 0755)',
},
{
'name': 'permission_file',
'default': '0755',
'label': 'File CHMOD',
'description': 'Same as Folder CHMOD but for files',
},
],
},
],
}]
| gpl-3.0 |
GREO/gnuradio-git | gnuradio-examples/python/usrp2/qt_wfm_interface.py | 10 | 6160 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'qt_wfm_interface.ui'
#
# Created: Thu Jun 18 23:41:03 2009
# by: PyQt4 UI code generator 4.4.3
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
class Ui_InterfaceWindow(object):
def setupUi(self, InterfaceWindow):
InterfaceWindow.setObjectName("InterfaceWindow")
InterfaceWindow.resize(909, 711)
self.centralwidget = QtGui.QWidget(InterfaceWindow)
self.centralwidget.setObjectName("centralwidget")
self.closeButton = QtGui.QPushButton(self.centralwidget)
self.closeButton.setGeometry(QtCore.QRect(790, 580, 101, 31))
self.closeButton.setObjectName("closeButton")
self.sinkFrame = QtGui.QFrame(self.centralwidget)
self.sinkFrame.setGeometry(QtCore.QRect(10, 10, 891, 501))
self.sinkFrame.setFrameShape(QtGui.QFrame.StyledPanel)
self.sinkFrame.setFrameShadow(QtGui.QFrame.Raised)
self.sinkFrame.setObjectName("sinkFrame")
self.horizontalLayoutWidget = QtGui.QWidget(self.sinkFrame)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(10, 10, 871, 481))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.sinkLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)
self.sinkLayout.setObjectName("sinkLayout")
self.channelModeBox = QtGui.QGroupBox(self.centralwidget)
self.channelModeBox.setGeometry(QtCore.QRect(10, 520, 261, 131))
self.channelModeBox.setObjectName("channelModeBox")
self.bandwidthabel = QtGui.QLabel(self.channelModeBox)
self.bandwidthabel.setGeometry(QtCore.QRect(10, 90, 101, 17))
self.bandwidthabel.setObjectName("bandwidthabel")
self.bandwidthEdit = QtGui.QLineEdit(self.channelModeBox)
self.bandwidthEdit.setGeometry(QtCore.QRect(130, 90, 113, 23))
self.bandwidthEdit.setObjectName("bandwidthEdit")
self.gainEdit = QtGui.QLineEdit(self.channelModeBox)
self.gainEdit.setGeometry(QtCore.QRect(130, 60, 113, 23))
self.gainEdit.setObjectName("gainEdit")
self.gainLabel = QtGui.QLabel(self.channelModeBox)
self.gainLabel.setGeometry(QtCore.QRect(10, 60, 111, 20))
self.gainLabel.setObjectName("gainLabel")
self.freqEdit = QtGui.QLineEdit(self.channelModeBox)
self.freqEdit.setGeometry(QtCore.QRect(130, 30, 113, 23))
self.freqEdit.setObjectName("freqEdit")
self.freqLabel = QtGui.QLabel(self.channelModeBox)
self.freqLabel.setGeometry(QtCore.QRect(10, 30, 111, 17))
self.freqLabel.setObjectName("freqLabel")
self.pauseButton = QtGui.QPushButton(self.centralwidget)
self.pauseButton.setGeometry(QtCore.QRect(790, 520, 101, 31))
self.pauseButton.setObjectName("pauseButton")
self.fmBox = QtGui.QGroupBox(self.centralwidget)
self.fmBox.setGeometry(QtCore.QRect(290, 520, 251, 131))
self.fmBox.setObjectName("fmBox")
self.volumeEdit = QtGui.QLineEdit(self.fmBox)
self.volumeEdit.setGeometry(QtCore.QRect(130, 20, 113, 23))
self.volumeEdit.setObjectName("volumeEdit")
self.volumeLabel = QtGui.QLabel(self.fmBox)
self.volumeLabel.setGeometry(QtCore.QRect(10, 20, 111, 17))
self.volumeLabel.setObjectName("volumeLabel")
InterfaceWindow.setCentralWidget(self.centralwidget)
self.menubar = QtGui.QMenuBar(InterfaceWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 909, 24))
self.menubar.setObjectName("menubar")
self.menuFile = QtGui.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
InterfaceWindow.setMenuBar(self.menubar)
self.statusbar = QtGui.QStatusBar(InterfaceWindow)
self.statusbar.setObjectName("statusbar")
InterfaceWindow.setStatusBar(self.statusbar)
self.actionExit = QtGui.QAction(InterfaceWindow)
self.actionExit.setObjectName("actionExit")
self.menuFile.addAction(self.actionExit)
self.menubar.addAction(self.menuFile.menuAction())
self.retranslateUi(InterfaceWindow)
QtCore.QObject.connect(self.closeButton, QtCore.SIGNAL("clicked()"), InterfaceWindow.close)
QtCore.QObject.connect(self.actionExit, QtCore.SIGNAL("triggered()"), InterfaceWindow.close)
QtCore.QMetaObject.connectSlotsByName(InterfaceWindow)
InterfaceWindow.setTabOrder(self.closeButton, self.gainEdit)
InterfaceWindow.setTabOrder(self.gainEdit, self.freqEdit)
InterfaceWindow.setTabOrder(self.freqEdit, self.bandwidthEdit)
def retranslateUi(self, InterfaceWindow):
InterfaceWindow.setWindowTitle(QtGui.QApplication.translate("InterfaceWindow", "MainWindow", None, QtGui.QApplication.UnicodeUTF8))
self.closeButton.setText(QtGui.QApplication.translate("InterfaceWindow", "Close", None, QtGui.QApplication.UnicodeUTF8))
self.channelModeBox.setTitle(QtGui.QApplication.translate("InterfaceWindow", "USRP Parameters", None, QtGui.QApplication.UnicodeUTF8))
self.bandwidthabel.setText(QtGui.QApplication.translate("InterfaceWindow", "Bandwidth (Hz)", None, QtGui.QApplication.UnicodeUTF8))
self.gainLabel.setText(QtGui.QApplication.translate("InterfaceWindow", "Gain (dB)", None, QtGui.QApplication.UnicodeUTF8))
self.freqLabel.setText(QtGui.QApplication.translate("InterfaceWindow", "Frequency", None, QtGui.QApplication.UnicodeUTF8))
self.pauseButton.setText(QtGui.QApplication.translate("InterfaceWindow", "Pause", None, QtGui.QApplication.UnicodeUTF8))
self.fmBox.setTitle(QtGui.QApplication.translate("InterfaceWindow", "FM Tuner Parameters", None, QtGui.QApplication.UnicodeUTF8))
self.volumeLabel.setText(QtGui.QApplication.translate("InterfaceWindow", "Volume", None, QtGui.QApplication.UnicodeUTF8))
self.menuFile.setTitle(QtGui.QApplication.translate("InterfaceWindow", "&File", None, QtGui.QApplication.UnicodeUTF8))
self.actionExit.setText(QtGui.QApplication.translate("InterfaceWindow", "E&xit", None, QtGui.QApplication.UnicodeUTF8))
| gpl-3.0 |
mtr574/Midburn-ThemeCamps | midburn/models.py | 3 | 4096 | from django.db import models
from django.contrib.auth.models import User
CAMPSTATUS = (
(0, 'Deleted'),
(1, 'Accepting new members'),
(2, 'Closed to new members'),
(3, 'Camp will not come to Midburn 2016'),
)
# Create your models here.
class Camp(models.Model):
users = models.ManyToManyField(User)
camp_name_he = models.CharField(max_length=50, unique=True)
camp_name_en = models.CharField(max_length=50, unique=True)
camp_desc_he = models.TextField()
camp_desc_en = models.TextField()
camp_status = models.IntegerField(choices=CAMPSTATUS, default=1)
is_published = models.BooleanField(default=False)
contact_email = models.CharField(max_length=254, blank=True)
contact_facebook = models.CharField(max_length=254, blank=True)
contact_name_he = models.CharField(max_length=50, blank=True)
contact_name_en = models.CharField(max_length=50, blank=True)
contact_phone = models.CharField(max_length=50, blank=True)
accepting_families = models.BooleanField(default=True)
def get_status(self):
self.status = CAMPSTATUS[self.camp_status][1]
return self
def __str__(self):
return self.camp_name_en
CAMPTYPES = (
(1, 'food'),
(2, 'drinking/bar'),
(3, 'music'),
(4, 'workshops'),
(5, 'art-supporting'),
(6, 'other'),
)
CAMPTIMES = (
(1, 'morning'),
(2, 'noon'),
(3, 'evening'),
(4, 'night'),
)
NOISE_LEVELS = (
(1, 'quiet'),
(2, 'medium'),
(3, 'noisy'),
(4, 'very noisy'),
)
class CampLocation(models.Model): # Can be part of camp, but for better modularity
camp = models.OneToOneField(Camp)
camp_type = models.IntegerField(choices=CAMPTYPES)
camp_type_other = models.TextField()
camp_activity_time = models.CommaSeparatedIntegerField(choices=CAMPTIMES, max_length=64)
child_friendly = models.BooleanField()
noise_level = models.IntegerField(choices=NOISE_LEVELS)
public_activity_area_sqm = models.IntegerField()
public_activity_area_desc = models.TextField()
support_art = models.BooleanField()
location_comments = models.TextField()
# These 3 will be set by mikumation
camp_location_street = models.CharField(max_length=100)
camp_location_street_time = models.CharField(max_length=100)
camp_location_area = models.IntegerField()
# Arrival
arriving_at = models.DateTimeField()
# Arrival Checklist
has_construction_team = models.BooleanField()
has_deconst_team = models.BooleanField()
has_gifting = models.BooleanField()
has_leds = models.BooleanField()
# Neighbour Camps
requested_nearby_camps = models.ManyToManyField(Camp, related_name="requested_nearby_camps")
def __str__(self):
return self.camp.camp_name_en
CAMP_MEMBERSHIP_STATUS = (
(1, 'not a member'),
(2, 'awaiting approval'),
(3, 'approved'),
)
class CampMember(models.Model):
camp = models.ForeignKey(Camp)
status = models.IntegerField(choices=CAMP_MEMBERSHIP_STATUS)
has_ticket = models.BooleanField()
early_arrival = models.BooleanField()
is_editor = models.BooleanField()
class CampSafety(models.Model):
camp = models.OneToOneField(Camp)
have_art = models.BooleanField()
installation_over_2m = models.BooleanField()
# Safety checklist:
is_gas_2m_from_stove = models.BooleanField()
is_electricity_not_near_water = models.BooleanField()
ACTIVITY_TYPES = (
(1, 'workshop'),
(2, 'party'),
(3, 'lecture'),
(4, 'show'),
(5, 'parade/hike'),
(6, 'game'),
(7, 'movie'),
(8, 'other'),
)
class Workshop(models.Model):
owner = models.ForeignKey(Camp)
activity_name_he = models.CharField(max_length=50)
activity_name_en = models.CharField(max_length=50)
activity_desc_he = models.TextField()
activity_desc_en = models.TextField()
activity_datetime = models.DateTimeField()
activity_type = models.IntegerField(choices=ACTIVITY_TYPES)
activity_type_other = models.TextField()
adult_only = models.BooleanField()
child_friendly = models.BooleanField()
| apache-2.0 |
gartung/dxr | dxr/plugins/rust/__init__.py | 1 | 40098 | """DXR plugin for Rust. Relies on output from running rustc with -Zsave-analysis
It is somewhat painful dealing with the untyped-ness of the CSV input. We want
to treat all ids as ints rather than strings, getting this wrong causes annoying
bugs because Python will not check the type of things, but does distinguish between
`n: int` and `'n': string`, and thus dictionary lookups will mysteriously fail.
* All input is strings
* Anything placed into the hashtable data must have had all ids converted to ints
- that is mostly (but not all) done by convert_ids/find_id (since we convert
Rust NodeIds/DefIds to internal ids)
* Helper methods might take args which may or may not have been int-ified :-(
This will all go away when we convert to using JSON instead of CSV for the data
interchange format.
Line and column numbers are stored as strings though.
"""
import csv
import os
from itertools import chain
from dxr import indexers
from dxr.plugins import Plugin, filters_from_namespace, refs_from_namespace
from dxr.filters import LINE
from dxr.indexers import Extent, Position, iterable_per_line, with_start_and_end, split_into_lines, QUALIFIED_LINE_NEEDLE
from dxr.plugins.rust import filters
from dxr.plugins.rust import refs
RUST_DXR_FLAG = " -Zsave-analysis"
# We know these crates come from the rust distribution (probably, the user could
# override that, but lets assume for now...).
std_libs = ['alloc', 'arena', 'backtrace', 'collections', 'core', 'coretest',
'flate','fmt_macros', 'getopts', 'graphviz', 'libc', 'log', 'rand',
'rbml', 'regex', 'rustc', 'rustc_bitflags', 'rustc_back', 'rustc_borrowck',
'rustc_driver', 'rustc_llvm', 'rustc_privacy', 'rustc_resolve', 'rustc_trans',
'rustc_typeck', 'rustdoc', 'serialize', 'std', 'syntax', 'term',
'test', 'unicode']
id = 0
def next_id():
global id
id += 1
return id
class FileToIndex(indexers.FileToIndex):
def __init__(self, path, contents, plugin_name, tti):
super(FileToIndex, self).__init__(path, contents, plugin_name, tti.tree)
self.tree_index = tti
def needles_by_line(self):
#iterable of key/value mapping; one iterable per line
return self.all_needles()
def refs(self):
classes_and_tables = [(refs.FunctionRef, 'functions'),
(refs.FunctionRefRef, 'function_refs'),
(refs.VariableRef, 'variables'),
(refs.VariableRefRef, 'variable_refs'),
(refs.TypeRef, 'types'),
(refs.TypeRefRef, 'type_refs'),
(refs.ModuleRef, 'modules'),
(refs.ModuleRefRef, 'module_refs'),
(refs.ModuleAliasRef, 'module_aliases'),
(refs.UnknownRef, 'unknown_refs')]
# Note there is no ref for impls since both the trait and struct parts
# are covered as refs already. If you add this, then you will get overlapping
# extents, which is bad. We have impl_defs in the db because we do want
# to jump _to_ them.
for make_ref, table_name in classes_and_tables:
for datum in self.tree_index.by_file(table_name, self.path):
ref = make_ref(self.tree, datum, tree_index=self.tree_index)
if ref and 'extent_start' in datum:
yield (int(datum['extent_start']),
int(datum['extent_end']),
ref)
def annotations_by_line(self):
# FIXME(#4) links in the lefthand margin (warnings, etc.)
return []
def links(self):
# FIXME(#16) RHS links
#return (sort order, heading, [(icon, title, href), ...])
return []
def all_needles(self):
return iterable_per_line(with_start_and_end(split_into_lines(chain(
self.file_needles('function', 'functions'),
self.file_needles('function_ref', 'function_refs'),
self.file_needles('var', 'variables'),
self.file_needles('var_ref', 'variable_refs'),
self.file_needles('type', 'types'),
self.file_needles('type_ref', 'type_refs'),
self.file_needles('module', 'modules'),
self.file_needles('module_ref', 'module_refs'),
self.file_needles('module_alias_ref', 'module_aliases'),
self.alias_needles(),
self.module_use_needles(),
self.file_needles('extern_ref', 'unknown_refs'),
self.impl_needles(),
self.fn_impls_needles(),
self.inherit_needles(self.tree_index.super_traits, 'derived'),
self.inherit_needles(self.tree_index.sub_traits, 'bases'),
self.call_needles(self.tree_index.callers, 'called_by'),
self.call_needles(self.tree_index.callees, 'callers'),
))))
def file_needles(self, filter_name, table_name, keys=('name', 'qualname')):
data = self.tree_index.by_file(table_name, self.path)
return self.needles_for_table(filter_name, data)
def needles_for_table(self, filter_name, data):
# Each needle is a (needle name, needle value dict, Extent) triple.
result = (('rust_{0}'.format(filter_name),
datum,
self.make_span(datum))
for datum in data if 'extent_start' in datum)
return result
def alias_needles(self):
# When we have a reference to an alias, it is useful to have a needle for
# both the alias and the aliased 'module'.
refs = self.tree_index.by_file('module_refs', self.path)
aliases = self.tree_index.data.module_aliases
mods = self.tree_index.data.modules
for datum in refs:
if datum['aliasid'] in aliases:
a_ref = aliases[datum['aliasid']]
alias = {
'qualname': a_ref['qualname'],
'name': a_ref['name']
}
yield ('rust_module_alias_ref', alias, self.make_span(datum))
if a_ref['refid'] in mods:
mod = {
'qualname': mods[a_ref['refid']]['qualname'],
'name': mods[a_ref['refid']]['name']
}
yield ('rust_module_ref', mod, self.make_span(datum))
def module_use_needles(self):
aliases = self.tree_index.by_file('module_aliases', self.path)
modules = self.tree_index.data.modules
for datum in aliases:
if datum['refid'] in modules:
alias = {
'qualname': modules[datum['refid']]['qualname'],
'name': modules[datum['refid']]['name']
}
yield ('rust_module_use', alias, self.make_span(datum))
def impl_needles(self):
impls = self.tree_index.by_file('impl_defs', self.path)
types = self.tree_index.data.types
for datum in impls:
if datum['refid'] in types:
impl = {
'qualname': types[datum['refid']]['qualname'],
'name': types[datum['refid']]['name']
}
yield ('rust_impl', impl, self.make_span(datum))
if datum['traitid'] in types:
impl = {
'qualname': types[datum['traitid']]['qualname'],
'name': types[datum['traitid']]['name']
}
yield ('rust_impl', impl, self.make_span(datum))
def fn_impls_needles(self):
fns = self.tree_index.by_file('functions', self.path)
all_fns = self.tree_index.data.functions
for datum in fns:
if 'declid' in datum and datum['declid'] in all_fns:
fn = {
'qualname': all_fns[datum['declid']]['qualname'],
'name': all_fns[datum['declid']]['name']
}
yield ('rust_fn_impls', fn, self.make_span(datum))
def inherit_needles(self, inheritance, filter_name):
types = self.tree_index.by_file('types', self.path)
all_types = self.tree_index.data.types
for datum in types:
if datum['id'] not in inheritance:
continue
for s in inheritance[datum['id']]:
t = {
'qualname': all_types[s]['qualname'],
'name': all_types[s]['name']
}
yield ('rust_{0}'.format(filter_name), t, self.make_span(datum))
def call_needles(self, calls, filter_name):
fns = self.tree_index.by_file('functions', self.path)
all_fns = self.tree_index.data.functions
for datum in fns:
if datum['id'] not in calls:
continue
for s in calls[datum['id']]:
fn = {
'qualname': all_fns[s]['qualname'],
'name': all_fns[s]['name']
}
yield ('rust_{0}'.format(filter_name), fn, self.make_span(datum))
# Takes a row of data and returns an Extent.
def make_span(self, row):
return Extent(Position(int(row['file_line']), int(row['file_col'])),
Position(int(row['file_line_end']), int(row['file_col_end'])))
class RustLine:
def __init__(self):
self.defs = []
class RustFile:
def __init__(self):
self.lines = {}
def get_line(self, line):
if line not in self.lines:
self.lines[line] = RustLine()
return self.lines[line]
# Data for the tree, mappings for each of the various kinds of language item to
# the place it occurs and info about it.
class TreeData:
def __init__(self):
# non-refs are id->args, refs are lists
self.unknowns = {}
self.unknown_refs = []
self.modules = {}
# A module for each crate linked using extern crate, indexed by the module id for the crate
self.extern_crate_mods = {}
self.module_refs = []
self.module_aliases = {}
self.variables = {}
self.variable_refs = []
self.functions = {}
self.function_refs = []
self.types = {}
self.type_refs = []
self.impl_defs = {}
self.indices = {}
# Create an index for a dict
def index(self, table_name, field_name):
if (table_name, field_name) in self.indices:
return self.indices[(table_name, field_name)]
table = getattr(self, table_name)
index = {}
values = None
if table_name.endswith('_refs'):
values = table
else:
values = table.values()
for v in values:
if field_name in v and v[field_name]:
if v[field_name] in index:
index[v[field_name]].append(v)
else:
index[v[field_name]] = [v]
self.indices[(table_name, field_name)] = index
return index
def delete_indices(self):
self.indices = {}
class TreeToIndex(indexers.TreeToIndex):
def __init__(self, plugin_name, tree, vcs_cache):
super(TreeToIndex, self).__init__(plugin_name, tree, vcs_cache)
self.tree = tree
src_folder = self.tree.source_folder
if not src_folder.endswith('/'):
src_folder += '/'
self.src_folder = src_folder
self.crate_map = {}
self.crates_by_name = {}
self.id_map = {}
self.local_libs = []
self.files = {} # map from file name to RustFile, which in turn stores all data
# mapping location -> info.
self.data = TreeData()
# Map from the id of a scope to the id of its parent (or 0), if there is no parent.
# Note that these are Rust ids, not DXR ids
self.mod_parents = {}
# map from ctor_id to def_id for structs
# The domains should be disjoint
self.ctor_ids = {}
# list of (base, derived) trait ids
self.inheritance = []
# convenience lookups for self.inheritance
self.sub_traits = {}
self.super_traits = {}
# maps from a fn to its callers or callees (by id)
self.callers = {}
self.callees = {}
# map from inner to outer scopes
self.scope_inheritance = {}
# URLs for std libs
self.locations = {}
# The name of the crate being processed
self.crate_name = None
self._temp_folder = os.path.join(self.tree.temp_folder, 'plugins', plugin_name)
# return data by file, indexed by the file's path
def by_file(self, table_name, file_path):
table = self.data.index(table_name, 'file_name')
if file_path not in table:
return []
return table[file_path]
def environment(self, env):
print("rust-dxr environment")
# Setup environment variables for using the rust-dxr tool
# We'll store all the havested metadata in the plugins temporary folder.
env['RUSTC'] = env.get('RUSTC', 'rustc') + RUST_DXR_FLAG
if 'RUSTFLAGS_STAGE2' in env:
env['RUSTFLAGS_STAGE2'] += RUST_DXR_FLAG
else:
env['RUSTFLAGS_STAGE2'] = RUST_DXR_FLAG
env['DXR_RUST_OBJECT_FOLDER'] = self.tree.object_folder
env['DXR_RUST_TEMP_FOLDER'] = self._temp_folder
return env
def post_build(self):
print "rust-dxr post_build"
for root, dirs, files in os.walk(self._temp_folder):
print " - Processing files - first pass"
for f in [f for f in files if f.endswith('.csv')]:
self.process_csv_first_pass(os.path.join(root, f))
self.crate_name = None
print " - Processing files - second pass"
for f in [f for f in files if f.endswith('.csv')]:
self.process_csv_second_pass(os.path.join(root, f))
self.crate_name = None
# don't need to look in sub-directories
break
print " - Updating references"
self.fixup_struct_ids()
self.fixup_sub_mods()
print " - Generating inheritance graph"
self.generate_inheritance()
self.generate_callgraph()
print " - Generating crate info"
self.generate_locations()
print " - Generating qualnames"
self.generate_qualnames()
def file_to_index(self, path, contents):
return FileToIndex(path, contents, self.plugin_name, self)
# Just record the crates we index (process_crate).
def process_csv_first_pass(self, path):
self.process_csv(path, True)
# All the proper indexing.
def process_csv_second_pass(self, path):
self.process_csv(path, False)
# We need to do this once per crate whilst the current crate is still current
self.generate_scopes()
self.std_hack()
def process_csv(self, file_name, header_only):
try:
f = open(file_name, 'rb')
print 'processing ' + file_name
parsed_iter = csv.reader(f)
try:
# the first item on a line is the kind of entity we are dealing with and so
# we can use that to dispatch to the appropriate process_... function
for line in parsed_iter:
# convert key:value pairs to a map
args = {}
for i in range(1, len(line), 2):
args[line[i]] = line[i + 1]
func = None
try:
func = globals()['process_' + line[0]]
except KeyError:
print " - 'process_" + line[0] + "' not implemented!"
continue
# The Rust compiler can output noncanonical paths, which
# don't match the ones DXR comes up with. Canonicalize
# them. We don't use relpath() because, in cases where the
# path doesn't end up starting with source_folder, we
# don't want to end up with an absolute path, since that
# certainly won't match what DXR constructs.
if 'file_name' in args:
file_name_value = os.path.normpath(args['file_name'])
if file_name_value.startswith(self.tree.source_folder):
file_name_value = file_name_value[len(self.tree.source_folder)+1:]
args['file_name'] = file_name_value
stop = func(args, self)
if stop and header_only:
break
except Exception:
print "error in", file_name, line
raise
except Exception:
print "error in", file_name
raise
finally:
f.close()
def fixup_struct_ids(self):
""" Sadness. Structs have an id for their definition and an id for their ctor.
Sometimes, we get one, sometimes the other. This method fixes up any refs
to the latter into refs to the former."""
type_refs_by_ref = self.data.index('type_refs', 'refid')
for ctor in self.ctor_ids.keys():
if ctor in type_refs_by_ref:
for ref in type_refs_by_ref[ctor]:
ref['refid'] = self.ctor_ids[ctor]
# Indices are now out of date, need to delete them
self.data.delete_indices()
def fixup_sub_mods(self):
""" When we have a path like a::b::c, we want to have info for a and a::b.
Unfortunately Rust does not give us much info, so we have to
construct it ourselves from the module info we have.
We have the qualname for the module (e.g, a or a::b) but we do not have
the refid. """
self.fixup_sub_mods_impl('modules', 'module_refs')
# paths leading up to a static method have a module path, then a type at the end,
# so we have to fixup the type in the same way as we do modules.
self.fixup_sub_mods_impl('types', 'type_refs')
# Some module_refs are refs to types, e.g., enums in paths
self.fixup_sub_mods_impl('types', 'module_refs')
# FIXME - does not seem to work for external crates - refid = 0, crateid = 0
# they must be in the same module crate as their parent though, and we can cache
# module name and scope -> crate and always get a hit, so maybe we can win.
def fixup_sub_mods_impl(self, table_name, table_ref_name):
""" NOTE table_name and table_ref_name should not come from user input, otherwise
there is potential for SQL injection attacks. """
# First create refids for module refs whose qualnames match the qualname of
# the module (i.e., no aliases).
table_refs = getattr(self.data, table_ref_name)
table_by_name = self.data.index(table_name, 'qualname')
for v in table_refs:
if v['refid'] > 0:
continue
if v['qualname'] and v['qualname'] in table_by_name:
v['refid'] = table_by_name[v['qualname']][0]['id']
# We do our own scpoing of aliases and it is kinda nasty. We keep a record
# of a reflexive, transitive 'inside' relation for scopes in impl. So we
# check that the alias is outside the reference to the alias.
# XXX This does not take into account overriding/shadowing, so if there is
# an alias in a smaller scope which hides an outer alias, it is chance which
# you will get.
if table_name == 'modules':
# Next account for where the path is an aliased modules e.g., alias::c,
# where c is already accounted for.
module_aliases_by_scope = self.data.index('module_aliases', 'scopeid')
module_refs_0 = [item for item in self.data.module_refs if item['refid'] == -1]
for mod_ref in module_refs_0:
if mod_ref['scopeid'] not in self.scope_inheritance:
continue
parent_ids = self.scope_inheritance[mod_ref['scopeid']]
for parent_id in parent_ids:
if parent_id in module_aliases_by_scope:
for alias in module_aliases_by_scope[parent_id]:
if alias['name'] == mod_ref['qualname']:
qualname = str(parent_id) +"$" + alias['name']
mod_ref['qualname'] = qualname
mod = None
id = alias['refid']
if id in self.data.modules:
mod = self.data.modules[id]
elif id in self.data.extern_crate_mods:
mod = self.data.extern_crate_mods[id]
if mod:
mod_ref['refid'] = mod['id']
mod_ref['aliasid'] = alias['id']
def generate_inheritance(self):
direct = [(base, derived) for (base, derived) in self.inheritance]
transitive = [(base, derived) for (base, derived) in self.closure(self.inheritance) if (base, derived) not in self.inheritance]
self.inheritance = direct + transitive
for (b, d) in self.inheritance:
self.sub_traits.setdefault(b, []).append(d)
self.super_traits.setdefault(d, []).append(b)
def generate_callgraph(self):
# staticaly dispatched call
static_calls = [(value['refid'], value['scopeid']) for value in self.data.function_refs if value['refid'] and value['refid'] in self.data.functions and value['scopeid'] in self.data.functions]
# dynamically dispatched call
fns_by_declid = self.data.index('functions', 'declid')
dynamic_calls = [(fns_by_declid[value['declid']][0]['id'], value['scopeid'])
for value in self.data.function_refs
if ('refid' not in value or not value['refid']) and 'declid' in value and value['declid'] in fns_by_declid and fns_by_declid[value['declid']][0]['id'] in self.data.functions and value['scopeid'] in self.data.functions]
for (er, ee) in static_calls + dynamic_calls:
self.callers.setdefault(er, []).append(ee)
self.callees.setdefault(ee, []).append(er)
def generate_locations(self):
docurl = "http://static.rust-lang.org/doc/master/%s/index.html"
srcurl = "https://github.com/rust-lang/rust/tree/master/src/lib%s"
dxrurl = "http://dxr.mozilla.org/rust/source/lib%s/lib.rs.html"
for l in std_libs:
# If we are indexing the standard libs for some reason, then don't give
# them special treatment.
if l not in self.local_libs:
self.locations[l] = (docurl%l, srcurl%l, dxrurl%l)
def generate_qualnames(self):
def generate_qualname_for_table(ref_table, table):
for datum in ref_table:
if 'qualname' not in datum or not datum['qualname']:
if datum['refid'] and datum['refid'] in table:
datum['qualname'] = table[datum['refid']]['qualname']
datum['name'] = table[datum['refid']]['name']
generate_qualname_for_table(self.data.type_refs, self.data.types)
generate_qualname_for_table(self.data.module_refs, self.data.types)
generate_qualname_for_table(self.data.variable_refs, self.data.variables)
# function refs
for f in self.data.function_refs:
if 'qualname' not in f or not f['qualname']:
if 'refid' in f and f['refid'] and f['refid'] in self.data.functions:
fn_def = self.data.functions[f['refid']]
f['qualname'] = fn_def['qualname']
f['name'] = fn_def['name']
elif 'refid' in f and f['refid'] and f['refid'] in self.data.types:
fn_def = self.data.types[f['refid']]
f['qualname'] = fn_def['qualname']
f['name'] = fn_def['name']
elif 'declid' in f and f['declid'] and f['declid'] in self.data.functions:
fn_decl = self.data.functions[f['declid']]
f['qualname'] = fn_decl['qualname']
f['name'] = fn_decl['name']
# unknown refs
for datum in self.data.unknown_refs:
if 'qualname' not in datum or not datum['qualname']:
if datum['refid']:
datum['qualname'] = datum['refid']
datum['name'] = datum['refid']
# module aliases
for datum in self.data.module_refs:
if 'qualname' not in datum or not datum['qualname']:
if datum['aliasid'] and datum['aliasid'] in self.data.module_aliases:
alias = self.data.module_aliases[datum['aliasid']]
datum['qualname'] = alias['qualname']
datum['name'] = alias['name']
def generate_scopes(self):
self.scope_inheritance[self.find_id_cur(0)] = [self.find_id_cur(0)]
for (child, parent) in self.mod_parents.items():
self.scope_inheritance.setdefault(child, []).append(parent)
# reflexivity
self.scope_inheritance.setdefault(child, []).append(child)
# transitivity
for (child, parent) in self.closure(self.mod_parents.items()):
if (child, parent) not in self.mod_parents.items():
self.scope_inheritance.setdefault(child, []).append(parent)
self.mod_parents = {}
def std_hack(self):
# This is nasty - Rust implicitly includes the standard library,
# crate `std`, but without generating an `extern crate` item, so we need
# to do that. However, it is possible the project includes some other crate
# called `std` (by building without the standard lib, we can't tell from
# the indexing data which is the case), so we need to check in case there
# is one already.
# We probably wouldn't need this if we dealt with generated code properly
# in the compiler indexing.
if 'std' not in self.data.index('module_aliases', 'name').keys():
id = next_id()
scopeid = self.find_id_cur('0')
args = {
'name': 'std',
'location': 'std',
'id': id,
'scopeid': scopeid,
# Jesus, this is fragile
'crate': '1',
'qualname': str(scopeid) + '$std',
'refid': self.crate_map[1][1]['id']
}
self.data.module_aliases[id] = args
def closure(self, input):
""" Compute the (non-refexive) transitive closure of a list."""
closure = set(input)
while True:
next_set = set([(b,dd) for (b,d) in closure for (bb,dd) in closure if d == bb])
next_set |= closure
if next_set == closure:
return closure
closure = next_set
def find_id(self, crate, node):
""" Maps a crate name and a node number to a globally unique id. """
if node == None:
return None
if node < 0:
return node
node = int(node)
if (crate, node) not in self.id_map:
result = next_id()
self.id_map[(crate, node)] = (result, 0)
return result
return self.id_map[(crate, node)][0]
def add_external_item(self, args):
""" Returns True if the refid in the args points to an item in an external crate. """
node, crate = args['refid'], args['refidcrate']
if not node:
return False
crate = self.crate_map[int(crate)][0]
if crate in self.local_libs:
return False
id = self.find_id(crate, node)
if id not in self.data.unknowns:
self.data.unknowns[id] = {'id': id, 'crate': crate }
args = self.convert_ids(args)
self.data.unknown_refs.append(args)
self.add_to_lines(args, ('unknowns', args))
return True
def add_external_decl(self, args):
decl_node, decl_crate = args['declid'], args['declidcrate']
if not decl_node:
return False
decl_crate = self.crate_map[int(decl_crate)][0]
if decl_crate in self.local_libs:
return False
id = self.find_id(decl_crate, decl_node)
if id not in self.data.unknowns:
self.data.unknowns[id] = {'id': id, 'crate': decl_crate }
new_args = self.convert_ids(args)
new_args['refid'] = new_args['declid']
self.add_to_lines(new_args, ('unknowns', new_args))
args['refid'] = new_args['declid']
return True
def add_to_lines(self, args, data):
r_file = self.get_file(args['file_name'])
start_line = args['file_line']
end_line = args['file_line_end']
for i in range(int(start_line), int(end_line) + 1):
r_line = r_file.get_line(i)
r_line.defs.append(data)
def get_file(self, file_name):
if file_name.startswith(self.src_folder):
file_name = file_name[len(self.src_folder):]
if file_name in self.files:
return self.files[file_name]
r_file = RustFile()
self.files[file_name] = r_file
return r_file
# XXX this feels a little bit fragile...
def convert_ids(self, args):
def convert(k, v):
if k.endswith('crate'):
return -1
elif k == 'ctor_id' or k == 'aliasid':
return int(v)
elif k == 'refid' and (not v or int(v) <= 0):
return -1
elif k == 'id' or k == 'scopeid':
return self.find_id_cur(v)
elif v == '' and (k.endswith('id') or k == 'base' or k == 'derived'):
return None
elif k.endswith('id') or k == 'base' or k == 'derived':
return self.find_id(self.crate_map[int(args[k + 'crate'])][0], v)
else:
return v
new_args = {k: convert(k, v) for k, v in args.items() if not k.endswith('crate')}
return new_args
def find_id_cur(self, node):
""" Shorthand for nodes in the current crate. """
return self.find_id(self.crate_map[0][0], node)
def fixup_qualname(self, datum):
# FIXME(#19) we should not do this here, we should do it in the compiler
if 'qualname' in datum and datum['qualname'] and datum['qualname'][:2] == '::':
datum['qualname'] = self.crate_name + datum['qualname']
# FIXME(#15) all these process_* methods would be better off in TreeToIndex
def process_crate(args, tree):
""" There should only be one of these per crate and it gives info about the current
crate.
Note that this gets called twice for the same crate line - once per pass. """
if args['name'] not in tree.local_libs:
tree.local_libs.append(args['name'])
args = tree.convert_ids(args)
args['id'] = next_id()
tree.crate_map[0] = (args['name'], args)
tree.crates_by_name[args['name']] = args
tree.crate_name = args['name']
def process_external_crate(args, tree):
""" These have to happen before anything else in the csv and have to be concluded
by 'end_external_crate'. """
mod_id = next_id()
name = args['name']
id = int(args['crate'])
args = {'id': mod_id,
'name': name,
'qualname': "0$" + name,
'def_file': args['file_name'],
'kind': 'extern',
'scopeid': 0,
'extent_start': -1,
'extent_end': -1}
# don't need to convert_args because the args are all post-transform
tree.data.extern_crate_mods[mod_id] = args
tree.crate_map[id] = (name, args)
def process_type_ref(args, tree):
if tree.add_external_item(args):
return;
args = tree.convert_ids(args)
tree.fixup_qualname(args)
if 'qualname' not in args:
args['qualname'] = ''
tree.data.type_refs.append(args)
tree.add_to_lines(args, ('type_refs', args))
def process_variable(args, tree):
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.variables[args['id']] = args
tree.add_to_lines(args, ('variables', args))
def process_function_impl(args, tree):
args['name'] = args['qualname'].split('::')[-1]
args['args'] = ''
args['type'] = ''
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.mod_parents[int(args['id'])] = int(args['scopeid'])
tree.data.functions[args['id']] = args
tree.add_to_lines(args, ('functions', args))
def process_function(args, tree):
process_function_impl(args, tree)
def process_method_decl(args, tree):
process_function_impl(args, tree)
def process_enum(args, tree):
args['kind'] = 'enum'
args['name'] = args['qualname'].split('::')[-1]
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.types[args['id']] = args
tree.add_to_lines(args, ('types', args))
def process_struct(args, tree, kind = 'struct'):
# Used for fixing up the refid in fixup_struct_ids
if args['ctor_id'] != '-1':
tree.ctor_ids[tree.find_id_cur(args['ctor_id'])] = tree.find_id_cur(args['id'])
args['name'] = args['qualname'].split('::')[-1]
tree.fixup_qualname(args)
args['kind'] = kind
scope_args = tree.convert_ids({'id': args['id'],
'name' : args['name']})
args = tree.convert_ids(args)
tree.mod_parents[int(args['id'])] = int(args['scopeid'])
tree.data.types[args['id']] = args
tree.add_to_lines(args, ('types', args))
def process_impl(args, tree):
scope_args = tree.convert_ids({'id': args['id'],
'name' : 'impl'})
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.mod_parents[int(args['id'])] = int(args['scopeid'])
tree.data.impl_defs[args['id']] = args
tree.add_to_lines(args, ('impl_defs', args))
def process_fn_call(args, tree):
if tree.add_external_item(args):
return;
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.function_refs.append(args)
tree.add_to_lines(args, ('function_refs', args))
def process_var_ref(args, tree):
if tree.add_external_item(args):
return;
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.variable_refs.append(args)
tree.add_to_lines(args, ('variable_refs', args))
def process_struct_ref(args, tree):
if 'qualname' not in args:
args['qualname'] = ''
process_type_ref(args, tree)
def process_method_call(args, tree):
if args['refid'] == '0':
args['refid'] = None
ex_def = tree.add_external_item(args)
ex_decl = tree.add_external_decl(args)
if ex_def and ex_decl:
return;
if (ex_def and not args['declid']) or (ex_decl and not args['refid']):
# FIXME, I think this is meant to be an assertion, but not sure
print "Unexpected(?) missing id in method call"
return;
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.function_refs.append(args)
tree.add_to_lines(args, ('function_refs', args))
def process_mod_ref(args, tree):
args['name'] = args['qualname'].split('::')[-1]
if tree.add_external_item(args):
return;
args['aliasid'] = 0
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.module_refs.append(args)
tree.add_to_lines(args, ('module_refs', args))
def process_use_alias(args, tree):
# module_aliases includes aliases to things other than modules
args = tree.convert_ids(args)
args['qualname'] = str(args['scopeid']) + "$" + args['name']
tree.data.module_aliases[args['id']] = args
tree.add_to_lines(args, ('module_aliases', args))
def process_typedef(args, tree):
args['name'] = args['qualname'].split('::')[-1]
args['kind'] = 'typedef'
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.types[args['id']] = args
tree.add_to_lines(args, ('types', args))
def process_variant(args, tree):
process_variable(args, tree)
def process_variant_struct(args, tree):
process_struct(args, tree, 'variant_struct')
def process_trait(args, tree):
args['name'] = args['qualname'].split('::')[-1]
args['kind'] = 'trait'
scope_args = tree.convert_ids({'id': args['id'],
'name' : 'name'})
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.mod_parents[int(args['id'])] = int(args['scopeid'])
tree.data.types[args['id']] = args
tree.add_to_lines(args, ('types', args))
def process_module(args, tree):
args['name'] = args['qualname'].split('::')[-1]
# Need the file name for the menu, at least
# args['def_file'] = tree.get_file(args['def_file'])
args['kind'] = 'mod'
scope_args = tree.convert_ids({'id': args['id'],
'name' : 'name'})
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.mod_parents[int(args['id'])] = int(args['scopeid'])
tree.data.modules[args['id']] = args
tree.add_to_lines(args, ('modules', args))
# FIXME: hmm, I'm not exactly clear on the difference between a fn call and fn ref, some of the former
# are logically the latter and this is stupid code dup...
def process_fn_ref(args, tree):
if tree.add_external_item(args):
return;
args = tree.convert_ids(args)
tree.fixup_qualname(args)
tree.data.function_refs.append(args)
tree.add_to_lines(args, ('function_refs', args))
def process_extern_crate(args, tree):
crate = int(args['crate'])
args['refid'] = '0'
args['refidcrate'] = '0'
args = tree.convert_ids(args)
args['qualname'] = str(args['scopeid']) + "$" + args['name']
args['refid'] = tree.crate_map[crate][1]['id']
tree.data.module_aliases[args['id']] = args
tree.add_to_lines(args, ('module_aliases', args))
def process_inheritance(args, tree):
args = tree.convert_ids(args)
tree.inheritance.append((args['base'], args['derived']))
def process_use_glob(args, tree):
# FIXME(#9)
pass
def process_end_external_crates(args, tree):
# We've got all the info we're going to get about external crates now.
return True
mappings = {
LINE: {
'properties': {
'rust_function': QUALIFIED_LINE_NEEDLE,
'rust_function_ref': QUALIFIED_LINE_NEEDLE,
'rust_var': QUALIFIED_LINE_NEEDLE,
'rust_var_ref': QUALIFIED_LINE_NEEDLE,
'rust_type': QUALIFIED_LINE_NEEDLE,
'rust_type_ref': QUALIFIED_LINE_NEEDLE,
'rust_module': QUALIFIED_LINE_NEEDLE,
'rust_module_ref': QUALIFIED_LINE_NEEDLE,
'rust_module_alias_ref': QUALIFIED_LINE_NEEDLE,
'rust_extern_ref': QUALIFIED_LINE_NEEDLE,
'rust_module_use': QUALIFIED_LINE_NEEDLE,
'rust_impl': QUALIFIED_LINE_NEEDLE,
'rust_fn_impls': QUALIFIED_LINE_NEEDLE,
'rust_bases': QUALIFIED_LINE_NEEDLE,
'rust_derived': QUALIFIED_LINE_NEEDLE,
'rust_callers': QUALIFIED_LINE_NEEDLE,
'rust_called_by': QUALIFIED_LINE_NEEDLE,
}
}
}
plugin = Plugin(filters=filters_from_namespace(filters.__dict__),
tree_to_index=TreeToIndex,
mappings=mappings,
refs=refs_from_namespace(refs.__dict__))
| mit |
olafhauk/mne-python | mne/io/bti/read.py | 14 | 2981 | # Authors: Denis A. Engemann <[email protected]>
# simplified BSD-3 license
import numpy as np
from ..utils import read_str
def _unpack_matrix(fid, rows, cols, dtype, out_dtype):
"""Unpack matrix."""
dtype = np.dtype(dtype)
string = fid.read(int(dtype.itemsize * rows * cols))
out = np.frombuffer(string, dtype=dtype).reshape(
rows, cols).astype(out_dtype)
return out
def _unpack_simple(fid, dtype, out_dtype):
"""Unpack a NumPy type."""
dtype = np.dtype(dtype)
string = fid.read(dtype.itemsize)
out = np.frombuffer(string, dtype=dtype).astype(out_dtype)
if len(out) > 0:
out = out[0]
return out
def read_char(fid, count=1):
"""Read character from bti file."""
return _unpack_simple(fid, '>S%s' % count, 'S')
def read_bool(fid):
"""Read bool value from bti file."""
return _unpack_simple(fid, '>?', bool)
def read_uint8(fid):
"""Read unsigned 8bit integer from bti file."""
return _unpack_simple(fid, '>u1', np.uint8)
def read_int8(fid):
"""Read 8bit integer from bti file."""
return _unpack_simple(fid, '>i1', np.int8)
def read_uint16(fid):
"""Read unsigned 16bit integer from bti file."""
return _unpack_simple(fid, '>u2', np.uint16)
def read_int16(fid):
"""Read 16bit integer from bti file."""
return _unpack_simple(fid, '>i2', np.int16)
def read_uint32(fid):
"""Read unsigned 32bit integer from bti file."""
return _unpack_simple(fid, '>u4', np.uint32)
def read_int32(fid):
"""Read 32bit integer from bti file."""
return _unpack_simple(fid, '>i4', np.int32)
def read_uint64(fid):
"""Read unsigned 64bit integer from bti file."""
return _unpack_simple(fid, '>u8', np.uint64)
def read_int64(fid):
"""Read 64bit integer from bti file."""
return _unpack_simple(fid, '>u8', np.int64)
def read_float(fid):
"""Read 32bit float from bti file."""
return _unpack_simple(fid, '>f4', np.float32)
def read_double(fid):
"""Read 64bit float from bti file."""
return _unpack_simple(fid, '>f8', np.float64)
def read_int16_matrix(fid, rows, cols):
"""Read 16bit integer matrix from bti file."""
return _unpack_matrix(fid, rows, cols, dtype='>i2',
out_dtype=np.int16)
def read_float_matrix(fid, rows, cols):
"""Read 32bit float matrix from bti file."""
return _unpack_matrix(fid, rows, cols, dtype='>f4',
out_dtype=np.float32)
def read_double_matrix(fid, rows, cols):
"""Read 64bit float matrix from bti file."""
return _unpack_matrix(fid, rows, cols, dtype='>f8',
out_dtype=np.float64)
def read_transform(fid):
"""Read 64bit float matrix transform from bti file."""
return read_double_matrix(fid, rows=4, cols=4)
def read_dev_header(x):
"""Create a dev header."""
return dict(size=read_int32(x), checksum=read_int32(x),
reserved=read_str(x, 32))
| bsd-3-clause |
briancoutinho0905/2dsampling | ext/ply/test/yacc_error3.py | 174 | 1485 | # -----------------------------------------------------------------------------
# yacc_error3.py
#
# Bad p_error() function
# -----------------------------------------------------------------------------
import sys
if ".." not in sys.path: sys.path.insert(0,"..")
import ply.yacc as yacc
from calclex import tokens
# Parsing rules
precedence = (
('left','PLUS','MINUS'),
('left','TIMES','DIVIDE'),
('right','UMINUS'),
)
# dictionary of names
names = { }
def p_statement_assign(t):
'statement : NAME EQUALS expression'
names[t[1]] = t[3]
def p_statement_expr(t):
'statement : expression'
print(t[1])
def p_expression_binop(t):
'''expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression'''
if t[2] == '+' : t[0] = t[1] + t[3]
elif t[2] == '-': t[0] = t[1] - t[3]
elif t[2] == '*': t[0] = t[1] * t[3]
elif t[2] == '/': t[0] = t[1] / t[3]
def p_expression_uminus(t):
'expression : MINUS expression %prec UMINUS'
t[0] = -t[2]
def p_expression_group(t):
'expression : LPAREN expression RPAREN'
t[0] = t[2]
def p_expression_number(t):
'expression : NUMBER'
t[0] = t[1]
def p_expression_name(t):
'expression : NAME'
try:
t[0] = names[t[1]]
except LookupError:
print("Undefined name '%s'" % t[1])
t[0] = 0
p_error = "blah"
yacc.yacc()
| bsd-3-clause |
ahmadRagheb/goldenHR | erpnext/config/hr.py | 19 | 6497 | from __future__ import unicode_literals
from frappe import _
def get_data():
return [
{
"label": _("Employee and Attendance"),
"items": [
{
"type": "doctype",
"name": "Employee",
"description": _("Employee records."),
},
{
"type": "doctype",
"name": "Employee Attendance Tool",
"label": _("Employee Attendance Tool"),
"description":_("Mark Attendance for multiple employees"),
"hide_count": True
},
{
"type": "doctype",
"name": "Attendance",
"description": _("Attendance record."),
},
{
"type": "doctype",
"name": "Upload Attendance",
"description":_("Upload attendance from a .csv file"),
"hide_count": True
},
]
},
{
"label": _("Recruitment"),
"items": [
{
"type": "doctype",
"name": "Job Applicant",
"description": _("Applicant for a Job."),
},
{
"type": "doctype",
"name": "Job Opening",
"description": _("Opening for a Job."),
},
{
"type": "doctype",
"name": "Offer Letter",
"description": _("Offer candidate a Job."),
},
]
},
{
"label": _("Leaves and Holiday"),
"items": [
{
"type": "doctype",
"name": "Leave Application",
"description": _("Applications for leave."),
},
{
"type": "doctype",
"name":"Leave Type",
"description": _("Type of leaves like casual, sick etc."),
},
{
"type": "doctype",
"name": "Holiday List",
"description": _("Holiday master.")
},
{
"type": "doctype",
"name": "Leave Allocation",
"description": _("Allocate leaves for a period.")
},
{
"type": "doctype",
"name": "Leave Control Panel",
"label": _("Leave Allocation Tool"),
"description":_("Allocate leaves for the year."),
"hide_count": True
},
{
"type": "doctype",
"name": "Leave Block List",
"description": _("Block leave applications by department.")
},
]
},
{
"label": _("Payroll"),
"items": [
{
"type": "doctype",
"name": "Salary Slip",
"description": _("Monthly salary statement."),
},
{
"type": "doctype",
"name": "Process Payroll",
"label": _("Process Payroll"),
"description":_("Generate Salary Slips"),
"hide_count": True
},
{
"type": "doctype",
"name": "Salary Structure",
"description": _("Salary template master.")
},
{
"type": "doctype",
"name": "Salary Component",
"label": _("Salary Components"),
"description": _("Earnings, Deductions and other Salary components")
},
]
},
{
"label": _("Expense Claims"),
"items": [
{
"type": "doctype",
"name": "Expense Claim",
"description": _("Claims for company expense."),
},
{
"type": "doctype",
"name": "Expense Claim Type",
"description": _("Types of Expense Claim.")
},
]
},
{
"label": _("Appraisals"),
"items": [
{
"type": "doctype",
"name": "Appraisal",
"description": _("Performance appraisal."),
},
{
"type": "doctype",
"name": "Appraisal Template",
"description": _("Template for performance appraisals.")
},
{
"type": "page",
"name": "team-updates",
"label": _("Team Updates")
},
]
},
{
"label": _("Employee Loan Management"),
"icon": "icon-list",
"items": [
{
"type": "doctype",
"name": "Loan Type",
"description": _("Define various loan types")
},
{
"type": "doctype",
"name": "Employee Loan Application",
"description": _("Employee Loan Application")
},
{
"type": "doctype",
"name": "Employee Loan"
},
]
},
{
"label": _("Training"),
"items": [
{
"type": "doctype",
"name": "Training Event"
},
{
"type": "doctype",
"name": "Training Result"
},
{
"type": "doctype",
"name": "Training Feedback"
},
]
},
{
"label": _("Fleet Management"),
"items": [
{
"type": "doctype",
"name": "Vehicle"
},
{
"type": "doctype",
"name": "Vehicle Log"
},
]
},
{
"label": _("Setup"),
"icon": "fa fa-cog",
"items": [
{
"type": "doctype",
"name": "HR Settings",
"description": _("Settings for HR Module")
},
{
"type": "doctype",
"name": "Employment Type",
"description": _("Types of employment (permanent, contract, intern etc.).")
},
{
"type": "doctype",
"name": "Branch",
"description": _("Organization branch master.")
},
{
"type": "doctype",
"name": "Department",
"description": _("Organization unit (department) master.")
},
{
"type": "doctype",
"name": "Designation",
"description": _("Employee designation (e.g. CEO, Director etc.).")
},
{
"type": "doctype",
"name": "Daily Work Summary Settings"
},
]
},
{
"label": _("Reports"),
"icon": "fa fa-list",
"items": [
{
"type": "report",
"is_query_report": True,
"name": "Employee Leave Balance",
"doctype": "Leave Application"
},
{
"type": "report",
"is_query_report": True,
"name": "Employee Birthday",
"doctype": "Employee"
},
{
"type": "report",
"is_query_report": True,
"name": "Employees working on a holiday",
"doctype": "Employee"
},
{
"type": "report",
"name": "Employee Information",
"doctype": "Employee"
},
{
"type": "report",
"is_query_report": True,
"name": "Salary Register",
"doctype": "Salary Slip"
},
{
"type": "report",
"is_query_report": True,
"name": "Monthly Attendance Sheet",
"doctype": "Attendance"
},
{
"type": "report",
"is_query_report": True,
"name": "Vehicle Expenses",
"doctype": "Vehicle"
},
]
},
{
"label": _("Help"),
"icon": "fa fa-facetime-video",
"items": [
{
"type": "help",
"label": _("Setting up Employees"),
"youtube_id": "USfIUdZlUhw"
},
{
"type": "help",
"label": _("Leave Management"),
"youtube_id": "fc0p_AXebc8"
},
{
"type": "help",
"label": _("Expense Claims"),
"youtube_id": "5SZHJF--ZFY"
},
{
"type": "help",
"label": _("Processing Payroll"),
"youtube_id": "apgE-f25Rm0"
},
]
}
]
| gpl-3.0 |
fhaoquan/kbengine | kbe/src/lib/python/Lib/tkinter/_fix.py | 76 | 2897 | import sys, os
# Delay import _tkinter until we have set TCL_LIBRARY,
# so that Tcl_FindExecutable has a chance to locate its
# encoding directory.
# Unfortunately, we cannot know the TCL_LIBRARY directory
# if we don't know the tcl version, which we cannot find out
# without import Tcl. Fortunately, Tcl will itself look in
# <TCL_LIBRARY>\..\tcl<TCL_VERSION>, so anything close to
# the real Tcl library will do.
# Expand symbolic links on Vista
try:
import ctypes
ctypes.windll.kernel32.GetFinalPathNameByHandleW
except (ImportError, AttributeError):
def convert_path(s):
return s
else:
def convert_path(s):
if isinstance(s, bytes):
s = s.decode("mbcs")
hdir = ctypes.windll.kernel32.\
CreateFileW(s, 0x80, # FILE_READ_ATTRIBUTES
1, # FILE_SHARE_READ
None, 3, # OPEN_EXISTING
0x02000000, # FILE_FLAG_BACKUP_SEMANTICS
None)
if hdir == -1:
# Cannot open directory, give up
return s
buf = ctypes.create_unicode_buffer("", 32768)
res = ctypes.windll.kernel32.\
GetFinalPathNameByHandleW(hdir, buf, len(buf),
0) # VOLUME_NAME_DOS
ctypes.windll.kernel32.CloseHandle(hdir)
if res == 0:
# Conversion failed (e.g. network location)
return s
s = buf[:res]
# Ignore leading \\?\
if s.startswith("\\\\?\\"):
s = s[4:]
if s.startswith("UNC"):
s = "\\" + s[3:]
return s
prefix = os.path.join(sys.base_prefix,"tcl")
if not os.path.exists(prefix):
# devdir/../tcltk/lib
prefix = os.path.join(sys.base_prefix, os.path.pardir, "tcltk", "lib")
prefix = os.path.abspath(prefix)
# if this does not exist, no further search is needed
if os.path.exists(prefix):
prefix = convert_path(prefix)
if "TCL_LIBRARY" not in os.environ:
for name in os.listdir(prefix):
if name.startswith("tcl"):
tcldir = os.path.join(prefix,name)
if os.path.isdir(tcldir):
os.environ["TCL_LIBRARY"] = tcldir
# Compute TK_LIBRARY, knowing that it has the same version
# as Tcl
import _tkinter
ver = str(_tkinter.TCL_VERSION)
if "TK_LIBRARY" not in os.environ:
v = os.path.join(prefix, 'tk'+ver)
if os.path.exists(os.path.join(v, "tclIndex")):
os.environ['TK_LIBRARY'] = v
# We don't know the Tix version, so we must search the entire
# directory
if "TIX_LIBRARY" not in os.environ:
for name in os.listdir(prefix):
if name.startswith("tix"):
tixdir = os.path.join(prefix,name)
if os.path.isdir(tixdir):
os.environ["TIX_LIBRARY"] = tixdir
| lgpl-3.0 |
CG-F16-4-Rutgers/steersuite-rutgers | steerstats/steersuitedb/Test.py | 8 | 7607 | import psycopg2
import psycopg2.extras
from steersuitedb.Util import getTime
from Sequence import TestSequence
# this is not completely encapsulated by another transaction so it should
# be used by the client when inserting data
class Test(object):
"""A simple example class"""
__id_name = "test_id"
__table_name = "test"
__insert_order = "(test_id, algorithm_data_id, test_timestamp, test_comments, benchmark_type, test_case, test_status, scenario_group, num_agents, num_obstacles)"
#test
#(
# test_id integer NOT NULL primary key,
# algorithm_data_id int NOT NULL references algorithm_data(algorithm_data_id),
# test_timestamp timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP,
# test_comments text NOT NULL,
# benchmark_type int NOT NULL references benchmark(benchmark_id),
# test_case text NOT NULL,
# test_status int NOT NULL,
# scenario_group int Not NULL references scenario(scenario_group)
#)
def __init__(self, algorithm_data_id=0, test_comments="", benchmark_type=0,
test_case="", test_status=0, scenario_group=0, num_agents=0, num_obstacles=0):
self._algorithm_data_id = algorithm_data_id
self._test_comments = test_comments
self._benchmark_type = benchmark_type
self._test_case = test_case
self._test_status = test_status
self._scenario_group = scenario_group
self._num_agents = num_agents
self._num_obstacles = num_obstacles
# Returns a Test object
def getTestData(self, cur, n):
cur.execute("SELECT * FROM " + self.__table_name + " where " + self.__id_name + " = %s", (n,))
row = cur.fetchone()
testy = Test(row['algorithm_data_id'], row['test_comments'], row['benchmark_type'], row['test_case'], row['test_status'], row['scenario_group'], row['num_agents'], row['num_obstacles'])
return testy
def getTestXML(self):
return self._test_case
def setBenchmarkValuesFromDict(self, valuesDict):
self._algorithm_data_id=valuesDict['algorithm_data_id']
self._test_comments=valuesDict['test_comments']
self._benchmark_type=valuesDict['benchmark_type']
self._test_case=valuesDict['test_case']
self._test_status=valuesDict['test_status']
self._scenario_group=valuesDict['scenario_group']
self._num_agents=valuesDict['num_agents']
self._num_obstacles=valuesDict['num_obstacles']
# will Return -1 if insert did not work
def insertTest(self, cur, algorithm_data_id, test_comments, benchmark_type, test_case, test_status, scenario_group, num_agents, num_obstacles):
try:
testSeq = TestSequence()
next_id = testSeq.getNextVal(cur)
# print "obstacles: " + str(num_obstacles)
# print "agents: " + str(num_agents)
# get the current timestamp
timestamp = getTime(cur)
cur.execute("INSERT INTO " + self.__table_name + " " +
self.__insert_order + " " +
"VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)" ,
(
str(next_id) ,
str(algorithm_data_id) ,
str(timestamp) ,
test_comments ,
str(benchmark_type),
test_case ,
str(test_status) ,
str(scenario_group),
num_agents,
num_obstacles
)
)
return next_id
except psycopg2.DatabaseError, e:
print 'Error Test.py %s' % e
def insertTest2(self, cur):
return self.insertTest(cur, self._algorithm_data_id, self._test_comments, self._benchmark_type, self._test_case, self._test_status, self._scenario_group,
self._num_agents, self._num_obstacles)
def get_id_name(self):
return self._id_name
def get_table_name(self):
return self._table_name
def get_insert_order(self):
return self._insert_order
def get_algorithm_data_id(self):
return self._algorithm_data_id
def get_test_comments(self):
return self._test_comments
def get_benchmark_type(self):
return self._benchmark_type
def get_test_case(self):
return self._test_case
def get_test_status(self):
return self._test_status
def get_scenario_group(self):
return self._scenario_group
def get_num_agents(self):
return self._num_agents
def get_num_obstacles(self):
return self._num_obstacles
def set_id_name(self, value):
self._id_name = value
def set_table_name(self, value):
self._table_name = value
def set_insert_order(self, value):
self._insert_order = value
def set_algorithm_data_id(self, value):
self._algorithm_data_id = value
def set_test_comments(self, value):
self._test_comments = value
def set_benchmark_type(self, value):
self._benchmark_type = value
def set_test_case(self, value):
self._test_case = value
def set_test_status(self, value):
self._test_status = value
def set_scenario_group(self, value):
self._scenario_group = value
def set_num_agents(self, value):
self._num_agents = value
def set_num_obstacles(self, value):
self._num_obstacles = value
def del_id_name(self):
del self._id_name
def del_table_name(self):
del self._table_name
def del_insert_order(self):
del self._insert_order
def del_algorithm_data_id(self):
del self._algorithm_data_id
def del_test_comments(self):
del self._test_comments
def del_benchmark_type(self):
del self._benchmark_type
def del_test_case(self):
del self._test_case
def del_test_status(self):
del self._test_status
def del_scenario_group(self):
del self._scenario_group
def del_num_agents(self):
del self._num_agents
def del_num_obstacles(self):
del self._num_obstacles
id_name = property(get_id_name, set_id_name, del_id_name, "id_name's docstring")
table_name = property(get_table_name, set_table_name, del_table_name, "table_name's docstring")
insert_order = property(get_insert_order, set_insert_order, del_insert_order, "insert_order's docstring")
algorithm_data_id = property(get_algorithm_data_id, set_algorithm_data_id, del_algorithm_data_id, "algorithm_data_id's docstring")
test_comments = property(get_test_comments, set_test_comments, del_test_comments, "test_comments's docstring")
benchmark_type = property(get_benchmark_type, set_benchmark_type, del_benchmark_type, "benchmark_type's docstring")
test_case = property(get_test_case, set_test_case, del_test_case, "test_case's docstring")
test_status = property(get_test_status, set_test_status, del_test_status, "test_status's docstring")
scenario_group = property(get_scenario_group, set_scenario_group, del_scenario_group, "scenario_group's docstring")
num_agents = property(get_num_agents, set_num_agents, del_num_agents, "num_agents's docstring")
num_obstacles = property(get_num_obstacles, set_num_obstacles, del_num_obstacles, "num_obstacles's docstring")
# sys.exit(1)
| gpl-3.0 |
valkjsaaa/sl4a | python/gdata/tests/gdata_tests/youtube/service_test.py | 89 | 23513 | #!/usr/bin/python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = '[email protected] (Jochen Hartmann)'
import getpass
import time
import StringIO
import random
import unittest
import atom
import gdata.youtube
import gdata.youtube.service
YOUTUBE_TEST_CLIENT_ID = 'ytapi-pythonclientlibrary_servicetest'
class YouTubeServiceTest(unittest.TestCase):
def setUp(self):
self.client = gdata.youtube.service.YouTubeService()
self.client.email = username
self.client.password = password
self.client.source = YOUTUBE_TEST_CLIENT_ID
self.client.developer_key = developer_key
self.client.client_id = YOUTUBE_TEST_CLIENT_ID
self.client.ProgrammaticLogin()
def testRetrieveVideoFeed(self):
feed = self.client.GetYouTubeVideoFeed(
'http://gdata.youtube.com/feeds/api/standardfeeds/recently_featured');
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
for entry in feed.entry:
self.assert_(entry.title.text != '')
def testRetrieveTopRatedVideoFeed(self):
feed = self.client.GetTopRatedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostViewedVideoFeed(self):
feed = self.client.GetMostViewedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveRecentlyFeaturedVideoFeed(self):
feed = self.client.GetRecentlyFeaturedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveWatchOnMobileVideoFeed(self):
feed = self.client.GetWatchOnMobileVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveTopFavoritesVideoFeed(self):
feed = self.client.GetTopFavoritesVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostRecentVideoFeed(self):
feed = self.client.GetMostRecentVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostDiscussedVideoFeed(self):
feed = self.client.GetMostDiscussedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostLinkedVideoFeed(self):
feed = self.client.GetMostLinkedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveMostRespondedVideoFeed(self):
feed = self.client.GetMostRespondedVideoFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 10)
def testRetrieveVideoEntryByUri(self):
entry = self.client.GetYouTubeVideoEntry(
'http://gdata.youtube.com/feeds/videos/Ncakifd_16k')
self.assert_(isinstance(entry, gdata.youtube.YouTubeVideoEntry))
self.assert_(entry.title.text != '')
def testRetrieveVideoEntryByVideoId(self):
entry = self.client.GetYouTubeVideoEntry(video_id='Ncakifd_16k')
self.assert_(isinstance(entry, gdata.youtube.YouTubeVideoEntry))
self.assert_(entry.title.text != '')
def testRetrieveUserVideosbyUri(self):
feed = self.client.GetYouTubeUserFeed(
'http://gdata.youtube.com/feeds/users/gdpython/uploads')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveUserVideosbyUsername(self):
feed = self.client.GetYouTubeUserFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testSearchWithVideoQuery(self):
query = gdata.youtube.service.YouTubeVideoQuery()
query.vq = 'google'
query.max_results = 8
feed = self.client.YouTubeQuery(query)
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assertEquals(len(feed.entry), 8)
def testDirectVideoUploadStatusUpdateAndDeletion(self):
self.assertEquals(self.client.developer_key, developer_key)
self.assertEquals(self.client.client_id, YOUTUBE_TEST_CLIENT_ID)
self.assertEquals(self.client.additional_headers['X-GData-Key'],
'key=' + developer_key)
self.assertEquals(self.client.additional_headers['X-Gdata-Client'],
YOUTUBE_TEST_CLIENT_ID)
test_video_title = 'my cool video ' + str(random.randint(1000,5000))
test_video_description = 'description ' + str(random.randint(1000,5000))
my_media_group = gdata.media.Group(
title = gdata.media.Title(text=test_video_title),
description = gdata.media.Description(description_type='plain',
text=test_video_description),
keywords = gdata.media.Keywords(text='video, foo'),
category = gdata.media.Category(
text='Autos',
scheme='http://gdata.youtube.com/schemas/2007/categories.cat',
label='Autos'),
player=None
)
self.assert_(isinstance(my_media_group, gdata.media.Group))
# Set Geo location to 37,-122 lat, long
where = gdata.geo.Where()
where.set_location((37.0,-122.0))
video_entry = gdata.youtube.YouTubeVideoEntry(media=my_media_group,
geo=where)
self.assert_(isinstance(video_entry, gdata.youtube.YouTubeVideoEntry))
new_entry = self.client.InsertVideoEntry(video_entry, video_file_location)
self.assert_(isinstance(new_entry, gdata.youtube.YouTubeVideoEntry))
self.assertEquals(new_entry.title.text, test_video_title)
self.assertEquals(new_entry.media.description.text, test_video_description)
self.assert_(new_entry.id.text)
# check upload status also
upload_status = self.client.CheckUploadStatus(new_entry)
self.assert_(upload_status[0] != '')
# test updating entry meta-data
new_video_description = 'description ' + str(random.randint(1000,5000))
new_entry.media.description.text = new_video_description
updated_entry = self.client.UpdateVideoEntry(new_entry)
self.assert_(isinstance(updated_entry, gdata.youtube.YouTubeVideoEntry))
self.assertEquals(updated_entry.media.description.text,
new_video_description)
# sleep for 10 seconds
time.sleep(10)
# test to delete the entry
value = self.client.DeleteVideoEntry(updated_entry)
if not value:
# sleep more and try again
time.sleep(20)
# test to delete the entry
value = self.client.DeleteVideoEntry(updated_entry)
self.assert_(value == True)
def testDirectVideoUploadWithDeveloperTags(self):
self.assertEquals(self.client.developer_key, developer_key)
self.assertEquals(self.client.client_id, YOUTUBE_TEST_CLIENT_ID)
self.assertEquals(self.client.additional_headers['X-GData-Key'],
'key=' + developer_key)
self.assertEquals(self.client.additional_headers['X-Gdata-Client'],
YOUTUBE_TEST_CLIENT_ID)
test_video_title = 'my cool video ' + str(random.randint(1000,5000))
test_video_description = 'description ' + str(random.randint(1000,5000))
test_developer_tag_01 = 'tag' + str(random.randint(1000,5000))
test_developer_tag_02 = 'tag' + str(random.randint(1000,5000))
test_developer_tag_03 = 'tag' + str(random.randint(1000,5000))
my_media_group = gdata.media.Group(
title = gdata.media.Title(text=test_video_title),
description = gdata.media.Description(description_type='plain',
text=test_video_description),
keywords = gdata.media.Keywords(text='video, foo'),
category = [gdata.media.Category(
text='Autos',
scheme='http://gdata.youtube.com/schemas/2007/categories.cat',
label='Autos')],
player=None
)
self.assert_(isinstance(my_media_group, gdata.media.Group))
video_entry = gdata.youtube.YouTubeVideoEntry(media=my_media_group)
original_developer_tags = [test_developer_tag_01, test_developer_tag_02,
test_developer_tag_03]
dev_tags = video_entry.AddDeveloperTags(original_developer_tags)
for dev_tag in dev_tags:
self.assert_(dev_tag.text in original_developer_tags)
self.assert_(isinstance(video_entry, gdata.youtube.YouTubeVideoEntry))
new_entry = self.client.InsertVideoEntry(video_entry, video_file_location)
self.assert_(isinstance(new_entry, gdata.youtube.YouTubeVideoEntry))
self.assertEquals(new_entry.title.text, test_video_title)
self.assertEquals(new_entry.media.description.text, test_video_description)
self.assert_(new_entry.id.text)
developer_tags_from_new_entry = new_entry.GetDeveloperTags()
for dev_tag in developer_tags_from_new_entry:
self.assert_(dev_tag.text in original_developer_tags)
self.assertEquals(len(developer_tags_from_new_entry),
len(original_developer_tags))
# sleep for 10 seconds
time.sleep(10)
# test to delete the entry
value = self.client.DeleteVideoEntry(new_entry)
if not value:
# sleep more and try again
time.sleep(20)
# test to delete the entry
value = self.client.DeleteVideoEntry(new_entry)
self.assert_(value == True)
def testBrowserBasedVideoUpload(self):
self.assertEquals(self.client.developer_key, developer_key)
self.assertEquals(self.client.client_id, YOUTUBE_TEST_CLIENT_ID)
self.assertEquals(self.client.additional_headers['X-GData-Key'],
'key=' + developer_key)
self.assertEquals(self.client.additional_headers['X-Gdata-Client'],
YOUTUBE_TEST_CLIENT_ID)
test_video_title = 'my cool video ' + str(random.randint(1000,5000))
test_video_description = 'description ' + str(random.randint(1000,5000))
my_media_group = gdata.media.Group(
title = gdata.media.Title(text=test_video_title),
description = gdata.media.Description(description_type='plain',
text=test_video_description),
keywords = gdata.media.Keywords(text='video, foo'),
category = gdata.media.Category(
text='Autos',
scheme='http://gdata.youtube.com/schemas/2007/categories.cat',
label='Autos'),
player=None
)
self.assert_(isinstance(my_media_group, gdata.media.Group))
video_entry = gdata.youtube.YouTubeVideoEntry(media=my_media_group)
self.assert_(isinstance(video_entry, gdata.youtube.YouTubeVideoEntry))
response = self.client.GetFormUploadToken(video_entry)
self.assert_(response[0].startswith(
'http://uploads.gdata.youtube.com/action/FormDataUpload/'))
self.assert_(len(response[0]) > 55)
self.assert_(len(response[1]) > 100)
def testRetrieveRelatedVideoFeedByUri(self):
feed = self.client.GetYouTubeRelatedVideoFeed(
'http://gdata.youtube.com/feeds/videos/Ncakifd_16k/related')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveRelatedVideoFeedById(self):
feed = self.client.GetYouTubeRelatedVideoFeed(video_id = 'Ncakifd_16k')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveResponseVideoFeedByUri(self):
feed = self.client.GetYouTubeVideoResponseFeed(
'http://gdata.youtube.com/feeds/videos/Ncakifd_16k/responses')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoResponseFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveResponseVideoFeedById(self):
feed = self.client.GetYouTubeVideoResponseFeed(video_id='Ncakifd_16k')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoResponseFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveVideoCommentFeedByUri(self):
feed = self.client.GetYouTubeVideoCommentFeed(
'http://gdata.youtube.com/feeds/api/videos/Ncakifd_16k/comments')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoCommentFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveVideoCommentFeedByVideoId(self):
feed = self.client.GetYouTubeVideoCommentFeed(video_id='Ncakifd_16k')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoCommentFeed))
self.assert_(len(feed.entry) > 0)
def testAddComment(self):
video_id = '9g6buYJTt_g'
video_entry = self.client.GetYouTubeVideoEntry(video_id=video_id)
random_comment_text = 'test_comment_' + str(random.randint(1000,50000))
self.client.AddComment(comment_text=random_comment_text,
video_entry=video_entry)
comment_feed = self.client.GetYouTubeVideoCommentFeed(video_id=video_id)
comment_found = False
for item in comment_feed.entry:
if (item.content.text == random_comment_text):
comment_found = True
self.assertEquals(comment_found, True)
def testAddRating(self):
video_id_to_rate = 'Ncakifd_16k'
video_entry = self.client.GetYouTubeVideoEntry(video_id=video_id_to_rate)
response = self.client.AddRating(3, video_entry)
self.assert_(isinstance(response, gdata.GDataEntry))
def testRetrievePlaylistFeedByUri(self):
feed = self.client.GetYouTubePlaylistFeed(
'http://gdata.youtube.com/feeds/users/gdpython/playlists')
self.assert_(isinstance(feed, gdata.youtube.YouTubePlaylistFeed))
self.assert_(len(feed.entry) > 0)
def testRetrievePlaylistListFeedByUsername(self):
feed = self.client.GetYouTubePlaylistFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubePlaylistFeed))
self.assert_(len(feed.entry) > 0)
def testRetrievePlaylistVideoFeed(self):
feed = self.client.GetYouTubePlaylistVideoFeed(
'http://gdata.youtube.com/feeds/api/playlists/BCB3BB96DF51B505')
self.assert_(isinstance(feed, gdata.youtube.YouTubePlaylistVideoFeed))
self.assert_(len(feed.entry) > 0)
self.assert_(isinstance(feed.entry[0],
gdata.youtube.YouTubePlaylistVideoEntry))
def testAddUpdateAndDeletePlaylist(self):
test_playlist_title = 'my test playlist ' + str(random.randint(1000,3000))
test_playlist_description = 'test playlist '
response = self.client.AddPlaylist(test_playlist_title,
test_playlist_description)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistEntry))
new_playlist_title = 'my updated playlist ' + str(random.randint(1000,4000))
new_playlist_description = 'my updated playlist '
playlist_entry_id = response.id.text.split('/')[-1]
updated_playlist = self.client.UpdatePlaylist(playlist_entry_id,
new_playlist_title,
new_playlist_description)
playlist_feed = self.client.GetYouTubePlaylistFeed()
update_successful = False
for playlist_entry in playlist_feed.entry:
if playlist_entry.title.text == new_playlist_title:
update_successful = True
break
self.assertEquals(update_successful, True)
# wait
time.sleep(10)
# delete it
playlist_uri = updated_playlist.id.text
response = self.client.DeletePlaylist(playlist_uri)
self.assertEquals(response, True)
def testAddUpdateAndDeletePrivatePlaylist(self):
test_playlist_title = 'my test playlist ' + str(random.randint(1000,3000))
test_playlist_description = 'test playlist '
response = self.client.AddPlaylist(test_playlist_title,
test_playlist_description,
playlist_private=True)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistEntry))
new_playlist_title = 'my updated playlist ' + str(random.randint(1000,4000))
new_playlist_description = 'my updated playlist '
playlist_entry_id = response.id.text.split('/')[-1]
updated_playlist = self.client.UpdatePlaylist(playlist_entry_id,
new_playlist_title,
new_playlist_description,
playlist_private=True)
playlist_feed = self.client.GetYouTubePlaylistFeed()
update_successful = False
playlist_still_private = False
for playlist_entry in playlist_feed.entry:
if playlist_entry.title.text == new_playlist_title:
update_successful = True
if playlist_entry.private is not None:
playlist_still_private = True
self.assertEquals(update_successful, True)
self.assertEquals(playlist_still_private, True)
# wait
time.sleep(10)
# delete it
playlist_uri = updated_playlist.id.text
response = self.client.DeletePlaylist(playlist_uri)
self.assertEquals(response, True)
def testAddEditAndDeleteVideoFromPlaylist(self):
test_playlist_title = 'my test playlist ' + str(random.randint(1000,3000))
test_playlist_description = 'test playlist '
response = self.client.AddPlaylist(test_playlist_title,
test_playlist_description)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistEntry))
custom_video_title = 'my test video on my test playlist'
custom_video_description = 'this is a test video on my test playlist'
video_id = 'Ncakifd_16k'
playlist_uri = response.feed_link[0].href
time.sleep(10)
response = self.client.AddPlaylistVideoEntryToPlaylist(
playlist_uri, video_id, custom_video_title, custom_video_description)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistVideoEntry))
playlist_entry_id = response.id.text.split('/')[-1]
playlist_uri = response.id.text.split(playlist_entry_id)[0][:-1]
new_video_title = 'video number ' + str(random.randint(1000,3000))
new_video_description = 'test video'
time.sleep(10)
response = self.client.UpdatePlaylistVideoEntryMetaData(
playlist_uri,
playlist_entry_id,
new_video_title,
new_video_description,
1)
self.assert_(isinstance(response, gdata.youtube.YouTubePlaylistVideoEntry))
time.sleep(10)
playlist_entry_id = response.id.text.split('/')[-1]
# remove video from playlist
response = self.client.DeletePlaylistVideoEntry(playlist_uri,
playlist_entry_id)
self.assertEquals(response, True)
time.sleep(10)
# delete the playlist
response = self.client.DeletePlaylist(playlist_uri)
self.assertEquals(response, True)
def testRetrieveSubscriptionFeedByUri(self):
feed = self.client.GetYouTubeSubscriptionFeed(
'http://gdata.youtube.com/feeds/users/gdpython/subscriptions')
self.assert_(isinstance(feed, gdata.youtube.YouTubeSubscriptionFeed))
self.assert_(len(feed.entry) == 3)
subscription_to_channel_found = False
subscription_to_favorites_found = False
subscription_to_query_found = False
all_types_found = False
for entry in feed.entry:
self.assert_(isinstance(entry, gdata.youtube.YouTubeSubscriptionEntry))
subscription_type = entry.GetSubscriptionType()
if subscription_type == 'channel':
subscription_to_channel_found = True
elif subscription_type == 'favorites':
subscription_to_favorites_found = True
elif subscription_type == 'query':
subscription_to_query_found = True
if (subscription_to_channel_found and subscription_to_favorites_found and
subscription_to_query_found):
all_types_found = True
self.assertEquals(all_types_found, True)
def testRetrieveSubscriptionFeedByUsername(self):
feed = self.client.GetYouTubeSubscriptionFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeSubscriptionFeed))
self.assert_(len(feed.entry) == 3)
subscription_to_channel_found = False
subscription_to_favorites_found = False
subscription_to_query_found = False
all_types_found = False
for entry in feed.entry:
self.assert_(isinstance(entry, gdata.youtube.YouTubeSubscriptionEntry))
subscription_type = entry.GetSubscriptionType()
if subscription_type == 'channel':
subscription_to_channel_found = True
elif subscription_type == 'favorites':
subscription_to_favorites_found = True
elif subscription_type == 'query':
subscription_to_query_found = True
if (subscription_to_channel_found and subscription_to_favorites_found and
subscription_to_query_found):
all_types_found = True
self.assertEquals(all_types_found, True)
def testRetrieveUserProfileByUri(self):
user = self.client.GetYouTubeUserEntry(
'http://gdata.youtube.com/feeds/users/gdpython')
self.assert_(isinstance(user, gdata.youtube.YouTubeUserEntry))
self.assertEquals(user.location.text, 'US')
def testRetrieveUserProfileByUsername(self):
user = self.client.GetYouTubeUserEntry(username='gdpython')
self.assert_(isinstance(user, gdata.youtube.YouTubeUserEntry))
self.assertEquals(user.location.text, 'US')
def testRetrieveUserFavoritesFeed(self):
feed = self.client.GetUserFavoritesFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testRetrieveDefaultUserFavoritesFeed(self):
feed = self.client.GetUserFavoritesFeed()
self.assert_(isinstance(feed, gdata.youtube.YouTubeVideoFeed))
self.assert_(len(feed.entry) > 0)
def testAddAndDeleteVideoFromFavorites(self):
video_id = 'Ncakifd_16k'
video_entry = self.client.GetYouTubeVideoEntry(video_id=video_id)
response = self.client.AddVideoEntryToFavorites(video_entry)
self.assert_(isinstance(response, gdata.GDataEntry))
time.sleep(10)
response = self.client.DeleteVideoEntryFromFavorites(video_id)
self.assertEquals(response, True)
def testRetrieveContactFeedByUri(self):
feed = self.client.GetYouTubeContactFeed(
'http://gdata.youtube.com/feeds/users/gdpython/contacts')
self.assert_(isinstance(feed, gdata.youtube.YouTubeContactFeed))
self.assertEquals(len(feed.entry), 1)
def testRetrieveContactFeedByUsername(self):
feed = self.client.GetYouTubeContactFeed(username='gdpython')
self.assert_(isinstance(feed, gdata.youtube.YouTubeContactFeed))
self.assertEquals(len(feed.entry), 1)
if __name__ == '__main__':
print ('NOTE: Please run these tests only with a test account. '
'The tests may delete or update your data.')
username = raw_input('Please enter your username: ')
password = getpass.getpass()
developer_key = raw_input('Please enter your developer key: ')
video_file_location = raw_input(
'Please enter the absolute path to a video file: ')
unittest.main()
| apache-2.0 |
yanheven/console | openstack_dashboard/dashboards/project/network_topology/routers/tables.py | 8 | 1108 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 NTT Innovation Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from openstack_dashboard.dashboards.project.routers import\
tables as r_tables
class DeleteRouter(r_tables.DeleteRouter):
redirect_url = "horizon:project:network_topology:router"
class RoutersTable(r_tables.RoutersTable):
class Meta:
name = "Routers"
verbose_name = _("Routers")
status_columns = ["status"]
row_actions = (DeleteRouter,)
| apache-2.0 |
bartvm/pylearn2 | pylearn2/datasets/tests/test_dense_design_matrix.py | 12 | 3329 | import numpy as np
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrixPyTables
from pylearn2.datasets.dense_design_matrix import DefaultViewConverter
from pylearn2.datasets.dense_design_matrix import from_dataset
from pylearn2.utils import serial
def test_init_with_X_or_topo():
# tests that constructing with topo_view works
# tests that construction with design matrix works
# tests that conversion from topo_view to design matrix and back works
# tests that conversion the other way works too
rng = np.random.RandomState([1, 2, 3])
topo_view = rng.randn(5, 2, 2, 3)
d1 = DenseDesignMatrix(topo_view=topo_view)
X = d1.get_design_matrix()
d2 = DenseDesignMatrix(X=X, view_converter=d1.view_converter)
topo_view_2 = d2.get_topological_view()
assert np.allclose(topo_view, topo_view_2)
X = rng.randn(*X.shape)
topo_view_3 = d2.get_topological_view(X)
X2 = d2.get_design_matrix(topo_view_3)
assert np.allclose(X, X2)
def test_convert_to_one_hot():
rng = np.random.RandomState([2013, 11, 14])
m = 11
d = DenseDesignMatrix(
X=rng.randn(m, 4),
y=rng.randint(low=0, high=10, size=(m,)))
d.convert_to_one_hot()
def test_init_with_vc():
rng = np.random.RandomState([4, 5, 6])
d = DenseDesignMatrix(
X=rng.randn(12, 5),
view_converter=DefaultViewConverter([1, 2, 3]))
def get_rnd_design_matrix():
rng = np.random.RandomState([1, 2, 3])
topo_view = rng.randn(10, 2, 2, 3)
d1 = DenseDesignMatrix(topo_view=topo_view)
return d1
def test_split_datasets():
# Test the split dataset function.
ddm = get_rnd_design_matrix()
(train, valid) = ddm.split_dataset_holdout(train_prop=0.5)
assert valid.shape[0] == np.ceil(ddm.get_num_examples() * 0.5)
assert train.shape[0] == (ddm.get_num_examples() - valid.shape[0])
def test_split_nfold_datasets():
# Load and create ddm from cifar100
ddm = get_rnd_design_matrix()
folds = ddm.split_dataset_nfolds(10)
assert folds[0].shape[0] == np.ceil(ddm.get_num_examples() / 10)
def test_pytables():
"""
tests wether DenseDesignMatrixPyTables can be loaded and
initialize iterator
"""
# TODO more through test
x = np.ones((2, 3))
y = np.ones(2)
ds = DenseDesignMatrixPyTables(X=x, y=y)
it = ds.iterator(mode='sequential', batch_size=1)
it.next()
def test_from_dataset():
"""
Tests whether it supports integer labels.
"""
rng = np.random.RandomState([1, 2, 3])
topo_view = rng.randn(12, 2, 3, 3)
y = rng.randint(0, 5, (12, 1))
# without y:
d1 = DenseDesignMatrix(topo_view=topo_view)
slice_d = from_dataset(d1, 5)
assert slice_d.X.shape[1] == d1.X.shape[1]
assert slice_d.X.shape[0] == 5
# with y:
d2 = DenseDesignMatrix(topo_view=topo_view, y=y)
slice_d = from_dataset(d2, 5)
assert slice_d.X.shape[1] == d2.X.shape[1]
assert slice_d.X.shape[0] == 5
assert slice_d.y.shape[0] == 5
# without topo_view:
x = topo_view.reshape(12, 18)
d3 = DenseDesignMatrix(X=x, y=y)
slice_d = from_dataset(d3, 5)
assert slice_d.X.shape[1] == d3.X.shape[1]
assert slice_d.X.shape[0] == 5
assert slice_d.y.shape[0] == 5
| bsd-3-clause |
bopo/tablib | tablib/packages/xlwt3/ExcelFormulaParser.py | 46 | 22812 | ### $ANTLR 2.7.7 (20060930): "xlwt/excel-formula.g" -> "ExcelFormulaParser.py"$
### import antlr and other modules ..
import sys
from . import antlr
### header action >>>
import struct
from . import Utils
from .UnicodeUtils import upack1
from .ExcelMagic import *
_RVAdelta = {"R": 0, "V": 0x20, "A": 0x40}
_RVAdeltaRef = {"R": 0, "V": 0x20, "A": 0x40, "D": 0x20}
_RVAdeltaArea = {"R": 0, "V": 0x20, "A": 0x40, "D": 0}
class FormulaParseException(Exception):
"""
An exception indicating that a Formula could not be successfully parsed.
"""
### header action <<<
### preamble action>>>
### preamble action <<<
### import antlr.Token
from .antlr import Token
### >>>The Known Token Types <<<
SKIP = antlr.SKIP
INVALID_TYPE = antlr.INVALID_TYPE
EOF_TYPE = antlr.EOF_TYPE
EOF = antlr.EOF
NULL_TREE_LOOKAHEAD = antlr.NULL_TREE_LOOKAHEAD
MIN_USER_TYPE = antlr.MIN_USER_TYPE
TRUE_CONST = 4
FALSE_CONST = 5
STR_CONST = 6
NUM_CONST = 7
INT_CONST = 8
FUNC_IF = 9
FUNC_CHOOSE = 10
NAME = 11
QUOTENAME = 12
EQ = 13
NE = 14
GT = 15
LT = 16
GE = 17
LE = 18
ADD = 19
SUB = 20
MUL = 21
DIV = 22
POWER = 23
PERCENT = 24
LP = 25
RP = 26
LB = 27
RB = 28
COLON = 29
COMMA = 30
SEMICOLON = 31
REF2D = 32
REF2D_R1C1 = 33
BANG = 34
CONCAT = 35
class Parser(antlr.LLkParser):
### user action >>>
### user action <<<
def __init__(self, *args, **kwargs):
antlr.LLkParser.__init__(self, *args, **kwargs)
self.tokenNames = _tokenNames
### __init__ header action >>>
self.rpn = b""
self.sheet_references = []
self.xcall_references = []
### __init__ header action <<<
def formula(self):
self.expr("V")
def expr(self, arg_type):
self.prec0_expr(arg_type)
while True:
if ((self.LA(1) >= EQ and self.LA(1) <= LE)):
pass
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [EQ]:
pass
self.match(EQ)
op = struct.pack('B', ptgEQ)
elif la1 and la1 in [NE]:
pass
self.match(NE)
op = struct.pack('B', ptgNE)
elif la1 and la1 in [GT]:
pass
self.match(GT)
op = struct.pack('B', ptgGT)
elif la1 and la1 in [LT]:
pass
self.match(LT)
op = struct.pack('B', ptgLT)
elif la1 and la1 in [GE]:
pass
self.match(GE)
op = struct.pack('B', ptgGE)
elif la1 and la1 in [LE]:
pass
self.match(LE)
op = struct.pack('B', ptgLE)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.prec0_expr(arg_type)
self.rpn += op
else:
break
def prec0_expr(self,
arg_type
):
pass
self.prec1_expr(arg_type)
while True:
if (self.LA(1)==CONCAT):
pass
pass
self.match(CONCAT)
op = struct.pack('B', ptgConcat)
self.prec1_expr(arg_type)
self.rpn += op
else:
break
def prec1_expr(self,
arg_type
):
pass
self.prec2_expr(arg_type)
while True:
if (self.LA(1)==ADD or self.LA(1)==SUB):
pass
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [ADD]:
pass
self.match(ADD)
op = struct.pack('B', ptgAdd)
elif la1 and la1 in [SUB]:
pass
self.match(SUB)
op = struct.pack('B', ptgSub)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.prec2_expr(arg_type)
self.rpn += op;
# print "**prec1_expr4 %s" % arg_type
else:
break
def prec2_expr(self,
arg_type
):
pass
self.prec3_expr(arg_type)
while True:
if (self.LA(1)==MUL or self.LA(1)==DIV):
pass
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [MUL]:
pass
self.match(MUL)
op = struct.pack('B', ptgMul)
elif la1 and la1 in [DIV]:
pass
self.match(DIV)
op = struct.pack('B', ptgDiv)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.prec3_expr(arg_type)
self.rpn += op
else:
break
def prec3_expr(self,
arg_type
):
pass
self.prec4_expr(arg_type)
while True:
if (self.LA(1)==POWER):
pass
pass
self.match(POWER)
op = struct.pack('B', ptgPower)
self.prec4_expr(arg_type)
self.rpn += op
else:
break
def prec4_expr(self,
arg_type
):
pass
self.prec5_expr(arg_type)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [PERCENT]:
pass
self.match(PERCENT)
self.rpn += struct.pack('B', ptgPercent)
elif la1 and la1 in [EOF,EQ,NE,GT,LT,GE,LE,ADD,SUB,MUL,DIV,POWER,RP,COMMA,SEMICOLON,CONCAT]:
pass
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
def prec5_expr(self,
arg_type
):
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [TRUE_CONST,FALSE_CONST,STR_CONST,NUM_CONST,INT_CONST,FUNC_IF,FUNC_CHOOSE,NAME,QUOTENAME,LP,REF2D]:
pass
self.primary(arg_type)
elif la1 and la1 in [SUB]:
pass
self.match(SUB)
self.primary(arg_type)
self.rpn += struct.pack('B', ptgUminus)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
def primary(self,
arg_type
):
str_tok = None
int_tok = None
num_tok = None
ref2d_tok = None
ref2d1_tok = None
ref2d2_tok = None
ref3d_ref2d = None
ref3d_ref2d2 = None
name_tok = None
func_tok = None
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [TRUE_CONST]:
pass
self.match(TRUE_CONST)
self.rpn += struct.pack("2B", ptgBool, 1)
elif la1 and la1 in [FALSE_CONST]:
pass
self.match(FALSE_CONST)
self.rpn += struct.pack("2B", ptgBool, 0)
elif la1 and la1 in [STR_CONST]:
pass
str_tok = self.LT(1)
self.match(STR_CONST)
self.rpn += struct.pack("B", ptgStr) + upack1(str_tok.text[1:-1].replace("\"\"", "\""))
elif la1 and la1 in [NUM_CONST]:
pass
num_tok = self.LT(1)
self.match(NUM_CONST)
self.rpn += struct.pack("<Bd", ptgNum, float(num_tok.text))
elif la1 and la1 in [FUNC_IF]:
pass
self.match(FUNC_IF)
self.match(LP)
self.expr("V")
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [SEMICOLON]:
pass
self.match(SEMICOLON)
elif la1 and la1 in [COMMA]:
pass
self.match(COMMA)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.rpn += struct.pack("<BBH", ptgAttr, 0x02, 0) # tAttrIf
pos0 = len(self.rpn) - 2
self.expr(arg_type)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [SEMICOLON]:
pass
self.match(SEMICOLON)
elif la1 and la1 in [COMMA]:
pass
self.match(COMMA)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.rpn += struct.pack("<BBH", ptgAttr, 0x08, 0) # tAttrSkip
pos1 = len(self.rpn) - 2
self.rpn = self.rpn[:pos0] + struct.pack("<H", pos1-pos0) + self.rpn[pos0+2:]
self.expr(arg_type)
self.match(RP)
self.rpn += struct.pack("<BBH", ptgAttr, 0x08, 3) # tAttrSkip
self.rpn += struct.pack("<BBH", ptgFuncVarR, 3, 1) # 3 = nargs, 1 = IF func
pos2 = len(self.rpn)
self.rpn = self.rpn[:pos1] + struct.pack("<H", pos2-(pos1+2)-1) + self.rpn[pos1+2:]
elif la1 and la1 in [FUNC_CHOOSE]:
pass
self.match(FUNC_CHOOSE)
arg_type = b"R"
rpn_chunks = []
self.match(LP)
self.expr("V")
rpn_start = len(self.rpn)
ref_markers = [len(self.sheet_references)]
while True:
if (self.LA(1)==COMMA or self.LA(1)==SEMICOLON):
pass
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [SEMICOLON]:
pass
self.match(SEMICOLON)
elif la1 and la1 in [COMMA]:
pass
self.match(COMMA)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
mark = len(self.rpn)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [TRUE_CONST,FALSE_CONST,STR_CONST,NUM_CONST,INT_CONST,FUNC_IF,FUNC_CHOOSE,NAME,QUOTENAME,SUB,LP,REF2D]:
pass
self.expr(arg_type)
elif la1 and la1 in [RP,COMMA,SEMICOLON]:
pass
self.rpn += struct.pack("B", ptgMissArg)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
rpn_chunks.append(self.rpn[mark:])
ref_markers.append(len(self.sheet_references))
else:
break
self.match(RP)
self.rpn = self.rpn[:rpn_start]
nc = len(rpn_chunks)
chunklens = [len(chunk) for chunk in rpn_chunks]
skiplens = [0] * nc
skiplens[-1] = 3
for ic in range(nc-1, 0, -1):
skiplens[ic-1] = skiplens[ic] + chunklens[ic] + 4
jump_pos = [2 * nc + 2]
for ic in range(nc):
jump_pos.append(jump_pos[-1] + chunklens[ic] + 4)
chunk_shift = 2 * nc + 6 # size of tAttrChoose
for ic in range(nc):
for refx in range(ref_markers[ic], ref_markers[ic+1]):
ref = self.sheet_references[refx]
self.sheet_references[refx] = (ref[0], ref[1], ref[2] + chunk_shift)
chunk_shift += 4 # size of tAttrSkip
choose_rpn = []
choose_rpn.append(struct.pack("<BBH", ptgAttr, 0x04, nc)) # 0x04 is tAttrChoose
choose_rpn.append(struct.pack("<%dH" % (nc+1), *jump_pos))
for ic in range(nc):
choose_rpn.append(rpn_chunks[ic])
choose_rpn.append(struct.pack("<BBH", ptgAttr, 0x08, skiplens[ic])) # 0x08 is tAttrSkip
choose_rpn.append(struct.pack("<BBH", ptgFuncVarV, nc+1, 100)) # 100 is CHOOSE fn
self.rpn += b"".join(choose_rpn)
elif la1 and la1 in [LP]:
pass
self.match(LP)
self.expr(arg_type)
self.match(RP)
self.rpn += struct.pack("B", ptgParen)
else:
if (self.LA(1)==INT_CONST) and (_tokenSet_0.member(self.LA(2))):
pass
int_tok = self.LT(1)
self.match(INT_CONST)
# print "**int_const", int_tok.text
int_value = int(int_tok.text)
if int_value <= 65535:
self.rpn += struct.pack("<BH", ptgInt, int_value)
else:
self.rpn += struct.pack("<Bd", ptgNum, float(int_value))
elif (self.LA(1)==REF2D) and (_tokenSet_0.member(self.LA(2))):
pass
ref2d_tok = self.LT(1)
self.match(REF2D)
# print "**ref2d %s %s" % (ref2d_tok.text, arg_type)
r, c = Utils.cell_to_packed_rowcol(ref2d_tok.text)
ptg = ptgRefR + _RVAdeltaRef[arg_type]
self.rpn += struct.pack("<B2H", ptg, r, c)
elif (self.LA(1)==REF2D) and (self.LA(2)==COLON):
pass
ref2d1_tok = self.LT(1)
self.match(REF2D)
self.match(COLON)
ref2d2_tok = self.LT(1)
self.match(REF2D)
r1, c1 = Utils.cell_to_packed_rowcol(ref2d1_tok.text)
r2, c2 = Utils.cell_to_packed_rowcol(ref2d2_tok.text)
ptg = ptgAreaR + _RVAdeltaArea[arg_type]
self.rpn += struct.pack("<B4H", ptg, r1, r2, c1, c2)
elif (self.LA(1)==INT_CONST or self.LA(1)==NAME or self.LA(1)==QUOTENAME) and (self.LA(2)==COLON or self.LA(2)==BANG):
pass
sheet1=self.sheet()
sheet2 = sheet1
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [COLON]:
pass
self.match(COLON)
sheet2=self.sheet()
elif la1 and la1 in [BANG]:
pass
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.match(BANG)
ref3d_ref2d = self.LT(1)
self.match(REF2D)
ptg = ptgRef3dR + _RVAdeltaRef[arg_type]
rpn_ref2d = b""
r1, c1 = Utils.cell_to_packed_rowcol(ref3d_ref2d.text)
rpn_ref2d = struct.pack("<3H", 0x0000, r1, c1)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [COLON]:
pass
self.match(COLON)
ref3d_ref2d2 = self.LT(1)
self.match(REF2D)
ptg = ptgArea3dR + _RVAdeltaArea[arg_type]
r2, c2 = Utils.cell_to_packed_rowcol(ref3d_ref2d2.text)
rpn_ref2d = struct.pack("<5H", 0x0000, r1, r2, c1, c2)
elif la1 and la1 in [EOF,EQ,NE,GT,LT,GE,LE,ADD,SUB,MUL,DIV,POWER,PERCENT,RP,COMMA,SEMICOLON,CONCAT]:
pass
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
self.rpn += struct.pack("<B", ptg)
self.sheet_references.append((sheet1, sheet2, len(self.rpn)))
self.rpn += rpn_ref2d
elif (self.LA(1)==NAME) and (_tokenSet_0.member(self.LA(2))):
name_tok = self.LT(1)
self.match(NAME)
raise Exception("[formula] found unexpected NAME token (%r)" % name_tok.txt)
# #### TODO: handle references to defined names here
elif (self.LA(1)==NAME) and (self.LA(2)==LP):
func_tok = self.LT(1)
self.match(NAME)
func_toku = func_tok.text.upper()
if func_toku in all_funcs_by_name:
(opcode,
min_argc,
max_argc,
func_type,
arg_type_str) = all_funcs_by_name[func_toku]
arg_type_list = list(arg_type_str)
else:
raise Exception("[formula] unknown function (%s)" % func_tok.text)
# print "**func_tok1 %s %s" % (func_toku, func_type)
xcall = opcode < 0
if xcall:
# The name of the add-in function is passed as the 1st arg
# of the hidden XCALL function
self.xcall_references.append((func_toku, len(self.rpn) + 1))
self.rpn += struct.pack("<BHHH",
ptgNameXR,
0xadde, # ##PATCHME## index to REF entry in EXTERNSHEET record
0xefbe, # ##PATCHME## one-based index to EXTERNNAME record
0x0000) # unused
self.match(LP)
arg_count=self.expr_list(arg_type_list, min_argc, max_argc)
self.match(RP)
if arg_count > max_argc or arg_count < min_argc:
raise Exception("%d parameters for function: %s" % (arg_count, func_tok.text))
if xcall:
func_ptg = ptgFuncVarR + _RVAdelta[func_type]
self.rpn += struct.pack("<2BH", func_ptg, arg_count + 1, 255) # 255 is magic XCALL function
elif min_argc == max_argc:
func_ptg = ptgFuncR + _RVAdelta[func_type]
self.rpn += struct.pack("<BH", func_ptg, opcode)
elif arg_count == 1 and func_tok.text.upper() == "SUM":
self.rpn += struct.pack("<BBH", ptgAttr, 0x10, 0) # tAttrSum
else:
func_ptg = ptgFuncVarR + _RVAdelta[func_type]
self.rpn += struct.pack("<2BH", func_ptg, arg_count, opcode)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
def sheet(self):
ref = None
sheet_ref_name = None
sheet_ref_int = None
sheet_ref_quote = None
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [NAME]:
sheet_ref_name = self.LT(1)
self.match(NAME)
ref = sheet_ref_name.text
elif la1 and la1 in [INT_CONST]:
sheet_ref_int = self.LT(1)
self.match(INT_CONST)
ref = sheet_ref_int.text
elif la1 and la1 in [QUOTENAME]:
sheet_ref_quote = self.LT(1)
self.match(QUOTENAME)
ref = sheet_ref_quote.text[1:-1].replace("''", "'")
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
return ref
def expr_list(self,
arg_type_list, min_argc, max_argc
):
arg_cnt = None
arg_cnt = 0
arg_type = arg_type_list[arg_cnt]
# print "**expr_list1[%d] req=%s" % (arg_cnt, arg_type)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [TRUE_CONST,FALSE_CONST,STR_CONST,NUM_CONST,INT_CONST,FUNC_IF,FUNC_CHOOSE,NAME,QUOTENAME,SUB,LP,REF2D]:
pass
self.expr(arg_type)
arg_cnt += 1
while True:
if (self.LA(1)==COMMA or self.LA(1)==SEMICOLON):
pass
if arg_cnt < len(arg_type_list):
arg_type = arg_type_list[arg_cnt]
else:
arg_type = arg_type_list[-1]
if arg_type == "+":
arg_type = arg_type_list[-2]
# print "**expr_list2[%d] req=%s" % (arg_cnt, arg_type)
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [SEMICOLON]:
pass
self.match(SEMICOLON)
elif la1 and la1 in [COMMA]:
pass
self.match(COMMA)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
la1 = self.LA(1)
if False:
pass
elif la1 and la1 in [TRUE_CONST,FALSE_CONST,STR_CONST,NUM_CONST,INT_CONST,FUNC_IF,FUNC_CHOOSE,NAME,QUOTENAME,SUB,LP,REF2D]:
pass
self.expr(arg_type)
elif la1 and la1 in [RP,COMMA,SEMICOLON]:
pass
self.rpn += struct.pack("B", ptgMissArg)
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
arg_cnt += 1
else:
break
elif la1 and la1 in [RP]:
pass
else:
raise antlr.NoViableAltException(self.LT(1), self.getFilename())
return arg_cnt
_tokenNames = [
"<0>",
"EOF",
"<2>",
"NULL_TREE_LOOKAHEAD",
"TRUE_CONST",
"FALSE_CONST",
"STR_CONST",
"NUM_CONST",
"INT_CONST",
"FUNC_IF",
"FUNC_CHOOSE",
"NAME",
"QUOTENAME",
"EQ",
"NE",
"GT",
"LT",
"GE",
"LE",
"ADD",
"SUB",
"MUL",
"DIV",
"POWER",
"PERCENT",
"LP",
"RP",
"LB",
"RB",
"COLON",
"COMMA",
"SEMICOLON",
"REF2D",
"REF2D_R1C1",
"BANG",
"CONCAT"
]
### generate bit set
def mk_tokenSet_0():
### var1
data = [ 37681618946, 0]
return data
_tokenSet_0 = antlr.BitSet(mk_tokenSet_0())
| mit |
geometalab/osmaxx | tests/conversion/converters/inside_worker_test/nonop_way_test.py | 2 | 7148 | from contextlib import closing
import pytest
import sqlalchemy
from sqlalchemy.sql.schema import Table as DbTable
from osmaxx.utils.frozendict import frozendict
from tests.conftest import TagCombination
from tests.conversion.converters.inside_worker_test.conftest import slow
from tests.conversion.converters.inside_worker_test.declarative_schema import osm_models
MAJOR_KEYS = frozenset({'highway', 'railway'})
DEFAULT_EXPECTED_FALLBACK_SUBTYPE_FOR_MAJOR_KEY = frozendict(
highway='road',
railway='railway'
)
CORRESPONDING_OSMAXX_WAY_TYPES_FOR_OSM_TAG_COMBINATIONS = frozendict(
{
TagCombination(highway='track'): 'track',
TagCombination(highway='track', tracktype='grade3'): 'grade3',
TagCombination(highway='footway'): 'footway',
TagCombination(highway='secondary', junction='roundabout'): 'secondary',
TagCombination(highway='some bogus type of road', junction='roundabout'): 'roundabout',
TagCombination(railway='rail'): 'rail',
TagCombination(railway='platform'): 'railway',
},
)
CORRESPONDING_OSMAXX_STATUSES_FOR_OSM_STATUSES = frozendict(
proposed='P',
planned='P',
construction='C',
disused='D',
abandoned='A',
)
@slow
def test_osm_object_without_status_does_not_end_up_in_nonop(non_lifecycle_data_import, nonop_l, road_l, railway_l):
engine = non_lifecycle_data_import
with closing(engine.execute(sqlalchemy.select('*').select_from(road_l))) as road_result:
with closing(engine.execute(sqlalchemy.select('*').select_from(railway_l))) as railway_result:
assert road_result.rowcount + railway_result.rowcount == 1
with closing(engine.execute(sqlalchemy.select('*').select_from(nonop_l))) as nonop_result:
assert nonop_result.rowcount == 0
@slow
def test_osm_object_with_status_ends_up_in_nonop_with_correct_attribute_values(
lifecycle_data_import,
nonop_l, road_l, railway_l,
expected_osmaxx_status, osm_status, non_lifecycle_osm_tags, major_tag_key, expected_nonop_subtype,
):
engine = lifecycle_data_import
with closing(engine.execute(sqlalchemy.select('*').select_from(road_l))) as road_result:
assert road_result.rowcount == 0
with closing(engine.execute(sqlalchemy.select('*').select_from(railway_l))) as railway_result:
assert railway_result.rowcount == 0
with closing(engine.execute(sqlalchemy.select('*').select_from(nonop_l))) as result:
assert result.rowcount == 1
row = result.fetchone()
assert row['status'] == expected_osmaxx_status
assert row['tags'] == '"{key}"=>"{value}"'.format(key=osm_status, value=non_lifecycle_osm_tags[major_tag_key])
assert row['sub_type'] == expected_nonop_subtype
@slow
def test_osm_object_with_status_without_details_ends_up_in_nonop_with_correct_status(
incomplete_lifecycle_data_import, nonop_l, road_l, railway_l, expected_osmaxx_status,
expected_fallback_subtype):
engine = incomplete_lifecycle_data_import
with closing(engine.execute(sqlalchemy.select('*').select_from(road_l))) as road_result:
assert road_result.rowcount == 0
with closing(engine.execute(sqlalchemy.select('*').select_from(railway_l))) as railway_result:
assert railway_result.rowcount == 0
with closing(engine.execute(sqlalchemy.select('*').select_from(nonop_l))) as result:
assert result.rowcount == 1
row = result.fetchone()
assert row['status'] == expected_osmaxx_status
assert row['tags'] is None
assert row['sub_type'] == expected_fallback_subtype
@pytest.fixture
def nonop_l():
return DbTable('nonop_l', osm_models.metadata, schema='view_osmaxx')
@pytest.fixture
def road_l():
return DbTable('road_l', osm_models.metadata, schema='view_osmaxx')
@pytest.fixture
def railway_l():
return DbTable('railway_l', osm_models.metadata, schema='view_osmaxx')
@pytest.fixture
def expected_fallback_subtype(major_tag_key, incomplete_lifecycle_osm_tags):
if major_tag_key == 'highway' and incomplete_lifecycle_osm_tags.pop('junction', None) == 'roundabout':
return 'roundabout'
return DEFAULT_EXPECTED_FALLBACK_SUBTYPE_FOR_MAJOR_KEY[major_tag_key]
@pytest.yield_fixture
def lifecycle_data_import(lifecycle_data, data_import):
with data_import(lifecycle_data) as engine:
yield engine
@pytest.yield_fixture
def incomplete_lifecycle_data_import(incomplete_lifecycle_data, data_import):
with data_import(incomplete_lifecycle_data) as engine:
yield engine
@pytest.yield_fixture
def non_lifecycle_data_import(non_lifecycle_data, data_import):
with data_import(non_lifecycle_data) as engine:
yield engine
@pytest.fixture
def lifecycle_data(lifecycle_osm_tags):
return {osm_models.t_osm_line: lifecycle_osm_tags}
@pytest.fixture
def incomplete_lifecycle_data(incomplete_lifecycle_osm_tags):
return {osm_models.t_osm_line: incomplete_lifecycle_osm_tags}
@pytest.fixture
def non_lifecycle_data(non_lifecycle_osm_tags):
return {osm_models.t_osm_line: non_lifecycle_osm_tags}
@pytest.fixture
def lifecycle_osm_tags(non_lifecycle_osm_tags, osm_status, major_tag_key):
osm_tags = dict(non_lifecycle_osm_tags)
major_tag_value = osm_tags.pop(major_tag_key)
osm_tags.update({major_tag_key: osm_status, 'tags': {osm_status: major_tag_value}})
assert len(osm_tags) == len(non_lifecycle_osm_tags) + 1
return osm_tags
@pytest.fixture
def incomplete_lifecycle_osm_tags(non_lifecycle_osm_tags, osm_status, major_tag_key):
osm_tags = dict(non_lifecycle_osm_tags)
osm_tags.update({major_tag_key: osm_status})
assert len(osm_tags) == len(non_lifecycle_osm_tags)
return osm_tags
@pytest.fixture
def non_lifecycle_osm_tags(non_lifecycle_osm_tags_and_expected_nonop_subtype):
osm_tags, _ = non_lifecycle_osm_tags_and_expected_nonop_subtype
return osm_tags
@pytest.fixture
def major_tag_key(non_lifecycle_osm_tags):
major_keys = MAJOR_KEYS.intersection(non_lifecycle_osm_tags)
assert len(major_keys) == 1
return next(iter(major_keys))
@pytest.fixture
def expected_nonop_subtype(non_lifecycle_osm_tags_and_expected_nonop_subtype):
_, subtype = non_lifecycle_osm_tags_and_expected_nonop_subtype
return subtype
@pytest.fixture
def osm_status(osm_status_and_expected_osmaxx_status):
status, _ = osm_status_and_expected_osmaxx_status
return status
@pytest.fixture
def expected_osmaxx_status(osm_status_and_expected_osmaxx_status):
_, osmaxx_status = osm_status_and_expected_osmaxx_status
return osmaxx_status
@pytest.fixture(
params=CORRESPONDING_OSMAXX_WAY_TYPES_FOR_OSM_TAG_COMBINATIONS.items(),
ids=[str(tag_combination) for tag_combination in CORRESPONDING_OSMAXX_WAY_TYPES_FOR_OSM_TAG_COMBINATIONS.keys()],
)
def non_lifecycle_osm_tags_and_expected_nonop_subtype(request):
return request.param
@pytest.fixture(
params=CORRESPONDING_OSMAXX_STATUSES_FOR_OSM_STATUSES.items(),
ids=list(CORRESPONDING_OSMAXX_STATUSES_FOR_OSM_STATUSES.keys()),
)
def osm_status_and_expected_osmaxx_status(request):
return request.param
| mit |
guewen/connector | connector/tests/test_mapper.py | 1 | 20300 | # -*- coding: utf-8 -*-
import unittest2
import mock
import openerp.tests.common as common
from openerp.addons.connector.unit.mapper import (
Mapper,
ImportMapper,
ImportMapChild,
MappingDefinition,
changed_by,
only_create,
convert,
m2o_to_backend,
backend_to_m2o,
none,
MapOptions,
mapping)
from openerp.addons.connector.exception import NoConnectorUnitError
from openerp.addons.connector.backend import Backend
from openerp.addons.connector.connector import Environment
from openerp.addons.connector.session import ConnectorSession
class test_mapper(unittest2.TestCase):
""" Test Mapper """
def test_mapping_decorator(self):
class KifKrokerMapper(Mapper):
_model_name = 'res.users'
@changed_by('name', 'city')
@mapping
@only_create
def name(self):
pass
@changed_by('email')
@mapping
def email(self):
pass
@changed_by('street')
@mapping
def street(self):
pass
self.maxDiff = None
name_def = MappingDefinition(changed_by=set(('name', 'city')),
only_create=True)
email_def = MappingDefinition(changed_by=set(('email',)),
only_create=False)
street_def = MappingDefinition(changed_by=set(('street',)),
only_create=False)
self.assertEqual(KifKrokerMapper._map_methods,
{'name': name_def,
'email': email_def,
'street': street_def,
})
def test_mapping_decorator_cross_classes(self):
""" Mappings should not propagate to other classes"""
class MomMapper(Mapper):
_model_name = 'res.users'
@changed_by('name', 'city')
@mapping
def name(self):
pass
class ZappMapper(Mapper):
_model_name = 'res.users'
@changed_by('email')
@only_create
@mapping
def email(self):
pass
mom_def = MappingDefinition(changed_by=set(('name', 'city')),
only_create=False)
zapp_def = MappingDefinition(changed_by=set(('email',)),
only_create=True)
self.assertEqual(MomMapper._map_methods,
{'name': mom_def})
self.assertEqual(ZappMapper._map_methods,
{'email': zapp_def})
def test_mapping_decorator_cumul(self):
""" Mappings should cumulate the ``super`` mappings
and the local mappings."""
class FryMapper(Mapper):
_model_name = 'res.users'
@changed_by('name', 'city')
@mapping
def name(self):
pass
class FarnsworthMapper(FryMapper):
_model_name = 'res.users'
@changed_by('email')
@mapping
def email(self):
pass
name_def = MappingDefinition(changed_by=set(('name', 'city')),
only_create=False)
email_def = MappingDefinition(changed_by=set(('email',)),
only_create=False)
self.assertEqual(FarnsworthMapper._map_methods,
{'name': name_def,
'email': email_def})
def test_mapping_decorator_cumul_changed_by(self):
""" Mappings should cumulate the changed_by fields of the
``super`` mappings and the local mappings """
class FryMapper(Mapper):
_model_name = 'res.users'
@changed_by('name', 'city')
@mapping
def name(self):
pass
class FarnsworthMapper(FryMapper):
_model_name = 'res.users'
@changed_by('email')
@mapping
def name(self):
pass
name_def = MappingDefinition(changed_by=set(('name', 'city', 'email')),
only_create=False)
self.assertEqual(FarnsworthMapper._map_methods,
{'name': name_def})
def test_mapping_record(self):
""" Map a record and check the result """
class MyMapper(ImportMapper):
direct = [('name', 'out_name')]
@mapping
def street(self, record):
return {'out_street': record['street'].upper()}
env = mock.MagicMock()
record = {'name': 'Guewen',
'street': 'street'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 'Guewen',
'out_street': 'STREET'}
self.assertEqual(map_record.values(), expected)
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_record_on_create(self):
""" Map a record and check the result for creation of record """
class MyMapper(ImportMapper):
direct = [('name', 'out_name')]
@mapping
def street(self, record):
return {'out_street': record['street'].upper()}
@only_create
@mapping
def city(self, record):
return {'out_city': 'city'}
env = mock.MagicMock()
record = {'name': 'Guewen',
'street': 'street'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 'Guewen',
'out_street': 'STREET'}
self.assertEqual(map_record.values(), expected)
expected = {'out_name': 'Guewen',
'out_street': 'STREET',
'out_city': 'city'}
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_update(self):
""" Force values on a map record """
class MyMapper(ImportMapper):
direct = [('name', 'out_name')]
@mapping
def street(self, record):
return {'out_street': record['street'].upper()}
@only_create
@mapping
def city(self, record):
return {'out_city': 'city'}
env = mock.MagicMock()
record = {'name': 'Guewen',
'street': 'street'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
map_record.update({'test': 1}, out_city='forced')
expected = {'out_name': 'Guewen',
'out_street': 'STREET',
'out_city': 'forced',
'test': 1}
self.assertEqual(map_record.values(), expected)
expected = {'out_name': 'Guewen',
'out_street': 'STREET',
'out_city': 'forced',
'test': 1}
self.assertEqual(map_record.values(for_create=True), expected)
def test_finalize(self):
""" Inherit finalize to modify values """
class MyMapper(ImportMapper):
direct = [('name', 'out_name')]
def finalize(self, record, values):
result = super(MyMapper, self).finalize(record, values)
result['test'] = 'abc'
return result
env = mock.MagicMock()
record = {'name': 'Guewen',
'street': 'street'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 'Guewen',
'test': 'abc'}
self.assertEqual(map_record.values(), expected)
expected = {'out_name': 'Guewen',
'test': 'abc'}
self.assertEqual(map_record.values(for_create=True), expected)
def test_some_fields(self):
""" Map only a selection of fields """
class MyMapper(ImportMapper):
direct = [('name', 'out_name'),
('street', 'out_street'),
]
@changed_by('country')
@mapping
def country(self, record):
return {'country': 'country'}
env = mock.MagicMock()
record = {'name': 'Guewen',
'street': 'street',
'country': 'country'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 'Guewen',
'country': 'country'}
self.assertEqual(map_record.values(fields=['name', 'country']),
expected)
expected = {'out_name': 'Guewen',
'country': 'country'}
self.assertEqual(map_record.values(for_create=True,
fields=['name', 'country']),
expected)
def test_mapping_modifier(self):
""" Map a direct record with a modifier function """
def do_nothing(field):
def transform(self, record, to_attr):
return record[field]
return transform
class MyMapper(ImportMapper):
direct = [(do_nothing('name'), 'out_name')]
env = mock.MagicMock()
record = {'name': 'Guewen'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 'Guewen'}
self.assertEqual(map_record.values(), expected)
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_convert(self):
""" Map a direct record with the convert modifier function """
class MyMapper(ImportMapper):
direct = [(convert('name', int), 'out_name')]
env = mock.MagicMock()
record = {'name': '300'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 300}
self.assertEqual(map_record.values(), expected)
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_modifier_none(self):
""" Pipeline of modifiers """
class MyMapper(ImportMapper):
direct = [(none('in_f'), 'out_f'),
(none('in_t'), 'out_t')]
env = mock.MagicMock()
record = {'in_f': False, 'in_t': True}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_f': None, 'out_t': True}
self.assertEqual(map_record.values(), expected)
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_modifier_pipeline(self):
""" Pipeline of modifiers """
class MyMapper(ImportMapper):
direct = [(none(convert('in_f', bool)), 'out_f'),
(none(convert('in_t', bool)), 'out_t')]
env = mock.MagicMock()
record = {'in_f': 0, 'in_t': 1}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_f': None, 'out_t': True}
self.assertEqual(map_record.values(), expected)
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_custom_option(self):
""" Usage of custom options in mappings """
class MyMapper(ImportMapper):
@mapping
def any(self, record):
if self.options.custom:
res = True
else:
res = False
return {'res': res}
env = mock.MagicMock()
record = {}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'res': True}
self.assertEqual(map_record.values(custom=True), expected)
def test_mapping_custom_option_not_defined(self):
""" Usage of custom options not defined raise AttributeError """
class MyMapper(ImportMapper):
@mapping
def any(self, record):
if self.options.custom is None:
res = True
else:
res = False
return {'res': res}
env = mock.MagicMock()
record = {}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'res': True}
self.assertEqual(map_record.values(), expected)
def test_map_options(self):
""" Test MapOptions """
options = MapOptions({'xyz': 'abc'}, k=1)
options.l = 2
self.assertEqual(options['xyz'], 'abc')
self.assertEqual(options['k'], 1)
self.assertEqual(options['l'], 2)
self.assertEqual(options.xyz, 'abc')
self.assertEqual(options.k, 1)
self.assertEqual(options.l, 2)
self.assertEqual(options['undefined'], None)
self.assertEqual(options.undefined, None)
class test_mapper_binding(common.TransactionCase):
""" Test Mapper with Bindings"""
def setUp(self):
super(test_mapper_binding, self).setUp()
self.session = ConnectorSession(self.cr, self.uid)
self.Partner = self.registry('res.partner')
self.backend = mock.Mock(wraps=Backend('x', version='y'),
name='backend')
backend_record = mock.Mock()
backend_record.get_backend.return_value = self.backend
self.env = Environment(backend_record, self.session, 'res.partner')
self.country_binder = mock.Mock(name='country_binder')
self.country_binder.return_value = self.country_binder
self.backend.get_class.return_value = self.country_binder
def test_mapping_m2o_to_backend(self):
""" Map a direct record with the m2o_to_backend modifier function """
class MyMapper(ImportMapper):
_model_name = 'res.partner'
direct = [(m2o_to_backend('country_id'), 'country')]
partner_id = self.ref('base.main_partner')
self.Partner.write(self.cr, self.uid, partner_id,
{'country_id': self.ref('base.ch')})
partner = self.Partner.browse(self.cr, self.uid, partner_id)
self.country_binder.to_backend.return_value = 10
mapper = MyMapper(self.env)
map_record = mapper.map_record(partner)
self.assertEqual(map_record.values(), {'country': 10})
self.country_binder.to_backend.assert_called_once_with(
partner.country_id.id, wrap=False)
def test_mapping_backend_to_m2o(self):
""" Map a direct record with the backend_to_m2o modifier function """
class MyMapper(ImportMapper):
_model_name = 'res.partner'
direct = [(backend_to_m2o('country'), 'country_id')]
record = {'country': 10}
self.country_binder.to_openerp.return_value = 44
mapper = MyMapper(self.env)
map_record = mapper.map_record(record)
self.assertEqual(map_record.values(), {'country_id': 44})
self.country_binder.to_openerp.assert_called_once_with(
10, unwrap=False)
def test_mapping_record_children_no_map_child(self):
""" Map a record with children, using default MapChild """
backend = Backend('backend', '42')
@backend
class LineMapper(ImportMapper):
_model_name = 'res.currency.rate'
direct = [('name', 'name')]
@mapping
def price(self, record):
return {'rate': record['rate'] * 2}
@only_create
@mapping
def discount(self, record):
return {'test': .5}
@backend
class ObjectMapper(ImportMapper):
_model_name = 'res.currency'
direct = [('name', 'name')]
children = [('lines', 'line_ids', 'res.currency.rate')]
backend_record = mock.Mock()
backend_record.get_backend.side_effect = lambda *a: backend
env = Environment(backend_record, self.session, 'res.currency')
record = {'name': 'SO1',
'lines': [{'name': '2013-11-07',
'rate': 10},
{'name': '2013-11-08',
'rate': 20}]}
mapper = ObjectMapper(env)
map_record = mapper.map_record(record)
expected = {'name': 'SO1',
'line_ids': [(0, 0, {'name': '2013-11-07',
'rate': 20}),
(0, 0, {'name': '2013-11-08',
'rate': 40})]
}
self.assertEqual(map_record.values(), expected)
expected = {'name': 'SO1',
'line_ids': [(0, 0, {'name': '2013-11-07',
'rate': 20,
'test': .5}),
(0, 0, {'name': '2013-11-08',
'rate': 40,
'test': .5})]
}
self.assertEqual(map_record.values(for_create=True), expected)
def test_mapping_record_children(self):
""" Map a record with children, using defined MapChild """
backend = Backend('backend', '42')
@backend
class LineMapper(ImportMapper):
_model_name = 'res.currency.rate'
direct = [('name', 'name')]
@mapping
def price(self, record):
return {'rate': record['rate'] * 2}
@only_create
@mapping
def discount(self, record):
return {'test': .5}
@backend
class SaleLineImportMapChild(ImportMapChild):
_model_name = 'res.currency.rate'
def format_items(self, items_values):
return [('ABC', values) for values in items_values]
@backend
class ObjectMapper(ImportMapper):
_model_name = 'res.currency'
direct = [('name', 'name')]
children = [('lines', 'line_ids', 'res.currency.rate')]
backend_record = mock.Mock()
backend_record.get_backend.side_effect = lambda *a: backend
env = Environment(backend_record, self.session, 'res.currency')
record = {'name': 'SO1',
'lines': [{'name': '2013-11-07',
'rate': 10},
{'name': '2013-11-08',
'rate': 20}]}
mapper = ObjectMapper(env)
map_record = mapper.map_record(record)
expected = {'name': 'SO1',
'line_ids': [('ABC', {'name': '2013-11-07',
'rate': 20}),
('ABC', {'name': '2013-11-08',
'rate': 40})]
}
self.assertEqual(map_record.values(), expected)
expected = {'name': 'SO1',
'line_ids': [('ABC', {'name': '2013-11-07',
'rate': 20,
'test': .5}),
('ABC', {'name': '2013-11-08',
'rate': 40,
'test': .5})]
}
self.assertEqual(map_record.values(for_create=True), expected)
def test_modifier_filter_field(self):
""" A direct mapping with a modifier must still be considered from the list of fields """
class MyMapper(ImportMapper):
direct = [('field', 'field2'),
('no_field', 'no_field2'),
(convert('name', int), 'out_name')]
env = mock.MagicMock()
record = {'name': '300', 'field': 'value', 'no_field': 'no_value'}
mapper = MyMapper(env)
map_record = mapper.map_record(record)
expected = {'out_name': 300, 'field2': 'value'}
self.assertEqual(map_record.values(fields=['field', 'name']), expected)
self.assertEqual(map_record.values(for_create=True,
fields=['field', 'name']), expected)
| agpl-3.0 |
austinhyde/ansible-modules-core | cloud/openstack/os_nova_flavor.py | 93 | 6844 | #!/usr/bin/python
# Copyright (c) 2015 Hewlett-Packard Development Company, L.P.
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
DOCUMENTATION = '''
---
module: os_nova_flavor
short_description: Manage OpenStack compute flavors
extends_documentation_fragment: openstack
version_added: "2.0"
author: "David Shrewsbury (@Shrews)"
description:
- Add or remove flavors from OpenStack.
options:
state:
description:
- Indicate desired state of the resource. When I(state) is 'present',
then I(ram), I(vcpus), and I(disk) are all required. There are no
default values for those parameters.
choices: ['present', 'absent']
required: false
default: present
name:
description:
- Flavor name.
required: true
ram:
description:
- Amount of memory, in MB.
required: false
default: null
vcpus:
description:
- Number of virtual CPUs.
required: false
default: null
disk:
description:
- Size of local disk, in GB.
required: false
default: null
ephemeral:
description:
- Ephemeral space size, in GB.
required: false
default: 0
swap:
description:
- Swap space size, in MB.
required: false
default: 0
rxtx_factor:
description:
- RX/TX factor.
required: false
default: 1.0
is_public:
description:
- Make flavor accessible to the public.
required: false
default: true
flavorid:
description:
- ID for the flavor. This is optional as a unique UUID will be
assigned if a value is not specified.
required: false
default: "auto"
requirements: ["shade"]
'''
EXAMPLES = '''
# Create 'tiny' flavor with 1024MB of RAM, 1 virtual CPU, and 10GB of
# local disk, and 10GB of ephemeral.
- os_nova_flavor:
cloud=mycloud
state=present
name=tiny
ram=1024
vcpus=1
disk=10
ephemeral=10
# Delete 'tiny' flavor
- os_nova_flavor:
cloud=mycloud
state=absent
name=tiny
'''
RETURN = '''
flavor:
description: Dictionary describing the flavor.
returned: On success when I(state) is 'present'
type: dictionary
contains:
id:
description: Flavor ID.
returned: success
type: string
sample: "515256b8-7027-4d73-aa54-4e30a4a4a339"
name:
description: Flavor name.
returned: success
type: string
sample: "tiny"
disk:
description: Size of local disk, in GB.
returned: success
type: int
sample: 10
ephemeral:
description: Ephemeral space size, in GB.
returned: success
type: int
sample: 10
ram:
description: Amount of memory, in MB.
returned: success
type: int
sample: 1024
swap:
description: Swap space size, in MB.
returned: success
type: int
sample: 100
vcpus:
description: Number of virtual CPUs.
returned: success
type: int
sample: 2
is_public:
description: Make flavor accessible to the public.
returned: success
type: bool
sample: true
'''
def _system_state_change(module, flavor):
state = module.params['state']
if state == 'present' and not flavor:
return True
if state == 'absent' and flavor:
return True
return False
def main():
argument_spec = openstack_full_argument_spec(
state = dict(required=False, default='present',
choices=['absent', 'present']),
name = dict(required=False),
# required when state is 'present'
ram = dict(required=False, type='int'),
vcpus = dict(required=False, type='int'),
disk = dict(required=False, type='int'),
ephemeral = dict(required=False, default=0, type='int'),
swap = dict(required=False, default=0, type='int'),
rxtx_factor = dict(required=False, default=1.0, type='float'),
is_public = dict(required=False, default=True, type='bool'),
flavorid = dict(required=False, default="auto"),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
required_if=[
('state', 'present', ['ram', 'vcpus', 'disk'])
],
**module_kwargs)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
state = module.params['state']
name = module.params['name']
try:
cloud = shade.operator_cloud(**module.params)
flavor = cloud.get_flavor(name)
if module.check_mode:
module.exit_json(changed=_system_state_change(module, flavor))
if state == 'present':
if not flavor:
flavor = cloud.create_flavor(
name=name,
ram=module.params['ram'],
vcpus=module.params['vcpus'],
disk=module.params['disk'],
flavorid=module.params['flavorid'],
ephemeral=module.params['ephemeral'],
swap=module.params['swap'],
rxtx_factor=module.params['rxtx_factor'],
is_public=module.params['is_public']
)
module.exit_json(changed=True, flavor=flavor)
module.exit_json(changed=False, flavor=flavor)
elif state == 'absent':
if flavor:
cloud.delete_flavor(name)
module.exit_json(changed=True)
module.exit_json(changed=False)
except shade.OpenStackCloudException as e:
module.fail_json(msg=e.message)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
vivianli32/TravelConnect | flask/lib/python3.4/site-packages/whoosh/matching/binary.py | 94 | 24452 | # Copyright 2010 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
from whoosh.matching import mcore
class BiMatcher(mcore.Matcher):
"""Base class for matchers that combine the results of two sub-matchers in
some way.
"""
def __init__(self, a, b):
super(BiMatcher, self).__init__()
self.a = a
self.b = b
def reset(self):
self.a.reset()
self.b.reset()
def __repr__(self):
return "%s(%r, %r)" % (self.__class__.__name__, self.a, self.b)
def children(self):
return [self.a, self.b]
def copy(self):
return self.__class__(self.a.copy(), self.b.copy())
def depth(self):
return 1 + max(self.a.depth(), self.b.depth())
def skip_to(self, id):
if not self.is_active():
raise mcore.ReadTooFar
ra = self.a.skip_to(id)
rb = self.b.skip_to(id)
return ra or rb
def supports_block_quality(self):
return (self.a.supports_block_quality()
and self.b.supports_block_quality())
def supports(self, astype):
return self.a.supports(astype) and self.b.supports(astype)
class AdditiveBiMatcher(BiMatcher):
"""Base class for binary matchers where the scores of the sub-matchers are
added together.
"""
def max_quality(self):
q = 0.0
if self.a.is_active():
q += self.a.max_quality()
if self.b.is_active():
q += self.b.max_quality()
return q
def block_quality(self):
bq = 0.0
if self.a.is_active():
bq += self.a.block_quality()
if self.b.is_active():
bq += self.b.block_quality()
return bq
def weight(self):
return (self.a.weight() + self.b.weight())
def score(self):
return (self.a.score() + self.b.score())
def __eq__(self, other):
return self.__class__ is type(other)
def __lt__(self, other):
return type(other) is self.__class__
def __ne__(self, other):
return not self.__eq__(other)
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self.__eq__(other) or self.__lt__(other)
def __ge__(self, other):
return self.__eq__(other) or self.__gt__(other)
class UnionMatcher(AdditiveBiMatcher):
"""Matches the union (OR) of the postings in the two sub-matchers.
"""
_id = None
def replace(self, minquality=0):
a = self.a
b = self.b
a_active = a.is_active()
b_active = b.is_active()
# If neither sub-matcher on its own has a high enough max quality to
# contribute, convert to an intersection matcher
if minquality and a_active and b_active:
a_max = a.max_quality()
b_max = b.max_quality()
if a_max < minquality and b_max < minquality:
return IntersectionMatcher(a, b).replace(minquality)
elif a_max < minquality:
return AndMaybeMatcher(b, a)
elif b_max < minquality:
return AndMaybeMatcher(a, b)
# If one or both of the sub-matchers are inactive, convert
if not (a_active or b_active):
return mcore.NullMatcher()
elif not a_active:
return b.replace(minquality)
elif not b_active:
return a.replace(minquality)
a = a.replace(minquality - b.max_quality() if minquality else 0)
b = b.replace(minquality - a.max_quality() if minquality else 0)
# If one of the sub-matchers changed, return a new union
if a is not self.a or b is not self.b:
return self.__class__(a, b)
else:
self._id = None
return self
def is_active(self):
return self.a.is_active() or self.b.is_active()
def skip_to(self, id):
self._id = None
ra = rb = False
if self.a.is_active():
ra = self.a.skip_to(id)
if self.b.is_active():
rb = self.b.skip_to(id)
return ra or rb
def id(self):
_id = self._id
if _id is not None:
return _id
a = self.a
b = self.b
if not a.is_active():
_id = b.id()
elif not b.is_active():
_id = a.id()
else:
_id = min(a.id(), b.id())
self._id = _id
return _id
# Using sets is faster in most cases, but could potentially use a lot of
# memory. Comment out this method override to not use sets.
#def all_ids(self):
# return iter(sorted(set(self.a.all_ids()) | set(self.b.all_ids())))
def next(self):
self._id = None
a = self.a
b = self.b
a_active = a.is_active()
b_active = b.is_active()
# Shortcut when one matcher is inactive
if not (a_active or b_active):
raise mcore.ReadTooFar
elif not a_active:
return b.next()
elif not b_active:
return a.next()
a_id = a.id()
b_id = b.id()
ar = br = None
# After all that, here's the actual implementation
if a_id <= b_id:
ar = a.next()
if b_id <= a_id:
br = b.next()
return ar or br
def spans(self):
if not self.a.is_active():
return self.b.spans()
if not self.b.is_active():
return self.a.spans()
id_a = self.a.id()
id_b = self.b.id()
if id_a < id_b:
return self.a.spans()
elif id_b < id_a:
return self.b.spans()
else:
return sorted(set(self.a.spans()) | set(self.b.spans()))
def weight(self):
a = self.a
b = self.b
if not a.is_active():
return b.weight()
if not b.is_active():
return a.weight()
id_a = a.id()
id_b = b.id()
if id_a < id_b:
return a.weight()
elif id_b < id_a:
return b.weight()
else:
return (a.weight() + b.weight())
def score(self):
a = self.a
b = self.b
if not a.is_active():
return b.score()
if not b.is_active():
return a.score()
id_a = a.id()
id_b = b.id()
if id_a < id_b:
return a.score()
elif id_b < id_a:
return b.score()
else:
return (a.score() + b.score())
def skip_to_quality(self, minquality):
self._id = None
a = self.a
b = self.b
if not (a.is_active() or b.is_active()):
raise mcore.ReadTooFar
# Short circuit if one matcher is inactive
if not a.is_active():
return b.skip_to_quality(minquality)
elif not b.is_active():
return a.skip_to_quality(minquality)
skipped = 0
aq = a.block_quality()
bq = b.block_quality()
while a.is_active() and b.is_active() and aq + bq <= minquality:
if aq < bq:
skipped += a.skip_to_quality(minquality - bq)
aq = a.block_quality()
else:
skipped += b.skip_to_quality(minquality - aq)
bq = b.block_quality()
return skipped
class DisjunctionMaxMatcher(UnionMatcher):
"""Matches the union (OR) of two sub-matchers. Where both sub-matchers
match the same posting, returns the weight/score of the higher-scoring
posting.
"""
# TODO: this class inherits from AdditiveBiMatcher (through UnionMatcher)
# but it does not add the scores of the sub-matchers together (it
# overrides all methods that perform addition). Need to clean up the
# inheritance.
def __init__(self, a, b, tiebreak=0.0):
super(DisjunctionMaxMatcher, self).__init__(a, b)
self.tiebreak = tiebreak
def copy(self):
return self.__class__(self.a.copy(), self.b.copy(),
tiebreak=self.tiebreak)
def replace(self, minquality=0):
a = self.a
b = self.b
a_active = a.is_active()
b_active = b.is_active()
# DisMax takes the max of the sub-matcher qualities instead of adding
# them, so we need special logic here
if minquality and a_active and b_active:
a_max = a.max_quality()
b_max = b.max_quality()
if a_max < minquality and b_max < minquality:
# If neither sub-matcher has a high enough max quality to
# contribute, return an inactive matcher
return mcore.NullMatcher()
elif b_max < minquality:
# If the b matcher can't contribute, return a
return a.replace(minquality)
elif a_max < minquality:
# If the a matcher can't contribute, return b
return b.replace(minquality)
if not (a_active or b_active):
return mcore.NullMatcher()
elif not a_active:
return b.replace(minquality)
elif not b_active:
return a.replace(minquality)
# We CAN pass the minquality down here, since we don't add the two
# scores together
a = a.replace(minquality)
b = b.replace(minquality)
a_active = a.is_active()
b_active = b.is_active()
# It's kind of tedious to check for inactive sub-matchers all over
# again here after we replace them, but it's probably better than
# returning a replacement with an inactive sub-matcher
if not (a_active and b_active):
return mcore.NullMatcher()
elif not a_active:
return b
elif not b_active:
return a
elif a is not self.a or b is not self.b:
# If one of the sub-matchers changed, return a new DisMax
return self.__class__(a, b)
else:
return self
def score(self):
if not self.a.is_active():
return self.b.score()
elif not self.b.is_active():
return self.a.score()
else:
return max(self.a.score(), self.b.score())
def max_quality(self):
return max(self.a.max_quality(), self.b.max_quality())
def block_quality(self):
return max(self.a.block_quality(), self.b.block_quality())
def skip_to_quality(self, minquality):
a = self.a
b = self.b
# Short circuit if one matcher is inactive
if not a.is_active():
sk = b.skip_to_quality(minquality)
return sk
elif not b.is_active():
return a.skip_to_quality(minquality)
skipped = 0
aq = a.block_quality()
bq = b.block_quality()
while a.is_active() and b.is_active() and max(aq, bq) <= minquality:
if aq <= minquality:
skipped += a.skip_to_quality(minquality)
aq = a.block_quality()
if bq <= minquality:
skipped += b.skip_to_quality(minquality)
bq = b.block_quality()
return skipped
class IntersectionMatcher(AdditiveBiMatcher):
"""Matches the intersection (AND) of the postings in the two sub-matchers.
"""
def __init__(self, a, b):
super(IntersectionMatcher, self).__init__(a, b)
self._find_first()
def reset(self):
self.a.reset()
self.b.reset()
self._find_first()
def _find_first(self):
if (self.a.is_active()
and self.b.is_active()
and self.a.id() != self.b.id()):
self._find_next()
def replace(self, minquality=0):
a = self.a
b = self.b
a_active = a.is_active()
b_active = b.is_active()
if not (a_active and b_active):
# Intersection matcher requires that both sub-matchers be active
return mcore.NullMatcher()
if minquality:
a_max = a.max_quality()
b_max = b.max_quality()
if a_max + b_max < minquality:
# If the combined quality of the sub-matchers can't contribute,
# return an inactive matcher
return mcore.NullMatcher()
# Require that the replacements be able to contribute results
# higher than the minquality
a_min = minquality - b_max
b_min = minquality - a_max
else:
a_min = b_min = 0
a = a.replace(a_min)
b = b.replace(b_min)
a_active = a.is_active()
b_active = b.is_active()
if not (a_active or b_active):
return mcore.NullMatcher()
elif not a_active:
return b
elif not b_active:
return a
elif a is not self.a or b is not self.b:
return self.__class__(a, b)
else:
return self
def is_active(self):
return self.a.is_active() and self.b.is_active()
def _find_next(self):
a = self.a
b = self.b
a_id = a.id()
b_id = b.id()
assert a_id != b_id
r = False
while a.is_active() and b.is_active() and a_id != b_id:
if a_id < b_id:
ra = a.skip_to(b_id)
if not a.is_active():
return
r = r or ra
a_id = a.id()
else:
rb = b.skip_to(a_id)
if not b.is_active():
return
r = r or rb
b_id = b.id()
return r
def id(self):
return self.a.id()
# Using sets is faster in some cases, but could potentially use a lot of
# memory
def all_ids(self):
return iter(sorted(set(self.a.all_ids()) & set(self.b.all_ids())))
def skip_to(self, id):
if not self.is_active():
raise mcore.ReadTooFar
ra = self.a.skip_to(id)
rb = self.b.skip_to(id)
if self.is_active():
rn = False
if self.a.id() != self.b.id():
rn = self._find_next()
return ra or rb or rn
def skip_to_quality(self, minquality):
a = self.a
b = self.b
minquality = minquality
skipped = 0
aq = a.block_quality()
bq = b.block_quality()
while a.is_active() and b.is_active() and aq + bq <= minquality:
if aq < bq:
# If the block quality of A is less than B, skip A ahead until
# it can contribute at least the balance of the required min
# quality when added to B
sk = a.skip_to_quality(minquality - bq)
skipped += sk
if not sk and a.is_active():
# The matcher couldn't skip ahead for some reason, so just
# advance and try again
a.next()
else:
# And vice-versa
sk = b.skip_to_quality(minquality - aq)
skipped += sk
if not sk and b.is_active():
b.next()
if not a.is_active() or not b.is_active():
# One of the matchers is exhausted
break
if a.id() != b.id():
# We want to always leave in a state where the matchers are at
# the same document, so call _find_next() to sync them
self._find_next()
# Get the block qualities at the new matcher positions
aq = a.block_quality()
bq = b.block_quality()
return skipped
def next(self):
if not self.is_active():
raise mcore.ReadTooFar
# We must assume that the ids are equal whenever next() is called (they
# should have been made equal by _find_next), so advance them both
ar = self.a.next()
if self.is_active():
nr = self._find_next()
return ar or nr
def spans(self):
return sorted(set(self.a.spans()) | set(self.b.spans()))
class AndNotMatcher(BiMatcher):
"""Matches the postings in the first sub-matcher that are NOT present in
the second sub-matcher.
"""
def __init__(self, a, b):
super(AndNotMatcher, self).__init__(a, b)
self._find_first()
def reset(self):
self.a.reset()
self.b.reset()
self._find_first()
def _find_first(self):
if (self.a.is_active()
and self.b.is_active()
and self.a.id() == self.b.id()):
self._find_next()
def is_active(self):
return self.a.is_active()
def _find_next(self):
pos = self.a
neg = self.b
if not neg.is_active():
return
pos_id = pos.id()
r = False
if neg.id() < pos_id:
neg.skip_to(pos_id)
while pos.is_active() and neg.is_active() and pos_id == neg.id():
nr = pos.next()
if not pos.is_active():
break
r = r or nr
pos_id = pos.id()
neg.skip_to(pos_id)
return r
def supports_block_quality(self):
return self.a.supports_block_quality()
def replace(self, minquality=0):
if not self.a.is_active():
# The a matcher is required, so if it's inactive, return an
# inactive matcher
return mcore.NullMatcher()
elif (minquality
and self.a.max_quality() < minquality):
# If the quality of the required matcher isn't high enough to
# contribute, return an inactive matcher
return mcore.NullMatcher()
elif not self.b.is_active():
# If the prohibited matcher is inactive, convert to just the
# required matcher
return self.a.replace(minquality)
a = self.a.replace(minquality)
b = self.b.replace()
if a is not self.a or b is not self.b:
# If one of the sub-matchers was replaced, return a new AndNot
return self.__class__(a, b)
else:
return self
def max_quality(self):
return self.a.max_quality()
def block_quality(self):
return self.a.block_quality()
def skip_to_quality(self, minquality):
skipped = self.a.skip_to_quality(minquality)
self._find_next()
return skipped
def id(self):
return self.a.id()
def next(self):
if not self.a.is_active():
raise mcore.ReadTooFar
ar = self.a.next()
nr = False
if self.a.is_active() and self.b.is_active():
nr = self._find_next()
return ar or nr
def skip_to(self, id):
if not self.a.is_active():
raise mcore.ReadTooFar
if id < self.a.id():
return
self.a.skip_to(id)
if self.b.is_active():
self.b.skip_to(id)
self._find_next()
def weight(self):
return self.a.weight()
def score(self):
return self.a.score()
def supports(self, astype):
return self.a.supports(astype)
def value(self):
return self.a.value()
def value_as(self, astype):
return self.a.value_as(astype)
class AndMaybeMatcher(AdditiveBiMatcher):
"""Matches postings in the first sub-matcher, and if the same posting is
in the second sub-matcher, adds their scores.
"""
def __init__(self, a, b):
AdditiveBiMatcher.__init__(self, a, b)
self._first_b()
def reset(self):
self.a.reset()
self.b.reset()
self._first_b()
def _first_b(self):
a = self.a
b = self.b
if a.is_active() and b.is_active() and a.id() != b.id():
b.skip_to(a.id())
def is_active(self):
return self.a.is_active()
def id(self):
return self.a.id()
def next(self):
if not self.a.is_active():
raise mcore.ReadTooFar
ar = self.a.next()
br = False
if self.a.is_active() and self.b.is_active():
br = self.b.skip_to(self.a.id())
return ar or br
def skip_to(self, id):
if not self.a.is_active():
raise mcore.ReadTooFar
ra = self.a.skip_to(id)
rb = False
if self.a.is_active() and self.b.is_active():
rb = self.b.skip_to(id)
return ra or rb
def replace(self, minquality=0):
a = self.a
b = self.b
a_active = a.is_active()
b_active = b.is_active()
if not a_active:
return mcore.NullMatcher()
elif minquality and b_active:
if a.max_quality() + b.max_quality() < minquality:
# If the combined max quality of the sub-matchers isn't high
# enough to possibly contribute, return an inactive matcher
return mcore.NullMatcher()
elif a.max_quality() < minquality:
# If the max quality of the main sub-matcher isn't high enough
# to ever contribute without the optional sub- matcher, change
# into an IntersectionMatcher
return IntersectionMatcher(self.a, self.b)
elif not b_active:
return a.replace(minquality)
new_a = a.replace(minquality - b.max_quality())
new_b = b.replace(minquality - a.max_quality())
if new_a is not a or new_b is not b:
# If one of the sub-matchers changed, return a new AndMaybe
return self.__class__(new_a, new_b)
else:
return self
def skip_to_quality(self, minquality):
a = self.a
b = self.b
minquality = minquality
if not a.is_active():
raise mcore.ReadTooFar
if not b.is_active():
return a.skip_to_quality(minquality)
skipped = 0
aq = a.block_quality()
bq = b.block_quality()
while a.is_active() and b.is_active() and aq + bq <= minquality:
if aq < bq:
skipped += a.skip_to_quality(minquality - bq)
aq = a.block_quality()
else:
skipped += b.skip_to_quality(minquality - aq)
bq = b.block_quality()
return skipped
def weight(self):
if self.a.id() == self.b.id():
return self.a.weight() + self.b.weight()
else:
return self.a.weight()
def score(self):
if self.b.is_active() and self.a.id() == self.b.id():
return self.a.score() + self.b.score()
else:
return self.a.score()
def supports(self, astype):
return self.a.supports(astype)
def value(self):
return self.a.value()
def value_as(self, astype):
return self.a.value_as(astype)
| mit |
geekaia/edx-platform | cms/envs/aws_migrate.py | 87 | 1229 | """
A Django settings file for use on AWS while running
database migrations, since we don't want to normally run the
LMS with enough privileges to modify the database schema.
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=W0401, W0614
# Import everything from .aws so that our settings are based on those.
from .aws import *
import os
from django.core.exceptions import ImproperlyConfigured
DB_OVERRIDES = dict(
PASSWORD=os.environ.get('DB_MIGRATION_PASS', None),
ENGINE=os.environ.get('DB_MIGRATION_ENGINE', DATABASES['default']['ENGINE']),
USER=os.environ.get('DB_MIGRATION_USER', DATABASES['default']['USER']),
NAME=os.environ.get('DB_MIGRATION_NAME', DATABASES['default']['NAME']),
HOST=os.environ.get('DB_MIGRATION_HOST', DATABASES['default']['HOST']),
PORT=os.environ.get('DB_MIGRATION_PORT', DATABASES['default']['PORT']),
)
if DB_OVERRIDES['PASSWORD'] is None:
raise ImproperlyConfigured("No database password was provided for running "
"migrations. This is fatal.")
for override, value in DB_OVERRIDES.iteritems():
DATABASES['default'][override] = value
| agpl-3.0 |
evilgroot/ethercut | ethercut/master.py | 1 | 6570 | # coding: utf-8
# ETHERCUT SUITE
# Author: Ivan 'evilgroot' Luengo
# Email: [email protected]
# This project is released under a GPLv3 license
"""
Master: Handles the program loop
"""
import pcap, Queue, logging
import contextlib
import ethercut.ui as ui
import ethercut.log as log
import ethercut.sniff as sniff
import ethercut.utils as utils
import ethercut.net.link as link
import ethercut.discovery as discovery
import ethercut.exceptions as exceptions
import ethercut.net.target as target
import ethercut.net.inject as inject
import ethercut.net.network as network
import ethercut.decodermanager as decmanager
import ethercut.spoofermanager as spfmanager
import ethercut.platform as platform
import ethercut.koalafilter as koala
import ethercut.shell as shell
from ethercut.options import *
from ethercut.config import ethconf
from ethercut.context import ctx
from ethercut import NAME, PROGRAM, CONFILE, COPYRIGHT, AUTHOR
from ethercut.types.colorstr import CStr
class Master(object):
name = NAME
program = PROGRAM
def __init__(self):
# Load configuration file
ethconf.load(CONFILE)
# Register the decoders
self.decoders = decmanager.DecoderManager()
self.decoders.register()
# Register the spoofers
self.spoofers = spfmanager.SpooferManager()
self.spoofers.register()
# Add all options
self.opt = Options()
self.target1 = None
self.target2 = None
self.targetlist = ctx.targetlist = target.TargetList()
self.iface = None
self.original_mac = None
self.network = None
self.gateway = None
self.injector = inject.Injector()
self.discovery = discovery.Discovery()
self.sniffer = sniff.Sniffer()
self.filter = koala.KoalaFilter(self.decoders)
# Initialize the user interface
self.ui = ui.TextUI(self)
def start(self):
"""
Starts the whole thing
"""
# Load spoofers and decoders
if not self.opt.sniff.read:
self.spoofers.load()
self.decoders.load()
# Starts the user interface
self.ui.start()
def show_summary(self):
"""
Show a summary of the program status:
-Spoofers and decoders successfuly loaded
-Modules enabled (discovery, sniffer...)
"""
spoof = CStr(len(self.spoofers)).green if len(self.spoofers) > 0 else CStr(0).red
decode = CStr(len(self.decoders)).green if len(self.decoders) > 0 else CStr(0).red
disc = CStr("ON").green if self.discovery.active else CStr("OFF").red
sniff = CStr("ON").green if self.sniffer.active else CStr("OFF").red
summary = "[%s: %s - %s: %s - %s: %s - %s: %s]\n"%(CStr("spoofers").yellow,
spoof,
CStr("decoders").yellow,
decode,
CStr("discovery").yellow,
disc,
CStr("sniffer").yellow,
sniff)
self.ui.user_msg(summary)
def update_network(self):
"""
Update the network details
"""
if self.opt.core.use_mac:
cfg = utils.get_iface(self.opt.core.iface)
if cfg["inet"] is None:
raise exceptions.EthercutException("Couldn't determine %s IP address, make sure it "+
"is connected and propertly configured")
# Save the original mac to restore it later
self.original_mac = cfg["hw"]
self.ui.msg("Changing MAC address to: %s" %CStr(self.opt.core.use_mac).yellow)
shell.Shell().change_mac(self.opt.core.iface, self.opt.core.use_mac)
self.iface = link.Link(self.opt.core.iface)
# Network
self.network = network.Network(self.iface.ip, self.iface.netmask)
# Try to find the network gateway
gwip = self.opt.core.gateway or self.network.gateway
gwhw = utils.arp_read(gwip)
if gwip is None or gwhw is None:
raise exceptions.EthercutException("Ethercut wasn't able to find the network gateway, "+
"please check your network configuration")
self.gateway = target.Target(gwip, gwhw)
self.ui.msg("[%s] %s"%(CStr("IFACE").cyan, self.iface))
self.ui.msg("[%s] %s" %(CStr("GATEWAY").cyan, repr(self.gateway)))
# Update the context
ctx.iface = self.iface
ctx.network = self.network
ctx.gateway = self.gateway
def update_targets(self):
"""
Compile the target specifications and build the target list
"""
self.targetlist.clear()
self.target1 = self.opt.attack.target1
self.target2 = self.opt.attack.target2
# Add targets and bindings specified by the user with -T
for t in self.opt.attack.targets:
ip, mac, port = t
if port:
if mac: # Bind ports to MAC by default
if mac in self.target1:
self.target1.specific[mac] = port
if mac in self.target2:
self.target2.specific[mac] = port
else: # Bind it to the ip
if ip in self.target1:
self.target1.specific[ip] = port
if ip in self.target2:
self.target2.specific[ip] = port
if not self.opt.sniff.read:
# Only add the target if it has mac and ip
if (ip and mac and ip != self.gateway.ip and mac != self.gateway.mac and
ip != self.iface.ip and mac != self.iface.mac):
self.targetlist.append(target.Target(ip, mac, perm=True))
if len(self.targetlist) > 0:
self.ui.msg("Permanent targets:")
for t in self.targetlist:
self.ui.msg("\t%s"%repr(t))
else:
self.ui.msg("No permanent targets were added to the target list")
ctx.targetlist = self.targetlist
ctx.target1 = self.target1
ctx.target2 = self.target2
def shutdown(self):
"""
Shuts the program down, terminate all daemons
"""
self.ui.clean_exit()
| gpl-3.0 |
hetajen/vnpy161 | vn.api/vn.ctp/py3/pyscript/ctp_struct.py | 40 | 315084 | # encoding: UTF-8
structDict = {}
#//////////////////////////////////////////////////////////////////////
#@system 新一代交易所系统
#@company 上海期货信息技术有限公司
#@file ThostFtdcUserApiStruct.h
#@brief 定义了客户端接口使用的业务数据结构
#@history
#//////////////////////////////////////////////////////////////////////
#信息分发
CThostFtdcDisseminationField = {}
#序列系列号
CThostFtdcDisseminationField["SequenceSeries"] = "int"
#序列号
CThostFtdcDisseminationField["SequenceNo"] = "int"
structDict['CThostFtdcDisseminationField'] = CThostFtdcDisseminationField
#用户登录请求
CThostFtdcReqUserLoginField = {}
#交易日
CThostFtdcReqUserLoginField["TradingDay"] = "string"
#经纪公司代码
CThostFtdcReqUserLoginField["BrokerID"] = "string"
#用户代码
CThostFtdcReqUserLoginField["UserID"] = "string"
#密码
CThostFtdcReqUserLoginField["Password"] = "string"
#用户端产品信息
CThostFtdcReqUserLoginField["UserProductInfo"] = "string"
#接口端产品信息
CThostFtdcReqUserLoginField["InterfaceProductInfo"] = "string"
#协议信息
CThostFtdcReqUserLoginField["ProtocolInfo"] = "string"
#Mac地址
CThostFtdcReqUserLoginField["MacAddress"] = "string"
#动态密码
CThostFtdcReqUserLoginField["OneTimePassword"] = "string"
#终端IP地址
CThostFtdcReqUserLoginField["ClientIPAddress"] = "string"
structDict['CThostFtdcReqUserLoginField'] = CThostFtdcReqUserLoginField
#用户登录应答
CThostFtdcRspUserLoginField = {}
#交易日
CThostFtdcRspUserLoginField["TradingDay"] = "string"
#登录成功时间
CThostFtdcRspUserLoginField["LoginTime"] = "string"
#经纪公司代码
CThostFtdcRspUserLoginField["BrokerID"] = "string"
#用户代码
CThostFtdcRspUserLoginField["UserID"] = "string"
#交易系统名称
CThostFtdcRspUserLoginField["SystemName"] = "string"
#前置编号
CThostFtdcRspUserLoginField["FrontID"] = "int"
#会话编号
CThostFtdcRspUserLoginField["SessionID"] = "int"
#最大报单引用
CThostFtdcRspUserLoginField["MaxOrderRef"] = "string"
#上期所时间
CThostFtdcRspUserLoginField["SHFETime"] = "string"
#大商所时间
CThostFtdcRspUserLoginField["DCETime"] = "string"
#郑商所时间
CThostFtdcRspUserLoginField["CZCETime"] = "string"
#中金所时间
CThostFtdcRspUserLoginField["FFEXTime"] = "string"
#能源中心时间
CThostFtdcRspUserLoginField["INETime"] = "string"
structDict['CThostFtdcRspUserLoginField'] = CThostFtdcRspUserLoginField
#用户登出请求
CThostFtdcUserLogoutField = {}
#经纪公司代码
CThostFtdcUserLogoutField["BrokerID"] = "string"
#用户代码
CThostFtdcUserLogoutField["UserID"] = "string"
structDict['CThostFtdcUserLogoutField'] = CThostFtdcUserLogoutField
#强制交易员退出
CThostFtdcForceUserLogoutField = {}
#经纪公司代码
CThostFtdcForceUserLogoutField["BrokerID"] = "string"
#用户代码
CThostFtdcForceUserLogoutField["UserID"] = "string"
structDict['CThostFtdcForceUserLogoutField'] = CThostFtdcForceUserLogoutField
#客户端认证请求
CThostFtdcReqAuthenticateField = {}
#经纪公司代码
CThostFtdcReqAuthenticateField["BrokerID"] = "string"
#用户代码
CThostFtdcReqAuthenticateField["UserID"] = "string"
#用户端产品信息
CThostFtdcReqAuthenticateField["UserProductInfo"] = "string"
#认证码
CThostFtdcReqAuthenticateField["AuthCode"] = "string"
structDict['CThostFtdcReqAuthenticateField'] = CThostFtdcReqAuthenticateField
#客户端认证响应
CThostFtdcRspAuthenticateField = {}
#经纪公司代码
CThostFtdcRspAuthenticateField["BrokerID"] = "string"
#用户代码
CThostFtdcRspAuthenticateField["UserID"] = "string"
#用户端产品信息
CThostFtdcRspAuthenticateField["UserProductInfo"] = "string"
structDict['CThostFtdcRspAuthenticateField'] = CThostFtdcRspAuthenticateField
#客户端认证信息
CThostFtdcAuthenticationInfoField = {}
#经纪公司代码
CThostFtdcAuthenticationInfoField["BrokerID"] = "string"
#用户代码
CThostFtdcAuthenticationInfoField["UserID"] = "string"
#用户端产品信息
CThostFtdcAuthenticationInfoField["UserProductInfo"] = "string"
#认证信息
CThostFtdcAuthenticationInfoField["AuthInfo"] = "string"
#是否为认证结果
CThostFtdcAuthenticationInfoField["IsResult"] = "int"
structDict['CThostFtdcAuthenticationInfoField'] = CThostFtdcAuthenticationInfoField
#银期转帐报文头
CThostFtdcTransferHeaderField = {}
#版本号,常量,1.0
CThostFtdcTransferHeaderField["Version"] = "string"
#交易代码,必填
CThostFtdcTransferHeaderField["TradeCode"] = "string"
#交易日期,必填,格式:yyyymmdd
CThostFtdcTransferHeaderField["TradeDate"] = "string"
#交易时间,必填,格式:hhmmss
CThostFtdcTransferHeaderField["TradeTime"] = "string"
#发起方流水号,N/A
CThostFtdcTransferHeaderField["TradeSerial"] = "string"
#期货公司代码,必填
CThostFtdcTransferHeaderField["FutureID"] = "string"
#银行代码,根据查询银行得到,必填
CThostFtdcTransferHeaderField["BankID"] = "string"
#银行分中心代码,根据查询银行得到,必填
CThostFtdcTransferHeaderField["BankBrchID"] = "string"
#操作员,N/A
CThostFtdcTransferHeaderField["OperNo"] = "string"
#交易设备类型,N/A
CThostFtdcTransferHeaderField["DeviceID"] = "string"
#记录数,N/A
CThostFtdcTransferHeaderField["RecordNum"] = "string"
#会话编号,N/A
CThostFtdcTransferHeaderField["SessionID"] = "int"
#请求编号,N/A
CThostFtdcTransferHeaderField["RequestID"] = "int"
structDict['CThostFtdcTransferHeaderField'] = CThostFtdcTransferHeaderField
#银行资金转期货请求,TradeCode=202001
CThostFtdcTransferBankToFutureReqField = {}
#期货资金账户
CThostFtdcTransferBankToFutureReqField["FutureAccount"] = "string"
#密码标志
CThostFtdcTransferBankToFutureReqField["FuturePwdFlag"] = "char"
#密码
CThostFtdcTransferBankToFutureReqField["FutureAccPwd"] = "string"
#转账金额
CThostFtdcTransferBankToFutureReqField["TradeAmt"] = "float"
#客户手续费
CThostFtdcTransferBankToFutureReqField["CustFee"] = "float"
#币种:RMB-人民币 USD-美圆 HKD-港元
CThostFtdcTransferBankToFutureReqField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferBankToFutureReqField'] = CThostFtdcTransferBankToFutureReqField
#银行资金转期货请求响应
CThostFtdcTransferBankToFutureRspField = {}
#响应代码
CThostFtdcTransferBankToFutureRspField["RetCode"] = "string"
#响应信息
CThostFtdcTransferBankToFutureRspField["RetInfo"] = "string"
#资金账户
CThostFtdcTransferBankToFutureRspField["FutureAccount"] = "string"
#转帐金额
CThostFtdcTransferBankToFutureRspField["TradeAmt"] = "float"
#应收客户手续费
CThostFtdcTransferBankToFutureRspField["CustFee"] = "float"
#币种
CThostFtdcTransferBankToFutureRspField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferBankToFutureRspField'] = CThostFtdcTransferBankToFutureRspField
#期货资金转银行请求,TradeCode=202002
CThostFtdcTransferFutureToBankReqField = {}
#期货资金账户
CThostFtdcTransferFutureToBankReqField["FutureAccount"] = "string"
#密码标志
CThostFtdcTransferFutureToBankReqField["FuturePwdFlag"] = "char"
#密码
CThostFtdcTransferFutureToBankReqField["FutureAccPwd"] = "string"
#转账金额
CThostFtdcTransferFutureToBankReqField["TradeAmt"] = "float"
#客户手续费
CThostFtdcTransferFutureToBankReqField["CustFee"] = "float"
#币种:RMB-人民币 USD-美圆 HKD-港元
CThostFtdcTransferFutureToBankReqField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferFutureToBankReqField'] = CThostFtdcTransferFutureToBankReqField
#期货资金转银行请求响应
CThostFtdcTransferFutureToBankRspField = {}
#响应代码
CThostFtdcTransferFutureToBankRspField["RetCode"] = "string"
#响应信息
CThostFtdcTransferFutureToBankRspField["RetInfo"] = "string"
#资金账户
CThostFtdcTransferFutureToBankRspField["FutureAccount"] = "string"
#转帐金额
CThostFtdcTransferFutureToBankRspField["TradeAmt"] = "float"
#应收客户手续费
CThostFtdcTransferFutureToBankRspField["CustFee"] = "float"
#币种
CThostFtdcTransferFutureToBankRspField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferFutureToBankRspField'] = CThostFtdcTransferFutureToBankRspField
#查询银行资金请求,TradeCode=204002
CThostFtdcTransferQryBankReqField = {}
#期货资金账户
CThostFtdcTransferQryBankReqField["FutureAccount"] = "string"
#密码标志
CThostFtdcTransferQryBankReqField["FuturePwdFlag"] = "char"
#密码
CThostFtdcTransferQryBankReqField["FutureAccPwd"] = "string"
#币种:RMB-人民币 USD-美圆 HKD-港元
CThostFtdcTransferQryBankReqField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferQryBankReqField'] = CThostFtdcTransferQryBankReqField
#查询银行资金请求响应
CThostFtdcTransferQryBankRspField = {}
#响应代码
CThostFtdcTransferQryBankRspField["RetCode"] = "string"
#响应信息
CThostFtdcTransferQryBankRspField["RetInfo"] = "string"
#资金账户
CThostFtdcTransferQryBankRspField["FutureAccount"] = "string"
#银行余额
CThostFtdcTransferQryBankRspField["TradeAmt"] = "float"
#银行可用余额
CThostFtdcTransferQryBankRspField["UseAmt"] = "float"
#银行可取余额
CThostFtdcTransferQryBankRspField["FetchAmt"] = "float"
#币种
CThostFtdcTransferQryBankRspField["CurrencyCode"] = "string"
structDict['CThostFtdcTransferQryBankRspField'] = CThostFtdcTransferQryBankRspField
#查询银行交易明细请求,TradeCode=204999
CThostFtdcTransferQryDetailReqField = {}
#期货资金账户
CThostFtdcTransferQryDetailReqField["FutureAccount"] = "string"
structDict['CThostFtdcTransferQryDetailReqField'] = CThostFtdcTransferQryDetailReqField
#查询银行交易明细请求响应
CThostFtdcTransferQryDetailRspField = {}
#交易日期
CThostFtdcTransferQryDetailRspField["TradeDate"] = "string"
#交易时间
CThostFtdcTransferQryDetailRspField["TradeTime"] = "string"
#交易代码
CThostFtdcTransferQryDetailRspField["TradeCode"] = "string"
#期货流水号
CThostFtdcTransferQryDetailRspField["FutureSerial"] = "int"
#期货公司代码
CThostFtdcTransferQryDetailRspField["FutureID"] = "string"
#资金帐号
CThostFtdcTransferQryDetailRspField["FutureAccount"] = "string"
#银行流水号
CThostFtdcTransferQryDetailRspField["BankSerial"] = "int"
#银行代码
CThostFtdcTransferQryDetailRspField["BankID"] = "string"
#银行分中心代码
CThostFtdcTransferQryDetailRspField["BankBrchID"] = "string"
#银行账号
CThostFtdcTransferQryDetailRspField["BankAccount"] = "string"
#证件号码
CThostFtdcTransferQryDetailRspField["CertCode"] = "string"
#货币代码
CThostFtdcTransferQryDetailRspField["CurrencyCode"] = "string"
#发生金额
CThostFtdcTransferQryDetailRspField["TxAmount"] = "float"
#有效标志
CThostFtdcTransferQryDetailRspField["Flag"] = "char"
structDict['CThostFtdcTransferQryDetailRspField'] = CThostFtdcTransferQryDetailRspField
#响应信息
CThostFtdcRspInfoField = {}
#错误代码
CThostFtdcRspInfoField["ErrorID"] = "int"
#错误信息
CThostFtdcRspInfoField["ErrorMsg"] = "string"
structDict['CThostFtdcRspInfoField'] = CThostFtdcRspInfoField
#交易所
CThostFtdcExchangeField = {}
#交易所代码
CThostFtdcExchangeField["ExchangeID"] = "string"
#交易所名称
CThostFtdcExchangeField["ExchangeName"] = "string"
#交易所属性
CThostFtdcExchangeField["ExchangeProperty"] = "char"
structDict['CThostFtdcExchangeField'] = CThostFtdcExchangeField
#产品
CThostFtdcProductField = {}
#产品代码
CThostFtdcProductField["ProductID"] = "string"
#产品名称
CThostFtdcProductField["ProductName"] = "string"
#交易所代码
CThostFtdcProductField["ExchangeID"] = "string"
#产品类型
CThostFtdcProductField["ProductClass"] = "char"
#合约数量乘数
CThostFtdcProductField["VolumeMultiple"] = "int"
#最小变动价位
CThostFtdcProductField["PriceTick"] = "float"
#市价单最大下单量
CThostFtdcProductField["MaxMarketOrderVolume"] = "int"
#市价单最小下单量
CThostFtdcProductField["MinMarketOrderVolume"] = "int"
#限价单最大下单量
CThostFtdcProductField["MaxLimitOrderVolume"] = "int"
#限价单最小下单量
CThostFtdcProductField["MinLimitOrderVolume"] = "int"
#持仓类型
CThostFtdcProductField["PositionType"] = "char"
#持仓日期类型
CThostFtdcProductField["PositionDateType"] = "char"
#平仓处理类型
CThostFtdcProductField["CloseDealType"] = "char"
#交易币种类型
CThostFtdcProductField["TradeCurrencyID"] = "string"
#质押资金可用范围
CThostFtdcProductField["MortgageFundUseRange"] = "char"
#交易所产品代码
CThostFtdcProductField["ExchangeProductID"] = "string"
#合约基础商品乘数
CThostFtdcProductField["UnderlyingMultiple"] = "float"
structDict['CThostFtdcProductField'] = CThostFtdcProductField
#合约
CThostFtdcInstrumentField = {}
#合约代码
CThostFtdcInstrumentField["InstrumentID"] = "string"
#交易所代码
CThostFtdcInstrumentField["ExchangeID"] = "string"
#合约名称
CThostFtdcInstrumentField["InstrumentName"] = "string"
#合约在交易所的代码
CThostFtdcInstrumentField["ExchangeInstID"] = "string"
#产品代码
CThostFtdcInstrumentField["ProductID"] = "string"
#产品类型
CThostFtdcInstrumentField["ProductClass"] = "char"
#交割年份
CThostFtdcInstrumentField["DeliveryYear"] = "int"
#交割月
CThostFtdcInstrumentField["DeliveryMonth"] = "int"
#市价单最大下单量
CThostFtdcInstrumentField["MaxMarketOrderVolume"] = "int"
#市价单最小下单量
CThostFtdcInstrumentField["MinMarketOrderVolume"] = "int"
#限价单最大下单量
CThostFtdcInstrumentField["MaxLimitOrderVolume"] = "int"
#限价单最小下单量
CThostFtdcInstrumentField["MinLimitOrderVolume"] = "int"
#合约数量乘数
CThostFtdcInstrumentField["VolumeMultiple"] = "int"
#最小变动价位
CThostFtdcInstrumentField["PriceTick"] = "float"
#创建日
CThostFtdcInstrumentField["CreateDate"] = "string"
#上市日
CThostFtdcInstrumentField["OpenDate"] = "string"
#到期日
CThostFtdcInstrumentField["ExpireDate"] = "string"
#开始交割日
CThostFtdcInstrumentField["StartDelivDate"] = "string"
#结束交割日
CThostFtdcInstrumentField["EndDelivDate"] = "string"
#合约生命周期状态
CThostFtdcInstrumentField["InstLifePhase"] = "char"
#当前是否交易
CThostFtdcInstrumentField["IsTrading"] = "int"
#持仓类型
CThostFtdcInstrumentField["PositionType"] = "char"
#持仓日期类型
CThostFtdcInstrumentField["PositionDateType"] = "char"
#多头保证金率
CThostFtdcInstrumentField["LongMarginRatio"] = "float"
#空头保证金率
CThostFtdcInstrumentField["ShortMarginRatio"] = "float"
#是否使用大额单边保证金算法
CThostFtdcInstrumentField["MaxMarginSideAlgorithm"] = "char"
#基础商品代码
CThostFtdcInstrumentField["UnderlyingInstrID"] = "string"
#执行价
CThostFtdcInstrumentField["StrikePrice"] = "float"
#期权类型
CThostFtdcInstrumentField["OptionsType"] = "char"
#合约基础商品乘数
CThostFtdcInstrumentField["UnderlyingMultiple"] = "float"
#组合类型
CThostFtdcInstrumentField["CombinationType"] = "char"
#最小买下单单位
CThostFtdcInstrumentField["MinBuyVolume"] = "int"
#最小卖下单单位
CThostFtdcInstrumentField["MinSellVolume"] = "int"
#合约标识码
CThostFtdcInstrumentField["InstrumentCode"] = "string"
structDict['CThostFtdcInstrumentField'] = CThostFtdcInstrumentField
#经纪公司
CThostFtdcBrokerField = {}
#经纪公司代码
CThostFtdcBrokerField["BrokerID"] = "string"
#经纪公司简称
CThostFtdcBrokerField["BrokerAbbr"] = "string"
#经纪公司名称
CThostFtdcBrokerField["BrokerName"] = "string"
#是否活跃
CThostFtdcBrokerField["IsActive"] = "int"
structDict['CThostFtdcBrokerField'] = CThostFtdcBrokerField
#交易所交易员
CThostFtdcTraderField = {}
#交易所代码
CThostFtdcTraderField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcTraderField["TraderID"] = "string"
#会员代码
CThostFtdcTraderField["ParticipantID"] = "string"
#密码
CThostFtdcTraderField["Password"] = "string"
#安装数量
CThostFtdcTraderField["InstallCount"] = "int"
#经纪公司代码
CThostFtdcTraderField["BrokerID"] = "string"
structDict['CThostFtdcTraderField'] = CThostFtdcTraderField
#投资者
CThostFtdcInvestorField = {}
#投资者代码
CThostFtdcInvestorField["InvestorID"] = "string"
#经纪公司代码
CThostFtdcInvestorField["BrokerID"] = "string"
#投资者分组代码
CThostFtdcInvestorField["InvestorGroupID"] = "string"
#投资者名称
CThostFtdcInvestorField["InvestorName"] = "string"
#证件类型
CThostFtdcInvestorField["IdentifiedCardType"] = "char"
#证件号码
CThostFtdcInvestorField["IdentifiedCardNo"] = "string"
#是否活跃
CThostFtdcInvestorField["IsActive"] = "int"
#联系电话
CThostFtdcInvestorField["Telephone"] = "string"
#通讯地址
CThostFtdcInvestorField["Address"] = "string"
#开户日期
CThostFtdcInvestorField["OpenDate"] = "string"
#手机
CThostFtdcInvestorField["Mobile"] = "string"
#手续费率模板代码
CThostFtdcInvestorField["CommModelID"] = "string"
#保证金率模板代码
CThostFtdcInvestorField["MarginModelID"] = "string"
structDict['CThostFtdcInvestorField'] = CThostFtdcInvestorField
#交易编码
CThostFtdcTradingCodeField = {}
#投资者代码
CThostFtdcTradingCodeField["InvestorID"] = "string"
#经纪公司代码
CThostFtdcTradingCodeField["BrokerID"] = "string"
#交易所代码
CThostFtdcTradingCodeField["ExchangeID"] = "string"
#客户代码
CThostFtdcTradingCodeField["ClientID"] = "string"
#是否活跃
CThostFtdcTradingCodeField["IsActive"] = "int"
#交易编码类型
CThostFtdcTradingCodeField["ClientIDType"] = "char"
#营业部编号
CThostFtdcTradingCodeField["BranchID"] = "string"
#业务类型
CThostFtdcTradingCodeField["BizType"] = "char"
structDict['CThostFtdcTradingCodeField'] = CThostFtdcTradingCodeField
#会员编码和经纪公司编码对照表
CThostFtdcPartBrokerField = {}
#经纪公司代码
CThostFtdcPartBrokerField["BrokerID"] = "string"
#交易所代码
CThostFtdcPartBrokerField["ExchangeID"] = "string"
#会员代码
CThostFtdcPartBrokerField["ParticipantID"] = "string"
#是否活跃
CThostFtdcPartBrokerField["IsActive"] = "int"
structDict['CThostFtdcPartBrokerField'] = CThostFtdcPartBrokerField
#管理用户
CThostFtdcSuperUserField = {}
#用户代码
CThostFtdcSuperUserField["UserID"] = "string"
#用户名称
CThostFtdcSuperUserField["UserName"] = "string"
#密码
CThostFtdcSuperUserField["Password"] = "string"
#是否活跃
CThostFtdcSuperUserField["IsActive"] = "int"
structDict['CThostFtdcSuperUserField'] = CThostFtdcSuperUserField
#管理用户功能权限
CThostFtdcSuperUserFunctionField = {}
#用户代码
CThostFtdcSuperUserFunctionField["UserID"] = "string"
#功能代码
CThostFtdcSuperUserFunctionField["FunctionCode"] = "char"
structDict['CThostFtdcSuperUserFunctionField'] = CThostFtdcSuperUserFunctionField
#投资者组
CThostFtdcInvestorGroupField = {}
#经纪公司代码
CThostFtdcInvestorGroupField["BrokerID"] = "string"
#投资者分组代码
CThostFtdcInvestorGroupField["InvestorGroupID"] = "string"
#投资者分组名称
CThostFtdcInvestorGroupField["InvestorGroupName"] = "string"
structDict['CThostFtdcInvestorGroupField'] = CThostFtdcInvestorGroupField
#资金账户
CThostFtdcTradingAccountField = {}
#经纪公司代码
CThostFtdcTradingAccountField["BrokerID"] = "string"
#投资者帐号
CThostFtdcTradingAccountField["AccountID"] = "string"
#上次质押金额
CThostFtdcTradingAccountField["PreMortgage"] = "float"
#上次信用额度
CThostFtdcTradingAccountField["PreCredit"] = "float"
#上次存款额
CThostFtdcTradingAccountField["PreDeposit"] = "float"
#上次结算准备金
CThostFtdcTradingAccountField["PreBalance"] = "float"
#上次占用的保证金
CThostFtdcTradingAccountField["PreMargin"] = "float"
#利息基数
CThostFtdcTradingAccountField["InterestBase"] = "float"
#利息收入
CThostFtdcTradingAccountField["Interest"] = "float"
#入金金额
CThostFtdcTradingAccountField["Deposit"] = "float"
#出金金额
CThostFtdcTradingAccountField["Withdraw"] = "float"
#冻结的保证金
CThostFtdcTradingAccountField["FrozenMargin"] = "float"
#冻结的资金
CThostFtdcTradingAccountField["FrozenCash"] = "float"
#冻结的手续费
CThostFtdcTradingAccountField["FrozenCommission"] = "float"
#当前保证金总额
CThostFtdcTradingAccountField["CurrMargin"] = "float"
#资金差额
CThostFtdcTradingAccountField["CashIn"] = "float"
#手续费
CThostFtdcTradingAccountField["Commission"] = "float"
#平仓盈亏
CThostFtdcTradingAccountField["CloseProfit"] = "float"
#持仓盈亏
CThostFtdcTradingAccountField["PositionProfit"] = "float"
#期货结算准备金
CThostFtdcTradingAccountField["Balance"] = "float"
#可用资金
CThostFtdcTradingAccountField["Available"] = "float"
#可取资金
CThostFtdcTradingAccountField["WithdrawQuota"] = "float"
#基本准备金
CThostFtdcTradingAccountField["Reserve"] = "float"
#交易日
CThostFtdcTradingAccountField["TradingDay"] = "string"
#结算编号
CThostFtdcTradingAccountField["SettlementID"] = "int"
#信用额度
CThostFtdcTradingAccountField["Credit"] = "float"
#质押金额
CThostFtdcTradingAccountField["Mortgage"] = "float"
#交易所保证金
CThostFtdcTradingAccountField["ExchangeMargin"] = "float"
#投资者交割保证金
CThostFtdcTradingAccountField["DeliveryMargin"] = "float"
#交易所交割保证金
CThostFtdcTradingAccountField["ExchangeDeliveryMargin"] = "float"
#保底期货结算准备金
CThostFtdcTradingAccountField["ReserveBalance"] = "float"
#币种代码
CThostFtdcTradingAccountField["CurrencyID"] = "string"
#上次货币质入金额
CThostFtdcTradingAccountField["PreFundMortgageIn"] = "float"
#上次货币质出金额
CThostFtdcTradingAccountField["PreFundMortgageOut"] = "float"
#货币质入金额
CThostFtdcTradingAccountField["FundMortgageIn"] = "float"
#货币质出金额
CThostFtdcTradingAccountField["FundMortgageOut"] = "float"
#货币质押余额
CThostFtdcTradingAccountField["FundMortgageAvailable"] = "float"
#可质押货币金额
CThostFtdcTradingAccountField["MortgageableFund"] = "float"
#特殊产品占用保证金
CThostFtdcTradingAccountField["SpecProductMargin"] = "float"
#特殊产品冻结保证金
CThostFtdcTradingAccountField["SpecProductFrozenMargin"] = "float"
#特殊产品手续费
CThostFtdcTradingAccountField["SpecProductCommission"] = "float"
#特殊产品冻结手续费
CThostFtdcTradingAccountField["SpecProductFrozenCommission"] = "float"
#特殊产品持仓盈亏
CThostFtdcTradingAccountField["SpecProductPositionProfit"] = "float"
#特殊产品平仓盈亏
CThostFtdcTradingAccountField["SpecProductCloseProfit"] = "float"
#根据持仓盈亏算法计算的特殊产品持仓盈亏
CThostFtdcTradingAccountField["SpecProductPositionProfitByAlg"] = "float"
#特殊产品交易所保证金
CThostFtdcTradingAccountField["SpecProductExchangeMargin"] = "float"
#业务类型
CThostFtdcTradingAccountField["BizType"] = "char"
structDict['CThostFtdcTradingAccountField'] = CThostFtdcTradingAccountField
#投资者持仓
CThostFtdcInvestorPositionField = {}
#合约代码
CThostFtdcInvestorPositionField["InstrumentID"] = "string"
#经纪公司代码
CThostFtdcInvestorPositionField["BrokerID"] = "string"
#投资者代码
CThostFtdcInvestorPositionField["InvestorID"] = "string"
#持仓多空方向
CThostFtdcInvestorPositionField["PosiDirection"] = "char"
#投机套保标志
CThostFtdcInvestorPositionField["HedgeFlag"] = "char"
#持仓日期
CThostFtdcInvestorPositionField["PositionDate"] = "char"
#上日持仓
CThostFtdcInvestorPositionField["YdPosition"] = "int"
#今日持仓
CThostFtdcInvestorPositionField["Position"] = "int"
#多头冻结
CThostFtdcInvestorPositionField["LongFrozen"] = "int"
#空头冻结
CThostFtdcInvestorPositionField["ShortFrozen"] = "int"
#开仓冻结金额
CThostFtdcInvestorPositionField["LongFrozenAmount"] = "float"
#开仓冻结金额
CThostFtdcInvestorPositionField["ShortFrozenAmount"] = "float"
#开仓量
CThostFtdcInvestorPositionField["OpenVolume"] = "int"
#平仓量
CThostFtdcInvestorPositionField["CloseVolume"] = "int"
#开仓金额
CThostFtdcInvestorPositionField["OpenAmount"] = "float"
#平仓金额
CThostFtdcInvestorPositionField["CloseAmount"] = "float"
#持仓成本
CThostFtdcInvestorPositionField["PositionCost"] = "float"
#上次占用的保证金
CThostFtdcInvestorPositionField["PreMargin"] = "float"
#占用的保证金
CThostFtdcInvestorPositionField["UseMargin"] = "float"
#冻结的保证金
CThostFtdcInvestorPositionField["FrozenMargin"] = "float"
#冻结的资金
CThostFtdcInvestorPositionField["FrozenCash"] = "float"
#冻结的手续费
CThostFtdcInvestorPositionField["FrozenCommission"] = "float"
#资金差额
CThostFtdcInvestorPositionField["CashIn"] = "float"
#手续费
CThostFtdcInvestorPositionField["Commission"] = "float"
#平仓盈亏
CThostFtdcInvestorPositionField["CloseProfit"] = "float"
#持仓盈亏
CThostFtdcInvestorPositionField["PositionProfit"] = "float"
#上次结算价
CThostFtdcInvestorPositionField["PreSettlementPrice"] = "float"
#本次结算价
CThostFtdcInvestorPositionField["SettlementPrice"] = "float"
#交易日
CThostFtdcInvestorPositionField["TradingDay"] = "string"
#结算编号
CThostFtdcInvestorPositionField["SettlementID"] = "int"
#开仓成本
CThostFtdcInvestorPositionField["OpenCost"] = "float"
#交易所保证金
CThostFtdcInvestorPositionField["ExchangeMargin"] = "float"
#组合成交形成的持仓
CThostFtdcInvestorPositionField["CombPosition"] = "int"
#组合多头冻结
CThostFtdcInvestorPositionField["CombLongFrozen"] = "int"
#组合空头冻结
CThostFtdcInvestorPositionField["CombShortFrozen"] = "int"
#逐日盯市平仓盈亏
CThostFtdcInvestorPositionField["CloseProfitByDate"] = "float"
#逐笔对冲平仓盈亏
CThostFtdcInvestorPositionField["CloseProfitByTrade"] = "float"
#今日持仓
CThostFtdcInvestorPositionField["TodayPosition"] = "int"
#保证金率
CThostFtdcInvestorPositionField["MarginRateByMoney"] = "float"
#保证金率(按手数)
CThostFtdcInvestorPositionField["MarginRateByVolume"] = "float"
#执行冻结
CThostFtdcInvestorPositionField["StrikeFrozen"] = "int"
#执行冻结金额
CThostFtdcInvestorPositionField["StrikeFrozenAmount"] = "float"
#放弃执行冻结
CThostFtdcInvestorPositionField["AbandonFrozen"] = "int"
#交易所代码
CThostFtdcInvestorPositionField["ExchangeID"] = "string"
#执行冻结的昨仓
CThostFtdcInvestorPositionField["YdStrikeFrozen"] = "int"
structDict['CThostFtdcInvestorPositionField'] = CThostFtdcInvestorPositionField
#合约保证金率
CThostFtdcInstrumentMarginRateField = {}
#合约代码
CThostFtdcInstrumentMarginRateField["InstrumentID"] = "string"
#投资者范围
CThostFtdcInstrumentMarginRateField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcInstrumentMarginRateField["BrokerID"] = "string"
#投资者代码
CThostFtdcInstrumentMarginRateField["InvestorID"] = "string"
#投机套保标志
CThostFtdcInstrumentMarginRateField["HedgeFlag"] = "char"
#多头保证金率
CThostFtdcInstrumentMarginRateField["LongMarginRatioByMoney"] = "float"
#多头保证金费
CThostFtdcInstrumentMarginRateField["LongMarginRatioByVolume"] = "float"
#空头保证金率
CThostFtdcInstrumentMarginRateField["ShortMarginRatioByMoney"] = "float"
#空头保证金费
CThostFtdcInstrumentMarginRateField["ShortMarginRatioByVolume"] = "float"
#是否相对交易所收取
CThostFtdcInstrumentMarginRateField["IsRelative"] = "int"
structDict['CThostFtdcInstrumentMarginRateField'] = CThostFtdcInstrumentMarginRateField
#合约手续费率
CThostFtdcInstrumentCommissionRateField = {}
#合约代码
CThostFtdcInstrumentCommissionRateField["InstrumentID"] = "string"
#投资者范围
CThostFtdcInstrumentCommissionRateField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcInstrumentCommissionRateField["BrokerID"] = "string"
#投资者代码
CThostFtdcInstrumentCommissionRateField["InvestorID"] = "string"
#开仓手续费率
CThostFtdcInstrumentCommissionRateField["OpenRatioByMoney"] = "float"
#开仓手续费
CThostFtdcInstrumentCommissionRateField["OpenRatioByVolume"] = "float"
#平仓手续费率
CThostFtdcInstrumentCommissionRateField["CloseRatioByMoney"] = "float"
#平仓手续费
CThostFtdcInstrumentCommissionRateField["CloseRatioByVolume"] = "float"
#平今手续费率
CThostFtdcInstrumentCommissionRateField["CloseTodayRatioByMoney"] = "float"
#平今手续费
CThostFtdcInstrumentCommissionRateField["CloseTodayRatioByVolume"] = "float"
#交易所代码
CThostFtdcInstrumentCommissionRateField["ExchangeID"] = "string"
#业务类型
CThostFtdcInstrumentCommissionRateField["BizType"] = "char"
structDict['CThostFtdcInstrumentCommissionRateField'] = CThostFtdcInstrumentCommissionRateField
#深度行情
CThostFtdcDepthMarketDataField = {}
#交易日
CThostFtdcDepthMarketDataField["TradingDay"] = "string"
#合约代码
CThostFtdcDepthMarketDataField["InstrumentID"] = "string"
#交易所代码
CThostFtdcDepthMarketDataField["ExchangeID"] = "string"
#合约在交易所的代码
CThostFtdcDepthMarketDataField["ExchangeInstID"] = "string"
#最新价
CThostFtdcDepthMarketDataField["LastPrice"] = "float"
#上次结算价
CThostFtdcDepthMarketDataField["PreSettlementPrice"] = "float"
#昨收盘
CThostFtdcDepthMarketDataField["PreClosePrice"] = "float"
#昨持仓量
CThostFtdcDepthMarketDataField["PreOpenInterest"] = "float"
#今开盘
CThostFtdcDepthMarketDataField["OpenPrice"] = "float"
#最高价
CThostFtdcDepthMarketDataField["HighestPrice"] = "float"
#最低价
CThostFtdcDepthMarketDataField["LowestPrice"] = "float"
#数量
CThostFtdcDepthMarketDataField["Volume"] = "int"
#成交金额
CThostFtdcDepthMarketDataField["Turnover"] = "float"
#持仓量
CThostFtdcDepthMarketDataField["OpenInterest"] = "float"
#今收盘
CThostFtdcDepthMarketDataField["ClosePrice"] = "float"
#本次结算价
CThostFtdcDepthMarketDataField["SettlementPrice"] = "float"
#涨停板价
CThostFtdcDepthMarketDataField["UpperLimitPrice"] = "float"
#跌停板价
CThostFtdcDepthMarketDataField["LowerLimitPrice"] = "float"
#昨虚实度
CThostFtdcDepthMarketDataField["PreDelta"] = "float"
#今虚实度
CThostFtdcDepthMarketDataField["CurrDelta"] = "float"
#最后修改时间
CThostFtdcDepthMarketDataField["UpdateTime"] = "string"
#最后修改毫秒
CThostFtdcDepthMarketDataField["UpdateMillisec"] = "int"
#申买价一
CThostFtdcDepthMarketDataField["BidPrice1"] = "float"
#申买量一
CThostFtdcDepthMarketDataField["BidVolume1"] = "int"
#申卖价一
CThostFtdcDepthMarketDataField["AskPrice1"] = "float"
#申卖量一
CThostFtdcDepthMarketDataField["AskVolume1"] = "int"
#申买价二
CThostFtdcDepthMarketDataField["BidPrice2"] = "float"
#申买量二
CThostFtdcDepthMarketDataField["BidVolume2"] = "int"
#申卖价二
CThostFtdcDepthMarketDataField["AskPrice2"] = "float"
#申卖量二
CThostFtdcDepthMarketDataField["AskVolume2"] = "int"
#申买价三
CThostFtdcDepthMarketDataField["BidPrice3"] = "float"
#申买量三
CThostFtdcDepthMarketDataField["BidVolume3"] = "int"
#申卖价三
CThostFtdcDepthMarketDataField["AskPrice3"] = "float"
#申卖量三
CThostFtdcDepthMarketDataField["AskVolume3"] = "int"
#申买价四
CThostFtdcDepthMarketDataField["BidPrice4"] = "float"
#申买量四
CThostFtdcDepthMarketDataField["BidVolume4"] = "int"
#申卖价四
CThostFtdcDepthMarketDataField["AskPrice4"] = "float"
#申卖量四
CThostFtdcDepthMarketDataField["AskVolume4"] = "int"
#申买价五
CThostFtdcDepthMarketDataField["BidPrice5"] = "float"
#申买量五
CThostFtdcDepthMarketDataField["BidVolume5"] = "int"
#申卖价五
CThostFtdcDepthMarketDataField["AskPrice5"] = "float"
#申卖量五
CThostFtdcDepthMarketDataField["AskVolume5"] = "int"
#当日均价
CThostFtdcDepthMarketDataField["AveragePrice"] = "float"
#业务日期
CThostFtdcDepthMarketDataField["ActionDay"] = "string"
structDict['CThostFtdcDepthMarketDataField'] = CThostFtdcDepthMarketDataField
#投资者合约交易权限
CThostFtdcInstrumentTradingRightField = {}
#合约代码
CThostFtdcInstrumentTradingRightField["InstrumentID"] = "string"
#投资者范围
CThostFtdcInstrumentTradingRightField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcInstrumentTradingRightField["BrokerID"] = "string"
#投资者代码
CThostFtdcInstrumentTradingRightField["InvestorID"] = "string"
#交易权限
CThostFtdcInstrumentTradingRightField["TradingRight"] = "char"
#交易所代码
CThostFtdcInstrumentTradingRightField["ExchangeID"] = "string"
#业务类型
CThostFtdcInstrumentTradingRightField["BizType"] = "char"
structDict['CThostFtdcInstrumentTradingRightField'] = CThostFtdcInstrumentTradingRightField
#经纪公司用户
CThostFtdcBrokerUserField = {}
#经纪公司代码
CThostFtdcBrokerUserField["BrokerID"] = "string"
#用户代码
CThostFtdcBrokerUserField["UserID"] = "string"
#用户名称
CThostFtdcBrokerUserField["UserName"] = "string"
#用户类型
CThostFtdcBrokerUserField["UserType"] = "char"
#是否活跃
CThostFtdcBrokerUserField["IsActive"] = "int"
#是否使用令牌
CThostFtdcBrokerUserField["IsUsingOTP"] = "int"
structDict['CThostFtdcBrokerUserField'] = CThostFtdcBrokerUserField
#经纪公司用户口令
CThostFtdcBrokerUserPasswordField = {}
#经纪公司代码
CThostFtdcBrokerUserPasswordField["BrokerID"] = "string"
#用户代码
CThostFtdcBrokerUserPasswordField["UserID"] = "string"
#密码
CThostFtdcBrokerUserPasswordField["Password"] = "string"
structDict['CThostFtdcBrokerUserPasswordField'] = CThostFtdcBrokerUserPasswordField
#经纪公司用户功能权限
CThostFtdcBrokerUserFunctionField = {}
#经纪公司代码
CThostFtdcBrokerUserFunctionField["BrokerID"] = "string"
#用户代码
CThostFtdcBrokerUserFunctionField["UserID"] = "string"
#经纪公司功能代码
CThostFtdcBrokerUserFunctionField["BrokerFunctionCode"] = "char"
structDict['CThostFtdcBrokerUserFunctionField'] = CThostFtdcBrokerUserFunctionField
#交易所交易员报盘机
CThostFtdcTraderOfferField = {}
#交易所代码
CThostFtdcTraderOfferField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcTraderOfferField["TraderID"] = "string"
#会员代码
CThostFtdcTraderOfferField["ParticipantID"] = "string"
#密码
CThostFtdcTraderOfferField["Password"] = "string"
#安装编号
CThostFtdcTraderOfferField["InstallID"] = "int"
#本地报单编号
CThostFtdcTraderOfferField["OrderLocalID"] = "string"
#交易所交易员连接状态
CThostFtdcTraderOfferField["TraderConnectStatus"] = "char"
#发出连接请求的日期
CThostFtdcTraderOfferField["ConnectRequestDate"] = "string"
#发出连接请求的时间
CThostFtdcTraderOfferField["ConnectRequestTime"] = "string"
#上次报告日期
CThostFtdcTraderOfferField["LastReportDate"] = "string"
#上次报告时间
CThostFtdcTraderOfferField["LastReportTime"] = "string"
#完成连接日期
CThostFtdcTraderOfferField["ConnectDate"] = "string"
#完成连接时间
CThostFtdcTraderOfferField["ConnectTime"] = "string"
#启动日期
CThostFtdcTraderOfferField["StartDate"] = "string"
#启动时间
CThostFtdcTraderOfferField["StartTime"] = "string"
#交易日
CThostFtdcTraderOfferField["TradingDay"] = "string"
#经纪公司代码
CThostFtdcTraderOfferField["BrokerID"] = "string"
#本席位最大成交编号
CThostFtdcTraderOfferField["MaxTradeID"] = "string"
#本席位最大报单备拷
CThostFtdcTraderOfferField["MaxOrderMessageReference"] = "string"
#业务类型
CThostFtdcTraderOfferField["BizType"] = "char"
structDict['CThostFtdcTraderOfferField'] = CThostFtdcTraderOfferField
#投资者结算结果
CThostFtdcSettlementInfoField = {}
#交易日
CThostFtdcSettlementInfoField["TradingDay"] = "string"
#结算编号
CThostFtdcSettlementInfoField["SettlementID"] = "int"
#经纪公司代码
CThostFtdcSettlementInfoField["BrokerID"] = "string"
#投资者代码
CThostFtdcSettlementInfoField["InvestorID"] = "string"
#序号
CThostFtdcSettlementInfoField["SequenceNo"] = "int"
#消息正文
CThostFtdcSettlementInfoField["Content"] = "string"
structDict['CThostFtdcSettlementInfoField'] = CThostFtdcSettlementInfoField
#合约保证金率调整
CThostFtdcInstrumentMarginRateAdjustField = {}
#合约代码
CThostFtdcInstrumentMarginRateAdjustField["InstrumentID"] = "string"
#投资者范围
CThostFtdcInstrumentMarginRateAdjustField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcInstrumentMarginRateAdjustField["BrokerID"] = "string"
#投资者代码
CThostFtdcInstrumentMarginRateAdjustField["InvestorID"] = "string"
#投机套保标志
CThostFtdcInstrumentMarginRateAdjustField["HedgeFlag"] = "char"
#多头保证金率
CThostFtdcInstrumentMarginRateAdjustField["LongMarginRatioByMoney"] = "float"
#多头保证金费
CThostFtdcInstrumentMarginRateAdjustField["LongMarginRatioByVolume"] = "float"
#空头保证金率
CThostFtdcInstrumentMarginRateAdjustField["ShortMarginRatioByMoney"] = "float"
#空头保证金费
CThostFtdcInstrumentMarginRateAdjustField["ShortMarginRatioByVolume"] = "float"
#是否相对交易所收取
CThostFtdcInstrumentMarginRateAdjustField["IsRelative"] = "int"
structDict['CThostFtdcInstrumentMarginRateAdjustField'] = CThostFtdcInstrumentMarginRateAdjustField
#交易所保证金率
CThostFtdcExchangeMarginRateField = {}
#经纪公司代码
CThostFtdcExchangeMarginRateField["BrokerID"] = "string"
#合约代码
CThostFtdcExchangeMarginRateField["InstrumentID"] = "string"
#投机套保标志
CThostFtdcExchangeMarginRateField["HedgeFlag"] = "char"
#多头保证金率
CThostFtdcExchangeMarginRateField["LongMarginRatioByMoney"] = "float"
#多头保证金费
CThostFtdcExchangeMarginRateField["LongMarginRatioByVolume"] = "float"
#空头保证金率
CThostFtdcExchangeMarginRateField["ShortMarginRatioByMoney"] = "float"
#空头保证金费
CThostFtdcExchangeMarginRateField["ShortMarginRatioByVolume"] = "float"
structDict['CThostFtdcExchangeMarginRateField'] = CThostFtdcExchangeMarginRateField
#交易所保证金率调整
CThostFtdcExchangeMarginRateAdjustField = {}
#经纪公司代码
CThostFtdcExchangeMarginRateAdjustField["BrokerID"] = "string"
#合约代码
CThostFtdcExchangeMarginRateAdjustField["InstrumentID"] = "string"
#投机套保标志
CThostFtdcExchangeMarginRateAdjustField["HedgeFlag"] = "char"
#跟随交易所投资者多头保证金率
CThostFtdcExchangeMarginRateAdjustField["LongMarginRatioByMoney"] = "float"
#跟随交易所投资者多头保证金费
CThostFtdcExchangeMarginRateAdjustField["LongMarginRatioByVolume"] = "float"
#跟随交易所投资者空头保证金率
CThostFtdcExchangeMarginRateAdjustField["ShortMarginRatioByMoney"] = "float"
#跟随交易所投资者空头保证金费
CThostFtdcExchangeMarginRateAdjustField["ShortMarginRatioByVolume"] = "float"
#交易所多头保证金率
CThostFtdcExchangeMarginRateAdjustField["ExchLongMarginRatioByMoney"] = "float"
#交易所多头保证金费
CThostFtdcExchangeMarginRateAdjustField["ExchLongMarginRatioByVolume"] = "float"
#交易所空头保证金率
CThostFtdcExchangeMarginRateAdjustField["ExchShortMarginRatioByMoney"] = "float"
#交易所空头保证金费
CThostFtdcExchangeMarginRateAdjustField["ExchShortMarginRatioByVolume"] = "float"
#不跟随交易所投资者多头保证金率
CThostFtdcExchangeMarginRateAdjustField["NoLongMarginRatioByMoney"] = "float"
#不跟随交易所投资者多头保证金费
CThostFtdcExchangeMarginRateAdjustField["NoLongMarginRatioByVolume"] = "float"
#不跟随交易所投资者空头保证金率
CThostFtdcExchangeMarginRateAdjustField["NoShortMarginRatioByMoney"] = "float"
#不跟随交易所投资者空头保证金费
CThostFtdcExchangeMarginRateAdjustField["NoShortMarginRatioByVolume"] = "float"
structDict['CThostFtdcExchangeMarginRateAdjustField'] = CThostFtdcExchangeMarginRateAdjustField
#汇率
CThostFtdcExchangeRateField = {}
#经纪公司代码
CThostFtdcExchangeRateField["BrokerID"] = "string"
#源币种
CThostFtdcExchangeRateField["FromCurrencyID"] = "string"
#源币种单位数量
CThostFtdcExchangeRateField["FromCurrencyUnit"] = "float"
#目标币种
CThostFtdcExchangeRateField["ToCurrencyID"] = "string"
#汇率
CThostFtdcExchangeRateField["ExchangeRate"] = "float"
structDict['CThostFtdcExchangeRateField'] = CThostFtdcExchangeRateField
#结算引用
CThostFtdcSettlementRefField = {}
#交易日
CThostFtdcSettlementRefField["TradingDay"] = "string"
#结算编号
CThostFtdcSettlementRefField["SettlementID"] = "int"
structDict['CThostFtdcSettlementRefField'] = CThostFtdcSettlementRefField
#当前时间
CThostFtdcCurrentTimeField = {}
#当前日期
CThostFtdcCurrentTimeField["CurrDate"] = "string"
#当前时间
CThostFtdcCurrentTimeField["CurrTime"] = "string"
#当前时间(毫秒)
CThostFtdcCurrentTimeField["CurrMillisec"] = "int"
#业务日期
CThostFtdcCurrentTimeField["ActionDay"] = "string"
structDict['CThostFtdcCurrentTimeField'] = CThostFtdcCurrentTimeField
#通讯阶段
CThostFtdcCommPhaseField = {}
#交易日
CThostFtdcCommPhaseField["TradingDay"] = "string"
#通讯时段编号
CThostFtdcCommPhaseField["CommPhaseNo"] = "int"
#系统编号
CThostFtdcCommPhaseField["SystemID"] = "string"
structDict['CThostFtdcCommPhaseField'] = CThostFtdcCommPhaseField
#登录信息
CThostFtdcLoginInfoField = {}
#前置编号
CThostFtdcLoginInfoField["FrontID"] = "int"
#会话编号
CThostFtdcLoginInfoField["SessionID"] = "int"
#经纪公司代码
CThostFtdcLoginInfoField["BrokerID"] = "string"
#用户代码
CThostFtdcLoginInfoField["UserID"] = "string"
#登录日期
CThostFtdcLoginInfoField["LoginDate"] = "string"
#登录时间
CThostFtdcLoginInfoField["LoginTime"] = "string"
#IP地址
CThostFtdcLoginInfoField["IPAddress"] = "string"
#用户端产品信息
CThostFtdcLoginInfoField["UserProductInfo"] = "string"
#接口端产品信息
CThostFtdcLoginInfoField["InterfaceProductInfo"] = "string"
#协议信息
CThostFtdcLoginInfoField["ProtocolInfo"] = "string"
#系统名称
CThostFtdcLoginInfoField["SystemName"] = "string"
#密码
CThostFtdcLoginInfoField["Password"] = "string"
#最大报单引用
CThostFtdcLoginInfoField["MaxOrderRef"] = "string"
#上期所时间
CThostFtdcLoginInfoField["SHFETime"] = "string"
#大商所时间
CThostFtdcLoginInfoField["DCETime"] = "string"
#郑商所时间
CThostFtdcLoginInfoField["CZCETime"] = "string"
#中金所时间
CThostFtdcLoginInfoField["FFEXTime"] = "string"
#Mac地址
CThostFtdcLoginInfoField["MacAddress"] = "string"
#动态密码
CThostFtdcLoginInfoField["OneTimePassword"] = "string"
#能源中心时间
CThostFtdcLoginInfoField["INETime"] = "string"
structDict['CThostFtdcLoginInfoField'] = CThostFtdcLoginInfoField
#登录信息
CThostFtdcLogoutAllField = {}
#前置编号
CThostFtdcLogoutAllField["FrontID"] = "int"
#会话编号
CThostFtdcLogoutAllField["SessionID"] = "int"
#系统名称
CThostFtdcLogoutAllField["SystemName"] = "string"
structDict['CThostFtdcLogoutAllField'] = CThostFtdcLogoutAllField
#前置状态
CThostFtdcFrontStatusField = {}
#前置编号
CThostFtdcFrontStatusField["FrontID"] = "int"
#上次报告日期
CThostFtdcFrontStatusField["LastReportDate"] = "string"
#上次报告时间
CThostFtdcFrontStatusField["LastReportTime"] = "string"
#是否活跃
CThostFtdcFrontStatusField["IsActive"] = "int"
structDict['CThostFtdcFrontStatusField'] = CThostFtdcFrontStatusField
#用户口令变更
CThostFtdcUserPasswordUpdateField = {}
#经纪公司代码
CThostFtdcUserPasswordUpdateField["BrokerID"] = "string"
#用户代码
CThostFtdcUserPasswordUpdateField["UserID"] = "string"
#原来的口令
CThostFtdcUserPasswordUpdateField["OldPassword"] = "string"
#新的口令
CThostFtdcUserPasswordUpdateField["NewPassword"] = "string"
structDict['CThostFtdcUserPasswordUpdateField'] = CThostFtdcUserPasswordUpdateField
#输入报单
CThostFtdcInputOrderField = {}
#经纪公司代码
CThostFtdcInputOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcInputOrderField["InstrumentID"] = "string"
#报单引用
CThostFtdcInputOrderField["OrderRef"] = "string"
#用户代码
CThostFtdcInputOrderField["UserID"] = "string"
#报单价格条件
CThostFtdcInputOrderField["OrderPriceType"] = "char"
#买卖方向
CThostFtdcInputOrderField["Direction"] = "char"
#组合开平标志
CThostFtdcInputOrderField["CombOffsetFlag"] = "string"
#组合投机套保标志
CThostFtdcInputOrderField["CombHedgeFlag"] = "string"
#价格
CThostFtdcInputOrderField["LimitPrice"] = "float"
#数量
CThostFtdcInputOrderField["VolumeTotalOriginal"] = "int"
#有效期类型
CThostFtdcInputOrderField["TimeCondition"] = "char"
#GTD日期
CThostFtdcInputOrderField["GTDDate"] = "string"
#成交量类型
CThostFtdcInputOrderField["VolumeCondition"] = "char"
#最小成交量
CThostFtdcInputOrderField["MinVolume"] = "int"
#触发条件
CThostFtdcInputOrderField["ContingentCondition"] = "char"
#止损价
CThostFtdcInputOrderField["StopPrice"] = "float"
#强平原因
CThostFtdcInputOrderField["ForceCloseReason"] = "char"
#自动挂起标志
CThostFtdcInputOrderField["IsAutoSuspend"] = "int"
#业务单元
CThostFtdcInputOrderField["BusinessUnit"] = "string"
#请求编号
CThostFtdcInputOrderField["RequestID"] = "int"
#用户强评标志
CThostFtdcInputOrderField["UserForceClose"] = "int"
#互换单标志
CThostFtdcInputOrderField["IsSwapOrder"] = "int"
#交易所代码
CThostFtdcInputOrderField["ExchangeID"] = "string"
structDict['CThostFtdcInputOrderField'] = CThostFtdcInputOrderField
#报单
CThostFtdcOrderField = {}
#经纪公司代码
CThostFtdcOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcOrderField["InstrumentID"] = "string"
#报单引用
CThostFtdcOrderField["OrderRef"] = "string"
#用户代码
CThostFtdcOrderField["UserID"] = "string"
#报单价格条件
CThostFtdcOrderField["OrderPriceType"] = "char"
#买卖方向
CThostFtdcOrderField["Direction"] = "char"
#组合开平标志
CThostFtdcOrderField["CombOffsetFlag"] = "string"
#组合投机套保标志
CThostFtdcOrderField["CombHedgeFlag"] = "string"
#价格
CThostFtdcOrderField["LimitPrice"] = "float"
#数量
CThostFtdcOrderField["VolumeTotalOriginal"] = "int"
#有效期类型
CThostFtdcOrderField["TimeCondition"] = "char"
#GTD日期
CThostFtdcOrderField["GTDDate"] = "string"
#成交量类型
CThostFtdcOrderField["VolumeCondition"] = "char"
#最小成交量
CThostFtdcOrderField["MinVolume"] = "int"
#触发条件
CThostFtdcOrderField["ContingentCondition"] = "char"
#止损价
CThostFtdcOrderField["StopPrice"] = "float"
#强平原因
CThostFtdcOrderField["ForceCloseReason"] = "char"
#自动挂起标志
CThostFtdcOrderField["IsAutoSuspend"] = "int"
#业务单元
CThostFtdcOrderField["BusinessUnit"] = "string"
#请求编号
CThostFtdcOrderField["RequestID"] = "int"
#本地报单编号
CThostFtdcOrderField["OrderLocalID"] = "string"
#交易所代码
CThostFtdcOrderField["ExchangeID"] = "string"
#会员代码
CThostFtdcOrderField["ParticipantID"] = "string"
#客户代码
CThostFtdcOrderField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcOrderField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcOrderField["TraderID"] = "string"
#安装编号
CThostFtdcOrderField["InstallID"] = "int"
#报单提交状态
CThostFtdcOrderField["OrderSubmitStatus"] = "char"
#报单提示序号
CThostFtdcOrderField["NotifySequence"] = "int"
#交易日
CThostFtdcOrderField["TradingDay"] = "string"
#结算编号
CThostFtdcOrderField["SettlementID"] = "int"
#报单编号
CThostFtdcOrderField["OrderSysID"] = "string"
#报单来源
CThostFtdcOrderField["OrderSource"] = "char"
#报单状态
CThostFtdcOrderField["OrderStatus"] = "char"
#报单类型
CThostFtdcOrderField["OrderType"] = "char"
#今成交数量
CThostFtdcOrderField["VolumeTraded"] = "int"
#剩余数量
CThostFtdcOrderField["VolumeTotal"] = "int"
#报单日期
CThostFtdcOrderField["InsertDate"] = "string"
#委托时间
CThostFtdcOrderField["InsertTime"] = "string"
#激活时间
CThostFtdcOrderField["ActiveTime"] = "string"
#挂起时间
CThostFtdcOrderField["SuspendTime"] = "string"
#最后修改时间
CThostFtdcOrderField["UpdateTime"] = "string"
#撤销时间
CThostFtdcOrderField["CancelTime"] = "string"
#最后修改交易所交易员代码
CThostFtdcOrderField["ActiveTraderID"] = "string"
#结算会员编号
CThostFtdcOrderField["ClearingPartID"] = "string"
#序号
CThostFtdcOrderField["SequenceNo"] = "int"
#前置编号
CThostFtdcOrderField["FrontID"] = "int"
#会话编号
CThostFtdcOrderField["SessionID"] = "int"
#用户端产品信息
CThostFtdcOrderField["UserProductInfo"] = "string"
#状态信息
CThostFtdcOrderField["StatusMsg"] = "string"
#用户强评标志
CThostFtdcOrderField["UserForceClose"] = "int"
#操作用户代码
CThostFtdcOrderField["ActiveUserID"] = "string"
#经纪公司报单编号
CThostFtdcOrderField["BrokerOrderSeq"] = "int"
#相关报单
CThostFtdcOrderField["RelativeOrderSysID"] = "string"
#郑商所成交数量
CThostFtdcOrderField["ZCETotalTradedVolume"] = "int"
#互换单标志
CThostFtdcOrderField["IsSwapOrder"] = "int"
#营业部编号
CThostFtdcOrderField["BranchID"] = "string"
structDict['CThostFtdcOrderField'] = CThostFtdcOrderField
#交易所报单
CThostFtdcExchangeOrderField = {}
#报单价格条件
CThostFtdcExchangeOrderField["OrderPriceType"] = "char"
#买卖方向
CThostFtdcExchangeOrderField["Direction"] = "char"
#组合开平标志
CThostFtdcExchangeOrderField["CombOffsetFlag"] = "string"
#组合投机套保标志
CThostFtdcExchangeOrderField["CombHedgeFlag"] = "string"
#价格
CThostFtdcExchangeOrderField["LimitPrice"] = "float"
#数量
CThostFtdcExchangeOrderField["VolumeTotalOriginal"] = "int"
#有效期类型
CThostFtdcExchangeOrderField["TimeCondition"] = "char"
#GTD日期
CThostFtdcExchangeOrderField["GTDDate"] = "string"
#成交量类型
CThostFtdcExchangeOrderField["VolumeCondition"] = "char"
#最小成交量
CThostFtdcExchangeOrderField["MinVolume"] = "int"
#触发条件
CThostFtdcExchangeOrderField["ContingentCondition"] = "char"
#止损价
CThostFtdcExchangeOrderField["StopPrice"] = "float"
#强平原因
CThostFtdcExchangeOrderField["ForceCloseReason"] = "char"
#自动挂起标志
CThostFtdcExchangeOrderField["IsAutoSuspend"] = "int"
#业务单元
CThostFtdcExchangeOrderField["BusinessUnit"] = "string"
#请求编号
CThostFtdcExchangeOrderField["RequestID"] = "int"
#本地报单编号
CThostFtdcExchangeOrderField["OrderLocalID"] = "string"
#交易所代码
CThostFtdcExchangeOrderField["ExchangeID"] = "string"
#会员代码
CThostFtdcExchangeOrderField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeOrderField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcExchangeOrderField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcExchangeOrderField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeOrderField["InstallID"] = "int"
#报单提交状态
CThostFtdcExchangeOrderField["OrderSubmitStatus"] = "char"
#报单提示序号
CThostFtdcExchangeOrderField["NotifySequence"] = "int"
#交易日
CThostFtdcExchangeOrderField["TradingDay"] = "string"
#结算编号
CThostFtdcExchangeOrderField["SettlementID"] = "int"
#报单编号
CThostFtdcExchangeOrderField["OrderSysID"] = "string"
#报单来源
CThostFtdcExchangeOrderField["OrderSource"] = "char"
#报单状态
CThostFtdcExchangeOrderField["OrderStatus"] = "char"
#报单类型
CThostFtdcExchangeOrderField["OrderType"] = "char"
#今成交数量
CThostFtdcExchangeOrderField["VolumeTraded"] = "int"
#剩余数量
CThostFtdcExchangeOrderField["VolumeTotal"] = "int"
#报单日期
CThostFtdcExchangeOrderField["InsertDate"] = "string"
#委托时间
CThostFtdcExchangeOrderField["InsertTime"] = "string"
#激活时间
CThostFtdcExchangeOrderField["ActiveTime"] = "string"
#挂起时间
CThostFtdcExchangeOrderField["SuspendTime"] = "string"
#最后修改时间
CThostFtdcExchangeOrderField["UpdateTime"] = "string"
#撤销时间
CThostFtdcExchangeOrderField["CancelTime"] = "string"
#最后修改交易所交易员代码
CThostFtdcExchangeOrderField["ActiveTraderID"] = "string"
#结算会员编号
CThostFtdcExchangeOrderField["ClearingPartID"] = "string"
#序号
CThostFtdcExchangeOrderField["SequenceNo"] = "int"
#营业部编号
CThostFtdcExchangeOrderField["BranchID"] = "string"
structDict['CThostFtdcExchangeOrderField'] = CThostFtdcExchangeOrderField
#交易所报单插入失败
CThostFtdcExchangeOrderInsertErrorField = {}
#交易所代码
CThostFtdcExchangeOrderInsertErrorField["ExchangeID"] = "string"
#会员代码
CThostFtdcExchangeOrderInsertErrorField["ParticipantID"] = "string"
#交易所交易员代码
CThostFtdcExchangeOrderInsertErrorField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeOrderInsertErrorField["InstallID"] = "int"
#本地报单编号
CThostFtdcExchangeOrderInsertErrorField["OrderLocalID"] = "string"
#错误代码
CThostFtdcExchangeOrderInsertErrorField["ErrorID"] = "int"
#错误信息
CThostFtdcExchangeOrderInsertErrorField["ErrorMsg"] = "string"
structDict['CThostFtdcExchangeOrderInsertErrorField'] = CThostFtdcExchangeOrderInsertErrorField
#输入报单操作
CThostFtdcInputOrderActionField = {}
#经纪公司代码
CThostFtdcInputOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputOrderActionField["InvestorID"] = "string"
#报单操作引用
CThostFtdcInputOrderActionField["OrderActionRef"] = "int"
#报单引用
CThostFtdcInputOrderActionField["OrderRef"] = "string"
#请求编号
CThostFtdcInputOrderActionField["RequestID"] = "int"
#前置编号
CThostFtdcInputOrderActionField["FrontID"] = "int"
#会话编号
CThostFtdcInputOrderActionField["SessionID"] = "int"
#交易所代码
CThostFtdcInputOrderActionField["ExchangeID"] = "string"
#报单编号
CThostFtdcInputOrderActionField["OrderSysID"] = "string"
#操作标志
CThostFtdcInputOrderActionField["ActionFlag"] = "char"
#价格
CThostFtdcInputOrderActionField["LimitPrice"] = "float"
#数量变化
CThostFtdcInputOrderActionField["VolumeChange"] = "int"
#用户代码
CThostFtdcInputOrderActionField["UserID"] = "string"
#合约代码
CThostFtdcInputOrderActionField["InstrumentID"] = "string"
structDict['CThostFtdcInputOrderActionField'] = CThostFtdcInputOrderActionField
#报单操作
CThostFtdcOrderActionField = {}
#经纪公司代码
CThostFtdcOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcOrderActionField["InvestorID"] = "string"
#报单操作引用
CThostFtdcOrderActionField["OrderActionRef"] = "int"
#报单引用
CThostFtdcOrderActionField["OrderRef"] = "string"
#请求编号
CThostFtdcOrderActionField["RequestID"] = "int"
#前置编号
CThostFtdcOrderActionField["FrontID"] = "int"
#会话编号
CThostFtdcOrderActionField["SessionID"] = "int"
#交易所代码
CThostFtdcOrderActionField["ExchangeID"] = "string"
#报单编号
CThostFtdcOrderActionField["OrderSysID"] = "string"
#操作标志
CThostFtdcOrderActionField["ActionFlag"] = "char"
#价格
CThostFtdcOrderActionField["LimitPrice"] = "float"
#数量变化
CThostFtdcOrderActionField["VolumeChange"] = "int"
#操作日期
CThostFtdcOrderActionField["ActionDate"] = "string"
#操作时间
CThostFtdcOrderActionField["ActionTime"] = "string"
#交易所交易员代码
CThostFtdcOrderActionField["TraderID"] = "string"
#安装编号
CThostFtdcOrderActionField["InstallID"] = "int"
#本地报单编号
CThostFtdcOrderActionField["OrderLocalID"] = "string"
#操作本地编号
CThostFtdcOrderActionField["ActionLocalID"] = "string"
#会员代码
CThostFtdcOrderActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcOrderActionField["ClientID"] = "string"
#业务单元
CThostFtdcOrderActionField["BusinessUnit"] = "string"
#报单操作状态
CThostFtdcOrderActionField["OrderActionStatus"] = "char"
#用户代码
CThostFtdcOrderActionField["UserID"] = "string"
#状态信息
CThostFtdcOrderActionField["StatusMsg"] = "string"
#合约代码
CThostFtdcOrderActionField["InstrumentID"] = "string"
#营业部编号
CThostFtdcOrderActionField["BranchID"] = "string"
structDict['CThostFtdcOrderActionField'] = CThostFtdcOrderActionField
#交易所报单操作
CThostFtdcExchangeOrderActionField = {}
#交易所代码
CThostFtdcExchangeOrderActionField["ExchangeID"] = "string"
#报单编号
CThostFtdcExchangeOrderActionField["OrderSysID"] = "string"
#操作标志
CThostFtdcExchangeOrderActionField["ActionFlag"] = "char"
#价格
CThostFtdcExchangeOrderActionField["LimitPrice"] = "float"
#数量变化
CThostFtdcExchangeOrderActionField["VolumeChange"] = "int"
#操作日期
CThostFtdcExchangeOrderActionField["ActionDate"] = "string"
#操作时间
CThostFtdcExchangeOrderActionField["ActionTime"] = "string"
#交易所交易员代码
CThostFtdcExchangeOrderActionField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeOrderActionField["InstallID"] = "int"
#本地报单编号
CThostFtdcExchangeOrderActionField["OrderLocalID"] = "string"
#操作本地编号
CThostFtdcExchangeOrderActionField["ActionLocalID"] = "string"
#会员代码
CThostFtdcExchangeOrderActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeOrderActionField["ClientID"] = "string"
#业务单元
CThostFtdcExchangeOrderActionField["BusinessUnit"] = "string"
#报单操作状态
CThostFtdcExchangeOrderActionField["OrderActionStatus"] = "char"
#用户代码
CThostFtdcExchangeOrderActionField["UserID"] = "string"
#营业部编号
CThostFtdcExchangeOrderActionField["BranchID"] = "string"
structDict['CThostFtdcExchangeOrderActionField'] = CThostFtdcExchangeOrderActionField
#交易所报单操作失败
CThostFtdcExchangeOrderActionErrorField = {}
#交易所代码
CThostFtdcExchangeOrderActionErrorField["ExchangeID"] = "string"
#报单编号
CThostFtdcExchangeOrderActionErrorField["OrderSysID"] = "string"
#交易所交易员代码
CThostFtdcExchangeOrderActionErrorField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeOrderActionErrorField["InstallID"] = "int"
#本地报单编号
CThostFtdcExchangeOrderActionErrorField["OrderLocalID"] = "string"
#操作本地编号
CThostFtdcExchangeOrderActionErrorField["ActionLocalID"] = "string"
#错误代码
CThostFtdcExchangeOrderActionErrorField["ErrorID"] = "int"
#错误信息
CThostFtdcExchangeOrderActionErrorField["ErrorMsg"] = "string"
#经纪公司代码
CThostFtdcExchangeOrderActionErrorField["BrokerID"] = "string"
structDict['CThostFtdcExchangeOrderActionErrorField'] = CThostFtdcExchangeOrderActionErrorField
#交易所成交
CThostFtdcExchangeTradeField = {}
#交易所代码
CThostFtdcExchangeTradeField["ExchangeID"] = "string"
#成交编号
CThostFtdcExchangeTradeField["TradeID"] = "string"
#买卖方向
CThostFtdcExchangeTradeField["Direction"] = "char"
#报单编号
CThostFtdcExchangeTradeField["OrderSysID"] = "string"
#会员代码
CThostFtdcExchangeTradeField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeTradeField["ClientID"] = "string"
#交易角色
CThostFtdcExchangeTradeField["TradingRole"] = "char"
#合约在交易所的代码
CThostFtdcExchangeTradeField["ExchangeInstID"] = "string"
#开平标志
CThostFtdcExchangeTradeField["OffsetFlag"] = "char"
#投机套保标志
CThostFtdcExchangeTradeField["HedgeFlag"] = "char"
#价格
CThostFtdcExchangeTradeField["Price"] = "float"
#数量
CThostFtdcExchangeTradeField["Volume"] = "int"
#成交时期
CThostFtdcExchangeTradeField["TradeDate"] = "string"
#成交时间
CThostFtdcExchangeTradeField["TradeTime"] = "string"
#成交类型
CThostFtdcExchangeTradeField["TradeType"] = "char"
#成交价来源
CThostFtdcExchangeTradeField["PriceSource"] = "char"
#交易所交易员代码
CThostFtdcExchangeTradeField["TraderID"] = "string"
#本地报单编号
CThostFtdcExchangeTradeField["OrderLocalID"] = "string"
#结算会员编号
CThostFtdcExchangeTradeField["ClearingPartID"] = "string"
#业务单元
CThostFtdcExchangeTradeField["BusinessUnit"] = "string"
#序号
CThostFtdcExchangeTradeField["SequenceNo"] = "int"
#成交来源
CThostFtdcExchangeTradeField["TradeSource"] = "char"
structDict['CThostFtdcExchangeTradeField'] = CThostFtdcExchangeTradeField
#成交
CThostFtdcTradeField = {}
#经纪公司代码
CThostFtdcTradeField["BrokerID"] = "string"
#投资者代码
CThostFtdcTradeField["InvestorID"] = "string"
#合约代码
CThostFtdcTradeField["InstrumentID"] = "string"
#报单引用
CThostFtdcTradeField["OrderRef"] = "string"
#用户代码
CThostFtdcTradeField["UserID"] = "string"
#交易所代码
CThostFtdcTradeField["ExchangeID"] = "string"
#成交编号
CThostFtdcTradeField["TradeID"] = "string"
#买卖方向
CThostFtdcTradeField["Direction"] = "char"
#报单编号
CThostFtdcTradeField["OrderSysID"] = "string"
#会员代码
CThostFtdcTradeField["ParticipantID"] = "string"
#客户代码
CThostFtdcTradeField["ClientID"] = "string"
#交易角色
CThostFtdcTradeField["TradingRole"] = "char"
#合约在交易所的代码
CThostFtdcTradeField["ExchangeInstID"] = "string"
#开平标志
CThostFtdcTradeField["OffsetFlag"] = "char"
#投机套保标志
CThostFtdcTradeField["HedgeFlag"] = "char"
#价格
CThostFtdcTradeField["Price"] = "float"
#数量
CThostFtdcTradeField["Volume"] = "int"
#成交时期
CThostFtdcTradeField["TradeDate"] = "string"
#成交时间
CThostFtdcTradeField["TradeTime"] = "string"
#成交类型
CThostFtdcTradeField["TradeType"] = "char"
#成交价来源
CThostFtdcTradeField["PriceSource"] = "char"
#交易所交易员代码
CThostFtdcTradeField["TraderID"] = "string"
#本地报单编号
CThostFtdcTradeField["OrderLocalID"] = "string"
#结算会员编号
CThostFtdcTradeField["ClearingPartID"] = "string"
#业务单元
CThostFtdcTradeField["BusinessUnit"] = "string"
#序号
CThostFtdcTradeField["SequenceNo"] = "int"
#交易日
CThostFtdcTradeField["TradingDay"] = "string"
#结算编号
CThostFtdcTradeField["SettlementID"] = "int"
#经纪公司报单编号
CThostFtdcTradeField["BrokerOrderSeq"] = "int"
#成交来源
CThostFtdcTradeField["TradeSource"] = "char"
structDict['CThostFtdcTradeField'] = CThostFtdcTradeField
#用户会话
CThostFtdcUserSessionField = {}
#前置编号
CThostFtdcUserSessionField["FrontID"] = "int"
#会话编号
CThostFtdcUserSessionField["SessionID"] = "int"
#经纪公司代码
CThostFtdcUserSessionField["BrokerID"] = "string"
#用户代码
CThostFtdcUserSessionField["UserID"] = "string"
#登录日期
CThostFtdcUserSessionField["LoginDate"] = "string"
#登录时间
CThostFtdcUserSessionField["LoginTime"] = "string"
#IP地址
CThostFtdcUserSessionField["IPAddress"] = "string"
#用户端产品信息
CThostFtdcUserSessionField["UserProductInfo"] = "string"
#接口端产品信息
CThostFtdcUserSessionField["InterfaceProductInfo"] = "string"
#协议信息
CThostFtdcUserSessionField["ProtocolInfo"] = "string"
#Mac地址
CThostFtdcUserSessionField["MacAddress"] = "string"
structDict['CThostFtdcUserSessionField'] = CThostFtdcUserSessionField
#查询最大报单数量
CThostFtdcQueryMaxOrderVolumeField = {}
#经纪公司代码
CThostFtdcQueryMaxOrderVolumeField["BrokerID"] = "string"
#投资者代码
CThostFtdcQueryMaxOrderVolumeField["InvestorID"] = "string"
#合约代码
CThostFtdcQueryMaxOrderVolumeField["InstrumentID"] = "string"
#买卖方向
CThostFtdcQueryMaxOrderVolumeField["Direction"] = "char"
#开平标志
CThostFtdcQueryMaxOrderVolumeField["OffsetFlag"] = "char"
#投机套保标志
CThostFtdcQueryMaxOrderVolumeField["HedgeFlag"] = "char"
#最大允许报单数量
CThostFtdcQueryMaxOrderVolumeField["MaxVolume"] = "int"
#交易所代码
CThostFtdcQueryMaxOrderVolumeField["ExchangeID"] = "string"
structDict['CThostFtdcQueryMaxOrderVolumeField'] = CThostFtdcQueryMaxOrderVolumeField
#投资者结算结果确认信息
CThostFtdcSettlementInfoConfirmField = {}
#经纪公司代码
CThostFtdcSettlementInfoConfirmField["BrokerID"] = "string"
#投资者代码
CThostFtdcSettlementInfoConfirmField["InvestorID"] = "string"
#确认日期
CThostFtdcSettlementInfoConfirmField["ConfirmDate"] = "string"
#确认时间
CThostFtdcSettlementInfoConfirmField["ConfirmTime"] = "string"
structDict['CThostFtdcSettlementInfoConfirmField'] = CThostFtdcSettlementInfoConfirmField
#出入金同步
CThostFtdcSyncDepositField = {}
#出入金流水号
CThostFtdcSyncDepositField["DepositSeqNo"] = "string"
#经纪公司代码
CThostFtdcSyncDepositField["BrokerID"] = "string"
#投资者代码
CThostFtdcSyncDepositField["InvestorID"] = "string"
#入金金额
CThostFtdcSyncDepositField["Deposit"] = "float"
#是否强制进行
CThostFtdcSyncDepositField["IsForce"] = "int"
#币种代码
CThostFtdcSyncDepositField["CurrencyID"] = "string"
structDict['CThostFtdcSyncDepositField'] = CThostFtdcSyncDepositField
#货币质押同步
CThostFtdcSyncFundMortgageField = {}
#货币质押流水号
CThostFtdcSyncFundMortgageField["MortgageSeqNo"] = "string"
#经纪公司代码
CThostFtdcSyncFundMortgageField["BrokerID"] = "string"
#投资者代码
CThostFtdcSyncFundMortgageField["InvestorID"] = "string"
#源币种
CThostFtdcSyncFundMortgageField["FromCurrencyID"] = "string"
#质押金额
CThostFtdcSyncFundMortgageField["MortgageAmount"] = "float"
#目标币种
CThostFtdcSyncFundMortgageField["ToCurrencyID"] = "string"
structDict['CThostFtdcSyncFundMortgageField'] = CThostFtdcSyncFundMortgageField
#经纪公司同步
CThostFtdcBrokerSyncField = {}
#经纪公司代码
CThostFtdcBrokerSyncField["BrokerID"] = "string"
structDict['CThostFtdcBrokerSyncField'] = CThostFtdcBrokerSyncField
#正在同步中的投资者
CThostFtdcSyncingInvestorField = {}
#投资者代码
CThostFtdcSyncingInvestorField["InvestorID"] = "string"
#经纪公司代码
CThostFtdcSyncingInvestorField["BrokerID"] = "string"
#投资者分组代码
CThostFtdcSyncingInvestorField["InvestorGroupID"] = "string"
#投资者名称
CThostFtdcSyncingInvestorField["InvestorName"] = "string"
#证件类型
CThostFtdcSyncingInvestorField["IdentifiedCardType"] = "char"
#证件号码
CThostFtdcSyncingInvestorField["IdentifiedCardNo"] = "string"
#是否活跃
CThostFtdcSyncingInvestorField["IsActive"] = "int"
#联系电话
CThostFtdcSyncingInvestorField["Telephone"] = "string"
#通讯地址
CThostFtdcSyncingInvestorField["Address"] = "string"
#开户日期
CThostFtdcSyncingInvestorField["OpenDate"] = "string"
#手机
CThostFtdcSyncingInvestorField["Mobile"] = "string"
#手续费率模板代码
CThostFtdcSyncingInvestorField["CommModelID"] = "string"
#保证金率模板代码
CThostFtdcSyncingInvestorField["MarginModelID"] = "string"
structDict['CThostFtdcSyncingInvestorField'] = CThostFtdcSyncingInvestorField
#正在同步中的交易代码
CThostFtdcSyncingTradingCodeField = {}
#投资者代码
CThostFtdcSyncingTradingCodeField["InvestorID"] = "string"
#经纪公司代码
CThostFtdcSyncingTradingCodeField["BrokerID"] = "string"
#交易所代码
CThostFtdcSyncingTradingCodeField["ExchangeID"] = "string"
#客户代码
CThostFtdcSyncingTradingCodeField["ClientID"] = "string"
#是否活跃
CThostFtdcSyncingTradingCodeField["IsActive"] = "int"
#交易编码类型
CThostFtdcSyncingTradingCodeField["ClientIDType"] = "char"
#营业部编号
CThostFtdcSyncingTradingCodeField["BranchID"] = "string"
structDict['CThostFtdcSyncingTradingCodeField'] = CThostFtdcSyncingTradingCodeField
#正在同步中的投资者分组
CThostFtdcSyncingInvestorGroupField = {}
#经纪公司代码
CThostFtdcSyncingInvestorGroupField["BrokerID"] = "string"
#投资者分组代码
CThostFtdcSyncingInvestorGroupField["InvestorGroupID"] = "string"
#投资者分组名称
CThostFtdcSyncingInvestorGroupField["InvestorGroupName"] = "string"
structDict['CThostFtdcSyncingInvestorGroupField'] = CThostFtdcSyncingInvestorGroupField
#正在同步中的交易账号
CThostFtdcSyncingTradingAccountField = {}
#经纪公司代码
CThostFtdcSyncingTradingAccountField["BrokerID"] = "string"
#投资者帐号
CThostFtdcSyncingTradingAccountField["AccountID"] = "string"
#上次质押金额
CThostFtdcSyncingTradingAccountField["PreMortgage"] = "float"
#上次信用额度
CThostFtdcSyncingTradingAccountField["PreCredit"] = "float"
#上次存款额
CThostFtdcSyncingTradingAccountField["PreDeposit"] = "float"
#上次结算准备金
CThostFtdcSyncingTradingAccountField["PreBalance"] = "float"
#上次占用的保证金
CThostFtdcSyncingTradingAccountField["PreMargin"] = "float"
#利息基数
CThostFtdcSyncingTradingAccountField["InterestBase"] = "float"
#利息收入
CThostFtdcSyncingTradingAccountField["Interest"] = "float"
#入金金额
CThostFtdcSyncingTradingAccountField["Deposit"] = "float"
#出金金额
CThostFtdcSyncingTradingAccountField["Withdraw"] = "float"
#冻结的保证金
CThostFtdcSyncingTradingAccountField["FrozenMargin"] = "float"
#冻结的资金
CThostFtdcSyncingTradingAccountField["FrozenCash"] = "float"
#冻结的手续费
CThostFtdcSyncingTradingAccountField["FrozenCommission"] = "float"
#当前保证金总额
CThostFtdcSyncingTradingAccountField["CurrMargin"] = "float"
#资金差额
CThostFtdcSyncingTradingAccountField["CashIn"] = "float"
#手续费
CThostFtdcSyncingTradingAccountField["Commission"] = "float"
#平仓盈亏
CThostFtdcSyncingTradingAccountField["CloseProfit"] = "float"
#持仓盈亏
CThostFtdcSyncingTradingAccountField["PositionProfit"] = "float"
#期货结算准备金
CThostFtdcSyncingTradingAccountField["Balance"] = "float"
#可用资金
CThostFtdcSyncingTradingAccountField["Available"] = "float"
#可取资金
CThostFtdcSyncingTradingAccountField["WithdrawQuota"] = "float"
#基本准备金
CThostFtdcSyncingTradingAccountField["Reserve"] = "float"
#交易日
CThostFtdcSyncingTradingAccountField["TradingDay"] = "string"
#结算编号
CThostFtdcSyncingTradingAccountField["SettlementID"] = "int"
#信用额度
CThostFtdcSyncingTradingAccountField["Credit"] = "float"
#质押金额
CThostFtdcSyncingTradingAccountField["Mortgage"] = "float"
#交易所保证金
CThostFtdcSyncingTradingAccountField["ExchangeMargin"] = "float"
#投资者交割保证金
CThostFtdcSyncingTradingAccountField["DeliveryMargin"] = "float"
#交易所交割保证金
CThostFtdcSyncingTradingAccountField["ExchangeDeliveryMargin"] = "float"
#保底期货结算准备金
CThostFtdcSyncingTradingAccountField["ReserveBalance"] = "float"
#币种代码
CThostFtdcSyncingTradingAccountField["CurrencyID"] = "string"
#上次货币质入金额
CThostFtdcSyncingTradingAccountField["PreFundMortgageIn"] = "float"
#上次货币质出金额
CThostFtdcSyncingTradingAccountField["PreFundMortgageOut"] = "float"
#货币质入金额
CThostFtdcSyncingTradingAccountField["FundMortgageIn"] = "float"
#货币质出金额
CThostFtdcSyncingTradingAccountField["FundMortgageOut"] = "float"
#货币质押余额
CThostFtdcSyncingTradingAccountField["FundMortgageAvailable"] = "float"
#可质押货币金额
CThostFtdcSyncingTradingAccountField["MortgageableFund"] = "float"
#特殊产品占用保证金
CThostFtdcSyncingTradingAccountField["SpecProductMargin"] = "float"
#特殊产品冻结保证金
CThostFtdcSyncingTradingAccountField["SpecProductFrozenMargin"] = "float"
#特殊产品手续费
CThostFtdcSyncingTradingAccountField["SpecProductCommission"] = "float"
#特殊产品冻结手续费
CThostFtdcSyncingTradingAccountField["SpecProductFrozenCommission"] = "float"
#特殊产品持仓盈亏
CThostFtdcSyncingTradingAccountField["SpecProductPositionProfit"] = "float"
#特殊产品平仓盈亏
CThostFtdcSyncingTradingAccountField["SpecProductCloseProfit"] = "float"
#根据持仓盈亏算法计算的特殊产品持仓盈亏
CThostFtdcSyncingTradingAccountField["SpecProductPositionProfitByAlg"] = "float"
#特殊产品交易所保证金
CThostFtdcSyncingTradingAccountField["SpecProductExchangeMargin"] = "float"
structDict['CThostFtdcSyncingTradingAccountField'] = CThostFtdcSyncingTradingAccountField
#正在同步中的投资者持仓
CThostFtdcSyncingInvestorPositionField = {}
#合约代码
CThostFtdcSyncingInvestorPositionField["InstrumentID"] = "string"
#经纪公司代码
CThostFtdcSyncingInvestorPositionField["BrokerID"] = "string"
#投资者代码
CThostFtdcSyncingInvestorPositionField["InvestorID"] = "string"
#持仓多空方向
CThostFtdcSyncingInvestorPositionField["PosiDirection"] = "char"
#投机套保标志
CThostFtdcSyncingInvestorPositionField["HedgeFlag"] = "char"
#持仓日期
CThostFtdcSyncingInvestorPositionField["PositionDate"] = "char"
#上日持仓
CThostFtdcSyncingInvestorPositionField["YdPosition"] = "int"
#今日持仓
CThostFtdcSyncingInvestorPositionField["Position"] = "int"
#多头冻结
CThostFtdcSyncingInvestorPositionField["LongFrozen"] = "int"
#空头冻结
CThostFtdcSyncingInvestorPositionField["ShortFrozen"] = "int"
#开仓冻结金额
CThostFtdcSyncingInvestorPositionField["LongFrozenAmount"] = "float"
#开仓冻结金额
CThostFtdcSyncingInvestorPositionField["ShortFrozenAmount"] = "float"
#开仓量
CThostFtdcSyncingInvestorPositionField["OpenVolume"] = "int"
#平仓量
CThostFtdcSyncingInvestorPositionField["CloseVolume"] = "int"
#开仓金额
CThostFtdcSyncingInvestorPositionField["OpenAmount"] = "float"
#平仓金额
CThostFtdcSyncingInvestorPositionField["CloseAmount"] = "float"
#持仓成本
CThostFtdcSyncingInvestorPositionField["PositionCost"] = "float"
#上次占用的保证金
CThostFtdcSyncingInvestorPositionField["PreMargin"] = "float"
#占用的保证金
CThostFtdcSyncingInvestorPositionField["UseMargin"] = "float"
#冻结的保证金
CThostFtdcSyncingInvestorPositionField["FrozenMargin"] = "float"
#冻结的资金
CThostFtdcSyncingInvestorPositionField["FrozenCash"] = "float"
#冻结的手续费
CThostFtdcSyncingInvestorPositionField["FrozenCommission"] = "float"
#资金差额
CThostFtdcSyncingInvestorPositionField["CashIn"] = "float"
#手续费
CThostFtdcSyncingInvestorPositionField["Commission"] = "float"
#平仓盈亏
CThostFtdcSyncingInvestorPositionField["CloseProfit"] = "float"
#持仓盈亏
CThostFtdcSyncingInvestorPositionField["PositionProfit"] = "float"
#上次结算价
CThostFtdcSyncingInvestorPositionField["PreSettlementPrice"] = "float"
#本次结算价
CThostFtdcSyncingInvestorPositionField["SettlementPrice"] = "float"
#交易日
CThostFtdcSyncingInvestorPositionField["TradingDay"] = "string"
#结算编号
CThostFtdcSyncingInvestorPositionField["SettlementID"] = "int"
#开仓成本
CThostFtdcSyncingInvestorPositionField["OpenCost"] = "float"
#交易所保证金
CThostFtdcSyncingInvestorPositionField["ExchangeMargin"] = "float"
#组合成交形成的持仓
CThostFtdcSyncingInvestorPositionField["CombPosition"] = "int"
#组合多头冻结
CThostFtdcSyncingInvestorPositionField["CombLongFrozen"] = "int"
#组合空头冻结
CThostFtdcSyncingInvestorPositionField["CombShortFrozen"] = "int"
#逐日盯市平仓盈亏
CThostFtdcSyncingInvestorPositionField["CloseProfitByDate"] = "float"
#逐笔对冲平仓盈亏
CThostFtdcSyncingInvestorPositionField["CloseProfitByTrade"] = "float"
#今日持仓
CThostFtdcSyncingInvestorPositionField["TodayPosition"] = "int"
#保证金率
CThostFtdcSyncingInvestorPositionField["MarginRateByMoney"] = "float"
#保证金率(按手数)
CThostFtdcSyncingInvestorPositionField["MarginRateByVolume"] = "float"
#执行冻结
CThostFtdcSyncingInvestorPositionField["StrikeFrozen"] = "int"
#执行冻结金额
CThostFtdcSyncingInvestorPositionField["StrikeFrozenAmount"] = "float"
#放弃执行冻结
CThostFtdcSyncingInvestorPositionField["AbandonFrozen"] = "int"
#交易所代码
CThostFtdcSyncingInvestorPositionField["ExchangeID"] = "string"
#执行冻结的昨仓
CThostFtdcSyncingInvestorPositionField["YdStrikeFrozen"] = "int"
structDict['CThostFtdcSyncingInvestorPositionField'] = CThostFtdcSyncingInvestorPositionField
#正在同步中的合约保证金率
CThostFtdcSyncingInstrumentMarginRateField = {}
#合约代码
CThostFtdcSyncingInstrumentMarginRateField["InstrumentID"] = "string"
#投资者范围
CThostFtdcSyncingInstrumentMarginRateField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcSyncingInstrumentMarginRateField["BrokerID"] = "string"
#投资者代码
CThostFtdcSyncingInstrumentMarginRateField["InvestorID"] = "string"
#投机套保标志
CThostFtdcSyncingInstrumentMarginRateField["HedgeFlag"] = "char"
#多头保证金率
CThostFtdcSyncingInstrumentMarginRateField["LongMarginRatioByMoney"] = "float"
#多头保证金费
CThostFtdcSyncingInstrumentMarginRateField["LongMarginRatioByVolume"] = "float"
#空头保证金率
CThostFtdcSyncingInstrumentMarginRateField["ShortMarginRatioByMoney"] = "float"
#空头保证金费
CThostFtdcSyncingInstrumentMarginRateField["ShortMarginRatioByVolume"] = "float"
#是否相对交易所收取
CThostFtdcSyncingInstrumentMarginRateField["IsRelative"] = "int"
structDict['CThostFtdcSyncingInstrumentMarginRateField'] = CThostFtdcSyncingInstrumentMarginRateField
#正在同步中的合约手续费率
CThostFtdcSyncingInstrumentCommissionRateField = {}
#合约代码
CThostFtdcSyncingInstrumentCommissionRateField["InstrumentID"] = "string"
#投资者范围
CThostFtdcSyncingInstrumentCommissionRateField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcSyncingInstrumentCommissionRateField["BrokerID"] = "string"
#投资者代码
CThostFtdcSyncingInstrumentCommissionRateField["InvestorID"] = "string"
#开仓手续费率
CThostFtdcSyncingInstrumentCommissionRateField["OpenRatioByMoney"] = "float"
#开仓手续费
CThostFtdcSyncingInstrumentCommissionRateField["OpenRatioByVolume"] = "float"
#平仓手续费率
CThostFtdcSyncingInstrumentCommissionRateField["CloseRatioByMoney"] = "float"
#平仓手续费
CThostFtdcSyncingInstrumentCommissionRateField["CloseRatioByVolume"] = "float"
#平今手续费率
CThostFtdcSyncingInstrumentCommissionRateField["CloseTodayRatioByMoney"] = "float"
#平今手续费
CThostFtdcSyncingInstrumentCommissionRateField["CloseTodayRatioByVolume"] = "float"
#交易所代码
CThostFtdcSyncingInstrumentCommissionRateField["ExchangeID"] = "string"
structDict['CThostFtdcSyncingInstrumentCommissionRateField'] = CThostFtdcSyncingInstrumentCommissionRateField
#正在同步中的合约交易权限
CThostFtdcSyncingInstrumentTradingRightField = {}
#合约代码
CThostFtdcSyncingInstrumentTradingRightField["InstrumentID"] = "string"
#投资者范围
CThostFtdcSyncingInstrumentTradingRightField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcSyncingInstrumentTradingRightField["BrokerID"] = "string"
#投资者代码
CThostFtdcSyncingInstrumentTradingRightField["InvestorID"] = "string"
#交易权限
CThostFtdcSyncingInstrumentTradingRightField["TradingRight"] = "char"
#交易所代码
CThostFtdcSyncingInstrumentTradingRightField["ExchangeID"] = "string"
structDict['CThostFtdcSyncingInstrumentTradingRightField'] = CThostFtdcSyncingInstrumentTradingRightField
#查询报单
CThostFtdcQryOrderField = {}
#经纪公司代码
CThostFtdcQryOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcQryOrderField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryOrderField["ExchangeID"] = "string"
#报单编号
CThostFtdcQryOrderField["OrderSysID"] = "string"
#开始时间
CThostFtdcQryOrderField["InsertTimeStart"] = "string"
#结束时间
CThostFtdcQryOrderField["InsertTimeEnd"] = "string"
structDict['CThostFtdcQryOrderField'] = CThostFtdcQryOrderField
#查询成交
CThostFtdcQryTradeField = {}
#经纪公司代码
CThostFtdcQryTradeField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryTradeField["InvestorID"] = "string"
#合约代码
CThostFtdcQryTradeField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryTradeField["ExchangeID"] = "string"
#成交编号
CThostFtdcQryTradeField["TradeID"] = "string"
#开始时间
CThostFtdcQryTradeField["TradeTimeStart"] = "string"
#结束时间
CThostFtdcQryTradeField["TradeTimeEnd"] = "string"
structDict['CThostFtdcQryTradeField'] = CThostFtdcQryTradeField
#查询投资者持仓
CThostFtdcQryInvestorPositionField = {}
#经纪公司代码
CThostFtdcQryInvestorPositionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryInvestorPositionField["InvestorID"] = "string"
#合约代码
CThostFtdcQryInvestorPositionField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryInvestorPositionField["ExchangeID"] = "string"
structDict['CThostFtdcQryInvestorPositionField'] = CThostFtdcQryInvestorPositionField
#查询资金账户
CThostFtdcQryTradingAccountField = {}
#经纪公司代码
CThostFtdcQryTradingAccountField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryTradingAccountField["InvestorID"] = "string"
#币种代码
CThostFtdcQryTradingAccountField["CurrencyID"] = "string"
#业务类型
CThostFtdcQryTradingAccountField["BizType"] = "char"
structDict['CThostFtdcQryTradingAccountField'] = CThostFtdcQryTradingAccountField
#查询投资者
CThostFtdcQryInvestorField = {}
#经纪公司代码
CThostFtdcQryInvestorField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryInvestorField["InvestorID"] = "string"
structDict['CThostFtdcQryInvestorField'] = CThostFtdcQryInvestorField
#查询交易编码
CThostFtdcQryTradingCodeField = {}
#经纪公司代码
CThostFtdcQryTradingCodeField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryTradingCodeField["InvestorID"] = "string"
#交易所代码
CThostFtdcQryTradingCodeField["ExchangeID"] = "string"
#客户代码
CThostFtdcQryTradingCodeField["ClientID"] = "string"
#交易编码类型
CThostFtdcQryTradingCodeField["ClientIDType"] = "char"
structDict['CThostFtdcQryTradingCodeField'] = CThostFtdcQryTradingCodeField
#查询投资者组
CThostFtdcQryInvestorGroupField = {}
#经纪公司代码
CThostFtdcQryInvestorGroupField["BrokerID"] = "string"
structDict['CThostFtdcQryInvestorGroupField'] = CThostFtdcQryInvestorGroupField
#查询合约保证金率
CThostFtdcQryInstrumentMarginRateField = {}
#经纪公司代码
CThostFtdcQryInstrumentMarginRateField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryInstrumentMarginRateField["InvestorID"] = "string"
#合约代码
CThostFtdcQryInstrumentMarginRateField["InstrumentID"] = "string"
#投机套保标志
CThostFtdcQryInstrumentMarginRateField["HedgeFlag"] = "char"
structDict['CThostFtdcQryInstrumentMarginRateField'] = CThostFtdcQryInstrumentMarginRateField
#查询手续费率
CThostFtdcQryInstrumentCommissionRateField = {}
#经纪公司代码
CThostFtdcQryInstrumentCommissionRateField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryInstrumentCommissionRateField["InvestorID"] = "string"
#合约代码
CThostFtdcQryInstrumentCommissionRateField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryInstrumentCommissionRateField["ExchangeID"] = "string"
structDict['CThostFtdcQryInstrumentCommissionRateField'] = CThostFtdcQryInstrumentCommissionRateField
#查询合约交易权限
CThostFtdcQryInstrumentTradingRightField = {}
#经纪公司代码
CThostFtdcQryInstrumentTradingRightField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryInstrumentTradingRightField["InvestorID"] = "string"
#合约代码
CThostFtdcQryInstrumentTradingRightField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryInstrumentTradingRightField["ExchangeID"] = "string"
structDict['CThostFtdcQryInstrumentTradingRightField'] = CThostFtdcQryInstrumentTradingRightField
#查询经纪公司
CThostFtdcQryBrokerField = {}
#经纪公司代码
CThostFtdcQryBrokerField["BrokerID"] = "string"
structDict['CThostFtdcQryBrokerField'] = CThostFtdcQryBrokerField
#查询交易员
CThostFtdcQryTraderField = {}
#交易所代码
CThostFtdcQryTraderField["ExchangeID"] = "string"
#会员代码
CThostFtdcQryTraderField["ParticipantID"] = "string"
#交易所交易员代码
CThostFtdcQryTraderField["TraderID"] = "string"
structDict['CThostFtdcQryTraderField'] = CThostFtdcQryTraderField
#查询管理用户功能权限
CThostFtdcQrySuperUserFunctionField = {}
#用户代码
CThostFtdcQrySuperUserFunctionField["UserID"] = "string"
structDict['CThostFtdcQrySuperUserFunctionField'] = CThostFtdcQrySuperUserFunctionField
#查询用户会话
CThostFtdcQryUserSessionField = {}
#前置编号
CThostFtdcQryUserSessionField["FrontID"] = "int"
#会话编号
CThostFtdcQryUserSessionField["SessionID"] = "int"
#经纪公司代码
CThostFtdcQryUserSessionField["BrokerID"] = "string"
#用户代码
CThostFtdcQryUserSessionField["UserID"] = "string"
structDict['CThostFtdcQryUserSessionField'] = CThostFtdcQryUserSessionField
#查询经纪公司会员代码
CThostFtdcQryPartBrokerField = {}
#交易所代码
CThostFtdcQryPartBrokerField["ExchangeID"] = "string"
#经纪公司代码
CThostFtdcQryPartBrokerField["BrokerID"] = "string"
#会员代码
CThostFtdcQryPartBrokerField["ParticipantID"] = "string"
structDict['CThostFtdcQryPartBrokerField'] = CThostFtdcQryPartBrokerField
#查询前置状态
CThostFtdcQryFrontStatusField = {}
#前置编号
CThostFtdcQryFrontStatusField["FrontID"] = "int"
structDict['CThostFtdcQryFrontStatusField'] = CThostFtdcQryFrontStatusField
#查询交易所报单
CThostFtdcQryExchangeOrderField = {}
#会员代码
CThostFtdcQryExchangeOrderField["ParticipantID"] = "string"
#客户代码
CThostFtdcQryExchangeOrderField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcQryExchangeOrderField["ExchangeInstID"] = "string"
#交易所代码
CThostFtdcQryExchangeOrderField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcQryExchangeOrderField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeOrderField'] = CThostFtdcQryExchangeOrderField
#查询报单操作
CThostFtdcQryOrderActionField = {}
#经纪公司代码
CThostFtdcQryOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryOrderActionField["InvestorID"] = "string"
#交易所代码
CThostFtdcQryOrderActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryOrderActionField'] = CThostFtdcQryOrderActionField
#查询交易所报单操作
CThostFtdcQryExchangeOrderActionField = {}
#会员代码
CThostFtdcQryExchangeOrderActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcQryExchangeOrderActionField["ClientID"] = "string"
#交易所代码
CThostFtdcQryExchangeOrderActionField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcQryExchangeOrderActionField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeOrderActionField'] = CThostFtdcQryExchangeOrderActionField
#查询管理用户
CThostFtdcQrySuperUserField = {}
#用户代码
CThostFtdcQrySuperUserField["UserID"] = "string"
structDict['CThostFtdcQrySuperUserField'] = CThostFtdcQrySuperUserField
#查询交易所
CThostFtdcQryExchangeField = {}
#交易所代码
CThostFtdcQryExchangeField["ExchangeID"] = "string"
structDict['CThostFtdcQryExchangeField'] = CThostFtdcQryExchangeField
#查询产品
CThostFtdcQryProductField = {}
#产品代码
CThostFtdcQryProductField["ProductID"] = "string"
#产品类型
CThostFtdcQryProductField["ProductClass"] = "char"
#交易所代码
CThostFtdcQryProductField["ExchangeID"] = "string"
structDict['CThostFtdcQryProductField'] = CThostFtdcQryProductField
#查询合约
CThostFtdcQryInstrumentField = {}
#合约代码
CThostFtdcQryInstrumentField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryInstrumentField["ExchangeID"] = "string"
#合约在交易所的代码
CThostFtdcQryInstrumentField["ExchangeInstID"] = "string"
#产品代码
CThostFtdcQryInstrumentField["ProductID"] = "string"
structDict['CThostFtdcQryInstrumentField'] = CThostFtdcQryInstrumentField
#查询行情
CThostFtdcQryDepthMarketDataField = {}
#合约代码
CThostFtdcQryDepthMarketDataField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryDepthMarketDataField["ExchangeID"] = "string"
structDict['CThostFtdcQryDepthMarketDataField'] = CThostFtdcQryDepthMarketDataField
#查询经纪公司用户
CThostFtdcQryBrokerUserField = {}
#经纪公司代码
CThostFtdcQryBrokerUserField["BrokerID"] = "string"
#用户代码
CThostFtdcQryBrokerUserField["UserID"] = "string"
structDict['CThostFtdcQryBrokerUserField'] = CThostFtdcQryBrokerUserField
#查询经纪公司用户权限
CThostFtdcQryBrokerUserFunctionField = {}
#经纪公司代码
CThostFtdcQryBrokerUserFunctionField["BrokerID"] = "string"
#用户代码
CThostFtdcQryBrokerUserFunctionField["UserID"] = "string"
structDict['CThostFtdcQryBrokerUserFunctionField'] = CThostFtdcQryBrokerUserFunctionField
#查询交易员报盘机
CThostFtdcQryTraderOfferField = {}
#交易所代码
CThostFtdcQryTraderOfferField["ExchangeID"] = "string"
#会员代码
CThostFtdcQryTraderOfferField["ParticipantID"] = "string"
#交易所交易员代码
CThostFtdcQryTraderOfferField["TraderID"] = "string"
structDict['CThostFtdcQryTraderOfferField'] = CThostFtdcQryTraderOfferField
#查询出入金流水
CThostFtdcQrySyncDepositField = {}
#经纪公司代码
CThostFtdcQrySyncDepositField["BrokerID"] = "string"
#出入金流水号
CThostFtdcQrySyncDepositField["DepositSeqNo"] = "string"
structDict['CThostFtdcQrySyncDepositField'] = CThostFtdcQrySyncDepositField
#查询投资者结算结果
CThostFtdcQrySettlementInfoField = {}
#经纪公司代码
CThostFtdcQrySettlementInfoField["BrokerID"] = "string"
#投资者代码
CThostFtdcQrySettlementInfoField["InvestorID"] = "string"
#交易日
CThostFtdcQrySettlementInfoField["TradingDay"] = "string"
structDict['CThostFtdcQrySettlementInfoField'] = CThostFtdcQrySettlementInfoField
#查询交易所保证金率
CThostFtdcQryExchangeMarginRateField = {}
#经纪公司代码
CThostFtdcQryExchangeMarginRateField["BrokerID"] = "string"
#合约代码
CThostFtdcQryExchangeMarginRateField["InstrumentID"] = "string"
#投机套保标志
CThostFtdcQryExchangeMarginRateField["HedgeFlag"] = "char"
structDict['CThostFtdcQryExchangeMarginRateField'] = CThostFtdcQryExchangeMarginRateField
#查询交易所调整保证金率
CThostFtdcQryExchangeMarginRateAdjustField = {}
#经纪公司代码
CThostFtdcQryExchangeMarginRateAdjustField["BrokerID"] = "string"
#合约代码
CThostFtdcQryExchangeMarginRateAdjustField["InstrumentID"] = "string"
#投机套保标志
CThostFtdcQryExchangeMarginRateAdjustField["HedgeFlag"] = "char"
structDict['CThostFtdcQryExchangeMarginRateAdjustField'] = CThostFtdcQryExchangeMarginRateAdjustField
#查询汇率
CThostFtdcQryExchangeRateField = {}
#经纪公司代码
CThostFtdcQryExchangeRateField["BrokerID"] = "string"
#源币种
CThostFtdcQryExchangeRateField["FromCurrencyID"] = "string"
#目标币种
CThostFtdcQryExchangeRateField["ToCurrencyID"] = "string"
structDict['CThostFtdcQryExchangeRateField'] = CThostFtdcQryExchangeRateField
#查询货币质押流水
CThostFtdcQrySyncFundMortgageField = {}
#经纪公司代码
CThostFtdcQrySyncFundMortgageField["BrokerID"] = "string"
#货币质押流水号
CThostFtdcQrySyncFundMortgageField["MortgageSeqNo"] = "string"
structDict['CThostFtdcQrySyncFundMortgageField'] = CThostFtdcQrySyncFundMortgageField
#查询报单
CThostFtdcQryHisOrderField = {}
#经纪公司代码
CThostFtdcQryHisOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryHisOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcQryHisOrderField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryHisOrderField["ExchangeID"] = "string"
#报单编号
CThostFtdcQryHisOrderField["OrderSysID"] = "string"
#开始时间
CThostFtdcQryHisOrderField["InsertTimeStart"] = "string"
#结束时间
CThostFtdcQryHisOrderField["InsertTimeEnd"] = "string"
#交易日
CThostFtdcQryHisOrderField["TradingDay"] = "string"
#结算编号
CThostFtdcQryHisOrderField["SettlementID"] = "int"
structDict['CThostFtdcQryHisOrderField'] = CThostFtdcQryHisOrderField
#当前期权合约最小保证金
CThostFtdcOptionInstrMiniMarginField = {}
#合约代码
CThostFtdcOptionInstrMiniMarginField["InstrumentID"] = "string"
#投资者范围
CThostFtdcOptionInstrMiniMarginField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcOptionInstrMiniMarginField["BrokerID"] = "string"
#投资者代码
CThostFtdcOptionInstrMiniMarginField["InvestorID"] = "string"
#单位(手)期权合约最小保证金
CThostFtdcOptionInstrMiniMarginField["MinMargin"] = "float"
#取值方式
CThostFtdcOptionInstrMiniMarginField["ValueMethod"] = "char"
#是否跟随交易所收取
CThostFtdcOptionInstrMiniMarginField["IsRelative"] = "int"
#交易所代码
CThostFtdcOptionInstrMiniMarginField["ExchangeID"] = "string"
structDict['CThostFtdcOptionInstrMiniMarginField'] = CThostFtdcOptionInstrMiniMarginField
#当前期权合约保证金调整系数
CThostFtdcOptionInstrMarginAdjustField = {}
#合约代码
CThostFtdcOptionInstrMarginAdjustField["InstrumentID"] = "string"
#投资者范围
CThostFtdcOptionInstrMarginAdjustField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcOptionInstrMarginAdjustField["BrokerID"] = "string"
#投资者代码
CThostFtdcOptionInstrMarginAdjustField["InvestorID"] = "string"
#投机空头保证金调整系数
CThostFtdcOptionInstrMarginAdjustField["SShortMarginRatioByMoney"] = "float"
#投机空头保证金调整系数
CThostFtdcOptionInstrMarginAdjustField["SShortMarginRatioByVolume"] = "float"
#保值空头保证金调整系数
CThostFtdcOptionInstrMarginAdjustField["HShortMarginRatioByMoney"] = "float"
#保值空头保证金调整系数
CThostFtdcOptionInstrMarginAdjustField["HShortMarginRatioByVolume"] = "float"
#套利空头保证金调整系数
CThostFtdcOptionInstrMarginAdjustField["AShortMarginRatioByMoney"] = "float"
#套利空头保证金调整系数
CThostFtdcOptionInstrMarginAdjustField["AShortMarginRatioByVolume"] = "float"
#是否跟随交易所收取
CThostFtdcOptionInstrMarginAdjustField["IsRelative"] = "int"
#交易所代码
CThostFtdcOptionInstrMarginAdjustField["ExchangeID"] = "string"
structDict['CThostFtdcOptionInstrMarginAdjustField'] = CThostFtdcOptionInstrMarginAdjustField
#当前期权合约手续费的详细内容
CThostFtdcOptionInstrCommRateField = {}
#合约代码
CThostFtdcOptionInstrCommRateField["InstrumentID"] = "string"
#投资者范围
CThostFtdcOptionInstrCommRateField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcOptionInstrCommRateField["BrokerID"] = "string"
#投资者代码
CThostFtdcOptionInstrCommRateField["InvestorID"] = "string"
#开仓手续费率
CThostFtdcOptionInstrCommRateField["OpenRatioByMoney"] = "float"
#开仓手续费
CThostFtdcOptionInstrCommRateField["OpenRatioByVolume"] = "float"
#平仓手续费率
CThostFtdcOptionInstrCommRateField["CloseRatioByMoney"] = "float"
#平仓手续费
CThostFtdcOptionInstrCommRateField["CloseRatioByVolume"] = "float"
#平今手续费率
CThostFtdcOptionInstrCommRateField["CloseTodayRatioByMoney"] = "float"
#平今手续费
CThostFtdcOptionInstrCommRateField["CloseTodayRatioByVolume"] = "float"
#执行手续费率
CThostFtdcOptionInstrCommRateField["StrikeRatioByMoney"] = "float"
#执行手续费
CThostFtdcOptionInstrCommRateField["StrikeRatioByVolume"] = "float"
#交易所代码
CThostFtdcOptionInstrCommRateField["ExchangeID"] = "string"
structDict['CThostFtdcOptionInstrCommRateField'] = CThostFtdcOptionInstrCommRateField
#期权交易成本
CThostFtdcOptionInstrTradeCostField = {}
#经纪公司代码
CThostFtdcOptionInstrTradeCostField["BrokerID"] = "string"
#投资者代码
CThostFtdcOptionInstrTradeCostField["InvestorID"] = "string"
#合约代码
CThostFtdcOptionInstrTradeCostField["InstrumentID"] = "string"
#投机套保标志
CThostFtdcOptionInstrTradeCostField["HedgeFlag"] = "char"
#期权合约保证金不变部分
CThostFtdcOptionInstrTradeCostField["FixedMargin"] = "float"
#期权合约最小保证金
CThostFtdcOptionInstrTradeCostField["MiniMargin"] = "float"
#期权合约权利金
CThostFtdcOptionInstrTradeCostField["Royalty"] = "float"
#交易所期权合约保证金不变部分
CThostFtdcOptionInstrTradeCostField["ExchFixedMargin"] = "float"
#交易所期权合约最小保证金
CThostFtdcOptionInstrTradeCostField["ExchMiniMargin"] = "float"
#交易所代码
CThostFtdcOptionInstrTradeCostField["ExchangeID"] = "string"
structDict['CThostFtdcOptionInstrTradeCostField'] = CThostFtdcOptionInstrTradeCostField
#期权交易成本查询
CThostFtdcQryOptionInstrTradeCostField = {}
#经纪公司代码
CThostFtdcQryOptionInstrTradeCostField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryOptionInstrTradeCostField["InvestorID"] = "string"
#合约代码
CThostFtdcQryOptionInstrTradeCostField["InstrumentID"] = "string"
#投机套保标志
CThostFtdcQryOptionInstrTradeCostField["HedgeFlag"] = "char"
#期权合约报价
CThostFtdcQryOptionInstrTradeCostField["InputPrice"] = "float"
#标的价格,填0则用昨结算价
CThostFtdcQryOptionInstrTradeCostField["UnderlyingPrice"] = "float"
#交易所代码
CThostFtdcQryOptionInstrTradeCostField["ExchangeID"] = "string"
structDict['CThostFtdcQryOptionInstrTradeCostField'] = CThostFtdcQryOptionInstrTradeCostField
#期权手续费率查询
CThostFtdcQryOptionInstrCommRateField = {}
#经纪公司代码
CThostFtdcQryOptionInstrCommRateField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryOptionInstrCommRateField["InvestorID"] = "string"
#合约代码
CThostFtdcQryOptionInstrCommRateField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryOptionInstrCommRateField["ExchangeID"] = "string"
structDict['CThostFtdcQryOptionInstrCommRateField'] = CThostFtdcQryOptionInstrCommRateField
#股指现货指数
CThostFtdcIndexPriceField = {}
#经纪公司代码
CThostFtdcIndexPriceField["BrokerID"] = "string"
#合约代码
CThostFtdcIndexPriceField["InstrumentID"] = "string"
#指数现货收盘价
CThostFtdcIndexPriceField["ClosePrice"] = "float"
#交易所代码
CThostFtdcIndexPriceField["ExchangeID"] = "string"
structDict['CThostFtdcIndexPriceField'] = CThostFtdcIndexPriceField
#输入的执行宣告
CThostFtdcInputExecOrderField = {}
#经纪公司代码
CThostFtdcInputExecOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputExecOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcInputExecOrderField["InstrumentID"] = "string"
#执行宣告引用
CThostFtdcInputExecOrderField["ExecOrderRef"] = "string"
#用户代码
CThostFtdcInputExecOrderField["UserID"] = "string"
#数量
CThostFtdcInputExecOrderField["Volume"] = "int"
#请求编号
CThostFtdcInputExecOrderField["RequestID"] = "int"
#业务单元
CThostFtdcInputExecOrderField["BusinessUnit"] = "string"
#开平标志
CThostFtdcInputExecOrderField["OffsetFlag"] = "char"
#投机套保标志
CThostFtdcInputExecOrderField["HedgeFlag"] = "char"
#执行类型
CThostFtdcInputExecOrderField["ActionType"] = "char"
#保留头寸申请的持仓方向
CThostFtdcInputExecOrderField["PosiDirection"] = "char"
#期权行权后是否保留期货头寸的标记
CThostFtdcInputExecOrderField["ReservePositionFlag"] = "char"
#期权行权后生成的头寸是否自动平仓
CThostFtdcInputExecOrderField["CloseFlag"] = "char"
#交易所代码
CThostFtdcInputExecOrderField["ExchangeID"] = "string"
structDict['CThostFtdcInputExecOrderField'] = CThostFtdcInputExecOrderField
#输入执行宣告操作
CThostFtdcInputExecOrderActionField = {}
#经纪公司代码
CThostFtdcInputExecOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputExecOrderActionField["InvestorID"] = "string"
#执行宣告操作引用
CThostFtdcInputExecOrderActionField["ExecOrderActionRef"] = "int"
#执行宣告引用
CThostFtdcInputExecOrderActionField["ExecOrderRef"] = "string"
#请求编号
CThostFtdcInputExecOrderActionField["RequestID"] = "int"
#前置编号
CThostFtdcInputExecOrderActionField["FrontID"] = "int"
#会话编号
CThostFtdcInputExecOrderActionField["SessionID"] = "int"
#交易所代码
CThostFtdcInputExecOrderActionField["ExchangeID"] = "string"
#执行宣告操作编号
CThostFtdcInputExecOrderActionField["ExecOrderSysID"] = "string"
#操作标志
CThostFtdcInputExecOrderActionField["ActionFlag"] = "char"
#用户代码
CThostFtdcInputExecOrderActionField["UserID"] = "string"
#合约代码
CThostFtdcInputExecOrderActionField["InstrumentID"] = "string"
structDict['CThostFtdcInputExecOrderActionField'] = CThostFtdcInputExecOrderActionField
#执行宣告
CThostFtdcExecOrderField = {}
#经纪公司代码
CThostFtdcExecOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcExecOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcExecOrderField["InstrumentID"] = "string"
#执行宣告引用
CThostFtdcExecOrderField["ExecOrderRef"] = "string"
#用户代码
CThostFtdcExecOrderField["UserID"] = "string"
#数量
CThostFtdcExecOrderField["Volume"] = "int"
#请求编号
CThostFtdcExecOrderField["RequestID"] = "int"
#业务单元
CThostFtdcExecOrderField["BusinessUnit"] = "string"
#开平标志
CThostFtdcExecOrderField["OffsetFlag"] = "char"
#投机套保标志
CThostFtdcExecOrderField["HedgeFlag"] = "char"
#执行类型
CThostFtdcExecOrderField["ActionType"] = "char"
#保留头寸申请的持仓方向
CThostFtdcExecOrderField["PosiDirection"] = "char"
#期权行权后是否保留期货头寸的标记
CThostFtdcExecOrderField["ReservePositionFlag"] = "char"
#期权行权后生成的头寸是否自动平仓
CThostFtdcExecOrderField["CloseFlag"] = "char"
#本地执行宣告编号
CThostFtdcExecOrderField["ExecOrderLocalID"] = "string"
#交易所代码
CThostFtdcExecOrderField["ExchangeID"] = "string"
#会员代码
CThostFtdcExecOrderField["ParticipantID"] = "string"
#客户代码
CThostFtdcExecOrderField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcExecOrderField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcExecOrderField["TraderID"] = "string"
#安装编号
CThostFtdcExecOrderField["InstallID"] = "int"
#执行宣告提交状态
CThostFtdcExecOrderField["OrderSubmitStatus"] = "char"
#报单提示序号
CThostFtdcExecOrderField["NotifySequence"] = "int"
#交易日
CThostFtdcExecOrderField["TradingDay"] = "string"
#结算编号
CThostFtdcExecOrderField["SettlementID"] = "int"
#执行宣告编号
CThostFtdcExecOrderField["ExecOrderSysID"] = "string"
#报单日期
CThostFtdcExecOrderField["InsertDate"] = "string"
#插入时间
CThostFtdcExecOrderField["InsertTime"] = "string"
#撤销时间
CThostFtdcExecOrderField["CancelTime"] = "string"
#执行结果
CThostFtdcExecOrderField["ExecResult"] = "char"
#结算会员编号
CThostFtdcExecOrderField["ClearingPartID"] = "string"
#序号
CThostFtdcExecOrderField["SequenceNo"] = "int"
#前置编号
CThostFtdcExecOrderField["FrontID"] = "int"
#会话编号
CThostFtdcExecOrderField["SessionID"] = "int"
#用户端产品信息
CThostFtdcExecOrderField["UserProductInfo"] = "string"
#状态信息
CThostFtdcExecOrderField["StatusMsg"] = "string"
#操作用户代码
CThostFtdcExecOrderField["ActiveUserID"] = "string"
#经纪公司报单编号
CThostFtdcExecOrderField["BrokerExecOrderSeq"] = "int"
#营业部编号
CThostFtdcExecOrderField["BranchID"] = "string"
structDict['CThostFtdcExecOrderField'] = CThostFtdcExecOrderField
#执行宣告操作
CThostFtdcExecOrderActionField = {}
#经纪公司代码
CThostFtdcExecOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcExecOrderActionField["InvestorID"] = "string"
#执行宣告操作引用
CThostFtdcExecOrderActionField["ExecOrderActionRef"] = "int"
#执行宣告引用
CThostFtdcExecOrderActionField["ExecOrderRef"] = "string"
#请求编号
CThostFtdcExecOrderActionField["RequestID"] = "int"
#前置编号
CThostFtdcExecOrderActionField["FrontID"] = "int"
#会话编号
CThostFtdcExecOrderActionField["SessionID"] = "int"
#交易所代码
CThostFtdcExecOrderActionField["ExchangeID"] = "string"
#执行宣告操作编号
CThostFtdcExecOrderActionField["ExecOrderSysID"] = "string"
#操作标志
CThostFtdcExecOrderActionField["ActionFlag"] = "char"
#操作日期
CThostFtdcExecOrderActionField["ActionDate"] = "string"
#操作时间
CThostFtdcExecOrderActionField["ActionTime"] = "string"
#交易所交易员代码
CThostFtdcExecOrderActionField["TraderID"] = "string"
#安装编号
CThostFtdcExecOrderActionField["InstallID"] = "int"
#本地执行宣告编号
CThostFtdcExecOrderActionField["ExecOrderLocalID"] = "string"
#操作本地编号
CThostFtdcExecOrderActionField["ActionLocalID"] = "string"
#会员代码
CThostFtdcExecOrderActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcExecOrderActionField["ClientID"] = "string"
#业务单元
CThostFtdcExecOrderActionField["BusinessUnit"] = "string"
#报单操作状态
CThostFtdcExecOrderActionField["OrderActionStatus"] = "char"
#用户代码
CThostFtdcExecOrderActionField["UserID"] = "string"
#执行类型
CThostFtdcExecOrderActionField["ActionType"] = "char"
#状态信息
CThostFtdcExecOrderActionField["StatusMsg"] = "string"
#合约代码
CThostFtdcExecOrderActionField["InstrumentID"] = "string"
#营业部编号
CThostFtdcExecOrderActionField["BranchID"] = "string"
structDict['CThostFtdcExecOrderActionField'] = CThostFtdcExecOrderActionField
#执行宣告查询
CThostFtdcQryExecOrderField = {}
#经纪公司代码
CThostFtdcQryExecOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryExecOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcQryExecOrderField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryExecOrderField["ExchangeID"] = "string"
#执行宣告编号
CThostFtdcQryExecOrderField["ExecOrderSysID"] = "string"
#开始时间
CThostFtdcQryExecOrderField["InsertTimeStart"] = "string"
#结束时间
CThostFtdcQryExecOrderField["InsertTimeEnd"] = "string"
structDict['CThostFtdcQryExecOrderField'] = CThostFtdcQryExecOrderField
#交易所执行宣告信息
CThostFtdcExchangeExecOrderField = {}
#数量
CThostFtdcExchangeExecOrderField["Volume"] = "int"
#请求编号
CThostFtdcExchangeExecOrderField["RequestID"] = "int"
#业务单元
CThostFtdcExchangeExecOrderField["BusinessUnit"] = "string"
#开平标志
CThostFtdcExchangeExecOrderField["OffsetFlag"] = "char"
#投机套保标志
CThostFtdcExchangeExecOrderField["HedgeFlag"] = "char"
#执行类型
CThostFtdcExchangeExecOrderField["ActionType"] = "char"
#保留头寸申请的持仓方向
CThostFtdcExchangeExecOrderField["PosiDirection"] = "char"
#期权行权后是否保留期货头寸的标记
CThostFtdcExchangeExecOrderField["ReservePositionFlag"] = "char"
#期权行权后生成的头寸是否自动平仓
CThostFtdcExchangeExecOrderField["CloseFlag"] = "char"
#本地执行宣告编号
CThostFtdcExchangeExecOrderField["ExecOrderLocalID"] = "string"
#交易所代码
CThostFtdcExchangeExecOrderField["ExchangeID"] = "string"
#会员代码
CThostFtdcExchangeExecOrderField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeExecOrderField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcExchangeExecOrderField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcExchangeExecOrderField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeExecOrderField["InstallID"] = "int"
#执行宣告提交状态
CThostFtdcExchangeExecOrderField["OrderSubmitStatus"] = "char"
#报单提示序号
CThostFtdcExchangeExecOrderField["NotifySequence"] = "int"
#交易日
CThostFtdcExchangeExecOrderField["TradingDay"] = "string"
#结算编号
CThostFtdcExchangeExecOrderField["SettlementID"] = "int"
#执行宣告编号
CThostFtdcExchangeExecOrderField["ExecOrderSysID"] = "string"
#报单日期
CThostFtdcExchangeExecOrderField["InsertDate"] = "string"
#插入时间
CThostFtdcExchangeExecOrderField["InsertTime"] = "string"
#撤销时间
CThostFtdcExchangeExecOrderField["CancelTime"] = "string"
#执行结果
CThostFtdcExchangeExecOrderField["ExecResult"] = "char"
#结算会员编号
CThostFtdcExchangeExecOrderField["ClearingPartID"] = "string"
#序号
CThostFtdcExchangeExecOrderField["SequenceNo"] = "int"
#营业部编号
CThostFtdcExchangeExecOrderField["BranchID"] = "string"
structDict['CThostFtdcExchangeExecOrderField'] = CThostFtdcExchangeExecOrderField
#交易所执行宣告查询
CThostFtdcQryExchangeExecOrderField = {}
#会员代码
CThostFtdcQryExchangeExecOrderField["ParticipantID"] = "string"
#客户代码
CThostFtdcQryExchangeExecOrderField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcQryExchangeExecOrderField["ExchangeInstID"] = "string"
#交易所代码
CThostFtdcQryExchangeExecOrderField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcQryExchangeExecOrderField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeExecOrderField'] = CThostFtdcQryExchangeExecOrderField
#执行宣告操作查询
CThostFtdcQryExecOrderActionField = {}
#经纪公司代码
CThostFtdcQryExecOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryExecOrderActionField["InvestorID"] = "string"
#交易所代码
CThostFtdcQryExecOrderActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryExecOrderActionField'] = CThostFtdcQryExecOrderActionField
#交易所执行宣告操作
CThostFtdcExchangeExecOrderActionField = {}
#交易所代码
CThostFtdcExchangeExecOrderActionField["ExchangeID"] = "string"
#执行宣告操作编号
CThostFtdcExchangeExecOrderActionField["ExecOrderSysID"] = "string"
#操作标志
CThostFtdcExchangeExecOrderActionField["ActionFlag"] = "char"
#操作日期
CThostFtdcExchangeExecOrderActionField["ActionDate"] = "string"
#操作时间
CThostFtdcExchangeExecOrderActionField["ActionTime"] = "string"
#交易所交易员代码
CThostFtdcExchangeExecOrderActionField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeExecOrderActionField["InstallID"] = "int"
#本地执行宣告编号
CThostFtdcExchangeExecOrderActionField["ExecOrderLocalID"] = "string"
#操作本地编号
CThostFtdcExchangeExecOrderActionField["ActionLocalID"] = "string"
#会员代码
CThostFtdcExchangeExecOrderActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeExecOrderActionField["ClientID"] = "string"
#业务单元
CThostFtdcExchangeExecOrderActionField["BusinessUnit"] = "string"
#报单操作状态
CThostFtdcExchangeExecOrderActionField["OrderActionStatus"] = "char"
#用户代码
CThostFtdcExchangeExecOrderActionField["UserID"] = "string"
#执行类型
CThostFtdcExchangeExecOrderActionField["ActionType"] = "char"
#营业部编号
CThostFtdcExchangeExecOrderActionField["BranchID"] = "string"
structDict['CThostFtdcExchangeExecOrderActionField'] = CThostFtdcExchangeExecOrderActionField
#交易所执行宣告操作查询
CThostFtdcQryExchangeExecOrderActionField = {}
#会员代码
CThostFtdcQryExchangeExecOrderActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcQryExchangeExecOrderActionField["ClientID"] = "string"
#交易所代码
CThostFtdcQryExchangeExecOrderActionField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcQryExchangeExecOrderActionField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeExecOrderActionField'] = CThostFtdcQryExchangeExecOrderActionField
#错误执行宣告
CThostFtdcErrExecOrderField = {}
#经纪公司代码
CThostFtdcErrExecOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcErrExecOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcErrExecOrderField["InstrumentID"] = "string"
#执行宣告引用
CThostFtdcErrExecOrderField["ExecOrderRef"] = "string"
#用户代码
CThostFtdcErrExecOrderField["UserID"] = "string"
#数量
CThostFtdcErrExecOrderField["Volume"] = "int"
#请求编号
CThostFtdcErrExecOrderField["RequestID"] = "int"
#业务单元
CThostFtdcErrExecOrderField["BusinessUnit"] = "string"
#开平标志
CThostFtdcErrExecOrderField["OffsetFlag"] = "char"
#投机套保标志
CThostFtdcErrExecOrderField["HedgeFlag"] = "char"
#执行类型
CThostFtdcErrExecOrderField["ActionType"] = "char"
#保留头寸申请的持仓方向
CThostFtdcErrExecOrderField["PosiDirection"] = "char"
#期权行权后是否保留期货头寸的标记
CThostFtdcErrExecOrderField["ReservePositionFlag"] = "char"
#期权行权后生成的头寸是否自动平仓
CThostFtdcErrExecOrderField["CloseFlag"] = "char"
#交易所代码
CThostFtdcErrExecOrderField["ExchangeID"] = "string"
#错误代码
CThostFtdcErrExecOrderField["ErrorID"] = "int"
#错误信息
CThostFtdcErrExecOrderField["ErrorMsg"] = "string"
structDict['CThostFtdcErrExecOrderField'] = CThostFtdcErrExecOrderField
#查询错误执行宣告
CThostFtdcQryErrExecOrderField = {}
#经纪公司代码
CThostFtdcQryErrExecOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryErrExecOrderField["InvestorID"] = "string"
structDict['CThostFtdcQryErrExecOrderField'] = CThostFtdcQryErrExecOrderField
#错误执行宣告操作
CThostFtdcErrExecOrderActionField = {}
#经纪公司代码
CThostFtdcErrExecOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcErrExecOrderActionField["InvestorID"] = "string"
#执行宣告操作引用
CThostFtdcErrExecOrderActionField["ExecOrderActionRef"] = "int"
#执行宣告引用
CThostFtdcErrExecOrderActionField["ExecOrderRef"] = "string"
#请求编号
CThostFtdcErrExecOrderActionField["RequestID"] = "int"
#前置编号
CThostFtdcErrExecOrderActionField["FrontID"] = "int"
#会话编号
CThostFtdcErrExecOrderActionField["SessionID"] = "int"
#交易所代码
CThostFtdcErrExecOrderActionField["ExchangeID"] = "string"
#执行宣告操作编号
CThostFtdcErrExecOrderActionField["ExecOrderSysID"] = "string"
#操作标志
CThostFtdcErrExecOrderActionField["ActionFlag"] = "char"
#用户代码
CThostFtdcErrExecOrderActionField["UserID"] = "string"
#合约代码
CThostFtdcErrExecOrderActionField["InstrumentID"] = "string"
#错误代码
CThostFtdcErrExecOrderActionField["ErrorID"] = "int"
#错误信息
CThostFtdcErrExecOrderActionField["ErrorMsg"] = "string"
structDict['CThostFtdcErrExecOrderActionField'] = CThostFtdcErrExecOrderActionField
#查询错误执行宣告操作
CThostFtdcQryErrExecOrderActionField = {}
#经纪公司代码
CThostFtdcQryErrExecOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryErrExecOrderActionField["InvestorID"] = "string"
structDict['CThostFtdcQryErrExecOrderActionField'] = CThostFtdcQryErrExecOrderActionField
#投资者期权合约交易权限
CThostFtdcOptionInstrTradingRightField = {}
#合约代码
CThostFtdcOptionInstrTradingRightField["InstrumentID"] = "string"
#投资者范围
CThostFtdcOptionInstrTradingRightField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcOptionInstrTradingRightField["BrokerID"] = "string"
#投资者代码
CThostFtdcOptionInstrTradingRightField["InvestorID"] = "string"
#买卖方向
CThostFtdcOptionInstrTradingRightField["Direction"] = "char"
#交易权限
CThostFtdcOptionInstrTradingRightField["TradingRight"] = "char"
#交易所代码
CThostFtdcOptionInstrTradingRightField["ExchangeID"] = "string"
#投机套保标志
CThostFtdcOptionInstrTradingRightField["HedgeFlag"] = "char"
structDict['CThostFtdcOptionInstrTradingRightField'] = CThostFtdcOptionInstrTradingRightField
#查询期权合约交易权限
CThostFtdcQryOptionInstrTradingRightField = {}
#经纪公司代码
CThostFtdcQryOptionInstrTradingRightField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryOptionInstrTradingRightField["InvestorID"] = "string"
#合约代码
CThostFtdcQryOptionInstrTradingRightField["InstrumentID"] = "string"
#买卖方向
CThostFtdcQryOptionInstrTradingRightField["Direction"] = "char"
#交易所代码
CThostFtdcQryOptionInstrTradingRightField["ExchangeID"] = "string"
structDict['CThostFtdcQryOptionInstrTradingRightField'] = CThostFtdcQryOptionInstrTradingRightField
#输入的询价
CThostFtdcInputForQuoteField = {}
#经纪公司代码
CThostFtdcInputForQuoteField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputForQuoteField["InvestorID"] = "string"
#合约代码
CThostFtdcInputForQuoteField["InstrumentID"] = "string"
#询价引用
CThostFtdcInputForQuoteField["ForQuoteRef"] = "string"
#用户代码
CThostFtdcInputForQuoteField["UserID"] = "string"
#交易所代码
CThostFtdcInputForQuoteField["ExchangeID"] = "string"
structDict['CThostFtdcInputForQuoteField'] = CThostFtdcInputForQuoteField
#询价
CThostFtdcForQuoteField = {}
#经纪公司代码
CThostFtdcForQuoteField["BrokerID"] = "string"
#投资者代码
CThostFtdcForQuoteField["InvestorID"] = "string"
#合约代码
CThostFtdcForQuoteField["InstrumentID"] = "string"
#询价引用
CThostFtdcForQuoteField["ForQuoteRef"] = "string"
#用户代码
CThostFtdcForQuoteField["UserID"] = "string"
#本地询价编号
CThostFtdcForQuoteField["ForQuoteLocalID"] = "string"
#交易所代码
CThostFtdcForQuoteField["ExchangeID"] = "string"
#会员代码
CThostFtdcForQuoteField["ParticipantID"] = "string"
#客户代码
CThostFtdcForQuoteField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcForQuoteField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcForQuoteField["TraderID"] = "string"
#安装编号
CThostFtdcForQuoteField["InstallID"] = "int"
#报单日期
CThostFtdcForQuoteField["InsertDate"] = "string"
#插入时间
CThostFtdcForQuoteField["InsertTime"] = "string"
#询价状态
CThostFtdcForQuoteField["ForQuoteStatus"] = "char"
#前置编号
CThostFtdcForQuoteField["FrontID"] = "int"
#会话编号
CThostFtdcForQuoteField["SessionID"] = "int"
#状态信息
CThostFtdcForQuoteField["StatusMsg"] = "string"
#操作用户代码
CThostFtdcForQuoteField["ActiveUserID"] = "string"
#经纪公司询价编号
CThostFtdcForQuoteField["BrokerForQutoSeq"] = "int"
structDict['CThostFtdcForQuoteField'] = CThostFtdcForQuoteField
#询价查询
CThostFtdcQryForQuoteField = {}
#经纪公司代码
CThostFtdcQryForQuoteField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryForQuoteField["InvestorID"] = "string"
#合约代码
CThostFtdcQryForQuoteField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryForQuoteField["ExchangeID"] = "string"
#开始时间
CThostFtdcQryForQuoteField["InsertTimeStart"] = "string"
#结束时间
CThostFtdcQryForQuoteField["InsertTimeEnd"] = "string"
structDict['CThostFtdcQryForQuoteField'] = CThostFtdcQryForQuoteField
#交易所询价信息
CThostFtdcExchangeForQuoteField = {}
#本地询价编号
CThostFtdcExchangeForQuoteField["ForQuoteLocalID"] = "string"
#交易所代码
CThostFtdcExchangeForQuoteField["ExchangeID"] = "string"
#会员代码
CThostFtdcExchangeForQuoteField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeForQuoteField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcExchangeForQuoteField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcExchangeForQuoteField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeForQuoteField["InstallID"] = "int"
#报单日期
CThostFtdcExchangeForQuoteField["InsertDate"] = "string"
#插入时间
CThostFtdcExchangeForQuoteField["InsertTime"] = "string"
#询价状态
CThostFtdcExchangeForQuoteField["ForQuoteStatus"] = "char"
structDict['CThostFtdcExchangeForQuoteField'] = CThostFtdcExchangeForQuoteField
#交易所询价查询
CThostFtdcQryExchangeForQuoteField = {}
#会员代码
CThostFtdcQryExchangeForQuoteField["ParticipantID"] = "string"
#客户代码
CThostFtdcQryExchangeForQuoteField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcQryExchangeForQuoteField["ExchangeInstID"] = "string"
#交易所代码
CThostFtdcQryExchangeForQuoteField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcQryExchangeForQuoteField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeForQuoteField'] = CThostFtdcQryExchangeForQuoteField
#输入的报价
CThostFtdcInputQuoteField = {}
#经纪公司代码
CThostFtdcInputQuoteField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputQuoteField["InvestorID"] = "string"
#合约代码
CThostFtdcInputQuoteField["InstrumentID"] = "string"
#报价引用
CThostFtdcInputQuoteField["QuoteRef"] = "string"
#用户代码
CThostFtdcInputQuoteField["UserID"] = "string"
#卖价格
CThostFtdcInputQuoteField["AskPrice"] = "float"
#买价格
CThostFtdcInputQuoteField["BidPrice"] = "float"
#卖数量
CThostFtdcInputQuoteField["AskVolume"] = "int"
#买数量
CThostFtdcInputQuoteField["BidVolume"] = "int"
#请求编号
CThostFtdcInputQuoteField["RequestID"] = "int"
#业务单元
CThostFtdcInputQuoteField["BusinessUnit"] = "string"
#卖开平标志
CThostFtdcInputQuoteField["AskOffsetFlag"] = "char"
#买开平标志
CThostFtdcInputQuoteField["BidOffsetFlag"] = "char"
#卖投机套保标志
CThostFtdcInputQuoteField["AskHedgeFlag"] = "char"
#买投机套保标志
CThostFtdcInputQuoteField["BidHedgeFlag"] = "char"
#衍生卖报单引用
CThostFtdcInputQuoteField["AskOrderRef"] = "string"
#衍生买报单引用
CThostFtdcInputQuoteField["BidOrderRef"] = "string"
#应价编号
CThostFtdcInputQuoteField["ForQuoteSysID"] = "string"
#交易所代码
CThostFtdcInputQuoteField["ExchangeID"] = "string"
structDict['CThostFtdcInputQuoteField'] = CThostFtdcInputQuoteField
#输入报价操作
CThostFtdcInputQuoteActionField = {}
#经纪公司代码
CThostFtdcInputQuoteActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputQuoteActionField["InvestorID"] = "string"
#报价操作引用
CThostFtdcInputQuoteActionField["QuoteActionRef"] = "int"
#报价引用
CThostFtdcInputQuoteActionField["QuoteRef"] = "string"
#请求编号
CThostFtdcInputQuoteActionField["RequestID"] = "int"
#前置编号
CThostFtdcInputQuoteActionField["FrontID"] = "int"
#会话编号
CThostFtdcInputQuoteActionField["SessionID"] = "int"
#交易所代码
CThostFtdcInputQuoteActionField["ExchangeID"] = "string"
#报价操作编号
CThostFtdcInputQuoteActionField["QuoteSysID"] = "string"
#操作标志
CThostFtdcInputQuoteActionField["ActionFlag"] = "char"
#用户代码
CThostFtdcInputQuoteActionField["UserID"] = "string"
#合约代码
CThostFtdcInputQuoteActionField["InstrumentID"] = "string"
structDict['CThostFtdcInputQuoteActionField'] = CThostFtdcInputQuoteActionField
#报价
CThostFtdcQuoteField = {}
#经纪公司代码
CThostFtdcQuoteField["BrokerID"] = "string"
#投资者代码
CThostFtdcQuoteField["InvestorID"] = "string"
#合约代码
CThostFtdcQuoteField["InstrumentID"] = "string"
#报价引用
CThostFtdcQuoteField["QuoteRef"] = "string"
#用户代码
CThostFtdcQuoteField["UserID"] = "string"
#卖价格
CThostFtdcQuoteField["AskPrice"] = "float"
#买价格
CThostFtdcQuoteField["BidPrice"] = "float"
#卖数量
CThostFtdcQuoteField["AskVolume"] = "int"
#买数量
CThostFtdcQuoteField["BidVolume"] = "int"
#请求编号
CThostFtdcQuoteField["RequestID"] = "int"
#业务单元
CThostFtdcQuoteField["BusinessUnit"] = "string"
#卖开平标志
CThostFtdcQuoteField["AskOffsetFlag"] = "char"
#买开平标志
CThostFtdcQuoteField["BidOffsetFlag"] = "char"
#卖投机套保标志
CThostFtdcQuoteField["AskHedgeFlag"] = "char"
#买投机套保标志
CThostFtdcQuoteField["BidHedgeFlag"] = "char"
#本地报价编号
CThostFtdcQuoteField["QuoteLocalID"] = "string"
#交易所代码
CThostFtdcQuoteField["ExchangeID"] = "string"
#会员代码
CThostFtdcQuoteField["ParticipantID"] = "string"
#客户代码
CThostFtdcQuoteField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcQuoteField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcQuoteField["TraderID"] = "string"
#安装编号
CThostFtdcQuoteField["InstallID"] = "int"
#报价提示序号
CThostFtdcQuoteField["NotifySequence"] = "int"
#报价提交状态
CThostFtdcQuoteField["OrderSubmitStatus"] = "char"
#交易日
CThostFtdcQuoteField["TradingDay"] = "string"
#结算编号
CThostFtdcQuoteField["SettlementID"] = "int"
#报价编号
CThostFtdcQuoteField["QuoteSysID"] = "string"
#报单日期
CThostFtdcQuoteField["InsertDate"] = "string"
#插入时间
CThostFtdcQuoteField["InsertTime"] = "string"
#撤销时间
CThostFtdcQuoteField["CancelTime"] = "string"
#报价状态
CThostFtdcQuoteField["QuoteStatus"] = "char"
#结算会员编号
CThostFtdcQuoteField["ClearingPartID"] = "string"
#序号
CThostFtdcQuoteField["SequenceNo"] = "int"
#卖方报单编号
CThostFtdcQuoteField["AskOrderSysID"] = "string"
#买方报单编号
CThostFtdcQuoteField["BidOrderSysID"] = "string"
#前置编号
CThostFtdcQuoteField["FrontID"] = "int"
#会话编号
CThostFtdcQuoteField["SessionID"] = "int"
#用户端产品信息
CThostFtdcQuoteField["UserProductInfo"] = "string"
#状态信息
CThostFtdcQuoteField["StatusMsg"] = "string"
#操作用户代码
CThostFtdcQuoteField["ActiveUserID"] = "string"
#经纪公司报价编号
CThostFtdcQuoteField["BrokerQuoteSeq"] = "int"
#衍生卖报单引用
CThostFtdcQuoteField["AskOrderRef"] = "string"
#衍生买报单引用
CThostFtdcQuoteField["BidOrderRef"] = "string"
#应价编号
CThostFtdcQuoteField["ForQuoteSysID"] = "string"
#营业部编号
CThostFtdcQuoteField["BranchID"] = "string"
structDict['CThostFtdcQuoteField'] = CThostFtdcQuoteField
#报价操作
CThostFtdcQuoteActionField = {}
#经纪公司代码
CThostFtdcQuoteActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQuoteActionField["InvestorID"] = "string"
#报价操作引用
CThostFtdcQuoteActionField["QuoteActionRef"] = "int"
#报价引用
CThostFtdcQuoteActionField["QuoteRef"] = "string"
#请求编号
CThostFtdcQuoteActionField["RequestID"] = "int"
#前置编号
CThostFtdcQuoteActionField["FrontID"] = "int"
#会话编号
CThostFtdcQuoteActionField["SessionID"] = "int"
#交易所代码
CThostFtdcQuoteActionField["ExchangeID"] = "string"
#报价操作编号
CThostFtdcQuoteActionField["QuoteSysID"] = "string"
#操作标志
CThostFtdcQuoteActionField["ActionFlag"] = "char"
#操作日期
CThostFtdcQuoteActionField["ActionDate"] = "string"
#操作时间
CThostFtdcQuoteActionField["ActionTime"] = "string"
#交易所交易员代码
CThostFtdcQuoteActionField["TraderID"] = "string"
#安装编号
CThostFtdcQuoteActionField["InstallID"] = "int"
#本地报价编号
CThostFtdcQuoteActionField["QuoteLocalID"] = "string"
#操作本地编号
CThostFtdcQuoteActionField["ActionLocalID"] = "string"
#会员代码
CThostFtdcQuoteActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcQuoteActionField["ClientID"] = "string"
#业务单元
CThostFtdcQuoteActionField["BusinessUnit"] = "string"
#报单操作状态
CThostFtdcQuoteActionField["OrderActionStatus"] = "char"
#用户代码
CThostFtdcQuoteActionField["UserID"] = "string"
#状态信息
CThostFtdcQuoteActionField["StatusMsg"] = "string"
#合约代码
CThostFtdcQuoteActionField["InstrumentID"] = "string"
#营业部编号
CThostFtdcQuoteActionField["BranchID"] = "string"
structDict['CThostFtdcQuoteActionField'] = CThostFtdcQuoteActionField
#报价查询
CThostFtdcQryQuoteField = {}
#经纪公司代码
CThostFtdcQryQuoteField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryQuoteField["InvestorID"] = "string"
#合约代码
CThostFtdcQryQuoteField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryQuoteField["ExchangeID"] = "string"
#报价编号
CThostFtdcQryQuoteField["QuoteSysID"] = "string"
#开始时间
CThostFtdcQryQuoteField["InsertTimeStart"] = "string"
#结束时间
CThostFtdcQryQuoteField["InsertTimeEnd"] = "string"
structDict['CThostFtdcQryQuoteField'] = CThostFtdcQryQuoteField
#交易所报价信息
CThostFtdcExchangeQuoteField = {}
#卖价格
CThostFtdcExchangeQuoteField["AskPrice"] = "float"
#买价格
CThostFtdcExchangeQuoteField["BidPrice"] = "float"
#卖数量
CThostFtdcExchangeQuoteField["AskVolume"] = "int"
#买数量
CThostFtdcExchangeQuoteField["BidVolume"] = "int"
#请求编号
CThostFtdcExchangeQuoteField["RequestID"] = "int"
#业务单元
CThostFtdcExchangeQuoteField["BusinessUnit"] = "string"
#卖开平标志
CThostFtdcExchangeQuoteField["AskOffsetFlag"] = "char"
#买开平标志
CThostFtdcExchangeQuoteField["BidOffsetFlag"] = "char"
#卖投机套保标志
CThostFtdcExchangeQuoteField["AskHedgeFlag"] = "char"
#买投机套保标志
CThostFtdcExchangeQuoteField["BidHedgeFlag"] = "char"
#本地报价编号
CThostFtdcExchangeQuoteField["QuoteLocalID"] = "string"
#交易所代码
CThostFtdcExchangeQuoteField["ExchangeID"] = "string"
#会员代码
CThostFtdcExchangeQuoteField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeQuoteField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcExchangeQuoteField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcExchangeQuoteField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeQuoteField["InstallID"] = "int"
#报价提示序号
CThostFtdcExchangeQuoteField["NotifySequence"] = "int"
#报价提交状态
CThostFtdcExchangeQuoteField["OrderSubmitStatus"] = "char"
#交易日
CThostFtdcExchangeQuoteField["TradingDay"] = "string"
#结算编号
CThostFtdcExchangeQuoteField["SettlementID"] = "int"
#报价编号
CThostFtdcExchangeQuoteField["QuoteSysID"] = "string"
#报单日期
CThostFtdcExchangeQuoteField["InsertDate"] = "string"
#插入时间
CThostFtdcExchangeQuoteField["InsertTime"] = "string"
#撤销时间
CThostFtdcExchangeQuoteField["CancelTime"] = "string"
#报价状态
CThostFtdcExchangeQuoteField["QuoteStatus"] = "char"
#结算会员编号
CThostFtdcExchangeQuoteField["ClearingPartID"] = "string"
#序号
CThostFtdcExchangeQuoteField["SequenceNo"] = "int"
#卖方报单编号
CThostFtdcExchangeQuoteField["AskOrderSysID"] = "string"
#买方报单编号
CThostFtdcExchangeQuoteField["BidOrderSysID"] = "string"
#应价编号
CThostFtdcExchangeQuoteField["ForQuoteSysID"] = "string"
#营业部编号
CThostFtdcExchangeQuoteField["BranchID"] = "string"
structDict['CThostFtdcExchangeQuoteField'] = CThostFtdcExchangeQuoteField
#交易所报价查询
CThostFtdcQryExchangeQuoteField = {}
#会员代码
CThostFtdcQryExchangeQuoteField["ParticipantID"] = "string"
#客户代码
CThostFtdcQryExchangeQuoteField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcQryExchangeQuoteField["ExchangeInstID"] = "string"
#交易所代码
CThostFtdcQryExchangeQuoteField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcQryExchangeQuoteField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeQuoteField'] = CThostFtdcQryExchangeQuoteField
#报价操作查询
CThostFtdcQryQuoteActionField = {}
#经纪公司代码
CThostFtdcQryQuoteActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryQuoteActionField["InvestorID"] = "string"
#交易所代码
CThostFtdcQryQuoteActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryQuoteActionField'] = CThostFtdcQryQuoteActionField
#交易所报价操作
CThostFtdcExchangeQuoteActionField = {}
#交易所代码
CThostFtdcExchangeQuoteActionField["ExchangeID"] = "string"
#报价操作编号
CThostFtdcExchangeQuoteActionField["QuoteSysID"] = "string"
#操作标志
CThostFtdcExchangeQuoteActionField["ActionFlag"] = "char"
#操作日期
CThostFtdcExchangeQuoteActionField["ActionDate"] = "string"
#操作时间
CThostFtdcExchangeQuoteActionField["ActionTime"] = "string"
#交易所交易员代码
CThostFtdcExchangeQuoteActionField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeQuoteActionField["InstallID"] = "int"
#本地报价编号
CThostFtdcExchangeQuoteActionField["QuoteLocalID"] = "string"
#操作本地编号
CThostFtdcExchangeQuoteActionField["ActionLocalID"] = "string"
#会员代码
CThostFtdcExchangeQuoteActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeQuoteActionField["ClientID"] = "string"
#业务单元
CThostFtdcExchangeQuoteActionField["BusinessUnit"] = "string"
#报单操作状态
CThostFtdcExchangeQuoteActionField["OrderActionStatus"] = "char"
#用户代码
CThostFtdcExchangeQuoteActionField["UserID"] = "string"
structDict['CThostFtdcExchangeQuoteActionField'] = CThostFtdcExchangeQuoteActionField
#交易所报价操作查询
CThostFtdcQryExchangeQuoteActionField = {}
#会员代码
CThostFtdcQryExchangeQuoteActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcQryExchangeQuoteActionField["ClientID"] = "string"
#交易所代码
CThostFtdcQryExchangeQuoteActionField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcQryExchangeQuoteActionField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeQuoteActionField'] = CThostFtdcQryExchangeQuoteActionField
#期权合约delta值
CThostFtdcOptionInstrDeltaField = {}
#合约代码
CThostFtdcOptionInstrDeltaField["InstrumentID"] = "string"
#投资者范围
CThostFtdcOptionInstrDeltaField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcOptionInstrDeltaField["BrokerID"] = "string"
#投资者代码
CThostFtdcOptionInstrDeltaField["InvestorID"] = "string"
#Delta值
CThostFtdcOptionInstrDeltaField["Delta"] = "float"
#交易所代码
CThostFtdcOptionInstrDeltaField["ExchangeID"] = "string"
structDict['CThostFtdcOptionInstrDeltaField'] = CThostFtdcOptionInstrDeltaField
#发给做市商的询价请求
CThostFtdcForQuoteRspField = {}
#交易日
CThostFtdcForQuoteRspField["TradingDay"] = "string"
#合约代码
CThostFtdcForQuoteRspField["InstrumentID"] = "string"
#询价编号
CThostFtdcForQuoteRspField["ForQuoteSysID"] = "string"
#询价时间
CThostFtdcForQuoteRspField["ForQuoteTime"] = "string"
#业务日期
CThostFtdcForQuoteRspField["ActionDay"] = "string"
#交易所代码
CThostFtdcForQuoteRspField["ExchangeID"] = "string"
structDict['CThostFtdcForQuoteRspField'] = CThostFtdcForQuoteRspField
#当前期权合约执行偏移值的详细内容
CThostFtdcStrikeOffsetField = {}
#合约代码
CThostFtdcStrikeOffsetField["InstrumentID"] = "string"
#投资者范围
CThostFtdcStrikeOffsetField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcStrikeOffsetField["BrokerID"] = "string"
#投资者代码
CThostFtdcStrikeOffsetField["InvestorID"] = "string"
#执行偏移值
CThostFtdcStrikeOffsetField["Offset"] = "float"
#交易所代码
CThostFtdcStrikeOffsetField["ExchangeID"] = "string"
structDict['CThostFtdcStrikeOffsetField'] = CThostFtdcStrikeOffsetField
#期权执行偏移值查询
CThostFtdcQryStrikeOffsetField = {}
#经纪公司代码
CThostFtdcQryStrikeOffsetField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryStrikeOffsetField["InvestorID"] = "string"
#合约代码
CThostFtdcQryStrikeOffsetField["InstrumentID"] = "string"
structDict['CThostFtdcQryStrikeOffsetField'] = CThostFtdcQryStrikeOffsetField
#录入锁定
CThostFtdcInputLockField = {}
#经纪公司代码
CThostFtdcInputLockField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputLockField["InvestorID"] = "string"
#合约代码
CThostFtdcInputLockField["InstrumentID"] = "string"
#锁定引用
CThostFtdcInputLockField["LockRef"] = "string"
#用户代码
CThostFtdcInputLockField["UserID"] = "string"
#数量
CThostFtdcInputLockField["Volume"] = "int"
#请求编号
CThostFtdcInputLockField["RequestID"] = "int"
#业务单元
CThostFtdcInputLockField["BusinessUnit"] = "string"
#锁定类型
CThostFtdcInputLockField["LockType"] = "char"
#交易所代码
CThostFtdcInputLockField["ExchangeID"] = "string"
structDict['CThostFtdcInputLockField'] = CThostFtdcInputLockField
#锁定
CThostFtdcLockField = {}
#经纪公司代码
CThostFtdcLockField["BrokerID"] = "string"
#投资者代码
CThostFtdcLockField["InvestorID"] = "string"
#合约代码
CThostFtdcLockField["InstrumentID"] = "string"
#锁定引用
CThostFtdcLockField["LockRef"] = "string"
#用户代码
CThostFtdcLockField["UserID"] = "string"
#数量
CThostFtdcLockField["Volume"] = "int"
#请求编号
CThostFtdcLockField["RequestID"] = "int"
#业务单元
CThostFtdcLockField["BusinessUnit"] = "string"
#锁定类型
CThostFtdcLockField["LockType"] = "char"
#本地锁定编号
CThostFtdcLockField["LockLocalID"] = "string"
#交易所代码
CThostFtdcLockField["ExchangeID"] = "string"
#会员代码
CThostFtdcLockField["ParticipantID"] = "string"
#客户代码
CThostFtdcLockField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcLockField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcLockField["TraderID"] = "string"
#安装编号
CThostFtdcLockField["InstallID"] = "int"
#执行宣告提交状态
CThostFtdcLockField["OrderSubmitStatus"] = "char"
#报单提示序号
CThostFtdcLockField["NotifySequence"] = "int"
#交易日
CThostFtdcLockField["TradingDay"] = "string"
#结算编号
CThostFtdcLockField["SettlementID"] = "int"
#锁定编号
CThostFtdcLockField["LockSysID"] = "string"
#报单日期
CThostFtdcLockField["InsertDate"] = "string"
#插入时间
CThostFtdcLockField["InsertTime"] = "string"
#撤销时间
CThostFtdcLockField["CancelTime"] = "string"
#锁定状态
CThostFtdcLockField["LockStatus"] = "char"
#结算会员编号
CThostFtdcLockField["ClearingPartID"] = "string"
#序号
CThostFtdcLockField["SequenceNo"] = "int"
#前置编号
CThostFtdcLockField["FrontID"] = "int"
#会话编号
CThostFtdcLockField["SessionID"] = "int"
#用户端产品信息
CThostFtdcLockField["UserProductInfo"] = "string"
#状态信息
CThostFtdcLockField["StatusMsg"] = "string"
#操作用户代码
CThostFtdcLockField["ActiveUserID"] = "string"
#经纪公司报单编号
CThostFtdcLockField["BrokerLockSeq"] = "int"
#营业部编号
CThostFtdcLockField["BranchID"] = "string"
structDict['CThostFtdcLockField'] = CThostFtdcLockField
#查询锁定
CThostFtdcQryLockField = {}
#经纪公司代码
CThostFtdcQryLockField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryLockField["InvestorID"] = "string"
#合约代码
CThostFtdcQryLockField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryLockField["ExchangeID"] = "string"
#锁定编号
CThostFtdcQryLockField["LockSysID"] = "string"
#开始时间
CThostFtdcQryLockField["InsertTimeStart"] = "string"
#结束时间
CThostFtdcQryLockField["InsertTimeEnd"] = "string"
structDict['CThostFtdcQryLockField'] = CThostFtdcQryLockField
#锁定证券仓位
CThostFtdcLockPositionField = {}
#经纪公司代码
CThostFtdcLockPositionField["BrokerID"] = "string"
#投资者代码
CThostFtdcLockPositionField["InvestorID"] = "string"
#合约代码
CThostFtdcLockPositionField["InstrumentID"] = "string"
#交易所代码
CThostFtdcLockPositionField["ExchangeID"] = "string"
#数量
CThostFtdcLockPositionField["Volume"] = "int"
#冻结数量
CThostFtdcLockPositionField["FrozenVolume"] = "int"
structDict['CThostFtdcLockPositionField'] = CThostFtdcLockPositionField
#查询锁定证券仓位
CThostFtdcQryLockPositionField = {}
#经纪公司代码
CThostFtdcQryLockPositionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryLockPositionField["InvestorID"] = "string"
#合约代码
CThostFtdcQryLockPositionField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryLockPositionField["ExchangeID"] = "string"
structDict['CThostFtdcQryLockPositionField'] = CThostFtdcQryLockPositionField
#当前ETF期权合约手续费的详细内容
CThostFtdcETFOptionInstrCommRateField = {}
#合约代码
CThostFtdcETFOptionInstrCommRateField["InstrumentID"] = "string"
#投资者范围
CThostFtdcETFOptionInstrCommRateField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcETFOptionInstrCommRateField["BrokerID"] = "string"
#投资者代码
CThostFtdcETFOptionInstrCommRateField["InvestorID"] = "string"
#开仓手续费率
CThostFtdcETFOptionInstrCommRateField["OpenRatioByMoney"] = "float"
#开仓手续费
CThostFtdcETFOptionInstrCommRateField["OpenRatioByVolume"] = "float"
#平仓手续费率
CThostFtdcETFOptionInstrCommRateField["CloseRatioByMoney"] = "float"
#平仓手续费
CThostFtdcETFOptionInstrCommRateField["CloseRatioByVolume"] = "float"
#平今手续费率
CThostFtdcETFOptionInstrCommRateField["CloseTodayRatioByMoney"] = "float"
#平今手续费
CThostFtdcETFOptionInstrCommRateField["CloseTodayRatioByVolume"] = "float"
#执行手续费率
CThostFtdcETFOptionInstrCommRateField["StrikeRatioByMoney"] = "float"
#执行手续费
CThostFtdcETFOptionInstrCommRateField["StrikeRatioByVolume"] = "float"
#交易所代码
CThostFtdcETFOptionInstrCommRateField["ExchangeID"] = "string"
#投机套保标志
CThostFtdcETFOptionInstrCommRateField["HedgeFlag"] = "char"
#持仓方向
CThostFtdcETFOptionInstrCommRateField["PosiDirection"] = "char"
structDict['CThostFtdcETFOptionInstrCommRateField'] = CThostFtdcETFOptionInstrCommRateField
#ETF期权手续费率查询
CThostFtdcQryETFOptionInstrCommRateField = {}
#经纪公司代码
CThostFtdcQryETFOptionInstrCommRateField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryETFOptionInstrCommRateField["InvestorID"] = "string"
#合约代码
CThostFtdcQryETFOptionInstrCommRateField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryETFOptionInstrCommRateField["ExchangeID"] = "string"
structDict['CThostFtdcQryETFOptionInstrCommRateField'] = CThostFtdcQryETFOptionInstrCommRateField
#输入的持仓冻结
CThostFtdcPosiFreezeField = {}
#经纪公司代码
CThostFtdcPosiFreezeField["BrokerID"] = "string"
#投资者代码
CThostFtdcPosiFreezeField["InvestorID"] = "string"
#合约代码
CThostFtdcPosiFreezeField["InstrumentID"] = "string"
#交易所代码
CThostFtdcPosiFreezeField["ExchangeID"] = "string"
#本地报单编号
CThostFtdcPosiFreezeField["OrderLocalID"] = "string"
#交易所交易员代码
CThostFtdcPosiFreezeField["TraderID"] = "string"
#会员代码
CThostFtdcPosiFreezeField["ParticipantID"] = "string"
#安装编号
CThostFtdcPosiFreezeField["InstallID"] = "int"
#数量
CThostFtdcPosiFreezeField["Volume"] = "int"
#冻结原因
CThostFtdcPosiFreezeField["FreezeReasonType"] = "char"
#冻结类型
CThostFtdcPosiFreezeField["FreezeType"] = "char"
structDict['CThostFtdcPosiFreezeField'] = CThostFtdcPosiFreezeField
#查询锁定
CThostFtdcQryExchangeLockField = {}
#会员代码
CThostFtdcQryExchangeLockField["ParticipantID"] = "string"
#客户代码
CThostFtdcQryExchangeLockField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcQryExchangeLockField["ExchangeInstID"] = "string"
#交易所代码
CThostFtdcQryExchangeLockField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcQryExchangeLockField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeLockField'] = CThostFtdcQryExchangeLockField
#交易所锁定
CThostFtdcExchangeLockField = {}
#数量
CThostFtdcExchangeLockField["Volume"] = "int"
#请求编号
CThostFtdcExchangeLockField["RequestID"] = "int"
#业务单元
CThostFtdcExchangeLockField["BusinessUnit"] = "string"
#锁定类型
CThostFtdcExchangeLockField["LockType"] = "char"
#本地锁定编号
CThostFtdcExchangeLockField["LockLocalID"] = "string"
#交易所代码
CThostFtdcExchangeLockField["ExchangeID"] = "string"
#会员代码
CThostFtdcExchangeLockField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeLockField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcExchangeLockField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcExchangeLockField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeLockField["InstallID"] = "int"
#执行宣告提交状态
CThostFtdcExchangeLockField["OrderSubmitStatus"] = "char"
#报单提示序号
CThostFtdcExchangeLockField["NotifySequence"] = "int"
#交易日
CThostFtdcExchangeLockField["TradingDay"] = "string"
#结算编号
CThostFtdcExchangeLockField["SettlementID"] = "int"
#锁定编号
CThostFtdcExchangeLockField["LockSysID"] = "string"
#报单日期
CThostFtdcExchangeLockField["InsertDate"] = "string"
#插入时间
CThostFtdcExchangeLockField["InsertTime"] = "string"
#撤销时间
CThostFtdcExchangeLockField["CancelTime"] = "string"
#锁定状态
CThostFtdcExchangeLockField["LockStatus"] = "char"
#结算会员编号
CThostFtdcExchangeLockField["ClearingPartID"] = "string"
#序号
CThostFtdcExchangeLockField["SequenceNo"] = "int"
#营业部编号
CThostFtdcExchangeLockField["BranchID"] = "string"
structDict['CThostFtdcExchangeLockField'] = CThostFtdcExchangeLockField
#交易所操作错误
CThostFtdcExchangeExecOrderActionErrorField = {}
#交易所代码
CThostFtdcExchangeExecOrderActionErrorField["ExchangeID"] = "string"
#执行宣告编号
CThostFtdcExchangeExecOrderActionErrorField["ExecOrderSysID"] = "string"
#交易所交易员代码
CThostFtdcExchangeExecOrderActionErrorField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeExecOrderActionErrorField["InstallID"] = "int"
#本地执行宣告编号
CThostFtdcExchangeExecOrderActionErrorField["ExecOrderLocalID"] = "string"
#操作本地编号
CThostFtdcExchangeExecOrderActionErrorField["ActionLocalID"] = "string"
#错误代码
CThostFtdcExchangeExecOrderActionErrorField["ErrorID"] = "int"
#错误信息
CThostFtdcExchangeExecOrderActionErrorField["ErrorMsg"] = "string"
#经纪公司代码
CThostFtdcExchangeExecOrderActionErrorField["BrokerID"] = "string"
structDict['CThostFtdcExchangeExecOrderActionErrorField'] = CThostFtdcExchangeExecOrderActionErrorField
#输入批量报单操作
CThostFtdcInputBatchOrderActionField = {}
#经纪公司代码
CThostFtdcInputBatchOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputBatchOrderActionField["InvestorID"] = "string"
#报单操作引用
CThostFtdcInputBatchOrderActionField["OrderActionRef"] = "int"
#请求编号
CThostFtdcInputBatchOrderActionField["RequestID"] = "int"
#前置编号
CThostFtdcInputBatchOrderActionField["FrontID"] = "int"
#会话编号
CThostFtdcInputBatchOrderActionField["SessionID"] = "int"
#交易所代码
CThostFtdcInputBatchOrderActionField["ExchangeID"] = "string"
#用户代码
CThostFtdcInputBatchOrderActionField["UserID"] = "string"
structDict['CThostFtdcInputBatchOrderActionField'] = CThostFtdcInputBatchOrderActionField
#批量报单操作
CThostFtdcBatchOrderActionField = {}
#经纪公司代码
CThostFtdcBatchOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcBatchOrderActionField["InvestorID"] = "string"
#报单操作引用
CThostFtdcBatchOrderActionField["OrderActionRef"] = "int"
#请求编号
CThostFtdcBatchOrderActionField["RequestID"] = "int"
#前置编号
CThostFtdcBatchOrderActionField["FrontID"] = "int"
#会话编号
CThostFtdcBatchOrderActionField["SessionID"] = "int"
#交易所代码
CThostFtdcBatchOrderActionField["ExchangeID"] = "string"
#操作日期
CThostFtdcBatchOrderActionField["ActionDate"] = "string"
#操作时间
CThostFtdcBatchOrderActionField["ActionTime"] = "string"
#交易所交易员代码
CThostFtdcBatchOrderActionField["TraderID"] = "string"
#安装编号
CThostFtdcBatchOrderActionField["InstallID"] = "int"
#操作本地编号
CThostFtdcBatchOrderActionField["ActionLocalID"] = "string"
#会员代码
CThostFtdcBatchOrderActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcBatchOrderActionField["ClientID"] = "string"
#业务单元
CThostFtdcBatchOrderActionField["BusinessUnit"] = "string"
#报单操作状态
CThostFtdcBatchOrderActionField["OrderActionStatus"] = "char"
#用户代码
CThostFtdcBatchOrderActionField["UserID"] = "string"
#状态信息
CThostFtdcBatchOrderActionField["StatusMsg"] = "string"
structDict['CThostFtdcBatchOrderActionField'] = CThostFtdcBatchOrderActionField
#交易所批量报单操作
CThostFtdcExchangeBatchOrderActionField = {}
#交易所代码
CThostFtdcExchangeBatchOrderActionField["ExchangeID"] = "string"
#操作日期
CThostFtdcExchangeBatchOrderActionField["ActionDate"] = "string"
#操作时间
CThostFtdcExchangeBatchOrderActionField["ActionTime"] = "string"
#交易所交易员代码
CThostFtdcExchangeBatchOrderActionField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeBatchOrderActionField["InstallID"] = "int"
#操作本地编号
CThostFtdcExchangeBatchOrderActionField["ActionLocalID"] = "string"
#会员代码
CThostFtdcExchangeBatchOrderActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeBatchOrderActionField["ClientID"] = "string"
#业务单元
CThostFtdcExchangeBatchOrderActionField["BusinessUnit"] = "string"
#报单操作状态
CThostFtdcExchangeBatchOrderActionField["OrderActionStatus"] = "char"
#用户代码
CThostFtdcExchangeBatchOrderActionField["UserID"] = "string"
structDict['CThostFtdcExchangeBatchOrderActionField'] = CThostFtdcExchangeBatchOrderActionField
#查询批量报单操作
CThostFtdcQryBatchOrderActionField = {}
#经纪公司代码
CThostFtdcQryBatchOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryBatchOrderActionField["InvestorID"] = "string"
#交易所代码
CThostFtdcQryBatchOrderActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryBatchOrderActionField'] = CThostFtdcQryBatchOrderActionField
#投资者持仓限制
CThostFtdcLimitPosiField = {}
#经纪公司代码
CThostFtdcLimitPosiField["BrokerID"] = "string"
#投资者代码
CThostFtdcLimitPosiField["InvestorID"] = "string"
#合约代码
CThostFtdcLimitPosiField["InstrumentID"] = "string"
#交易所代码
CThostFtdcLimitPosiField["ExchangeID"] = "string"
#总数量限制
CThostFtdcLimitPosiField["TotalVolume"] = "int"
#多头数量限制
CThostFtdcLimitPosiField["LongVolume"] = "int"
#当日多头开仓数量限制
CThostFtdcLimitPosiField["OpenVolume"] = "int"
#多头持仓金额限制
CThostFtdcLimitPosiField["LongAmount"] = "float"
#总数量冻结
CThostFtdcLimitPosiField["TotalVolumeFrozen"] = "int"
#多头数量冻结
CThostFtdcLimitPosiField["LongVolumeFrozen"] = "int"
#当日多头开仓数量冻结
CThostFtdcLimitPosiField["OpenVolumeFrozen"] = "int"
#多头持仓金额冻结
CThostFtdcLimitPosiField["LongAmountFrozen"] = "float"
structDict['CThostFtdcLimitPosiField'] = CThostFtdcLimitPosiField
#查询投资者持仓限制
CThostFtdcQryLimitPosiField = {}
#经纪公司代码
CThostFtdcQryLimitPosiField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryLimitPosiField["InvestorID"] = "string"
#合约代码
CThostFtdcQryLimitPosiField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryLimitPosiField["ExchangeID"] = "string"
structDict['CThostFtdcQryLimitPosiField'] = CThostFtdcQryLimitPosiField
#经纪公司持仓限制
CThostFtdcBrokerLimitPosiField = {}
#经纪公司代码
CThostFtdcBrokerLimitPosiField["BrokerID"] = "string"
#合约代码
CThostFtdcBrokerLimitPosiField["InstrumentID"] = "string"
#交易所代码
CThostFtdcBrokerLimitPosiField["ExchangeID"] = "string"
#总数量限制
CThostFtdcBrokerLimitPosiField["TotalVolume"] = "float"
#多头数量限制
CThostFtdcBrokerLimitPosiField["LongVolume"] = "float"
#总数量冻结
CThostFtdcBrokerLimitPosiField["TotalVolumeFrozen"] = "float"
#多头数量冻结
CThostFtdcBrokerLimitPosiField["LongVolumeFrozen"] = "float"
structDict['CThostFtdcBrokerLimitPosiField'] = CThostFtdcBrokerLimitPosiField
#查询经纪公司持仓限制
CThostFtdcQryBrokerLimitPosiField = {}
#经纪公司代码
CThostFtdcQryBrokerLimitPosiField["BrokerID"] = "string"
#合约代码
CThostFtdcQryBrokerLimitPosiField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryBrokerLimitPosiField["ExchangeID"] = "string"
structDict['CThostFtdcQryBrokerLimitPosiField'] = CThostFtdcQryBrokerLimitPosiField
#投资者证券持仓限制
CThostFtdcLimitPosiSField = {}
#经纪公司代码
CThostFtdcLimitPosiSField["BrokerID"] = "string"
#投资者代码
CThostFtdcLimitPosiSField["InvestorID"] = "string"
#合约代码
CThostFtdcLimitPosiSField["InstrumentID"] = "string"
#交易所代码
CThostFtdcLimitPosiSField["ExchangeID"] = "string"
#总数量限制
CThostFtdcLimitPosiSField["TotalVolume"] = "int"
#当日开仓数量限制
CThostFtdcLimitPosiSField["OpenVolume"] = "int"
#总数量冻结
CThostFtdcLimitPosiSField["TotalVolumeFrozen"] = "int"
#当日开仓数量冻结
CThostFtdcLimitPosiSField["OpenVolumeFrozen"] = "int"
structDict['CThostFtdcLimitPosiSField'] = CThostFtdcLimitPosiSField
#查询投资者证券持仓限制
CThostFtdcQryLimitPosiSField = {}
#经纪公司代码
CThostFtdcQryLimitPosiSField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryLimitPosiSField["InvestorID"] = "string"
#合约代码
CThostFtdcQryLimitPosiSField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryLimitPosiSField["ExchangeID"] = "string"
structDict['CThostFtdcQryLimitPosiSField'] = CThostFtdcQryLimitPosiSField
#投资者持仓限制参数
CThostFtdcLimitPosiParamField = {}
#合约代码
CThostFtdcLimitPosiParamField["InstrumentID"] = "string"
#投资者范围
CThostFtdcLimitPosiParamField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcLimitPosiParamField["BrokerID"] = "string"
#投资者代码
CThostFtdcLimitPosiParamField["InvestorID"] = "string"
#交易所代码
CThostFtdcLimitPosiParamField["ExchangeID"] = "string"
#总数量限制
CThostFtdcLimitPosiParamField["TotalVolume"] = "int"
#多头数量限制
CThostFtdcLimitPosiParamField["LongVolume"] = "int"
#当日多头开仓数量限制
CThostFtdcLimitPosiParamField["OpenVolume"] = "int"
#多头持仓金额限制
CThostFtdcLimitPosiParamField["LongAmount"] = "float"
structDict['CThostFtdcLimitPosiParamField'] = CThostFtdcLimitPosiParamField
#经纪公司持仓限制参数
CThostFtdcBrokerLimitPosiParamField = {}
#经纪公司代码
CThostFtdcBrokerLimitPosiParamField["BrokerID"] = "string"
#合约代码
CThostFtdcBrokerLimitPosiParamField["InstrumentID"] = "string"
#交易所代码
CThostFtdcBrokerLimitPosiParamField["ExchangeID"] = "string"
#总数量限制
CThostFtdcBrokerLimitPosiParamField["TotalVolume"] = "float"
#多头数量限制
CThostFtdcBrokerLimitPosiParamField["LongVolume"] = "float"
structDict['CThostFtdcBrokerLimitPosiParamField'] = CThostFtdcBrokerLimitPosiParamField
#投资者证券持仓限制参数
CThostFtdcLimitPosiParamSField = {}
#合约代码
CThostFtdcLimitPosiParamSField["InstrumentID"] = "string"
#投资者范围
CThostFtdcLimitPosiParamSField["InvestorRange"] = "char"
#经纪公司代码
CThostFtdcLimitPosiParamSField["BrokerID"] = "string"
#投资者代码
CThostFtdcLimitPosiParamSField["InvestorID"] = "string"
#交易所代码
CThostFtdcLimitPosiParamSField["ExchangeID"] = "string"
#总数量限制
CThostFtdcLimitPosiParamSField["TotalVolume"] = "int"
#当日开仓数量限制
CThostFtdcLimitPosiParamSField["OpenVolume"] = "int"
structDict['CThostFtdcLimitPosiParamSField'] = CThostFtdcLimitPosiParamSField
#输入证券处置操作
CThostFtdcInputStockDisposalActionField = {}
#经纪公司代码
CThostFtdcInputStockDisposalActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputStockDisposalActionField["InvestorID"] = "string"
#证券处置操作引用
CThostFtdcInputStockDisposalActionField["StockDisposalActionRef"] = "int"
#证券处置引用
CThostFtdcInputStockDisposalActionField["StockDisposalRef"] = "string"
#请求编号
CThostFtdcInputStockDisposalActionField["RequestID"] = "int"
#前置编号
CThostFtdcInputStockDisposalActionField["FrontID"] = "int"
#会话编号
CThostFtdcInputStockDisposalActionField["SessionID"] = "int"
#交易所代码
CThostFtdcInputStockDisposalActionField["ExchangeID"] = "string"
#证券处置操作编号
CThostFtdcInputStockDisposalActionField["StockDisposalSysID"] = "string"
#操作标志
CThostFtdcInputStockDisposalActionField["ActionFlag"] = "char"
#用户代码
CThostFtdcInputStockDisposalActionField["UserID"] = "string"
#合约代码
CThostFtdcInputStockDisposalActionField["InstrumentID"] = "string"
structDict['CThostFtdcInputStockDisposalActionField'] = CThostFtdcInputStockDisposalActionField
#证券处置操作
CThostFtdcStockDisposalActionField = {}
#经纪公司代码
CThostFtdcStockDisposalActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcStockDisposalActionField["InvestorID"] = "string"
#证券处置操作引用
CThostFtdcStockDisposalActionField["StockDisposalActionRef"] = "int"
#证券处置引用
CThostFtdcStockDisposalActionField["StockDisposalRef"] = "string"
#请求编号
CThostFtdcStockDisposalActionField["RequestID"] = "int"
#前置编号
CThostFtdcStockDisposalActionField["FrontID"] = "int"
#会话编号
CThostFtdcStockDisposalActionField["SessionID"] = "int"
#交易所代码
CThostFtdcStockDisposalActionField["ExchangeID"] = "string"
#证券处置操作编号
CThostFtdcStockDisposalActionField["StockDisposalSysID"] = "string"
#操作标志
CThostFtdcStockDisposalActionField["ActionFlag"] = "char"
#操作日期
CThostFtdcStockDisposalActionField["ActionDate"] = "string"
#操作时间
CThostFtdcStockDisposalActionField["ActionTime"] = "string"
#交易所交易员代码
CThostFtdcStockDisposalActionField["TraderID"] = "string"
#安装编号
CThostFtdcStockDisposalActionField["InstallID"] = "int"
#本地证券处置编号
CThostFtdcStockDisposalActionField["StockDisposalLocalID"] = "string"
#操作本地编号
CThostFtdcStockDisposalActionField["ActionLocalID"] = "string"
#会员代码
CThostFtdcStockDisposalActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcStockDisposalActionField["ClientID"] = "string"
#业务单元
CThostFtdcStockDisposalActionField["BusinessUnit"] = "string"
#报单操作状态
CThostFtdcStockDisposalActionField["OrderActionStatus"] = "char"
#用户代码
CThostFtdcStockDisposalActionField["UserID"] = "string"
#执行类型
CThostFtdcStockDisposalActionField["ActionType"] = "char"
#状态信息
CThostFtdcStockDisposalActionField["StatusMsg"] = "string"
#合约代码
CThostFtdcStockDisposalActionField["InstrumentID"] = "string"
#营业部编号
CThostFtdcStockDisposalActionField["BranchID"] = "string"
structDict['CThostFtdcStockDisposalActionField'] = CThostFtdcStockDisposalActionField
#证券处置操作查询
CThostFtdcQryStockDisposalActionField = {}
#经纪公司代码
CThostFtdcQryStockDisposalActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryStockDisposalActionField["InvestorID"] = "string"
#交易所代码
CThostFtdcQryStockDisposalActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryStockDisposalActionField'] = CThostFtdcQryStockDisposalActionField
#交易所证券处置操作
CThostFtdcExchangeStockDisposalActionField = {}
#交易所代码
CThostFtdcExchangeStockDisposalActionField["ExchangeID"] = "string"
#证券处置操作编号
CThostFtdcExchangeStockDisposalActionField["StockDisposalSysID"] = "string"
#操作标志
CThostFtdcExchangeStockDisposalActionField["ActionFlag"] = "char"
#操作日期
CThostFtdcExchangeStockDisposalActionField["ActionDate"] = "string"
#操作时间
CThostFtdcExchangeStockDisposalActionField["ActionTime"] = "string"
#交易所交易员代码
CThostFtdcExchangeStockDisposalActionField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeStockDisposalActionField["InstallID"] = "int"
#本地证券处置编号
CThostFtdcExchangeStockDisposalActionField["StockDisposalLocalID"] = "string"
#操作本地编号
CThostFtdcExchangeStockDisposalActionField["ActionLocalID"] = "string"
#会员代码
CThostFtdcExchangeStockDisposalActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeStockDisposalActionField["ClientID"] = "string"
#业务单元
CThostFtdcExchangeStockDisposalActionField["BusinessUnit"] = "string"
#报单操作状态
CThostFtdcExchangeStockDisposalActionField["OrderActionStatus"] = "char"
#用户代码
CThostFtdcExchangeStockDisposalActionField["UserID"] = "string"
#执行类型
CThostFtdcExchangeStockDisposalActionField["ActionType"] = "char"
#营业部编号
CThostFtdcExchangeStockDisposalActionField["BranchID"] = "string"
structDict['CThostFtdcExchangeStockDisposalActionField'] = CThostFtdcExchangeStockDisposalActionField
#错误证券处置操作
CThostFtdcQryExchangeStockDisposalActionField = {}
#会员代码
CThostFtdcQryExchangeStockDisposalActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcQryExchangeStockDisposalActionField["ClientID"] = "string"
#交易所代码
CThostFtdcQryExchangeStockDisposalActionField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcQryExchangeStockDisposalActionField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeStockDisposalActionField'] = CThostFtdcQryExchangeStockDisposalActionField
#查询错误证券处置操作
CThostFtdcQryErrStockDisposalActionField = {}
#经纪公司代码
CThostFtdcQryErrStockDisposalActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryErrStockDisposalActionField["InvestorID"] = "string"
structDict['CThostFtdcQryErrStockDisposalActionField'] = CThostFtdcQryErrStockDisposalActionField
#交易所证券处置操作错误
CThostFtdcExchangeStockDisposalActionErrorField = {}
#交易所代码
CThostFtdcExchangeStockDisposalActionErrorField["ExchangeID"] = "string"
#证券处置编号
CThostFtdcExchangeStockDisposalActionErrorField["StockDisposalSysID"] = "string"
#交易所交易员代码
CThostFtdcExchangeStockDisposalActionErrorField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeStockDisposalActionErrorField["InstallID"] = "int"
#本地证券处置编号
CThostFtdcExchangeStockDisposalActionErrorField["StockDisposalLocalID"] = "string"
#操作本地编号
CThostFtdcExchangeStockDisposalActionErrorField["ActionLocalID"] = "string"
#错误代码
CThostFtdcExchangeStockDisposalActionErrorField["ErrorID"] = "int"
#错误信息
CThostFtdcExchangeStockDisposalActionErrorField["ErrorMsg"] = "string"
#经纪公司代码
CThostFtdcExchangeStockDisposalActionErrorField["BrokerID"] = "string"
structDict['CThostFtdcExchangeStockDisposalActionErrorField'] = CThostFtdcExchangeStockDisposalActionErrorField
#错误证券处置操作
CThostFtdcErrStockDisposalActionField = {}
#经纪公司代码
CThostFtdcErrStockDisposalActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcErrStockDisposalActionField["InvestorID"] = "string"
#证券处置操作引用
CThostFtdcErrStockDisposalActionField["StockDisposalActionRef"] = "int"
#证券处置引用
CThostFtdcErrStockDisposalActionField["StockDisposalRef"] = "string"
#请求编号
CThostFtdcErrStockDisposalActionField["RequestID"] = "int"
#前置编号
CThostFtdcErrStockDisposalActionField["FrontID"] = "int"
#会话编号
CThostFtdcErrStockDisposalActionField["SessionID"] = "int"
#交易所代码
CThostFtdcErrStockDisposalActionField["ExchangeID"] = "string"
#证券处置操作编号
CThostFtdcErrStockDisposalActionField["StockDisposalSysID"] = "string"
#操作标志
CThostFtdcErrStockDisposalActionField["ActionFlag"] = "char"
#用户代码
CThostFtdcErrStockDisposalActionField["UserID"] = "string"
#合约代码
CThostFtdcErrStockDisposalActionField["InstrumentID"] = "string"
#错误代码
CThostFtdcErrStockDisposalActionField["ErrorID"] = "int"
#错误信息
CThostFtdcErrStockDisposalActionField["ErrorMsg"] = "string"
structDict['CThostFtdcErrStockDisposalActionField'] = CThostFtdcErrStockDisposalActionField
#投资者分级
CThostFtdcInvestorLevelField = {}
#经纪公司代码
CThostFtdcInvestorLevelField["BrokerID"] = "string"
#投资者代码
CThostFtdcInvestorLevelField["InvestorID"] = "string"
#交易所代码
CThostFtdcInvestorLevelField["ExchangeID"] = "string"
#投资者分级类型
CThostFtdcInvestorLevelField["LevelType"] = "char"
structDict['CThostFtdcInvestorLevelField'] = CThostFtdcInvestorLevelField
#组合合约安全系数
CThostFtdcCombInstrumentGuardField = {}
#经纪公司代码
CThostFtdcCombInstrumentGuardField["BrokerID"] = "string"
#合约代码
CThostFtdcCombInstrumentGuardField["InstrumentID"] = "string"
#
CThostFtdcCombInstrumentGuardField["GuarantRatio"] = "float"
structDict['CThostFtdcCombInstrumentGuardField'] = CThostFtdcCombInstrumentGuardField
#组合合约安全系数查询
CThostFtdcQryCombInstrumentGuardField = {}
#经纪公司代码
CThostFtdcQryCombInstrumentGuardField["BrokerID"] = "string"
#合约代码
CThostFtdcQryCombInstrumentGuardField["InstrumentID"] = "string"
structDict['CThostFtdcQryCombInstrumentGuardField'] = CThostFtdcQryCombInstrumentGuardField
#输入的申请组合
CThostFtdcInputCombActionField = {}
#经纪公司代码
CThostFtdcInputCombActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputCombActionField["InvestorID"] = "string"
#合约代码
CThostFtdcInputCombActionField["InstrumentID"] = "string"
#组合引用
CThostFtdcInputCombActionField["CombActionRef"] = "string"
#用户代码
CThostFtdcInputCombActionField["UserID"] = "string"
#买卖方向
CThostFtdcInputCombActionField["Direction"] = "char"
#数量
CThostFtdcInputCombActionField["Volume"] = "int"
#组合指令方向
CThostFtdcInputCombActionField["CombDirection"] = "char"
#投机套保标志
CThostFtdcInputCombActionField["HedgeFlag"] = "char"
#交易所代码
CThostFtdcInputCombActionField["ExchangeID"] = "string"
structDict['CThostFtdcInputCombActionField'] = CThostFtdcInputCombActionField
#申请组合
CThostFtdcCombActionField = {}
#经纪公司代码
CThostFtdcCombActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcCombActionField["InvestorID"] = "string"
#合约代码
CThostFtdcCombActionField["InstrumentID"] = "string"
#组合引用
CThostFtdcCombActionField["CombActionRef"] = "string"
#用户代码
CThostFtdcCombActionField["UserID"] = "string"
#买卖方向
CThostFtdcCombActionField["Direction"] = "char"
#数量
CThostFtdcCombActionField["Volume"] = "int"
#组合指令方向
CThostFtdcCombActionField["CombDirection"] = "char"
#投机套保标志
CThostFtdcCombActionField["HedgeFlag"] = "char"
#本地申请组合编号
CThostFtdcCombActionField["ActionLocalID"] = "string"
#交易所代码
CThostFtdcCombActionField["ExchangeID"] = "string"
#会员代码
CThostFtdcCombActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcCombActionField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcCombActionField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcCombActionField["TraderID"] = "string"
#安装编号
CThostFtdcCombActionField["InstallID"] = "int"
#组合状态
CThostFtdcCombActionField["ActionStatus"] = "char"
#报单提示序号
CThostFtdcCombActionField["NotifySequence"] = "int"
#交易日
CThostFtdcCombActionField["TradingDay"] = "string"
#结算编号
CThostFtdcCombActionField["SettlementID"] = "int"
#序号
CThostFtdcCombActionField["SequenceNo"] = "int"
#前置编号
CThostFtdcCombActionField["FrontID"] = "int"
#会话编号
CThostFtdcCombActionField["SessionID"] = "int"
#用户端产品信息
CThostFtdcCombActionField["UserProductInfo"] = "string"
#状态信息
CThostFtdcCombActionField["StatusMsg"] = "string"
structDict['CThostFtdcCombActionField'] = CThostFtdcCombActionField
#申请组合查询
CThostFtdcQryCombActionField = {}
#经纪公司代码
CThostFtdcQryCombActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryCombActionField["InvestorID"] = "string"
#合约代码
CThostFtdcQryCombActionField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryCombActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryCombActionField'] = CThostFtdcQryCombActionField
#交易所申请组合信息
CThostFtdcExchangeCombActionField = {}
#买卖方向
CThostFtdcExchangeCombActionField["Direction"] = "char"
#数量
CThostFtdcExchangeCombActionField["Volume"] = "int"
#组合指令方向
CThostFtdcExchangeCombActionField["CombDirection"] = "char"
#投机套保标志
CThostFtdcExchangeCombActionField["HedgeFlag"] = "char"
#本地申请组合编号
CThostFtdcExchangeCombActionField["ActionLocalID"] = "string"
#交易所代码
CThostFtdcExchangeCombActionField["ExchangeID"] = "string"
#会员代码
CThostFtdcExchangeCombActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeCombActionField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcExchangeCombActionField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcExchangeCombActionField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeCombActionField["InstallID"] = "int"
#组合状态
CThostFtdcExchangeCombActionField["ActionStatus"] = "char"
#报单提示序号
CThostFtdcExchangeCombActionField["NotifySequence"] = "int"
#交易日
CThostFtdcExchangeCombActionField["TradingDay"] = "string"
#结算编号
CThostFtdcExchangeCombActionField["SettlementID"] = "int"
#序号
CThostFtdcExchangeCombActionField["SequenceNo"] = "int"
structDict['CThostFtdcExchangeCombActionField'] = CThostFtdcExchangeCombActionField
#交易所申请组合查询
CThostFtdcQryExchangeCombActionField = {}
#会员代码
CThostFtdcQryExchangeCombActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcQryExchangeCombActionField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcQryExchangeCombActionField["ExchangeInstID"] = "string"
#交易所代码
CThostFtdcQryExchangeCombActionField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcQryExchangeCombActionField["TraderID"] = "string"
structDict['CThostFtdcQryExchangeCombActionField'] = CThostFtdcQryExchangeCombActionField
#产品报价汇率
CThostFtdcProductExchRateField = {}
#产品代码
CThostFtdcProductExchRateField["ProductID"] = "string"
#报价币种类型
CThostFtdcProductExchRateField["QuoteCurrencyID"] = "string"
#汇率
CThostFtdcProductExchRateField["ExchangeRate"] = "float"
structDict['CThostFtdcProductExchRateField'] = CThostFtdcProductExchRateField
#产品报价汇率查询
CThostFtdcQryProductExchRateField = {}
#产品代码
CThostFtdcQryProductExchRateField["ProductID"] = "string"
structDict['CThostFtdcQryProductExchRateField'] = CThostFtdcQryProductExchRateField
#输入的指定
CThostFtdcInputDesignateField = {}
#经纪公司代码
CThostFtdcInputDesignateField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputDesignateField["InvestorID"] = "string"
#指定登记引用
CThostFtdcInputDesignateField["DesignateRef"] = "string"
#用户代码
CThostFtdcInputDesignateField["UserID"] = "string"
#指定方向
CThostFtdcInputDesignateField["DesignateType"] = "char"
#交易所代码
CThostFtdcInputDesignateField["ExchangeID"] = "string"
structDict['CThostFtdcInputDesignateField'] = CThostFtdcInputDesignateField
#指定
CThostFtdcDesignateField = {}
#经纪公司代码
CThostFtdcDesignateField["BrokerID"] = "string"
#投资者代码
CThostFtdcDesignateField["InvestorID"] = "string"
#指定登记引用
CThostFtdcDesignateField["DesignateRef"] = "string"
#用户代码
CThostFtdcDesignateField["UserID"] = "string"
#指定方向
CThostFtdcDesignateField["DesignateType"] = "char"
#本地指定编号
CThostFtdcDesignateField["DesignateLocalID"] = "string"
#交易所代码
CThostFtdcDesignateField["ExchangeID"] = "string"
#会员代码
CThostFtdcDesignateField["ParticipantID"] = "string"
#客户代码
CThostFtdcDesignateField["ClientID"] = "string"
#交易所交易员代码
CThostFtdcDesignateField["TraderID"] = "string"
#安装编号
CThostFtdcDesignateField["InstallID"] = "int"
#指定状态
CThostFtdcDesignateField["DesignateStatus"] = "char"
#报单提示序号
CThostFtdcDesignateField["NotifySequence"] = "int"
#交易日
CThostFtdcDesignateField["TradingDay"] = "string"
#结算编号
CThostFtdcDesignateField["SettlementID"] = "int"
#报单日期
CThostFtdcDesignateField["InsertDate"] = "string"
#插入时间
CThostFtdcDesignateField["InsertTime"] = "string"
#前置编号
CThostFtdcDesignateField["FrontID"] = "int"
#会话编号
CThostFtdcDesignateField["SessionID"] = "int"
#用户端产品信息
CThostFtdcDesignateField["UserProductInfo"] = "string"
#状态信息
CThostFtdcDesignateField["StatusMsg"] = "string"
#营业部编号
CThostFtdcDesignateField["BranchID"] = "string"
structDict['CThostFtdcDesignateField'] = CThostFtdcDesignateField
#申请指定
CThostFtdcQryDesignateField = {}
#经纪公司代码
CThostFtdcQryDesignateField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryDesignateField["InvestorID"] = "string"
#交易所代码
CThostFtdcQryDesignateField["ExchangeID"] = "string"
structDict['CThostFtdcQryDesignateField'] = CThostFtdcQryDesignateField
#交易所指定信息
CThostFtdcExchangeDesignateField = {}
#指定方向
CThostFtdcExchangeDesignateField["DesignateType"] = "char"
#本地指定编号
CThostFtdcExchangeDesignateField["DesignateLocalID"] = "string"
#交易所代码
CThostFtdcExchangeDesignateField["ExchangeID"] = "string"
#会员代码
CThostFtdcExchangeDesignateField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeDesignateField["ClientID"] = "string"
#交易所交易员代码
CThostFtdcExchangeDesignateField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeDesignateField["InstallID"] = "int"
#指定状态
CThostFtdcExchangeDesignateField["DesignateStatus"] = "char"
#报单提示序号
CThostFtdcExchangeDesignateField["NotifySequence"] = "int"
#交易日
CThostFtdcExchangeDesignateField["TradingDay"] = "string"
#结算编号
CThostFtdcExchangeDesignateField["SettlementID"] = "int"
#报单日期
CThostFtdcExchangeDesignateField["InsertDate"] = "string"
#插入时间
CThostFtdcExchangeDesignateField["InsertTime"] = "string"
#营业部编号
CThostFtdcExchangeDesignateField["BranchID"] = "string"
structDict['CThostFtdcExchangeDesignateField'] = CThostFtdcExchangeDesignateField
#输入的证券处置
CThostFtdcInputStockDisposalField = {}
#经纪公司代码
CThostFtdcInputStockDisposalField["BrokerID"] = "string"
#投资者代码
CThostFtdcInputStockDisposalField["InvestorID"] = "string"
#证券处置登记引用
CThostFtdcInputStockDisposalField["StockDisposalRef"] = "string"
#用户代码
CThostFtdcInputStockDisposalField["UserID"] = "string"
#合约代码
CThostFtdcInputStockDisposalField["InstrumentID"] = "string"
#数量
CThostFtdcInputStockDisposalField["Volume"] = "int"
#证券处置方向
CThostFtdcInputStockDisposalField["StockDisposalType"] = "char"
#交易所代码
CThostFtdcInputStockDisposalField["ExchangeID"] = "string"
structDict['CThostFtdcInputStockDisposalField'] = CThostFtdcInputStockDisposalField
#证券处置
CThostFtdcStockDisposalField = {}
#经纪公司代码
CThostFtdcStockDisposalField["BrokerID"] = "string"
#投资者代码
CThostFtdcStockDisposalField["InvestorID"] = "string"
#证券处置登记引用
CThostFtdcStockDisposalField["StockDisposalRef"] = "string"
#用户代码
CThostFtdcStockDisposalField["UserID"] = "string"
#合约代码
CThostFtdcStockDisposalField["InstrumentID"] = "string"
#数量
CThostFtdcStockDisposalField["Volume"] = "int"
#证券处置方向
CThostFtdcStockDisposalField["StockDisposalType"] = "char"
#本地证券处置编号
CThostFtdcStockDisposalField["StockDisposalLocalID"] = "string"
#交易所代码
CThostFtdcStockDisposalField["ExchangeID"] = "string"
#合约在交易所的代码
CThostFtdcStockDisposalField["ExchangeInstID"] = "string"
#会员代码
CThostFtdcStockDisposalField["ParticipantID"] = "string"
#客户代码
CThostFtdcStockDisposalField["ClientID"] = "string"
#交易所交易员代码
CThostFtdcStockDisposalField["TraderID"] = "string"
#安装编号
CThostFtdcStockDisposalField["InstallID"] = "int"
#证券处置状态
CThostFtdcStockDisposalField["StockDisposalStatus"] = "char"
#报单提示序号
CThostFtdcStockDisposalField["NotifySequence"] = "int"
#交易日
CThostFtdcStockDisposalField["TradingDay"] = "string"
#结算编号
CThostFtdcStockDisposalField["SettlementID"] = "int"
#报单日期
CThostFtdcStockDisposalField["InsertDate"] = "string"
#插入时间
CThostFtdcStockDisposalField["InsertTime"] = "string"
#前置编号
CThostFtdcStockDisposalField["FrontID"] = "int"
#会话编号
CThostFtdcStockDisposalField["SessionID"] = "int"
#用户端产品信息
CThostFtdcStockDisposalField["UserProductInfo"] = "string"
#状态信息
CThostFtdcStockDisposalField["StatusMsg"] = "string"
#营业部编号
CThostFtdcStockDisposalField["BranchID"] = "string"
#证券处置操作编号
CThostFtdcStockDisposalField["StockDisposalSysID"] = "string"
#业务单元
CThostFtdcStockDisposalField["BusinessUnit"] = "string"
structDict['CThostFtdcStockDisposalField'] = CThostFtdcStockDisposalField
#申请证券处置
CThostFtdcQryStockDisposalField = {}
#经纪公司代码
CThostFtdcQryStockDisposalField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryStockDisposalField["InvestorID"] = "string"
#交易所代码
CThostFtdcQryStockDisposalField["ExchangeID"] = "string"
structDict['CThostFtdcQryStockDisposalField'] = CThostFtdcQryStockDisposalField
#交易所证券处置信息
CThostFtdcExchangeStockDisposalField = {}
#数量
CThostFtdcExchangeStockDisposalField["Volume"] = "int"
#证券处置方向
CThostFtdcExchangeStockDisposalField["StockDisposalType"] = "char"
#本地证券处置编号
CThostFtdcExchangeStockDisposalField["StockDisposalLocalID"] = "string"
#交易所代码
CThostFtdcExchangeStockDisposalField["ExchangeID"] = "string"
#合约在交易所的代码
CThostFtdcExchangeStockDisposalField["ExchangeInstID"] = "string"
#会员代码
CThostFtdcExchangeStockDisposalField["ParticipantID"] = "string"
#客户代码
CThostFtdcExchangeStockDisposalField["ClientID"] = "string"
#交易所交易员代码
CThostFtdcExchangeStockDisposalField["TraderID"] = "string"
#安装编号
CThostFtdcExchangeStockDisposalField["InstallID"] = "int"
#证券处置状态
CThostFtdcExchangeStockDisposalField["StockDisposalStatus"] = "char"
#报单提示序号
CThostFtdcExchangeStockDisposalField["NotifySequence"] = "int"
#交易日
CThostFtdcExchangeStockDisposalField["TradingDay"] = "string"
#结算编号
CThostFtdcExchangeStockDisposalField["SettlementID"] = "int"
#报单日期
CThostFtdcExchangeStockDisposalField["InsertDate"] = "string"
#插入时间
CThostFtdcExchangeStockDisposalField["InsertTime"] = "string"
#营业部编号
CThostFtdcExchangeStockDisposalField["BranchID"] = "string"
#证券处置操作编号
CThostFtdcExchangeStockDisposalField["StockDisposalSysID"] = "string"
#业务单元
CThostFtdcExchangeStockDisposalField["BusinessUnit"] = "string"
structDict['CThostFtdcExchangeStockDisposalField'] = CThostFtdcExchangeStockDisposalField
#查询投资者分级
CThostFtdcQryInvestorLevelField = {}
#经纪公司代码
CThostFtdcQryInvestorLevelField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryInvestorLevelField["InvestorID"] = "string"
#交易所代码
CThostFtdcQryInvestorLevelField["ExchangeID"] = "string"
structDict['CThostFtdcQryInvestorLevelField'] = CThostFtdcQryInvestorLevelField
#查询询价价差参数
CThostFtdcQryForQuoteParamField = {}
#经纪公司代码
CThostFtdcQryForQuoteParamField["BrokerID"] = "string"
#合约代码
CThostFtdcQryForQuoteParamField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryForQuoteParamField["ExchangeID"] = "string"
structDict['CThostFtdcQryForQuoteParamField'] = CThostFtdcQryForQuoteParamField
#询价价差参数
CThostFtdcForQuoteParamField = {}
#经纪公司代码
CThostFtdcForQuoteParamField["BrokerID"] = "string"
#合约代码
CThostFtdcForQuoteParamField["InstrumentID"] = "string"
#交易所代码
CThostFtdcForQuoteParamField["ExchangeID"] = "string"
#最新价
CThostFtdcForQuoteParamField["LastPrice"] = "float"
#价差
CThostFtdcForQuoteParamField["PriceInterval"] = "float"
structDict['CThostFtdcForQuoteParamField'] = CThostFtdcForQuoteParamField
#查询行权冻结
CThostFtdcQryExecFreezeField = {}
#经纪公司代码
CThostFtdcQryExecFreezeField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryExecFreezeField["InvestorID"] = "string"
#合约代码
CThostFtdcQryExecFreezeField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryExecFreezeField["ExchangeID"] = "string"
structDict['CThostFtdcQryExecFreezeField'] = CThostFtdcQryExecFreezeField
#行权冻结
CThostFtdcExecFreezeField = {}
#标的合约代码
CThostFtdcExecFreezeField["InstrumentID"] = "string"
#交易所代码
CThostFtdcExecFreezeField["ExchangeID"] = "string"
#经纪公司代码
CThostFtdcExecFreezeField["BrokerID"] = "string"
#投资者代码
CThostFtdcExecFreezeField["InvestorID"] = "string"
#持仓多空方向
CThostFtdcExecFreezeField["PosiDirection"] = "char"
#期权类型
CThostFtdcExecFreezeField["OptionsType"] = "char"
#冻结的数量_单位股
CThostFtdcExecFreezeField["Volume"] = "int"
#冻结金额
CThostFtdcExecFreezeField["FrozenAmount"] = "float"
structDict['CThostFtdcExecFreezeField'] = CThostFtdcExecFreezeField
#市场行情
CThostFtdcMarketDataField = {}
#交易日
CThostFtdcMarketDataField["TradingDay"] = "string"
#合约代码
CThostFtdcMarketDataField["InstrumentID"] = "string"
#交易所代码
CThostFtdcMarketDataField["ExchangeID"] = "string"
#合约在交易所的代码
CThostFtdcMarketDataField["ExchangeInstID"] = "string"
#最新价
CThostFtdcMarketDataField["LastPrice"] = "float"
#上次结算价
CThostFtdcMarketDataField["PreSettlementPrice"] = "float"
#昨收盘
CThostFtdcMarketDataField["PreClosePrice"] = "float"
#昨持仓量
CThostFtdcMarketDataField["PreOpenInterest"] = "float"
#今开盘
CThostFtdcMarketDataField["OpenPrice"] = "float"
#最高价
CThostFtdcMarketDataField["HighestPrice"] = "float"
#最低价
CThostFtdcMarketDataField["LowestPrice"] = "float"
#数量
CThostFtdcMarketDataField["Volume"] = "int"
#成交金额
CThostFtdcMarketDataField["Turnover"] = "float"
#持仓量
CThostFtdcMarketDataField["OpenInterest"] = "float"
#今收盘
CThostFtdcMarketDataField["ClosePrice"] = "float"
#本次结算价
CThostFtdcMarketDataField["SettlementPrice"] = "float"
#涨停板价
CThostFtdcMarketDataField["UpperLimitPrice"] = "float"
#跌停板价
CThostFtdcMarketDataField["LowerLimitPrice"] = "float"
#昨虚实度
CThostFtdcMarketDataField["PreDelta"] = "float"
#今虚实度
CThostFtdcMarketDataField["CurrDelta"] = "float"
#最后修改时间
CThostFtdcMarketDataField["UpdateTime"] = "string"
#最后修改毫秒
CThostFtdcMarketDataField["UpdateMillisec"] = "int"
#业务日期
CThostFtdcMarketDataField["ActionDay"] = "string"
structDict['CThostFtdcMarketDataField'] = CThostFtdcMarketDataField
#行情基础属性
CThostFtdcMarketDataBaseField = {}
#交易日
CThostFtdcMarketDataBaseField["TradingDay"] = "string"
#上次结算价
CThostFtdcMarketDataBaseField["PreSettlementPrice"] = "float"
#昨收盘
CThostFtdcMarketDataBaseField["PreClosePrice"] = "float"
#昨持仓量
CThostFtdcMarketDataBaseField["PreOpenInterest"] = "float"
#昨虚实度
CThostFtdcMarketDataBaseField["PreDelta"] = "float"
structDict['CThostFtdcMarketDataBaseField'] = CThostFtdcMarketDataBaseField
#行情静态属性
CThostFtdcMarketDataStaticField = {}
#今开盘
CThostFtdcMarketDataStaticField["OpenPrice"] = "float"
#最高价
CThostFtdcMarketDataStaticField["HighestPrice"] = "float"
#最低价
CThostFtdcMarketDataStaticField["LowestPrice"] = "float"
#今收盘
CThostFtdcMarketDataStaticField["ClosePrice"] = "float"
#涨停板价
CThostFtdcMarketDataStaticField["UpperLimitPrice"] = "float"
#跌停板价
CThostFtdcMarketDataStaticField["LowerLimitPrice"] = "float"
#本次结算价
CThostFtdcMarketDataStaticField["SettlementPrice"] = "float"
#今虚实度
CThostFtdcMarketDataStaticField["CurrDelta"] = "float"
structDict['CThostFtdcMarketDataStaticField'] = CThostFtdcMarketDataStaticField
#行情最新成交属性
CThostFtdcMarketDataLastMatchField = {}
#最新价
CThostFtdcMarketDataLastMatchField["LastPrice"] = "float"
#数量
CThostFtdcMarketDataLastMatchField["Volume"] = "int"
#成交金额
CThostFtdcMarketDataLastMatchField["Turnover"] = "float"
#持仓量
CThostFtdcMarketDataLastMatchField["OpenInterest"] = "float"
structDict['CThostFtdcMarketDataLastMatchField'] = CThostFtdcMarketDataLastMatchField
#行情最优价属性
CThostFtdcMarketDataBestPriceField = {}
#申买价一
CThostFtdcMarketDataBestPriceField["BidPrice1"] = "float"
#申买量一
CThostFtdcMarketDataBestPriceField["BidVolume1"] = "int"
#申卖价一
CThostFtdcMarketDataBestPriceField["AskPrice1"] = "float"
#申卖量一
CThostFtdcMarketDataBestPriceField["AskVolume1"] = "int"
structDict['CThostFtdcMarketDataBestPriceField'] = CThostFtdcMarketDataBestPriceField
#行情申买二、三属性
CThostFtdcMarketDataBid23Field = {}
#申买价二
CThostFtdcMarketDataBid23Field["BidPrice2"] = "float"
#申买量二
CThostFtdcMarketDataBid23Field["BidVolume2"] = "int"
#申买价三
CThostFtdcMarketDataBid23Field["BidPrice3"] = "float"
#申买量三
CThostFtdcMarketDataBid23Field["BidVolume3"] = "int"
structDict['CThostFtdcMarketDataBid23Field'] = CThostFtdcMarketDataBid23Field
#行情申卖二、三属性
CThostFtdcMarketDataAsk23Field = {}
#申卖价二
CThostFtdcMarketDataAsk23Field["AskPrice2"] = "float"
#申卖量二
CThostFtdcMarketDataAsk23Field["AskVolume2"] = "int"
#申卖价三
CThostFtdcMarketDataAsk23Field["AskPrice3"] = "float"
#申卖量三
CThostFtdcMarketDataAsk23Field["AskVolume3"] = "int"
structDict['CThostFtdcMarketDataAsk23Field'] = CThostFtdcMarketDataAsk23Field
#行情申买四、五属性
CThostFtdcMarketDataBid45Field = {}
#申买价四
CThostFtdcMarketDataBid45Field["BidPrice4"] = "float"
#申买量四
CThostFtdcMarketDataBid45Field["BidVolume4"] = "int"
#申买价五
CThostFtdcMarketDataBid45Field["BidPrice5"] = "float"
#申买量五
CThostFtdcMarketDataBid45Field["BidVolume5"] = "int"
structDict['CThostFtdcMarketDataBid45Field'] = CThostFtdcMarketDataBid45Field
#行情申卖四、五属性
CThostFtdcMarketDataAsk45Field = {}
#申卖价四
CThostFtdcMarketDataAsk45Field["AskPrice4"] = "float"
#申卖量四
CThostFtdcMarketDataAsk45Field["AskVolume4"] = "int"
#申卖价五
CThostFtdcMarketDataAsk45Field["AskPrice5"] = "float"
#申卖量五
CThostFtdcMarketDataAsk45Field["AskVolume5"] = "int"
structDict['CThostFtdcMarketDataAsk45Field'] = CThostFtdcMarketDataAsk45Field
#行情更新时间属性
CThostFtdcMarketDataUpdateTimeField = {}
#合约代码
CThostFtdcMarketDataUpdateTimeField["InstrumentID"] = "string"
#最后修改时间
CThostFtdcMarketDataUpdateTimeField["UpdateTime"] = "string"
#最后修改毫秒
CThostFtdcMarketDataUpdateTimeField["UpdateMillisec"] = "int"
#业务日期
CThostFtdcMarketDataUpdateTimeField["ActionDay"] = "string"
#交易所代码
CThostFtdcMarketDataUpdateTimeField["ExchangeID"] = "string"
structDict['CThostFtdcMarketDataUpdateTimeField'] = CThostFtdcMarketDataUpdateTimeField
#行情交易所代码属性
CThostFtdcMarketDataExchangeField = {}
#交易所代码
CThostFtdcMarketDataExchangeField["ExchangeID"] = "string"
structDict['CThostFtdcMarketDataExchangeField'] = CThostFtdcMarketDataExchangeField
#指定的合约
CThostFtdcSpecificInstrumentField = {}
#合约代码
CThostFtdcSpecificInstrumentField["InstrumentID"] = "string"
structDict['CThostFtdcSpecificInstrumentField'] = CThostFtdcSpecificInstrumentField
#合约状态
CThostFtdcInstrumentStatusField = {}
#交易所代码
CThostFtdcInstrumentStatusField["ExchangeID"] = "string"
#合约在交易所的代码
CThostFtdcInstrumentStatusField["ExchangeInstID"] = "string"
#结算组代码
CThostFtdcInstrumentStatusField["SettlementGroupID"] = "string"
#合约代码
CThostFtdcInstrumentStatusField["InstrumentID"] = "string"
#合约交易状态
CThostFtdcInstrumentStatusField["InstrumentStatus"] = "char"
#交易阶段编号
CThostFtdcInstrumentStatusField["TradingSegmentSN"] = "int"
#进入本状态时间
CThostFtdcInstrumentStatusField["EnterTime"] = "string"
#进入本状态原因
CThostFtdcInstrumentStatusField["EnterReason"] = "char"
structDict['CThostFtdcInstrumentStatusField'] = CThostFtdcInstrumentStatusField
#查询合约状态
CThostFtdcQryInstrumentStatusField = {}
#交易所代码
CThostFtdcQryInstrumentStatusField["ExchangeID"] = "string"
#合约在交易所的代码
CThostFtdcQryInstrumentStatusField["ExchangeInstID"] = "string"
structDict['CThostFtdcQryInstrumentStatusField'] = CThostFtdcQryInstrumentStatusField
#投资者账户
CThostFtdcInvestorAccountField = {}
#经纪公司代码
CThostFtdcInvestorAccountField["BrokerID"] = "string"
#投资者代码
CThostFtdcInvestorAccountField["InvestorID"] = "string"
#投资者帐号
CThostFtdcInvestorAccountField["AccountID"] = "string"
#币种代码
CThostFtdcInvestorAccountField["CurrencyID"] = "string"
structDict['CThostFtdcInvestorAccountField'] = CThostFtdcInvestorAccountField
#浮动盈亏算法
CThostFtdcPositionProfitAlgorithmField = {}
#经纪公司代码
CThostFtdcPositionProfitAlgorithmField["BrokerID"] = "string"
#投资者帐号
CThostFtdcPositionProfitAlgorithmField["AccountID"] = "string"
#盈亏算法
CThostFtdcPositionProfitAlgorithmField["Algorithm"] = "char"
#备注
CThostFtdcPositionProfitAlgorithmField["Memo"] = "string"
#币种代码
CThostFtdcPositionProfitAlgorithmField["CurrencyID"] = "string"
structDict['CThostFtdcPositionProfitAlgorithmField'] = CThostFtdcPositionProfitAlgorithmField
#会员资金折扣
CThostFtdcDiscountField = {}
#经纪公司代码
CThostFtdcDiscountField["BrokerID"] = "string"
#投资者范围
CThostFtdcDiscountField["InvestorRange"] = "char"
#投资者代码
CThostFtdcDiscountField["InvestorID"] = "string"
#资金折扣比例
CThostFtdcDiscountField["Discount"] = "float"
structDict['CThostFtdcDiscountField'] = CThostFtdcDiscountField
#查询转帐银行
CThostFtdcQryTransferBankField = {}
#银行代码
CThostFtdcQryTransferBankField["BankID"] = "string"
#银行分中心代码
CThostFtdcQryTransferBankField["BankBrchID"] = "string"
structDict['CThostFtdcQryTransferBankField'] = CThostFtdcQryTransferBankField
#转帐银行
CThostFtdcTransferBankField = {}
#银行代码
CThostFtdcTransferBankField["BankID"] = "string"
#银行分中心代码
CThostFtdcTransferBankField["BankBrchID"] = "string"
#银行名称
CThostFtdcTransferBankField["BankName"] = "string"
#是否活跃
CThostFtdcTransferBankField["IsActive"] = "int"
structDict['CThostFtdcTransferBankField'] = CThostFtdcTransferBankField
#查询投资者持仓明细
CThostFtdcQryInvestorPositionDetailField = {}
#经纪公司代码
CThostFtdcQryInvestorPositionDetailField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryInvestorPositionDetailField["InvestorID"] = "string"
#合约代码
CThostFtdcQryInvestorPositionDetailField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryInvestorPositionDetailField["ExchangeID"] = "string"
structDict['CThostFtdcQryInvestorPositionDetailField'] = CThostFtdcQryInvestorPositionDetailField
#投资者持仓明细
CThostFtdcInvestorPositionDetailField = {}
#合约代码
CThostFtdcInvestorPositionDetailField["InstrumentID"] = "string"
#经纪公司代码
CThostFtdcInvestorPositionDetailField["BrokerID"] = "string"
#投资者代码
CThostFtdcInvestorPositionDetailField["InvestorID"] = "string"
#投机套保标志
CThostFtdcInvestorPositionDetailField["HedgeFlag"] = "char"
#买卖
CThostFtdcInvestorPositionDetailField["Direction"] = "char"
#开仓日期
CThostFtdcInvestorPositionDetailField["OpenDate"] = "string"
#成交编号
CThostFtdcInvestorPositionDetailField["TradeID"] = "string"
#数量
CThostFtdcInvestorPositionDetailField["Volume"] = "int"
#开仓价
CThostFtdcInvestorPositionDetailField["OpenPrice"] = "float"
#交易日
CThostFtdcInvestorPositionDetailField["TradingDay"] = "string"
#结算编号
CThostFtdcInvestorPositionDetailField["SettlementID"] = "int"
#成交类型
CThostFtdcInvestorPositionDetailField["TradeType"] = "char"
#组合合约代码
CThostFtdcInvestorPositionDetailField["CombInstrumentID"] = "string"
#交易所代码
CThostFtdcInvestorPositionDetailField["ExchangeID"] = "string"
#逐日盯市平仓盈亏
CThostFtdcInvestorPositionDetailField["CloseProfitByDate"] = "float"
#逐笔对冲平仓盈亏
CThostFtdcInvestorPositionDetailField["CloseProfitByTrade"] = "float"
#逐日盯市持仓盈亏
CThostFtdcInvestorPositionDetailField["PositionProfitByDate"] = "float"
#逐笔对冲持仓盈亏
CThostFtdcInvestorPositionDetailField["PositionProfitByTrade"] = "float"
#投资者保证金
CThostFtdcInvestorPositionDetailField["Margin"] = "float"
#交易所保证金
CThostFtdcInvestorPositionDetailField["ExchMargin"] = "float"
#保证金率
CThostFtdcInvestorPositionDetailField["MarginRateByMoney"] = "float"
#保证金率(按手数)
CThostFtdcInvestorPositionDetailField["MarginRateByVolume"] = "float"
#昨结算价
CThostFtdcInvestorPositionDetailField["LastSettlementPrice"] = "float"
#结算价
CThostFtdcInvestorPositionDetailField["SettlementPrice"] = "float"
#平仓量
CThostFtdcInvestorPositionDetailField["CloseVolume"] = "int"
#平仓金额
CThostFtdcInvestorPositionDetailField["CloseAmount"] = "float"
structDict['CThostFtdcInvestorPositionDetailField'] = CThostFtdcInvestorPositionDetailField
#资金账户口令域
CThostFtdcTradingAccountPasswordField = {}
#经纪公司代码
CThostFtdcTradingAccountPasswordField["BrokerID"] = "string"
#投资者帐号
CThostFtdcTradingAccountPasswordField["AccountID"] = "string"
#密码
CThostFtdcTradingAccountPasswordField["Password"] = "string"
#币种代码
CThostFtdcTradingAccountPasswordField["CurrencyID"] = "string"
structDict['CThostFtdcTradingAccountPasswordField'] = CThostFtdcTradingAccountPasswordField
#交易所行情报盘机
CThostFtdcMDTraderOfferField = {}
#交易所代码
CThostFtdcMDTraderOfferField["ExchangeID"] = "string"
#交易所交易员代码
CThostFtdcMDTraderOfferField["TraderID"] = "string"
#会员代码
CThostFtdcMDTraderOfferField["ParticipantID"] = "string"
#密码
CThostFtdcMDTraderOfferField["Password"] = "string"
#安装编号
CThostFtdcMDTraderOfferField["InstallID"] = "int"
#本地报单编号
CThostFtdcMDTraderOfferField["OrderLocalID"] = "string"
#交易所交易员连接状态
CThostFtdcMDTraderOfferField["TraderConnectStatus"] = "char"
#发出连接请求的日期
CThostFtdcMDTraderOfferField["ConnectRequestDate"] = "string"
#发出连接请求的时间
CThostFtdcMDTraderOfferField["ConnectRequestTime"] = "string"
#上次报告日期
CThostFtdcMDTraderOfferField["LastReportDate"] = "string"
#上次报告时间
CThostFtdcMDTraderOfferField["LastReportTime"] = "string"
#完成连接日期
CThostFtdcMDTraderOfferField["ConnectDate"] = "string"
#完成连接时间
CThostFtdcMDTraderOfferField["ConnectTime"] = "string"
#启动日期
CThostFtdcMDTraderOfferField["StartDate"] = "string"
#启动时间
CThostFtdcMDTraderOfferField["StartTime"] = "string"
#交易日
CThostFtdcMDTraderOfferField["TradingDay"] = "string"
#经纪公司代码
CThostFtdcMDTraderOfferField["BrokerID"] = "string"
#本席位最大成交编号
CThostFtdcMDTraderOfferField["MaxTradeID"] = "string"
#本席位最大报单备拷
CThostFtdcMDTraderOfferField["MaxOrderMessageReference"] = "string"
#业务类型
CThostFtdcMDTraderOfferField["BizType"] = "char"
structDict['CThostFtdcMDTraderOfferField'] = CThostFtdcMDTraderOfferField
#查询行情报盘机
CThostFtdcQryMDTraderOfferField = {}
#交易所代码
CThostFtdcQryMDTraderOfferField["ExchangeID"] = "string"
#会员代码
CThostFtdcQryMDTraderOfferField["ParticipantID"] = "string"
#交易所交易员代码
CThostFtdcQryMDTraderOfferField["TraderID"] = "string"
structDict['CThostFtdcQryMDTraderOfferField'] = CThostFtdcQryMDTraderOfferField
#查询客户通知
CThostFtdcQryNoticeField = {}
#经纪公司代码
CThostFtdcQryNoticeField["BrokerID"] = "string"
structDict['CThostFtdcQryNoticeField'] = CThostFtdcQryNoticeField
#客户通知
CThostFtdcNoticeField = {}
#经纪公司代码
CThostFtdcNoticeField["BrokerID"] = "string"
#消息正文
CThostFtdcNoticeField["Content"] = "string"
#经纪公司通知内容序列号
CThostFtdcNoticeField["SequenceLabel"] = "string"
structDict['CThostFtdcNoticeField'] = CThostFtdcNoticeField
#用户权限
CThostFtdcUserRightField = {}
#经纪公司代码
CThostFtdcUserRightField["BrokerID"] = "string"
#用户代码
CThostFtdcUserRightField["UserID"] = "string"
#客户权限类型
CThostFtdcUserRightField["UserRightType"] = "char"
#是否禁止
CThostFtdcUserRightField["IsForbidden"] = "int"
structDict['CThostFtdcUserRightField'] = CThostFtdcUserRightField
#查询结算信息确认域
CThostFtdcQrySettlementInfoConfirmField = {}
#经纪公司代码
CThostFtdcQrySettlementInfoConfirmField["BrokerID"] = "string"
#投资者代码
CThostFtdcQrySettlementInfoConfirmField["InvestorID"] = "string"
structDict['CThostFtdcQrySettlementInfoConfirmField'] = CThostFtdcQrySettlementInfoConfirmField
#装载结算信息
CThostFtdcLoadSettlementInfoField = {}
#经纪公司代码
CThostFtdcLoadSettlementInfoField["BrokerID"] = "string"
structDict['CThostFtdcLoadSettlementInfoField'] = CThostFtdcLoadSettlementInfoField
#经纪公司可提资金算法表
CThostFtdcBrokerWithdrawAlgorithmField = {}
#经纪公司代码
CThostFtdcBrokerWithdrawAlgorithmField["BrokerID"] = "string"
#可提资金算法
CThostFtdcBrokerWithdrawAlgorithmField["WithdrawAlgorithm"] = "char"
#资金使用率
CThostFtdcBrokerWithdrawAlgorithmField["UsingRatio"] = "float"
#可提是否包含平仓盈利
CThostFtdcBrokerWithdrawAlgorithmField["IncludeCloseProfit"] = "char"
#本日无仓且无成交客户是否受可提比例限制
CThostFtdcBrokerWithdrawAlgorithmField["AllWithoutTrade"] = "char"
#可用是否包含平仓盈利
CThostFtdcBrokerWithdrawAlgorithmField["AvailIncludeCloseProfit"] = "char"
#是否启用用户事件
CThostFtdcBrokerWithdrawAlgorithmField["IsBrokerUserEvent"] = "int"
#币种代码
CThostFtdcBrokerWithdrawAlgorithmField["CurrencyID"] = "string"
#货币质押比率
CThostFtdcBrokerWithdrawAlgorithmField["FundMortgageRatio"] = "float"
#权益算法
CThostFtdcBrokerWithdrawAlgorithmField["BalanceAlgorithm"] = "char"
structDict['CThostFtdcBrokerWithdrawAlgorithmField'] = CThostFtdcBrokerWithdrawAlgorithmField
#资金账户口令变更域
CThostFtdcTradingAccountPasswordUpdateV1Field = {}
#经纪公司代码
CThostFtdcTradingAccountPasswordUpdateV1Field["BrokerID"] = "string"
#投资者代码
CThostFtdcTradingAccountPasswordUpdateV1Field["InvestorID"] = "string"
#原来的口令
CThostFtdcTradingAccountPasswordUpdateV1Field["OldPassword"] = "string"
#新的口令
CThostFtdcTradingAccountPasswordUpdateV1Field["NewPassword"] = "string"
structDict['CThostFtdcTradingAccountPasswordUpdateV1Field'] = CThostFtdcTradingAccountPasswordUpdateV1Field
#资金账户口令变更域
CThostFtdcTradingAccountPasswordUpdateField = {}
#经纪公司代码
CThostFtdcTradingAccountPasswordUpdateField["BrokerID"] = "string"
#投资者帐号
CThostFtdcTradingAccountPasswordUpdateField["AccountID"] = "string"
#原来的口令
CThostFtdcTradingAccountPasswordUpdateField["OldPassword"] = "string"
#新的口令
CThostFtdcTradingAccountPasswordUpdateField["NewPassword"] = "string"
#币种代码
CThostFtdcTradingAccountPasswordUpdateField["CurrencyID"] = "string"
structDict['CThostFtdcTradingAccountPasswordUpdateField'] = CThostFtdcTradingAccountPasswordUpdateField
#查询组合合约分腿
CThostFtdcQryCombinationLegField = {}
#组合合约代码
CThostFtdcQryCombinationLegField["CombInstrumentID"] = "string"
#单腿编号
CThostFtdcQryCombinationLegField["LegID"] = "int"
#单腿合约代码
CThostFtdcQryCombinationLegField["LegInstrumentID"] = "string"
structDict['CThostFtdcQryCombinationLegField'] = CThostFtdcQryCombinationLegField
#查询组合合约分腿
CThostFtdcQrySyncStatusField = {}
#交易日
CThostFtdcQrySyncStatusField["TradingDay"] = "string"
structDict['CThostFtdcQrySyncStatusField'] = CThostFtdcQrySyncStatusField
#组合交易合约的单腿
CThostFtdcCombinationLegField = {}
#组合合约代码
CThostFtdcCombinationLegField["CombInstrumentID"] = "string"
#单腿编号
CThostFtdcCombinationLegField["LegID"] = "int"
#单腿合约代码
CThostFtdcCombinationLegField["LegInstrumentID"] = "string"
#买卖方向
CThostFtdcCombinationLegField["Direction"] = "char"
#单腿乘数
CThostFtdcCombinationLegField["LegMultiple"] = "int"
#派生层数
CThostFtdcCombinationLegField["ImplyLevel"] = "int"
structDict['CThostFtdcCombinationLegField'] = CThostFtdcCombinationLegField
#数据同步状态
CThostFtdcSyncStatusField = {}
#交易日
CThostFtdcSyncStatusField["TradingDay"] = "string"
#数据同步状态
CThostFtdcSyncStatusField["DataSyncStatus"] = "char"
structDict['CThostFtdcSyncStatusField'] = CThostFtdcSyncStatusField
#查询联系人
CThostFtdcQryLinkManField = {}
#经纪公司代码
CThostFtdcQryLinkManField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryLinkManField["InvestorID"] = "string"
structDict['CThostFtdcQryLinkManField'] = CThostFtdcQryLinkManField
#联系人
CThostFtdcLinkManField = {}
#经纪公司代码
CThostFtdcLinkManField["BrokerID"] = "string"
#投资者代码
CThostFtdcLinkManField["InvestorID"] = "string"
#联系人类型
CThostFtdcLinkManField["PersonType"] = "char"
#证件类型
CThostFtdcLinkManField["IdentifiedCardType"] = "char"
#证件号码
CThostFtdcLinkManField["IdentifiedCardNo"] = "string"
#名称
CThostFtdcLinkManField["PersonName"] = "string"
#联系电话
CThostFtdcLinkManField["Telephone"] = "string"
#通讯地址
CThostFtdcLinkManField["Address"] = "string"
#邮政编码
CThostFtdcLinkManField["ZipCode"] = "string"
#优先级
CThostFtdcLinkManField["Priority"] = "int"
#开户邮政编码
CThostFtdcLinkManField["UOAZipCode"] = "string"
#全称
CThostFtdcLinkManField["PersonFullName"] = "string"
structDict['CThostFtdcLinkManField'] = CThostFtdcLinkManField
#查询经纪公司用户事件
CThostFtdcQryBrokerUserEventField = {}
#经纪公司代码
CThostFtdcQryBrokerUserEventField["BrokerID"] = "string"
#用户代码
CThostFtdcQryBrokerUserEventField["UserID"] = "string"
#用户事件类型
CThostFtdcQryBrokerUserEventField["UserEventType"] = "char"
structDict['CThostFtdcQryBrokerUserEventField'] = CThostFtdcQryBrokerUserEventField
#查询经纪公司用户事件
CThostFtdcBrokerUserEventField = {}
#经纪公司代码
CThostFtdcBrokerUserEventField["BrokerID"] = "string"
#用户代码
CThostFtdcBrokerUserEventField["UserID"] = "string"
#用户事件类型
CThostFtdcBrokerUserEventField["UserEventType"] = "char"
#用户事件序号
CThostFtdcBrokerUserEventField["EventSequenceNo"] = "int"
#事件发生日期
CThostFtdcBrokerUserEventField["EventDate"] = "string"
#事件发生时间
CThostFtdcBrokerUserEventField["EventTime"] = "string"
#用户事件信息
CThostFtdcBrokerUserEventField["UserEventInfo"] = "string"
#投资者代码
CThostFtdcBrokerUserEventField["InvestorID"] = "string"
#合约代码
CThostFtdcBrokerUserEventField["InstrumentID"] = "string"
#交易所代码
CThostFtdcBrokerUserEventField["ExchangeID"] = "string"
structDict['CThostFtdcBrokerUserEventField'] = CThostFtdcBrokerUserEventField
#查询签约银行请求
CThostFtdcQryContractBankField = {}
#经纪公司代码
CThostFtdcQryContractBankField["BrokerID"] = "string"
#银行代码
CThostFtdcQryContractBankField["BankID"] = "string"
#银行分中心代码
CThostFtdcQryContractBankField["BankBrchID"] = "string"
structDict['CThostFtdcQryContractBankField'] = CThostFtdcQryContractBankField
#查询签约银行响应
CThostFtdcContractBankField = {}
#经纪公司代码
CThostFtdcContractBankField["BrokerID"] = "string"
#银行代码
CThostFtdcContractBankField["BankID"] = "string"
#银行分中心代码
CThostFtdcContractBankField["BankBrchID"] = "string"
#银行名称
CThostFtdcContractBankField["BankName"] = "string"
structDict['CThostFtdcContractBankField'] = CThostFtdcContractBankField
#投资者组合持仓明细
CThostFtdcInvestorPositionCombineDetailField = {}
#交易日
CThostFtdcInvestorPositionCombineDetailField["TradingDay"] = "string"
#开仓日期
CThostFtdcInvestorPositionCombineDetailField["OpenDate"] = "string"
#交易所代码
CThostFtdcInvestorPositionCombineDetailField["ExchangeID"] = "string"
#结算编号
CThostFtdcInvestorPositionCombineDetailField["SettlementID"] = "int"
#经纪公司代码
CThostFtdcInvestorPositionCombineDetailField["BrokerID"] = "string"
#投资者代码
CThostFtdcInvestorPositionCombineDetailField["InvestorID"] = "string"
#组合编号
CThostFtdcInvestorPositionCombineDetailField["ComTradeID"] = "string"
#撮合编号
CThostFtdcInvestorPositionCombineDetailField["TradeID"] = "string"
#合约代码
CThostFtdcInvestorPositionCombineDetailField["InstrumentID"] = "string"
#投机套保标志
CThostFtdcInvestorPositionCombineDetailField["HedgeFlag"] = "char"
#买卖
CThostFtdcInvestorPositionCombineDetailField["Direction"] = "char"
#持仓量
CThostFtdcInvestorPositionCombineDetailField["TotalAmt"] = "int"
#投资者保证金
CThostFtdcInvestorPositionCombineDetailField["Margin"] = "float"
#交易所保证金
CThostFtdcInvestorPositionCombineDetailField["ExchMargin"] = "float"
#保证金率
CThostFtdcInvestorPositionCombineDetailField["MarginRateByMoney"] = "float"
#保证金率(按手数)
CThostFtdcInvestorPositionCombineDetailField["MarginRateByVolume"] = "float"
#单腿编号
CThostFtdcInvestorPositionCombineDetailField["LegID"] = "int"
#单腿乘数
CThostFtdcInvestorPositionCombineDetailField["LegMultiple"] = "int"
#组合持仓合约编码
CThostFtdcInvestorPositionCombineDetailField["CombInstrumentID"] = "string"
#成交组号
CThostFtdcInvestorPositionCombineDetailField["TradeGroupID"] = "int"
structDict['CThostFtdcInvestorPositionCombineDetailField'] = CThostFtdcInvestorPositionCombineDetailField
#预埋单
CThostFtdcParkedOrderField = {}
#经纪公司代码
CThostFtdcParkedOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcParkedOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcParkedOrderField["InstrumentID"] = "string"
#报单引用
CThostFtdcParkedOrderField["OrderRef"] = "string"
#用户代码
CThostFtdcParkedOrderField["UserID"] = "string"
#报单价格条件
CThostFtdcParkedOrderField["OrderPriceType"] = "char"
#买卖方向
CThostFtdcParkedOrderField["Direction"] = "char"
#组合开平标志
CThostFtdcParkedOrderField["CombOffsetFlag"] = "string"
#组合投机套保标志
CThostFtdcParkedOrderField["CombHedgeFlag"] = "string"
#价格
CThostFtdcParkedOrderField["LimitPrice"] = "float"
#数量
CThostFtdcParkedOrderField["VolumeTotalOriginal"] = "int"
#有效期类型
CThostFtdcParkedOrderField["TimeCondition"] = "char"
#GTD日期
CThostFtdcParkedOrderField["GTDDate"] = "string"
#成交量类型
CThostFtdcParkedOrderField["VolumeCondition"] = "char"
#最小成交量
CThostFtdcParkedOrderField["MinVolume"] = "int"
#触发条件
CThostFtdcParkedOrderField["ContingentCondition"] = "char"
#止损价
CThostFtdcParkedOrderField["StopPrice"] = "float"
#强平原因
CThostFtdcParkedOrderField["ForceCloseReason"] = "char"
#自动挂起标志
CThostFtdcParkedOrderField["IsAutoSuspend"] = "int"
#业务单元
CThostFtdcParkedOrderField["BusinessUnit"] = "string"
#请求编号
CThostFtdcParkedOrderField["RequestID"] = "int"
#用户强评标志
CThostFtdcParkedOrderField["UserForceClose"] = "int"
#交易所代码
CThostFtdcParkedOrderField["ExchangeID"] = "string"
#预埋报单编号
CThostFtdcParkedOrderField["ParkedOrderID"] = "string"
#用户类型
CThostFtdcParkedOrderField["UserType"] = "char"
#预埋单状态
CThostFtdcParkedOrderField["Status"] = "char"
#错误代码
CThostFtdcParkedOrderField["ErrorID"] = "int"
#错误信息
CThostFtdcParkedOrderField["ErrorMsg"] = "string"
#互换单标志
CThostFtdcParkedOrderField["IsSwapOrder"] = "int"
structDict['CThostFtdcParkedOrderField'] = CThostFtdcParkedOrderField
#输入预埋单操作
CThostFtdcParkedOrderActionField = {}
#经纪公司代码
CThostFtdcParkedOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcParkedOrderActionField["InvestorID"] = "string"
#报单操作引用
CThostFtdcParkedOrderActionField["OrderActionRef"] = "int"
#报单引用
CThostFtdcParkedOrderActionField["OrderRef"] = "string"
#请求编号
CThostFtdcParkedOrderActionField["RequestID"] = "int"
#前置编号
CThostFtdcParkedOrderActionField["FrontID"] = "int"
#会话编号
CThostFtdcParkedOrderActionField["SessionID"] = "int"
#交易所代码
CThostFtdcParkedOrderActionField["ExchangeID"] = "string"
#报单编号
CThostFtdcParkedOrderActionField["OrderSysID"] = "string"
#操作标志
CThostFtdcParkedOrderActionField["ActionFlag"] = "char"
#价格
CThostFtdcParkedOrderActionField["LimitPrice"] = "float"
#数量变化
CThostFtdcParkedOrderActionField["VolumeChange"] = "int"
#用户代码
CThostFtdcParkedOrderActionField["UserID"] = "string"
#合约代码
CThostFtdcParkedOrderActionField["InstrumentID"] = "string"
#预埋撤单单编号
CThostFtdcParkedOrderActionField["ParkedOrderActionID"] = "string"
#用户类型
CThostFtdcParkedOrderActionField["UserType"] = "char"
#预埋撤单状态
CThostFtdcParkedOrderActionField["Status"] = "char"
#错误代码
CThostFtdcParkedOrderActionField["ErrorID"] = "int"
#错误信息
CThostFtdcParkedOrderActionField["ErrorMsg"] = "string"
structDict['CThostFtdcParkedOrderActionField'] = CThostFtdcParkedOrderActionField
#查询预埋单
CThostFtdcQryParkedOrderField = {}
#经纪公司代码
CThostFtdcQryParkedOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryParkedOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcQryParkedOrderField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryParkedOrderField["ExchangeID"] = "string"
structDict['CThostFtdcQryParkedOrderField'] = CThostFtdcQryParkedOrderField
#查询预埋撤单
CThostFtdcQryParkedOrderActionField = {}
#经纪公司代码
CThostFtdcQryParkedOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryParkedOrderActionField["InvestorID"] = "string"
#合约代码
CThostFtdcQryParkedOrderActionField["InstrumentID"] = "string"
#交易所代码
CThostFtdcQryParkedOrderActionField["ExchangeID"] = "string"
structDict['CThostFtdcQryParkedOrderActionField'] = CThostFtdcQryParkedOrderActionField
#删除预埋单
CThostFtdcRemoveParkedOrderField = {}
#经纪公司代码
CThostFtdcRemoveParkedOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcRemoveParkedOrderField["InvestorID"] = "string"
#预埋报单编号
CThostFtdcRemoveParkedOrderField["ParkedOrderID"] = "string"
structDict['CThostFtdcRemoveParkedOrderField'] = CThostFtdcRemoveParkedOrderField
#删除预埋撤单
CThostFtdcRemoveParkedOrderActionField = {}
#经纪公司代码
CThostFtdcRemoveParkedOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcRemoveParkedOrderActionField["InvestorID"] = "string"
#预埋撤单编号
CThostFtdcRemoveParkedOrderActionField["ParkedOrderActionID"] = "string"
structDict['CThostFtdcRemoveParkedOrderActionField'] = CThostFtdcRemoveParkedOrderActionField
#经纪公司可提资金算法表
CThostFtdcInvestorWithdrawAlgorithmField = {}
#经纪公司代码
CThostFtdcInvestorWithdrawAlgorithmField["BrokerID"] = "string"
#投资者范围
CThostFtdcInvestorWithdrawAlgorithmField["InvestorRange"] = "char"
#投资者代码
CThostFtdcInvestorWithdrawAlgorithmField["InvestorID"] = "string"
#可提资金比例
CThostFtdcInvestorWithdrawAlgorithmField["UsingRatio"] = "float"
#币种代码
CThostFtdcInvestorWithdrawAlgorithmField["CurrencyID"] = "string"
#货币质押比率
CThostFtdcInvestorWithdrawAlgorithmField["FundMortgageRatio"] = "float"
structDict['CThostFtdcInvestorWithdrawAlgorithmField'] = CThostFtdcInvestorWithdrawAlgorithmField
#查询组合持仓明细
CThostFtdcQryInvestorPositionCombineDetailField = {}
#经纪公司代码
CThostFtdcQryInvestorPositionCombineDetailField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryInvestorPositionCombineDetailField["InvestorID"] = "string"
#组合持仓合约编码
CThostFtdcQryInvestorPositionCombineDetailField["CombInstrumentID"] = "string"
structDict['CThostFtdcQryInvestorPositionCombineDetailField'] = CThostFtdcQryInvestorPositionCombineDetailField
#成交均价
CThostFtdcMarketDataAveragePriceField = {}
#当日均价
CThostFtdcMarketDataAveragePriceField["AveragePrice"] = "float"
structDict['CThostFtdcMarketDataAveragePriceField'] = CThostFtdcMarketDataAveragePriceField
#校验投资者密码
CThostFtdcVerifyInvestorPasswordField = {}
#经纪公司代码
CThostFtdcVerifyInvestorPasswordField["BrokerID"] = "string"
#投资者代码
CThostFtdcVerifyInvestorPasswordField["InvestorID"] = "string"
#密码
CThostFtdcVerifyInvestorPasswordField["Password"] = "string"
structDict['CThostFtdcVerifyInvestorPasswordField'] = CThostFtdcVerifyInvestorPasswordField
#用户IP
CThostFtdcUserIPField = {}
#经纪公司代码
CThostFtdcUserIPField["BrokerID"] = "string"
#用户代码
CThostFtdcUserIPField["UserID"] = "string"
#IP地址
CThostFtdcUserIPField["IPAddress"] = "string"
#IP地址掩码
CThostFtdcUserIPField["IPMask"] = "string"
#Mac地址
CThostFtdcUserIPField["MacAddress"] = "string"
structDict['CThostFtdcUserIPField'] = CThostFtdcUserIPField
#用户事件通知信息
CThostFtdcTradingNoticeInfoField = {}
#经纪公司代码
CThostFtdcTradingNoticeInfoField["BrokerID"] = "string"
#投资者代码
CThostFtdcTradingNoticeInfoField["InvestorID"] = "string"
#发送时间
CThostFtdcTradingNoticeInfoField["SendTime"] = "string"
#消息正文
CThostFtdcTradingNoticeInfoField["FieldContent"] = "string"
#序列系列号
CThostFtdcTradingNoticeInfoField["SequenceSeries"] = "int"
#序列号
CThostFtdcTradingNoticeInfoField["SequenceNo"] = "int"
structDict['CThostFtdcTradingNoticeInfoField'] = CThostFtdcTradingNoticeInfoField
#用户事件通知
CThostFtdcTradingNoticeField = {}
#经纪公司代码
CThostFtdcTradingNoticeField["BrokerID"] = "string"
#投资者范围
CThostFtdcTradingNoticeField["InvestorRange"] = "char"
#投资者代码
CThostFtdcTradingNoticeField["InvestorID"] = "string"
#序列系列号
CThostFtdcTradingNoticeField["SequenceSeries"] = "int"
#用户代码
CThostFtdcTradingNoticeField["UserID"] = "string"
#发送时间
CThostFtdcTradingNoticeField["SendTime"] = "string"
#序列号
CThostFtdcTradingNoticeField["SequenceNo"] = "int"
#消息正文
CThostFtdcTradingNoticeField["FieldContent"] = "string"
structDict['CThostFtdcTradingNoticeField'] = CThostFtdcTradingNoticeField
#查询交易事件通知
CThostFtdcQryTradingNoticeField = {}
#经纪公司代码
CThostFtdcQryTradingNoticeField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryTradingNoticeField["InvestorID"] = "string"
structDict['CThostFtdcQryTradingNoticeField'] = CThostFtdcQryTradingNoticeField
#查询错误报单
CThostFtdcQryErrOrderField = {}
#经纪公司代码
CThostFtdcQryErrOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryErrOrderField["InvestorID"] = "string"
structDict['CThostFtdcQryErrOrderField'] = CThostFtdcQryErrOrderField
#错误报单
CThostFtdcErrOrderField = {}
#经纪公司代码
CThostFtdcErrOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcErrOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcErrOrderField["InstrumentID"] = "string"
#报单引用
CThostFtdcErrOrderField["OrderRef"] = "string"
#用户代码
CThostFtdcErrOrderField["UserID"] = "string"
#报单价格条件
CThostFtdcErrOrderField["OrderPriceType"] = "char"
#买卖方向
CThostFtdcErrOrderField["Direction"] = "char"
#组合开平标志
CThostFtdcErrOrderField["CombOffsetFlag"] = "string"
#组合投机套保标志
CThostFtdcErrOrderField["CombHedgeFlag"] = "string"
#价格
CThostFtdcErrOrderField["LimitPrice"] = "float"
#数量
CThostFtdcErrOrderField["VolumeTotalOriginal"] = "int"
#有效期类型
CThostFtdcErrOrderField["TimeCondition"] = "char"
#GTD日期
CThostFtdcErrOrderField["GTDDate"] = "string"
#成交量类型
CThostFtdcErrOrderField["VolumeCondition"] = "char"
#最小成交量
CThostFtdcErrOrderField["MinVolume"] = "int"
#触发条件
CThostFtdcErrOrderField["ContingentCondition"] = "char"
#止损价
CThostFtdcErrOrderField["StopPrice"] = "float"
#强平原因
CThostFtdcErrOrderField["ForceCloseReason"] = "char"
#自动挂起标志
CThostFtdcErrOrderField["IsAutoSuspend"] = "int"
#业务单元
CThostFtdcErrOrderField["BusinessUnit"] = "string"
#请求编号
CThostFtdcErrOrderField["RequestID"] = "int"
#用户强评标志
CThostFtdcErrOrderField["UserForceClose"] = "int"
#错误代码
CThostFtdcErrOrderField["ErrorID"] = "int"
#错误信息
CThostFtdcErrOrderField["ErrorMsg"] = "string"
#互换单标志
CThostFtdcErrOrderField["IsSwapOrder"] = "int"
#交易所代码
CThostFtdcErrOrderField["ExchangeID"] = "string"
structDict['CThostFtdcErrOrderField'] = CThostFtdcErrOrderField
#查询错误报单操作
CThostFtdcErrorConditionalOrderField = {}
#经纪公司代码
CThostFtdcErrorConditionalOrderField["BrokerID"] = "string"
#投资者代码
CThostFtdcErrorConditionalOrderField["InvestorID"] = "string"
#合约代码
CThostFtdcErrorConditionalOrderField["InstrumentID"] = "string"
#报单引用
CThostFtdcErrorConditionalOrderField["OrderRef"] = "string"
#用户代码
CThostFtdcErrorConditionalOrderField["UserID"] = "string"
#报单价格条件
CThostFtdcErrorConditionalOrderField["OrderPriceType"] = "char"
#买卖方向
CThostFtdcErrorConditionalOrderField["Direction"] = "char"
#组合开平标志
CThostFtdcErrorConditionalOrderField["CombOffsetFlag"] = "string"
#组合投机套保标志
CThostFtdcErrorConditionalOrderField["CombHedgeFlag"] = "string"
#价格
CThostFtdcErrorConditionalOrderField["LimitPrice"] = "float"
#数量
CThostFtdcErrorConditionalOrderField["VolumeTotalOriginal"] = "int"
#有效期类型
CThostFtdcErrorConditionalOrderField["TimeCondition"] = "char"
#GTD日期
CThostFtdcErrorConditionalOrderField["GTDDate"] = "string"
#成交量类型
CThostFtdcErrorConditionalOrderField["VolumeCondition"] = "char"
#最小成交量
CThostFtdcErrorConditionalOrderField["MinVolume"] = "int"
#触发条件
CThostFtdcErrorConditionalOrderField["ContingentCondition"] = "char"
#止损价
CThostFtdcErrorConditionalOrderField["StopPrice"] = "float"
#强平原因
CThostFtdcErrorConditionalOrderField["ForceCloseReason"] = "char"
#自动挂起标志
CThostFtdcErrorConditionalOrderField["IsAutoSuspend"] = "int"
#业务单元
CThostFtdcErrorConditionalOrderField["BusinessUnit"] = "string"
#请求编号
CThostFtdcErrorConditionalOrderField["RequestID"] = "int"
#本地报单编号
CThostFtdcErrorConditionalOrderField["OrderLocalID"] = "string"
#交易所代码
CThostFtdcErrorConditionalOrderField["ExchangeID"] = "string"
#会员代码
CThostFtdcErrorConditionalOrderField["ParticipantID"] = "string"
#客户代码
CThostFtdcErrorConditionalOrderField["ClientID"] = "string"
#合约在交易所的代码
CThostFtdcErrorConditionalOrderField["ExchangeInstID"] = "string"
#交易所交易员代码
CThostFtdcErrorConditionalOrderField["TraderID"] = "string"
#安装编号
CThostFtdcErrorConditionalOrderField["InstallID"] = "int"
#报单提交状态
CThostFtdcErrorConditionalOrderField["OrderSubmitStatus"] = "char"
#报单提示序号
CThostFtdcErrorConditionalOrderField["NotifySequence"] = "int"
#交易日
CThostFtdcErrorConditionalOrderField["TradingDay"] = "string"
#结算编号
CThostFtdcErrorConditionalOrderField["SettlementID"] = "int"
#报单编号
CThostFtdcErrorConditionalOrderField["OrderSysID"] = "string"
#报单来源
CThostFtdcErrorConditionalOrderField["OrderSource"] = "char"
#报单状态
CThostFtdcErrorConditionalOrderField["OrderStatus"] = "char"
#报单类型
CThostFtdcErrorConditionalOrderField["OrderType"] = "char"
#今成交数量
CThostFtdcErrorConditionalOrderField["VolumeTraded"] = "int"
#剩余数量
CThostFtdcErrorConditionalOrderField["VolumeTotal"] = "int"
#报单日期
CThostFtdcErrorConditionalOrderField["InsertDate"] = "string"
#委托时间
CThostFtdcErrorConditionalOrderField["InsertTime"] = "string"
#激活时间
CThostFtdcErrorConditionalOrderField["ActiveTime"] = "string"
#挂起时间
CThostFtdcErrorConditionalOrderField["SuspendTime"] = "string"
#最后修改时间
CThostFtdcErrorConditionalOrderField["UpdateTime"] = "string"
#撤销时间
CThostFtdcErrorConditionalOrderField["CancelTime"] = "string"
#最后修改交易所交易员代码
CThostFtdcErrorConditionalOrderField["ActiveTraderID"] = "string"
#结算会员编号
CThostFtdcErrorConditionalOrderField["ClearingPartID"] = "string"
#序号
CThostFtdcErrorConditionalOrderField["SequenceNo"] = "int"
#前置编号
CThostFtdcErrorConditionalOrderField["FrontID"] = "int"
#会话编号
CThostFtdcErrorConditionalOrderField["SessionID"] = "int"
#用户端产品信息
CThostFtdcErrorConditionalOrderField["UserProductInfo"] = "string"
#状态信息
CThostFtdcErrorConditionalOrderField["StatusMsg"] = "string"
#用户强评标志
CThostFtdcErrorConditionalOrderField["UserForceClose"] = "int"
#操作用户代码
CThostFtdcErrorConditionalOrderField["ActiveUserID"] = "string"
#经纪公司报单编号
CThostFtdcErrorConditionalOrderField["BrokerOrderSeq"] = "int"
#相关报单
CThostFtdcErrorConditionalOrderField["RelativeOrderSysID"] = "string"
#郑商所成交数量
CThostFtdcErrorConditionalOrderField["ZCETotalTradedVolume"] = "int"
#错误代码
CThostFtdcErrorConditionalOrderField["ErrorID"] = "int"
#错误信息
CThostFtdcErrorConditionalOrderField["ErrorMsg"] = "string"
#互换单标志
CThostFtdcErrorConditionalOrderField["IsSwapOrder"] = "int"
#营业部编号
CThostFtdcErrorConditionalOrderField["BranchID"] = "string"
structDict['CThostFtdcErrorConditionalOrderField'] = CThostFtdcErrorConditionalOrderField
#查询错误报单操作
CThostFtdcQryErrOrderActionField = {}
#经纪公司代码
CThostFtdcQryErrOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryErrOrderActionField["InvestorID"] = "string"
structDict['CThostFtdcQryErrOrderActionField'] = CThostFtdcQryErrOrderActionField
#错误报单操作
CThostFtdcErrOrderActionField = {}
#经纪公司代码
CThostFtdcErrOrderActionField["BrokerID"] = "string"
#投资者代码
CThostFtdcErrOrderActionField["InvestorID"] = "string"
#报单操作引用
CThostFtdcErrOrderActionField["OrderActionRef"] = "int"
#报单引用
CThostFtdcErrOrderActionField["OrderRef"] = "string"
#请求编号
CThostFtdcErrOrderActionField["RequestID"] = "int"
#前置编号
CThostFtdcErrOrderActionField["FrontID"] = "int"
#会话编号
CThostFtdcErrOrderActionField["SessionID"] = "int"
#交易所代码
CThostFtdcErrOrderActionField["ExchangeID"] = "string"
#报单编号
CThostFtdcErrOrderActionField["OrderSysID"] = "string"
#操作标志
CThostFtdcErrOrderActionField["ActionFlag"] = "char"
#价格
CThostFtdcErrOrderActionField["LimitPrice"] = "float"
#数量变化
CThostFtdcErrOrderActionField["VolumeChange"] = "int"
#操作日期
CThostFtdcErrOrderActionField["ActionDate"] = "string"
#操作时间
CThostFtdcErrOrderActionField["ActionTime"] = "string"
#交易所交易员代码
CThostFtdcErrOrderActionField["TraderID"] = "string"
#安装编号
CThostFtdcErrOrderActionField["InstallID"] = "int"
#本地报单编号
CThostFtdcErrOrderActionField["OrderLocalID"] = "string"
#操作本地编号
CThostFtdcErrOrderActionField["ActionLocalID"] = "string"
#会员代码
CThostFtdcErrOrderActionField["ParticipantID"] = "string"
#客户代码
CThostFtdcErrOrderActionField["ClientID"] = "string"
#业务单元
CThostFtdcErrOrderActionField["BusinessUnit"] = "string"
#报单操作状态
CThostFtdcErrOrderActionField["OrderActionStatus"] = "char"
#用户代码
CThostFtdcErrOrderActionField["UserID"] = "string"
#状态信息
CThostFtdcErrOrderActionField["StatusMsg"] = "string"
#合约代码
CThostFtdcErrOrderActionField["InstrumentID"] = "string"
#营业部编号
CThostFtdcErrOrderActionField["BranchID"] = "string"
#错误代码
CThostFtdcErrOrderActionField["ErrorID"] = "int"
#错误信息
CThostFtdcErrOrderActionField["ErrorMsg"] = "string"
structDict['CThostFtdcErrOrderActionField'] = CThostFtdcErrOrderActionField
#查询交易所状态
CThostFtdcQryExchangeSequenceField = {}
#交易所代码
CThostFtdcQryExchangeSequenceField["ExchangeID"] = "string"
structDict['CThostFtdcQryExchangeSequenceField'] = CThostFtdcQryExchangeSequenceField
#交易所状态
CThostFtdcExchangeSequenceField = {}
#交易所代码
CThostFtdcExchangeSequenceField["ExchangeID"] = "string"
#序号
CThostFtdcExchangeSequenceField["SequenceNo"] = "int"
#合约交易状态
CThostFtdcExchangeSequenceField["MarketStatus"] = "char"
structDict['CThostFtdcExchangeSequenceField'] = CThostFtdcExchangeSequenceField
#根据价格查询最大报单数量
CThostFtdcQueryMaxOrderVolumeWithPriceField = {}
#经纪公司代码
CThostFtdcQueryMaxOrderVolumeWithPriceField["BrokerID"] = "string"
#投资者代码
CThostFtdcQueryMaxOrderVolumeWithPriceField["InvestorID"] = "string"
#合约代码
CThostFtdcQueryMaxOrderVolumeWithPriceField["InstrumentID"] = "string"
#买卖方向
CThostFtdcQueryMaxOrderVolumeWithPriceField["Direction"] = "char"
#开平标志
CThostFtdcQueryMaxOrderVolumeWithPriceField["OffsetFlag"] = "char"
#投机套保标志
CThostFtdcQueryMaxOrderVolumeWithPriceField["HedgeFlag"] = "char"
#最大允许报单数量
CThostFtdcQueryMaxOrderVolumeWithPriceField["MaxVolume"] = "int"
#报单价格
CThostFtdcQueryMaxOrderVolumeWithPriceField["Price"] = "float"
#交易所代码
CThostFtdcQueryMaxOrderVolumeWithPriceField["ExchangeID"] = "string"
structDict['CThostFtdcQueryMaxOrderVolumeWithPriceField'] = CThostFtdcQueryMaxOrderVolumeWithPriceField
#查询经纪公司交易参数
CThostFtdcQryBrokerTradingParamsField = {}
#经纪公司代码
CThostFtdcQryBrokerTradingParamsField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryBrokerTradingParamsField["InvestorID"] = "string"
#币种代码
CThostFtdcQryBrokerTradingParamsField["CurrencyID"] = "string"
structDict['CThostFtdcQryBrokerTradingParamsField'] = CThostFtdcQryBrokerTradingParamsField
#经纪公司交易参数
CThostFtdcBrokerTradingParamsField = {}
#经纪公司代码
CThostFtdcBrokerTradingParamsField["BrokerID"] = "string"
#投资者代码
CThostFtdcBrokerTradingParamsField["InvestorID"] = "string"
#保证金价格类型
CThostFtdcBrokerTradingParamsField["MarginPriceType"] = "char"
#盈亏算法
CThostFtdcBrokerTradingParamsField["Algorithm"] = "char"
#可用是否包含平仓盈利
CThostFtdcBrokerTradingParamsField["AvailIncludeCloseProfit"] = "char"
#币种代码
CThostFtdcBrokerTradingParamsField["CurrencyID"] = "string"
#期权权利金价格类型
CThostFtdcBrokerTradingParamsField["OptionRoyaltyPriceType"] = "char"
structDict['CThostFtdcBrokerTradingParamsField'] = CThostFtdcBrokerTradingParamsField
#查询经纪公司交易算法
CThostFtdcQryBrokerTradingAlgosField = {}
#经纪公司代码
CThostFtdcQryBrokerTradingAlgosField["BrokerID"] = "string"
#交易所代码
CThostFtdcQryBrokerTradingAlgosField["ExchangeID"] = "string"
#合约代码
CThostFtdcQryBrokerTradingAlgosField["InstrumentID"] = "string"
structDict['CThostFtdcQryBrokerTradingAlgosField'] = CThostFtdcQryBrokerTradingAlgosField
#经纪公司交易算法
CThostFtdcBrokerTradingAlgosField = {}
#经纪公司代码
CThostFtdcBrokerTradingAlgosField["BrokerID"] = "string"
#交易所代码
CThostFtdcBrokerTradingAlgosField["ExchangeID"] = "string"
#合约代码
CThostFtdcBrokerTradingAlgosField["InstrumentID"] = "string"
#持仓处理算法编号
CThostFtdcBrokerTradingAlgosField["HandlePositionAlgoID"] = "char"
#寻找保证金率算法编号
CThostFtdcBrokerTradingAlgosField["FindMarginRateAlgoID"] = "char"
#资金处理算法编号
CThostFtdcBrokerTradingAlgosField["HandleTradingAccountAlgoID"] = "char"
structDict['CThostFtdcBrokerTradingAlgosField'] = CThostFtdcBrokerTradingAlgosField
#查询经纪公司资金
CThostFtdcQueryBrokerDepositField = {}
#经纪公司代码
CThostFtdcQueryBrokerDepositField["BrokerID"] = "string"
#交易所代码
CThostFtdcQueryBrokerDepositField["ExchangeID"] = "string"
structDict['CThostFtdcQueryBrokerDepositField'] = CThostFtdcQueryBrokerDepositField
#经纪公司资金
CThostFtdcBrokerDepositField = {}
#交易日期
CThostFtdcBrokerDepositField["TradingDay"] = "string"
#经纪公司代码
CThostFtdcBrokerDepositField["BrokerID"] = "string"
#会员代码
CThostFtdcBrokerDepositField["ParticipantID"] = "string"
#交易所代码
CThostFtdcBrokerDepositField["ExchangeID"] = "string"
#上次结算准备金
CThostFtdcBrokerDepositField["PreBalance"] = "float"
#当前保证金总额
CThostFtdcBrokerDepositField["CurrMargin"] = "float"
#平仓盈亏
CThostFtdcBrokerDepositField["CloseProfit"] = "float"
#期货结算准备金
CThostFtdcBrokerDepositField["Balance"] = "float"
#入金金额
CThostFtdcBrokerDepositField["Deposit"] = "float"
#出金金额
CThostFtdcBrokerDepositField["Withdraw"] = "float"
#可提资金
CThostFtdcBrokerDepositField["Available"] = "float"
#基本准备金
CThostFtdcBrokerDepositField["Reserve"] = "float"
#冻结的保证金
CThostFtdcBrokerDepositField["FrozenMargin"] = "float"
structDict['CThostFtdcBrokerDepositField'] = CThostFtdcBrokerDepositField
#查询保证金监管系统经纪公司密钥
CThostFtdcQryCFMMCBrokerKeyField = {}
#经纪公司代码
CThostFtdcQryCFMMCBrokerKeyField["BrokerID"] = "string"
structDict['CThostFtdcQryCFMMCBrokerKeyField'] = CThostFtdcQryCFMMCBrokerKeyField
#保证金监管系统经纪公司密钥
CThostFtdcCFMMCBrokerKeyField = {}
#经纪公司代码
CThostFtdcCFMMCBrokerKeyField["BrokerID"] = "string"
#经纪公司统一编码
CThostFtdcCFMMCBrokerKeyField["ParticipantID"] = "string"
#密钥生成日期
CThostFtdcCFMMCBrokerKeyField["CreateDate"] = "string"
#密钥生成时间
CThostFtdcCFMMCBrokerKeyField["CreateTime"] = "string"
#密钥编号
CThostFtdcCFMMCBrokerKeyField["KeyID"] = "int"
#动态密钥
CThostFtdcCFMMCBrokerKeyField["CurrentKey"] = "string"
#动态密钥类型
CThostFtdcCFMMCBrokerKeyField["KeyKind"] = "char"
structDict['CThostFtdcCFMMCBrokerKeyField'] = CThostFtdcCFMMCBrokerKeyField
#保证金监管系统经纪公司资金账户密钥
CThostFtdcCFMMCTradingAccountKeyField = {}
#经纪公司代码
CThostFtdcCFMMCTradingAccountKeyField["BrokerID"] = "string"
#经纪公司统一编码
CThostFtdcCFMMCTradingAccountKeyField["ParticipantID"] = "string"
#投资者帐号
CThostFtdcCFMMCTradingAccountKeyField["AccountID"] = "string"
#密钥编号
CThostFtdcCFMMCTradingAccountKeyField["KeyID"] = "int"
#动态密钥
CThostFtdcCFMMCTradingAccountKeyField["CurrentKey"] = "string"
structDict['CThostFtdcCFMMCTradingAccountKeyField'] = CThostFtdcCFMMCTradingAccountKeyField
#请求查询保证金监管系统经纪公司资金账户密钥
CThostFtdcQryCFMMCTradingAccountKeyField = {}
#经纪公司代码
CThostFtdcQryCFMMCTradingAccountKeyField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryCFMMCTradingAccountKeyField["InvestorID"] = "string"
structDict['CThostFtdcQryCFMMCTradingAccountKeyField'] = CThostFtdcQryCFMMCTradingAccountKeyField
#用户动态令牌参数
CThostFtdcBrokerUserOTPParamField = {}
#经纪公司代码
CThostFtdcBrokerUserOTPParamField["BrokerID"] = "string"
#用户代码
CThostFtdcBrokerUserOTPParamField["UserID"] = "string"
#动态令牌提供商
CThostFtdcBrokerUserOTPParamField["OTPVendorsID"] = "string"
#动态令牌序列号
CThostFtdcBrokerUserOTPParamField["SerialNumber"] = "string"
#令牌密钥
CThostFtdcBrokerUserOTPParamField["AuthKey"] = "string"
#漂移值
CThostFtdcBrokerUserOTPParamField["LastDrift"] = "int"
#成功值
CThostFtdcBrokerUserOTPParamField["LastSuccess"] = "int"
#动态令牌类型
CThostFtdcBrokerUserOTPParamField["OTPType"] = "char"
structDict['CThostFtdcBrokerUserOTPParamField'] = CThostFtdcBrokerUserOTPParamField
#手工同步用户动态令牌
CThostFtdcManualSyncBrokerUserOTPField = {}
#经纪公司代码
CThostFtdcManualSyncBrokerUserOTPField["BrokerID"] = "string"
#用户代码
CThostFtdcManualSyncBrokerUserOTPField["UserID"] = "string"
#动态令牌类型
CThostFtdcManualSyncBrokerUserOTPField["OTPType"] = "char"
#第一个动态密码
CThostFtdcManualSyncBrokerUserOTPField["FirstOTP"] = "string"
#第二个动态密码
CThostFtdcManualSyncBrokerUserOTPField["SecondOTP"] = "string"
structDict['CThostFtdcManualSyncBrokerUserOTPField'] = CThostFtdcManualSyncBrokerUserOTPField
#投资者手续费率模板
CThostFtdcCommRateModelField = {}
#经纪公司代码
CThostFtdcCommRateModelField["BrokerID"] = "string"
#手续费率模板代码
CThostFtdcCommRateModelField["CommModelID"] = "string"
#模板名称
CThostFtdcCommRateModelField["CommModelName"] = "string"
structDict['CThostFtdcCommRateModelField'] = CThostFtdcCommRateModelField
#请求查询投资者手续费率模板
CThostFtdcQryCommRateModelField = {}
#经纪公司代码
CThostFtdcQryCommRateModelField["BrokerID"] = "string"
#手续费率模板代码
CThostFtdcQryCommRateModelField["CommModelID"] = "string"
structDict['CThostFtdcQryCommRateModelField'] = CThostFtdcQryCommRateModelField
#投资者保证金率模板
CThostFtdcMarginModelField = {}
#经纪公司代码
CThostFtdcMarginModelField["BrokerID"] = "string"
#保证金率模板代码
CThostFtdcMarginModelField["MarginModelID"] = "string"
#模板名称
CThostFtdcMarginModelField["MarginModelName"] = "string"
structDict['CThostFtdcMarginModelField'] = CThostFtdcMarginModelField
#请求查询投资者保证金率模板
CThostFtdcQryMarginModelField = {}
#经纪公司代码
CThostFtdcQryMarginModelField["BrokerID"] = "string"
#保证金率模板代码
CThostFtdcQryMarginModelField["MarginModelID"] = "string"
structDict['CThostFtdcQryMarginModelField'] = CThostFtdcQryMarginModelField
#仓单折抵信息
CThostFtdcEWarrantOffsetField = {}
#交易日期
CThostFtdcEWarrantOffsetField["TradingDay"] = "string"
#经纪公司代码
CThostFtdcEWarrantOffsetField["BrokerID"] = "string"
#投资者代码
CThostFtdcEWarrantOffsetField["InvestorID"] = "string"
#交易所代码
CThostFtdcEWarrantOffsetField["ExchangeID"] = "string"
#合约代码
CThostFtdcEWarrantOffsetField["InstrumentID"] = "string"
#买卖方向
CThostFtdcEWarrantOffsetField["Direction"] = "char"
#投机套保标志
CThostFtdcEWarrantOffsetField["HedgeFlag"] = "char"
#数量
CThostFtdcEWarrantOffsetField["Volume"] = "int"
structDict['CThostFtdcEWarrantOffsetField'] = CThostFtdcEWarrantOffsetField
#查询仓单折抵信息
CThostFtdcQryEWarrantOffsetField = {}
#经纪公司代码
CThostFtdcQryEWarrantOffsetField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryEWarrantOffsetField["InvestorID"] = "string"
#交易所代码
CThostFtdcQryEWarrantOffsetField["ExchangeID"] = "string"
#合约代码
CThostFtdcQryEWarrantOffsetField["InstrumentID"] = "string"
structDict['CThostFtdcQryEWarrantOffsetField'] = CThostFtdcQryEWarrantOffsetField
#查询投资者品种/跨品种保证金
CThostFtdcQryInvestorProductGroupMarginField = {}
#经纪公司代码
CThostFtdcQryInvestorProductGroupMarginField["BrokerID"] = "string"
#投资者代码
CThostFtdcQryInvestorProductGroupMarginField["InvestorID"] = "string"
#品种/跨品种标示
CThostFtdcQryInvestorProductGroupMarginField["ProductGroupID"] = "string"
#投机套保标志
CThostFtdcQryInvestorProductGroupMarginField["HedgeFlag"] = "char"
structDict['CThostFtdcQryInvestorProductGroupMarginField'] = CThostFtdcQryInvestorProductGroupMarginField
#投资者品种/跨品种保证金
CThostFtdcInvestorProductGroupMarginField = {}
#品种/跨品种标示
CThostFtdcInvestorProductGroupMarginField["ProductGroupID"] = "string"
#经纪公司代码
CThostFtdcInvestorProductGroupMarginField["BrokerID"] = "string"
#投资者代码
CThostFtdcInvestorProductGroupMarginField["InvestorID"] = "string"
#交易日
CThostFtdcInvestorProductGroupMarginField["TradingDay"] = "string"
#结算编号
CThostFtdcInvestorProductGroupMarginField["SettlementID"] = "int"
#冻结的保证金
CThostFtdcInvestorProductGroupMarginField["FrozenMargin"] = "float"
#多头冻结的保证金
CThostFtdcInvestorProductGroupMarginField["LongFrozenMargin"] = "float"
#空头冻结的保证金
CThostFtdcInvestorProductGroupMarginField["ShortFrozenMargin"] = "float"
#占用的保证金
CThostFtdcInvestorProductGroupMarginField["UseMargin"] = "float"
#多头保证金
CThostFtdcInvestorProductGroupMarginField["LongUseMargin"] = "float"
#空头保证金
CThostFtdcInvestorProductGroupMarginField["ShortUseMargin"] = "float"
#交易所保证金
CThostFtdcInvestorProductGroupMarginField["ExchMargin"] = "float"
#交易所多头保证金
CThostFtdcInvestorProductGroupMarginField["LongExchMargin"] = "float"
#交易所空头保证金
CThostFtdcInvestorProductGroupMarginField["ShortExchMargin"] = "float"
#平仓盈亏
CThostFtdcInvestorProductGroupMarginField["CloseProfit"] = "float"
#冻结的手续费
CThostFtdcInvestorProductGroupMarginField["FrozenCommission"] = "float"
#手续费
CThostFtdcInvestorProductGroupMarginField["Commission"] = "float"
#冻结的资金
CThostFtdcInvestorProductGroupMarginField["FrozenCash"] = "float"
#资金差额
CThostFtdcInvestorProductGroupMarginField["CashIn"] = "float"
#持仓盈亏
CThostFtdcInvestorProductGroupMarginField["PositionProfit"] = "float"
#折抵总金额
CThostFtdcInvestorProductGroupMarginField["OffsetAmount"] = "float"
#多头折抵总金额
CThostFtdcInvestorProductGroupMarginField["LongOffsetAmount"] = "float"
#空头折抵总金额
CThostFtdcInvestorProductGroupMarginField["ShortOffsetAmount"] = "float"
#交易所折抵总金额
CThostFtdcInvestorProductGroupMarginField["ExchOffsetAmount"] = "float"
#交易所多头折抵总金额
CThostFtdcInvestorProductGroupMarginField["LongExchOffsetAmount"] = "float"
#交易所空头折抵总金额
CThostFtdcInvestorProductGroupMarginField["ShortExchOffsetAmount"] = "float"
#投机套保标志
CThostFtdcInvestorProductGroupMarginField["HedgeFlag"] = "char"
structDict['CThostFtdcInvestorProductGroupMarginField'] = CThostFtdcInvestorProductGroupMarginField
#查询监控中心用户令牌
CThostFtdcQueryCFMMCTradingAccountTokenField = {}
#经纪公司代码
CThostFtdcQueryCFMMCTradingAccountTokenField["BrokerID"] = "string"
#投资者代码
CThostFtdcQueryCFMMCTradingAccountTokenField["InvestorID"] = "string"
structDict['CThostFtdcQueryCFMMCTradingAccountTokenField'] = CThostFtdcQueryCFMMCTradingAccountTokenField
#监控中心用户令牌
CThostFtdcCFMMCTradingAccountTokenField = {}
#经纪公司代码
CThostFtdcCFMMCTradingAccountTokenField["BrokerID"] = "string"
#经纪公司统一编码
CThostFtdcCFMMCTradingAccountTokenField["ParticipantID"] = "string"
#投资者帐号
CThostFtdcCFMMCTradingAccountTokenField["AccountID"] = "string"
#密钥编号
CThostFtdcCFMMCTradingAccountTokenField["KeyID"] = "int"
#动态令牌
CThostFtdcCFMMCTradingAccountTokenField["Token"] = "string"
structDict['CThostFtdcCFMMCTradingAccountTokenField'] = CThostFtdcCFMMCTradingAccountTokenField
#投资者指令权限
CThostFtdcInstructionRightField = {}
#经纪公司代码
CThostFtdcInstructionRightField["BrokerID"] = "string"
#交易所代码
CThostFtdcInstructionRightField["ExchangeID"] = "string"
#投资者代码
CThostFtdcInstructionRightField["InvestorID"] = "string"
#指令权限类型
CThostFtdcInstructionRightField["InstructionRight"] = "char"
#是否禁止
CThostFtdcInstructionRightField["IsForbidden"] = "int"
structDict['CThostFtdcInstructionRightField'] = CThostFtdcInstructionRightField
#查询产品组
CThostFtdcQryProductGroupField = {}
#产品代码
CThostFtdcQryProductGroupField["ProductID"] = "string"
#交易所代码
CThostFtdcQryProductGroupField["ExchangeID"] = "string"
structDict['CThostFtdcQryProductGroupField'] = CThostFtdcQryProductGroupField
#投资者品种/跨品种保证金产品组
CThostFtdcProductGroupField = {}
#产品代码
CThostFtdcProductGroupField["ProductID"] = "string"
#交易所代码
CThostFtdcProductGroupField["ExchangeID"] = "string"
#产品组代码
CThostFtdcProductGroupField["ProductGroupID"] = "string"
structDict['CThostFtdcProductGroupField'] = CThostFtdcProductGroupField
#转帐开户请求
CThostFtdcReqOpenAccountField = {}
#业务功能码
CThostFtdcReqOpenAccountField["TradeCode"] = "string"
#银行代码
CThostFtdcReqOpenAccountField["BankID"] = "string"
#银行分支机构代码
CThostFtdcReqOpenAccountField["BankBranchID"] = "string"
#期商代码
CThostFtdcReqOpenAccountField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcReqOpenAccountField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcReqOpenAccountField["TradeDate"] = "string"
#交易时间
CThostFtdcReqOpenAccountField["TradeTime"] = "string"
#银行流水号
CThostFtdcReqOpenAccountField["BankSerial"] = "string"
#交易系统日期
CThostFtdcReqOpenAccountField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcReqOpenAccountField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcReqOpenAccountField["LastFragment"] = "char"
#会话号
CThostFtdcReqOpenAccountField["SessionID"] = "int"
#客户姓名
CThostFtdcReqOpenAccountField["CustomerName"] = "string"
#证件类型
CThostFtdcReqOpenAccountField["IdCardType"] = "char"
#证件号码
CThostFtdcReqOpenAccountField["IdentifiedCardNo"] = "string"
#性别
CThostFtdcReqOpenAccountField["Gender"] = "char"
#国家代码
CThostFtdcReqOpenAccountField["CountryCode"] = "string"
#客户类型
CThostFtdcReqOpenAccountField["CustType"] = "char"
#地址
CThostFtdcReqOpenAccountField["Address"] = "string"
#邮编
CThostFtdcReqOpenAccountField["ZipCode"] = "string"
#电话号码
CThostFtdcReqOpenAccountField["Telephone"] = "string"
#手机
CThostFtdcReqOpenAccountField["MobilePhone"] = "string"
#传真
CThostFtdcReqOpenAccountField["Fax"] = "string"
#电子邮件
CThostFtdcReqOpenAccountField["EMail"] = "string"
#资金账户状态
CThostFtdcReqOpenAccountField["MoneyAccountStatus"] = "char"
#银行帐号
CThostFtdcReqOpenAccountField["BankAccount"] = "string"
#银行密码
CThostFtdcReqOpenAccountField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcReqOpenAccountField["AccountID"] = "string"
#期货密码
CThostFtdcReqOpenAccountField["Password"] = "string"
#安装编号
CThostFtdcReqOpenAccountField["InstallID"] = "int"
#验证客户证件号码标志
CThostFtdcReqOpenAccountField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcReqOpenAccountField["CurrencyID"] = "string"
#汇钞标志
CThostFtdcReqOpenAccountField["CashExchangeCode"] = "char"
#摘要
CThostFtdcReqOpenAccountField["Digest"] = "string"
#银行帐号类型
CThostFtdcReqOpenAccountField["BankAccType"] = "char"
#渠道标志
CThostFtdcReqOpenAccountField["DeviceID"] = "string"
#期货单位帐号类型
CThostFtdcReqOpenAccountField["BankSecuAccType"] = "char"
#期货公司银行编码
CThostFtdcReqOpenAccountField["BrokerIDByBank"] = "string"
#期货单位帐号
CThostFtdcReqOpenAccountField["BankSecuAcc"] = "string"
#银行密码标志
CThostFtdcReqOpenAccountField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcReqOpenAccountField["SecuPwdFlag"] = "char"
#交易柜员
CThostFtdcReqOpenAccountField["OperNo"] = "string"
#交易ID
CThostFtdcReqOpenAccountField["TID"] = "int"
#用户标识
CThostFtdcReqOpenAccountField["UserID"] = "string"
structDict['CThostFtdcReqOpenAccountField'] = CThostFtdcReqOpenAccountField
#转帐销户请求
CThostFtdcReqCancelAccountField = {}
#业务功能码
CThostFtdcReqCancelAccountField["TradeCode"] = "string"
#银行代码
CThostFtdcReqCancelAccountField["BankID"] = "string"
#银行分支机构代码
CThostFtdcReqCancelAccountField["BankBranchID"] = "string"
#期商代码
CThostFtdcReqCancelAccountField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcReqCancelAccountField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcReqCancelAccountField["TradeDate"] = "string"
#交易时间
CThostFtdcReqCancelAccountField["TradeTime"] = "string"
#银行流水号
CThostFtdcReqCancelAccountField["BankSerial"] = "string"
#交易系统日期
CThostFtdcReqCancelAccountField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcReqCancelAccountField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcReqCancelAccountField["LastFragment"] = "char"
#会话号
CThostFtdcReqCancelAccountField["SessionID"] = "int"
#客户姓名
CThostFtdcReqCancelAccountField["CustomerName"] = "string"
#证件类型
CThostFtdcReqCancelAccountField["IdCardType"] = "char"
#证件号码
CThostFtdcReqCancelAccountField["IdentifiedCardNo"] = "string"
#性别
CThostFtdcReqCancelAccountField["Gender"] = "char"
#国家代码
CThostFtdcReqCancelAccountField["CountryCode"] = "string"
#客户类型
CThostFtdcReqCancelAccountField["CustType"] = "char"
#地址
CThostFtdcReqCancelAccountField["Address"] = "string"
#邮编
CThostFtdcReqCancelAccountField["ZipCode"] = "string"
#电话号码
CThostFtdcReqCancelAccountField["Telephone"] = "string"
#手机
CThostFtdcReqCancelAccountField["MobilePhone"] = "string"
#传真
CThostFtdcReqCancelAccountField["Fax"] = "string"
#电子邮件
CThostFtdcReqCancelAccountField["EMail"] = "string"
#资金账户状态
CThostFtdcReqCancelAccountField["MoneyAccountStatus"] = "char"
#银行帐号
CThostFtdcReqCancelAccountField["BankAccount"] = "string"
#银行密码
CThostFtdcReqCancelAccountField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcReqCancelAccountField["AccountID"] = "string"
#期货密码
CThostFtdcReqCancelAccountField["Password"] = "string"
#安装编号
CThostFtdcReqCancelAccountField["InstallID"] = "int"
#验证客户证件号码标志
CThostFtdcReqCancelAccountField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcReqCancelAccountField["CurrencyID"] = "string"
#汇钞标志
CThostFtdcReqCancelAccountField["CashExchangeCode"] = "char"
#摘要
CThostFtdcReqCancelAccountField["Digest"] = "string"
#银行帐号类型
CThostFtdcReqCancelAccountField["BankAccType"] = "char"
#渠道标志
CThostFtdcReqCancelAccountField["DeviceID"] = "string"
#期货单位帐号类型
CThostFtdcReqCancelAccountField["BankSecuAccType"] = "char"
#期货公司银行编码
CThostFtdcReqCancelAccountField["BrokerIDByBank"] = "string"
#期货单位帐号
CThostFtdcReqCancelAccountField["BankSecuAcc"] = "string"
#银行密码标志
CThostFtdcReqCancelAccountField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcReqCancelAccountField["SecuPwdFlag"] = "char"
#交易柜员
CThostFtdcReqCancelAccountField["OperNo"] = "string"
#交易ID
CThostFtdcReqCancelAccountField["TID"] = "int"
#用户标识
CThostFtdcReqCancelAccountField["UserID"] = "string"
structDict['CThostFtdcReqCancelAccountField'] = CThostFtdcReqCancelAccountField
#变更银行账户请求
CThostFtdcReqChangeAccountField = {}
#业务功能码
CThostFtdcReqChangeAccountField["TradeCode"] = "string"
#银行代码
CThostFtdcReqChangeAccountField["BankID"] = "string"
#银行分支机构代码
CThostFtdcReqChangeAccountField["BankBranchID"] = "string"
#期商代码
CThostFtdcReqChangeAccountField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcReqChangeAccountField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcReqChangeAccountField["TradeDate"] = "string"
#交易时间
CThostFtdcReqChangeAccountField["TradeTime"] = "string"
#银行流水号
CThostFtdcReqChangeAccountField["BankSerial"] = "string"
#交易系统日期
CThostFtdcReqChangeAccountField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcReqChangeAccountField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcReqChangeAccountField["LastFragment"] = "char"
#会话号
CThostFtdcReqChangeAccountField["SessionID"] = "int"
#客户姓名
CThostFtdcReqChangeAccountField["CustomerName"] = "string"
#证件类型
CThostFtdcReqChangeAccountField["IdCardType"] = "char"
#证件号码
CThostFtdcReqChangeAccountField["IdentifiedCardNo"] = "string"
#性别
CThostFtdcReqChangeAccountField["Gender"] = "char"
#国家代码
CThostFtdcReqChangeAccountField["CountryCode"] = "string"
#客户类型
CThostFtdcReqChangeAccountField["CustType"] = "char"
#地址
CThostFtdcReqChangeAccountField["Address"] = "string"
#邮编
CThostFtdcReqChangeAccountField["ZipCode"] = "string"
#电话号码
CThostFtdcReqChangeAccountField["Telephone"] = "string"
#手机
CThostFtdcReqChangeAccountField["MobilePhone"] = "string"
#传真
CThostFtdcReqChangeAccountField["Fax"] = "string"
#电子邮件
CThostFtdcReqChangeAccountField["EMail"] = "string"
#资金账户状态
CThostFtdcReqChangeAccountField["MoneyAccountStatus"] = "char"
#银行帐号
CThostFtdcReqChangeAccountField["BankAccount"] = "string"
#银行密码
CThostFtdcReqChangeAccountField["BankPassWord"] = "string"
#新银行帐号
CThostFtdcReqChangeAccountField["NewBankAccount"] = "string"
#新银行密码
CThostFtdcReqChangeAccountField["NewBankPassWord"] = "string"
#投资者帐号
CThostFtdcReqChangeAccountField["AccountID"] = "string"
#期货密码
CThostFtdcReqChangeAccountField["Password"] = "string"
#银行帐号类型
CThostFtdcReqChangeAccountField["BankAccType"] = "char"
#安装编号
CThostFtdcReqChangeAccountField["InstallID"] = "int"
#验证客户证件号码标志
CThostFtdcReqChangeAccountField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcReqChangeAccountField["CurrencyID"] = "string"
#期货公司银行编码
CThostFtdcReqChangeAccountField["BrokerIDByBank"] = "string"
#银行密码标志
CThostFtdcReqChangeAccountField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcReqChangeAccountField["SecuPwdFlag"] = "char"
#交易ID
CThostFtdcReqChangeAccountField["TID"] = "int"
#摘要
CThostFtdcReqChangeAccountField["Digest"] = "string"
structDict['CThostFtdcReqChangeAccountField'] = CThostFtdcReqChangeAccountField
#转账请求
CThostFtdcReqTransferField = {}
#业务功能码
CThostFtdcReqTransferField["TradeCode"] = "string"
#银行代码
CThostFtdcReqTransferField["BankID"] = "string"
#银行分支机构代码
CThostFtdcReqTransferField["BankBranchID"] = "string"
#期商代码
CThostFtdcReqTransferField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcReqTransferField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcReqTransferField["TradeDate"] = "string"
#交易时间
CThostFtdcReqTransferField["TradeTime"] = "string"
#银行流水号
CThostFtdcReqTransferField["BankSerial"] = "string"
#交易系统日期
CThostFtdcReqTransferField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcReqTransferField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcReqTransferField["LastFragment"] = "char"
#会话号
CThostFtdcReqTransferField["SessionID"] = "int"
#客户姓名
CThostFtdcReqTransferField["CustomerName"] = "string"
#证件类型
CThostFtdcReqTransferField["IdCardType"] = "char"
#证件号码
CThostFtdcReqTransferField["IdentifiedCardNo"] = "string"
#客户类型
CThostFtdcReqTransferField["CustType"] = "char"
#银行帐号
CThostFtdcReqTransferField["BankAccount"] = "string"
#银行密码
CThostFtdcReqTransferField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcReqTransferField["AccountID"] = "string"
#期货密码
CThostFtdcReqTransferField["Password"] = "string"
#安装编号
CThostFtdcReqTransferField["InstallID"] = "int"
#期货公司流水号
CThostFtdcReqTransferField["FutureSerial"] = "int"
#用户标识
CThostFtdcReqTransferField["UserID"] = "string"
#验证客户证件号码标志
CThostFtdcReqTransferField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcReqTransferField["CurrencyID"] = "string"
#转帐金额
CThostFtdcReqTransferField["TradeAmount"] = "float"
#期货可取金额
CThostFtdcReqTransferField["FutureFetchAmount"] = "float"
#费用支付标志
CThostFtdcReqTransferField["FeePayFlag"] = "char"
#应收客户费用
CThostFtdcReqTransferField["CustFee"] = "float"
#应收期货公司费用
CThostFtdcReqTransferField["BrokerFee"] = "float"
#发送方给接收方的消息
CThostFtdcReqTransferField["Message"] = "string"
#摘要
CThostFtdcReqTransferField["Digest"] = "string"
#银行帐号类型
CThostFtdcReqTransferField["BankAccType"] = "char"
#渠道标志
CThostFtdcReqTransferField["DeviceID"] = "string"
#期货单位帐号类型
CThostFtdcReqTransferField["BankSecuAccType"] = "char"
#期货公司银行编码
CThostFtdcReqTransferField["BrokerIDByBank"] = "string"
#期货单位帐号
CThostFtdcReqTransferField["BankSecuAcc"] = "string"
#银行密码标志
CThostFtdcReqTransferField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcReqTransferField["SecuPwdFlag"] = "char"
#交易柜员
CThostFtdcReqTransferField["OperNo"] = "string"
#请求编号
CThostFtdcReqTransferField["RequestID"] = "int"
#交易ID
CThostFtdcReqTransferField["TID"] = "int"
#转账交易状态
CThostFtdcReqTransferField["TransferStatus"] = "char"
structDict['CThostFtdcReqTransferField'] = CThostFtdcReqTransferField
#银行发起银行资金转期货响应
CThostFtdcRspTransferField = {}
#业务功能码
CThostFtdcRspTransferField["TradeCode"] = "string"
#银行代码
CThostFtdcRspTransferField["BankID"] = "string"
#银行分支机构代码
CThostFtdcRspTransferField["BankBranchID"] = "string"
#期商代码
CThostFtdcRspTransferField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcRspTransferField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcRspTransferField["TradeDate"] = "string"
#交易时间
CThostFtdcRspTransferField["TradeTime"] = "string"
#银行流水号
CThostFtdcRspTransferField["BankSerial"] = "string"
#交易系统日期
CThostFtdcRspTransferField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcRspTransferField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcRspTransferField["LastFragment"] = "char"
#会话号
CThostFtdcRspTransferField["SessionID"] = "int"
#客户姓名
CThostFtdcRspTransferField["CustomerName"] = "string"
#证件类型
CThostFtdcRspTransferField["IdCardType"] = "char"
#证件号码
CThostFtdcRspTransferField["IdentifiedCardNo"] = "string"
#客户类型
CThostFtdcRspTransferField["CustType"] = "char"
#银行帐号
CThostFtdcRspTransferField["BankAccount"] = "string"
#银行密码
CThostFtdcRspTransferField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcRspTransferField["AccountID"] = "string"
#期货密码
CThostFtdcRspTransferField["Password"] = "string"
#安装编号
CThostFtdcRspTransferField["InstallID"] = "int"
#期货公司流水号
CThostFtdcRspTransferField["FutureSerial"] = "int"
#用户标识
CThostFtdcRspTransferField["UserID"] = "string"
#验证客户证件号码标志
CThostFtdcRspTransferField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcRspTransferField["CurrencyID"] = "string"
#转帐金额
CThostFtdcRspTransferField["TradeAmount"] = "float"
#期货可取金额
CThostFtdcRspTransferField["FutureFetchAmount"] = "float"
#费用支付标志
CThostFtdcRspTransferField["FeePayFlag"] = "char"
#应收客户费用
CThostFtdcRspTransferField["CustFee"] = "float"
#应收期货公司费用
CThostFtdcRspTransferField["BrokerFee"] = "float"
#发送方给接收方的消息
CThostFtdcRspTransferField["Message"] = "string"
#摘要
CThostFtdcRspTransferField["Digest"] = "string"
#银行帐号类型
CThostFtdcRspTransferField["BankAccType"] = "char"
#渠道标志
CThostFtdcRspTransferField["DeviceID"] = "string"
#期货单位帐号类型
CThostFtdcRspTransferField["BankSecuAccType"] = "char"
#期货公司银行编码
CThostFtdcRspTransferField["BrokerIDByBank"] = "string"
#期货单位帐号
CThostFtdcRspTransferField["BankSecuAcc"] = "string"
#银行密码标志
CThostFtdcRspTransferField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcRspTransferField["SecuPwdFlag"] = "char"
#交易柜员
CThostFtdcRspTransferField["OperNo"] = "string"
#请求编号
CThostFtdcRspTransferField["RequestID"] = "int"
#交易ID
CThostFtdcRspTransferField["TID"] = "int"
#转账交易状态
CThostFtdcRspTransferField["TransferStatus"] = "char"
#错误代码
CThostFtdcRspTransferField["ErrorID"] = "int"
#错误信息
CThostFtdcRspTransferField["ErrorMsg"] = "string"
structDict['CThostFtdcRspTransferField'] = CThostFtdcRspTransferField
#冲正请求
CThostFtdcReqRepealField = {}
#冲正时间间隔
CThostFtdcReqRepealField["RepealTimeInterval"] = "int"
#已经冲正次数
CThostFtdcReqRepealField["RepealedTimes"] = "int"
#银行冲正标志
CThostFtdcReqRepealField["BankRepealFlag"] = "char"
#期商冲正标志
CThostFtdcReqRepealField["BrokerRepealFlag"] = "char"
#被冲正平台流水号
CThostFtdcReqRepealField["PlateRepealSerial"] = "int"
#被冲正银行流水号
CThostFtdcReqRepealField["BankRepealSerial"] = "string"
#被冲正期货流水号
CThostFtdcReqRepealField["FutureRepealSerial"] = "int"
#业务功能码
CThostFtdcReqRepealField["TradeCode"] = "string"
#银行代码
CThostFtdcReqRepealField["BankID"] = "string"
#银行分支机构代码
CThostFtdcReqRepealField["BankBranchID"] = "string"
#期商代码
CThostFtdcReqRepealField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcReqRepealField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcReqRepealField["TradeDate"] = "string"
#交易时间
CThostFtdcReqRepealField["TradeTime"] = "string"
#银行流水号
CThostFtdcReqRepealField["BankSerial"] = "string"
#交易系统日期
CThostFtdcReqRepealField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcReqRepealField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcReqRepealField["LastFragment"] = "char"
#会话号
CThostFtdcReqRepealField["SessionID"] = "int"
#客户姓名
CThostFtdcReqRepealField["CustomerName"] = "string"
#证件类型
CThostFtdcReqRepealField["IdCardType"] = "char"
#证件号码
CThostFtdcReqRepealField["IdentifiedCardNo"] = "string"
#客户类型
CThostFtdcReqRepealField["CustType"] = "char"
#银行帐号
CThostFtdcReqRepealField["BankAccount"] = "string"
#银行密码
CThostFtdcReqRepealField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcReqRepealField["AccountID"] = "string"
#期货密码
CThostFtdcReqRepealField["Password"] = "string"
#安装编号
CThostFtdcReqRepealField["InstallID"] = "int"
#期货公司流水号
CThostFtdcReqRepealField["FutureSerial"] = "int"
#用户标识
CThostFtdcReqRepealField["UserID"] = "string"
#验证客户证件号码标志
CThostFtdcReqRepealField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcReqRepealField["CurrencyID"] = "string"
#转帐金额
CThostFtdcReqRepealField["TradeAmount"] = "float"
#期货可取金额
CThostFtdcReqRepealField["FutureFetchAmount"] = "float"
#费用支付标志
CThostFtdcReqRepealField["FeePayFlag"] = "char"
#应收客户费用
CThostFtdcReqRepealField["CustFee"] = "float"
#应收期货公司费用
CThostFtdcReqRepealField["BrokerFee"] = "float"
#发送方给接收方的消息
CThostFtdcReqRepealField["Message"] = "string"
#摘要
CThostFtdcReqRepealField["Digest"] = "string"
#银行帐号类型
CThostFtdcReqRepealField["BankAccType"] = "char"
#渠道标志
CThostFtdcReqRepealField["DeviceID"] = "string"
#期货单位帐号类型
CThostFtdcReqRepealField["BankSecuAccType"] = "char"
#期货公司银行编码
CThostFtdcReqRepealField["BrokerIDByBank"] = "string"
#期货单位帐号
CThostFtdcReqRepealField["BankSecuAcc"] = "string"
#银行密码标志
CThostFtdcReqRepealField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcReqRepealField["SecuPwdFlag"] = "char"
#交易柜员
CThostFtdcReqRepealField["OperNo"] = "string"
#请求编号
CThostFtdcReqRepealField["RequestID"] = "int"
#交易ID
CThostFtdcReqRepealField["TID"] = "int"
#转账交易状态
CThostFtdcReqRepealField["TransferStatus"] = "char"
structDict['CThostFtdcReqRepealField'] = CThostFtdcReqRepealField
#冲正响应
CThostFtdcRspRepealField = {}
#冲正时间间隔
CThostFtdcRspRepealField["RepealTimeInterval"] = "int"
#已经冲正次数
CThostFtdcRspRepealField["RepealedTimes"] = "int"
#银行冲正标志
CThostFtdcRspRepealField["BankRepealFlag"] = "char"
#期商冲正标志
CThostFtdcRspRepealField["BrokerRepealFlag"] = "char"
#被冲正平台流水号
CThostFtdcRspRepealField["PlateRepealSerial"] = "int"
#被冲正银行流水号
CThostFtdcRspRepealField["BankRepealSerial"] = "string"
#被冲正期货流水号
CThostFtdcRspRepealField["FutureRepealSerial"] = "int"
#业务功能码
CThostFtdcRspRepealField["TradeCode"] = "string"
#银行代码
CThostFtdcRspRepealField["BankID"] = "string"
#银行分支机构代码
CThostFtdcRspRepealField["BankBranchID"] = "string"
#期商代码
CThostFtdcRspRepealField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcRspRepealField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcRspRepealField["TradeDate"] = "string"
#交易时间
CThostFtdcRspRepealField["TradeTime"] = "string"
#银行流水号
CThostFtdcRspRepealField["BankSerial"] = "string"
#交易系统日期
CThostFtdcRspRepealField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcRspRepealField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcRspRepealField["LastFragment"] = "char"
#会话号
CThostFtdcRspRepealField["SessionID"] = "int"
#客户姓名
CThostFtdcRspRepealField["CustomerName"] = "string"
#证件类型
CThostFtdcRspRepealField["IdCardType"] = "char"
#证件号码
CThostFtdcRspRepealField["IdentifiedCardNo"] = "string"
#客户类型
CThostFtdcRspRepealField["CustType"] = "char"
#银行帐号
CThostFtdcRspRepealField["BankAccount"] = "string"
#银行密码
CThostFtdcRspRepealField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcRspRepealField["AccountID"] = "string"
#期货密码
CThostFtdcRspRepealField["Password"] = "string"
#安装编号
CThostFtdcRspRepealField["InstallID"] = "int"
#期货公司流水号
CThostFtdcRspRepealField["FutureSerial"] = "int"
#用户标识
CThostFtdcRspRepealField["UserID"] = "string"
#验证客户证件号码标志
CThostFtdcRspRepealField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcRspRepealField["CurrencyID"] = "string"
#转帐金额
CThostFtdcRspRepealField["TradeAmount"] = "float"
#期货可取金额
CThostFtdcRspRepealField["FutureFetchAmount"] = "float"
#费用支付标志
CThostFtdcRspRepealField["FeePayFlag"] = "char"
#应收客户费用
CThostFtdcRspRepealField["CustFee"] = "float"
#应收期货公司费用
CThostFtdcRspRepealField["BrokerFee"] = "float"
#发送方给接收方的消息
CThostFtdcRspRepealField["Message"] = "string"
#摘要
CThostFtdcRspRepealField["Digest"] = "string"
#银行帐号类型
CThostFtdcRspRepealField["BankAccType"] = "char"
#渠道标志
CThostFtdcRspRepealField["DeviceID"] = "string"
#期货单位帐号类型
CThostFtdcRspRepealField["BankSecuAccType"] = "char"
#期货公司银行编码
CThostFtdcRspRepealField["BrokerIDByBank"] = "string"
#期货单位帐号
CThostFtdcRspRepealField["BankSecuAcc"] = "string"
#银行密码标志
CThostFtdcRspRepealField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcRspRepealField["SecuPwdFlag"] = "char"
#交易柜员
CThostFtdcRspRepealField["OperNo"] = "string"
#请求编号
CThostFtdcRspRepealField["RequestID"] = "int"
#交易ID
CThostFtdcRspRepealField["TID"] = "int"
#转账交易状态
CThostFtdcRspRepealField["TransferStatus"] = "char"
#错误代码
CThostFtdcRspRepealField["ErrorID"] = "int"
#错误信息
CThostFtdcRspRepealField["ErrorMsg"] = "string"
structDict['CThostFtdcRspRepealField'] = CThostFtdcRspRepealField
#查询账户信息请求
CThostFtdcReqQueryAccountField = {}
#业务功能码
CThostFtdcReqQueryAccountField["TradeCode"] = "string"
#银行代码
CThostFtdcReqQueryAccountField["BankID"] = "string"
#银行分支机构代码
CThostFtdcReqQueryAccountField["BankBranchID"] = "string"
#期商代码
CThostFtdcReqQueryAccountField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcReqQueryAccountField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcReqQueryAccountField["TradeDate"] = "string"
#交易时间
CThostFtdcReqQueryAccountField["TradeTime"] = "string"
#银行流水号
CThostFtdcReqQueryAccountField["BankSerial"] = "string"
#交易系统日期
CThostFtdcReqQueryAccountField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcReqQueryAccountField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcReqQueryAccountField["LastFragment"] = "char"
#会话号
CThostFtdcReqQueryAccountField["SessionID"] = "int"
#客户姓名
CThostFtdcReqQueryAccountField["CustomerName"] = "string"
#证件类型
CThostFtdcReqQueryAccountField["IdCardType"] = "char"
#证件号码
CThostFtdcReqQueryAccountField["IdentifiedCardNo"] = "string"
#客户类型
CThostFtdcReqQueryAccountField["CustType"] = "char"
#银行帐号
CThostFtdcReqQueryAccountField["BankAccount"] = "string"
#银行密码
CThostFtdcReqQueryAccountField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcReqQueryAccountField["AccountID"] = "string"
#期货密码
CThostFtdcReqQueryAccountField["Password"] = "string"
#期货公司流水号
CThostFtdcReqQueryAccountField["FutureSerial"] = "int"
#安装编号
CThostFtdcReqQueryAccountField["InstallID"] = "int"
#用户标识
CThostFtdcReqQueryAccountField["UserID"] = "string"
#验证客户证件号码标志
CThostFtdcReqQueryAccountField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcReqQueryAccountField["CurrencyID"] = "string"
#摘要
CThostFtdcReqQueryAccountField["Digest"] = "string"
#银行帐号类型
CThostFtdcReqQueryAccountField["BankAccType"] = "char"
#渠道标志
CThostFtdcReqQueryAccountField["DeviceID"] = "string"
#期货单位帐号类型
CThostFtdcReqQueryAccountField["BankSecuAccType"] = "char"
#期货公司银行编码
CThostFtdcReqQueryAccountField["BrokerIDByBank"] = "string"
#期货单位帐号
CThostFtdcReqQueryAccountField["BankSecuAcc"] = "string"
#银行密码标志
CThostFtdcReqQueryAccountField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcReqQueryAccountField["SecuPwdFlag"] = "char"
#交易柜员
CThostFtdcReqQueryAccountField["OperNo"] = "string"
#请求编号
CThostFtdcReqQueryAccountField["RequestID"] = "int"
#交易ID
CThostFtdcReqQueryAccountField["TID"] = "int"
structDict['CThostFtdcReqQueryAccountField'] = CThostFtdcReqQueryAccountField
#查询账户信息响应
CThostFtdcRspQueryAccountField = {}
#业务功能码
CThostFtdcRspQueryAccountField["TradeCode"] = "string"
#银行代码
CThostFtdcRspQueryAccountField["BankID"] = "string"
#银行分支机构代码
CThostFtdcRspQueryAccountField["BankBranchID"] = "string"
#期商代码
CThostFtdcRspQueryAccountField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcRspQueryAccountField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcRspQueryAccountField["TradeDate"] = "string"
#交易时间
CThostFtdcRspQueryAccountField["TradeTime"] = "string"
#银行流水号
CThostFtdcRspQueryAccountField["BankSerial"] = "string"
#交易系统日期
CThostFtdcRspQueryAccountField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcRspQueryAccountField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcRspQueryAccountField["LastFragment"] = "char"
#会话号
CThostFtdcRspQueryAccountField["SessionID"] = "int"
#客户姓名
CThostFtdcRspQueryAccountField["CustomerName"] = "string"
#证件类型
CThostFtdcRspQueryAccountField["IdCardType"] = "char"
#证件号码
CThostFtdcRspQueryAccountField["IdentifiedCardNo"] = "string"
#客户类型
CThostFtdcRspQueryAccountField["CustType"] = "char"
#银行帐号
CThostFtdcRspQueryAccountField["BankAccount"] = "string"
#银行密码
CThostFtdcRspQueryAccountField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcRspQueryAccountField["AccountID"] = "string"
#期货密码
CThostFtdcRspQueryAccountField["Password"] = "string"
#期货公司流水号
CThostFtdcRspQueryAccountField["FutureSerial"] = "int"
#安装编号
CThostFtdcRspQueryAccountField["InstallID"] = "int"
#用户标识
CThostFtdcRspQueryAccountField["UserID"] = "string"
#验证客户证件号码标志
CThostFtdcRspQueryAccountField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcRspQueryAccountField["CurrencyID"] = "string"
#摘要
CThostFtdcRspQueryAccountField["Digest"] = "string"
#银行帐号类型
CThostFtdcRspQueryAccountField["BankAccType"] = "char"
#渠道标志
CThostFtdcRspQueryAccountField["DeviceID"] = "string"
#期货单位帐号类型
CThostFtdcRspQueryAccountField["BankSecuAccType"] = "char"
#期货公司银行编码
CThostFtdcRspQueryAccountField["BrokerIDByBank"] = "string"
#期货单位帐号
CThostFtdcRspQueryAccountField["BankSecuAcc"] = "string"
#银行密码标志
CThostFtdcRspQueryAccountField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcRspQueryAccountField["SecuPwdFlag"] = "char"
#交易柜员
CThostFtdcRspQueryAccountField["OperNo"] = "string"
#请求编号
CThostFtdcRspQueryAccountField["RequestID"] = "int"
#交易ID
CThostFtdcRspQueryAccountField["TID"] = "int"
#银行可用金额
CThostFtdcRspQueryAccountField["BankUseAmount"] = "float"
#银行可取金额
CThostFtdcRspQueryAccountField["BankFetchAmount"] = "float"
structDict['CThostFtdcRspQueryAccountField'] = CThostFtdcRspQueryAccountField
#期商签到签退
CThostFtdcFutureSignIOField = {}
#业务功能码
CThostFtdcFutureSignIOField["TradeCode"] = "string"
#银行代码
CThostFtdcFutureSignIOField["BankID"] = "string"
#银行分支机构代码
CThostFtdcFutureSignIOField["BankBranchID"] = "string"
#期商代码
CThostFtdcFutureSignIOField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcFutureSignIOField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcFutureSignIOField["TradeDate"] = "string"
#交易时间
CThostFtdcFutureSignIOField["TradeTime"] = "string"
#银行流水号
CThostFtdcFutureSignIOField["BankSerial"] = "string"
#交易系统日期
CThostFtdcFutureSignIOField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcFutureSignIOField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcFutureSignIOField["LastFragment"] = "char"
#会话号
CThostFtdcFutureSignIOField["SessionID"] = "int"
#安装编号
CThostFtdcFutureSignIOField["InstallID"] = "int"
#用户标识
CThostFtdcFutureSignIOField["UserID"] = "string"
#摘要
CThostFtdcFutureSignIOField["Digest"] = "string"
#币种代码
CThostFtdcFutureSignIOField["CurrencyID"] = "string"
#渠道标志
CThostFtdcFutureSignIOField["DeviceID"] = "string"
#期货公司银行编码
CThostFtdcFutureSignIOField["BrokerIDByBank"] = "string"
#交易柜员
CThostFtdcFutureSignIOField["OperNo"] = "string"
#请求编号
CThostFtdcFutureSignIOField["RequestID"] = "int"
#交易ID
CThostFtdcFutureSignIOField["TID"] = "int"
structDict['CThostFtdcFutureSignIOField'] = CThostFtdcFutureSignIOField
#期商签到响应
CThostFtdcRspFutureSignInField = {}
#业务功能码
CThostFtdcRspFutureSignInField["TradeCode"] = "string"
#银行代码
CThostFtdcRspFutureSignInField["BankID"] = "string"
#银行分支机构代码
CThostFtdcRspFutureSignInField["BankBranchID"] = "string"
#期商代码
CThostFtdcRspFutureSignInField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcRspFutureSignInField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcRspFutureSignInField["TradeDate"] = "string"
#交易时间
CThostFtdcRspFutureSignInField["TradeTime"] = "string"
#银行流水号
CThostFtdcRspFutureSignInField["BankSerial"] = "string"
#交易系统日期
CThostFtdcRspFutureSignInField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcRspFutureSignInField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcRspFutureSignInField["LastFragment"] = "char"
#会话号
CThostFtdcRspFutureSignInField["SessionID"] = "int"
#安装编号
CThostFtdcRspFutureSignInField["InstallID"] = "int"
#用户标识
CThostFtdcRspFutureSignInField["UserID"] = "string"
#摘要
CThostFtdcRspFutureSignInField["Digest"] = "string"
#币种代码
CThostFtdcRspFutureSignInField["CurrencyID"] = "string"
#渠道标志
CThostFtdcRspFutureSignInField["DeviceID"] = "string"
#期货公司银行编码
CThostFtdcRspFutureSignInField["BrokerIDByBank"] = "string"
#交易柜员
CThostFtdcRspFutureSignInField["OperNo"] = "string"
#请求编号
CThostFtdcRspFutureSignInField["RequestID"] = "int"
#交易ID
CThostFtdcRspFutureSignInField["TID"] = "int"
#错误代码
CThostFtdcRspFutureSignInField["ErrorID"] = "int"
#错误信息
CThostFtdcRspFutureSignInField["ErrorMsg"] = "string"
#PIN密钥
CThostFtdcRspFutureSignInField["PinKey"] = "string"
#MAC密钥
CThostFtdcRspFutureSignInField["MacKey"] = "string"
structDict['CThostFtdcRspFutureSignInField'] = CThostFtdcRspFutureSignInField
#期商签退请求
CThostFtdcReqFutureSignOutField = {}
#业务功能码
CThostFtdcReqFutureSignOutField["TradeCode"] = "string"
#银行代码
CThostFtdcReqFutureSignOutField["BankID"] = "string"
#银行分支机构代码
CThostFtdcReqFutureSignOutField["BankBranchID"] = "string"
#期商代码
CThostFtdcReqFutureSignOutField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcReqFutureSignOutField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcReqFutureSignOutField["TradeDate"] = "string"
#交易时间
CThostFtdcReqFutureSignOutField["TradeTime"] = "string"
#银行流水号
CThostFtdcReqFutureSignOutField["BankSerial"] = "string"
#交易系统日期
CThostFtdcReqFutureSignOutField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcReqFutureSignOutField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcReqFutureSignOutField["LastFragment"] = "char"
#会话号
CThostFtdcReqFutureSignOutField["SessionID"] = "int"
#安装编号
CThostFtdcReqFutureSignOutField["InstallID"] = "int"
#用户标识
CThostFtdcReqFutureSignOutField["UserID"] = "string"
#摘要
CThostFtdcReqFutureSignOutField["Digest"] = "string"
#币种代码
CThostFtdcReqFutureSignOutField["CurrencyID"] = "string"
#渠道标志
CThostFtdcReqFutureSignOutField["DeviceID"] = "string"
#期货公司银行编码
CThostFtdcReqFutureSignOutField["BrokerIDByBank"] = "string"
#交易柜员
CThostFtdcReqFutureSignOutField["OperNo"] = "string"
#请求编号
CThostFtdcReqFutureSignOutField["RequestID"] = "int"
#交易ID
CThostFtdcReqFutureSignOutField["TID"] = "int"
structDict['CThostFtdcReqFutureSignOutField'] = CThostFtdcReqFutureSignOutField
#期商签退响应
CThostFtdcRspFutureSignOutField = {}
#业务功能码
CThostFtdcRspFutureSignOutField["TradeCode"] = "string"
#银行代码
CThostFtdcRspFutureSignOutField["BankID"] = "string"
#银行分支机构代码
CThostFtdcRspFutureSignOutField["BankBranchID"] = "string"
#期商代码
CThostFtdcRspFutureSignOutField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcRspFutureSignOutField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcRspFutureSignOutField["TradeDate"] = "string"
#交易时间
CThostFtdcRspFutureSignOutField["TradeTime"] = "string"
#银行流水号
CThostFtdcRspFutureSignOutField["BankSerial"] = "string"
#交易系统日期
CThostFtdcRspFutureSignOutField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcRspFutureSignOutField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcRspFutureSignOutField["LastFragment"] = "char"
#会话号
CThostFtdcRspFutureSignOutField["SessionID"] = "int"
#安装编号
CThostFtdcRspFutureSignOutField["InstallID"] = "int"
#用户标识
CThostFtdcRspFutureSignOutField["UserID"] = "string"
#摘要
CThostFtdcRspFutureSignOutField["Digest"] = "string"
#币种代码
CThostFtdcRspFutureSignOutField["CurrencyID"] = "string"
#渠道标志
CThostFtdcRspFutureSignOutField["DeviceID"] = "string"
#期货公司银行编码
CThostFtdcRspFutureSignOutField["BrokerIDByBank"] = "string"
#交易柜员
CThostFtdcRspFutureSignOutField["OperNo"] = "string"
#请求编号
CThostFtdcRspFutureSignOutField["RequestID"] = "int"
#交易ID
CThostFtdcRspFutureSignOutField["TID"] = "int"
#错误代码
CThostFtdcRspFutureSignOutField["ErrorID"] = "int"
#错误信息
CThostFtdcRspFutureSignOutField["ErrorMsg"] = "string"
structDict['CThostFtdcRspFutureSignOutField'] = CThostFtdcRspFutureSignOutField
#查询指定流水号的交易结果请求
CThostFtdcReqQueryTradeResultBySerialField = {}
#业务功能码
CThostFtdcReqQueryTradeResultBySerialField["TradeCode"] = "string"
#银行代码
CThostFtdcReqQueryTradeResultBySerialField["BankID"] = "string"
#银行分支机构代码
CThostFtdcReqQueryTradeResultBySerialField["BankBranchID"] = "string"
#期商代码
CThostFtdcReqQueryTradeResultBySerialField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcReqQueryTradeResultBySerialField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcReqQueryTradeResultBySerialField["TradeDate"] = "string"
#交易时间
CThostFtdcReqQueryTradeResultBySerialField["TradeTime"] = "string"
#银行流水号
CThostFtdcReqQueryTradeResultBySerialField["BankSerial"] = "string"
#交易系统日期
CThostFtdcReqQueryTradeResultBySerialField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcReqQueryTradeResultBySerialField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcReqQueryTradeResultBySerialField["LastFragment"] = "char"
#会话号
CThostFtdcReqQueryTradeResultBySerialField["SessionID"] = "int"
#流水号
CThostFtdcReqQueryTradeResultBySerialField["Reference"] = "int"
#本流水号发布者的机构类型
CThostFtdcReqQueryTradeResultBySerialField["RefrenceIssureType"] = "char"
#本流水号发布者机构编码
CThostFtdcReqQueryTradeResultBySerialField["RefrenceIssure"] = "string"
#客户姓名
CThostFtdcReqQueryTradeResultBySerialField["CustomerName"] = "string"
#证件类型
CThostFtdcReqQueryTradeResultBySerialField["IdCardType"] = "char"
#证件号码
CThostFtdcReqQueryTradeResultBySerialField["IdentifiedCardNo"] = "string"
#客户类型
CThostFtdcReqQueryTradeResultBySerialField["CustType"] = "char"
#银行帐号
CThostFtdcReqQueryTradeResultBySerialField["BankAccount"] = "string"
#银行密码
CThostFtdcReqQueryTradeResultBySerialField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcReqQueryTradeResultBySerialField["AccountID"] = "string"
#期货密码
CThostFtdcReqQueryTradeResultBySerialField["Password"] = "string"
#币种代码
CThostFtdcReqQueryTradeResultBySerialField["CurrencyID"] = "string"
#转帐金额
CThostFtdcReqQueryTradeResultBySerialField["TradeAmount"] = "float"
#摘要
CThostFtdcReqQueryTradeResultBySerialField["Digest"] = "string"
structDict['CThostFtdcReqQueryTradeResultBySerialField'] = CThostFtdcReqQueryTradeResultBySerialField
#查询指定流水号的交易结果响应
CThostFtdcRspQueryTradeResultBySerialField = {}
#业务功能码
CThostFtdcRspQueryTradeResultBySerialField["TradeCode"] = "string"
#银行代码
CThostFtdcRspQueryTradeResultBySerialField["BankID"] = "string"
#银行分支机构代码
CThostFtdcRspQueryTradeResultBySerialField["BankBranchID"] = "string"
#期商代码
CThostFtdcRspQueryTradeResultBySerialField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcRspQueryTradeResultBySerialField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcRspQueryTradeResultBySerialField["TradeDate"] = "string"
#交易时间
CThostFtdcRspQueryTradeResultBySerialField["TradeTime"] = "string"
#银行流水号
CThostFtdcRspQueryTradeResultBySerialField["BankSerial"] = "string"
#交易系统日期
CThostFtdcRspQueryTradeResultBySerialField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcRspQueryTradeResultBySerialField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcRspQueryTradeResultBySerialField["LastFragment"] = "char"
#会话号
CThostFtdcRspQueryTradeResultBySerialField["SessionID"] = "int"
#错误代码
CThostFtdcRspQueryTradeResultBySerialField["ErrorID"] = "int"
#错误信息
CThostFtdcRspQueryTradeResultBySerialField["ErrorMsg"] = "string"
#流水号
CThostFtdcRspQueryTradeResultBySerialField["Reference"] = "int"
#本流水号发布者的机构类型
CThostFtdcRspQueryTradeResultBySerialField["RefrenceIssureType"] = "char"
#本流水号发布者机构编码
CThostFtdcRspQueryTradeResultBySerialField["RefrenceIssure"] = "string"
#原始返回代码
CThostFtdcRspQueryTradeResultBySerialField["OriginReturnCode"] = "string"
#原始返回码描述
CThostFtdcRspQueryTradeResultBySerialField["OriginDescrInfoForReturnCode"] = "string"
#银行帐号
CThostFtdcRspQueryTradeResultBySerialField["BankAccount"] = "string"
#银行密码
CThostFtdcRspQueryTradeResultBySerialField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcRspQueryTradeResultBySerialField["AccountID"] = "string"
#期货密码
CThostFtdcRspQueryTradeResultBySerialField["Password"] = "string"
#币种代码
CThostFtdcRspQueryTradeResultBySerialField["CurrencyID"] = "string"
#转帐金额
CThostFtdcRspQueryTradeResultBySerialField["TradeAmount"] = "float"
#摘要
CThostFtdcRspQueryTradeResultBySerialField["Digest"] = "string"
structDict['CThostFtdcRspQueryTradeResultBySerialField'] = CThostFtdcRspQueryTradeResultBySerialField
#日终文件就绪请求
CThostFtdcReqDayEndFileReadyField = {}
#业务功能码
CThostFtdcReqDayEndFileReadyField["TradeCode"] = "string"
#银行代码
CThostFtdcReqDayEndFileReadyField["BankID"] = "string"
#银行分支机构代码
CThostFtdcReqDayEndFileReadyField["BankBranchID"] = "string"
#期商代码
CThostFtdcReqDayEndFileReadyField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcReqDayEndFileReadyField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcReqDayEndFileReadyField["TradeDate"] = "string"
#交易时间
CThostFtdcReqDayEndFileReadyField["TradeTime"] = "string"
#银行流水号
CThostFtdcReqDayEndFileReadyField["BankSerial"] = "string"
#交易系统日期
CThostFtdcReqDayEndFileReadyField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcReqDayEndFileReadyField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcReqDayEndFileReadyField["LastFragment"] = "char"
#会话号
CThostFtdcReqDayEndFileReadyField["SessionID"] = "int"
#文件业务功能
CThostFtdcReqDayEndFileReadyField["FileBusinessCode"] = "char"
#摘要
CThostFtdcReqDayEndFileReadyField["Digest"] = "string"
structDict['CThostFtdcReqDayEndFileReadyField'] = CThostFtdcReqDayEndFileReadyField
#返回结果
CThostFtdcReturnResultField = {}
#返回代码
CThostFtdcReturnResultField["ReturnCode"] = "string"
#返回码描述
CThostFtdcReturnResultField["DescrInfoForReturnCode"] = "string"
structDict['CThostFtdcReturnResultField'] = CThostFtdcReturnResultField
#验证期货资金密码
CThostFtdcVerifyFuturePasswordField = {}
#业务功能码
CThostFtdcVerifyFuturePasswordField["TradeCode"] = "string"
#银行代码
CThostFtdcVerifyFuturePasswordField["BankID"] = "string"
#银行分支机构代码
CThostFtdcVerifyFuturePasswordField["BankBranchID"] = "string"
#期商代码
CThostFtdcVerifyFuturePasswordField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcVerifyFuturePasswordField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcVerifyFuturePasswordField["TradeDate"] = "string"
#交易时间
CThostFtdcVerifyFuturePasswordField["TradeTime"] = "string"
#银行流水号
CThostFtdcVerifyFuturePasswordField["BankSerial"] = "string"
#交易系统日期
CThostFtdcVerifyFuturePasswordField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcVerifyFuturePasswordField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcVerifyFuturePasswordField["LastFragment"] = "char"
#会话号
CThostFtdcVerifyFuturePasswordField["SessionID"] = "int"
#投资者帐号
CThostFtdcVerifyFuturePasswordField["AccountID"] = "string"
#期货密码
CThostFtdcVerifyFuturePasswordField["Password"] = "string"
#银行帐号
CThostFtdcVerifyFuturePasswordField["BankAccount"] = "string"
#银行密码
CThostFtdcVerifyFuturePasswordField["BankPassWord"] = "string"
#安装编号
CThostFtdcVerifyFuturePasswordField["InstallID"] = "int"
#交易ID
CThostFtdcVerifyFuturePasswordField["TID"] = "int"
#币种代码
CThostFtdcVerifyFuturePasswordField["CurrencyID"] = "string"
structDict['CThostFtdcVerifyFuturePasswordField'] = CThostFtdcVerifyFuturePasswordField
#验证客户信息
CThostFtdcVerifyCustInfoField = {}
#客户姓名
CThostFtdcVerifyCustInfoField["CustomerName"] = "string"
#证件类型
CThostFtdcVerifyCustInfoField["IdCardType"] = "char"
#证件号码
CThostFtdcVerifyCustInfoField["IdentifiedCardNo"] = "string"
#客户类型
CThostFtdcVerifyCustInfoField["CustType"] = "char"
structDict['CThostFtdcVerifyCustInfoField'] = CThostFtdcVerifyCustInfoField
#验证期货资金密码和客户信息
CThostFtdcVerifyFuturePasswordAndCustInfoField = {}
#客户姓名
CThostFtdcVerifyFuturePasswordAndCustInfoField["CustomerName"] = "string"
#证件类型
CThostFtdcVerifyFuturePasswordAndCustInfoField["IdCardType"] = "char"
#证件号码
CThostFtdcVerifyFuturePasswordAndCustInfoField["IdentifiedCardNo"] = "string"
#客户类型
CThostFtdcVerifyFuturePasswordAndCustInfoField["CustType"] = "char"
#投资者帐号
CThostFtdcVerifyFuturePasswordAndCustInfoField["AccountID"] = "string"
#期货密码
CThostFtdcVerifyFuturePasswordAndCustInfoField["Password"] = "string"
#币种代码
CThostFtdcVerifyFuturePasswordAndCustInfoField["CurrencyID"] = "string"
structDict['CThostFtdcVerifyFuturePasswordAndCustInfoField'] = CThostFtdcVerifyFuturePasswordAndCustInfoField
#验证期货资金密码和客户信息
CThostFtdcDepositResultInformField = {}
#出入金流水号,该流水号为银期报盘返回的流水号
CThostFtdcDepositResultInformField["DepositSeqNo"] = "string"
#经纪公司代码
CThostFtdcDepositResultInformField["BrokerID"] = "string"
#投资者代码
CThostFtdcDepositResultInformField["InvestorID"] = "string"
#入金金额
CThostFtdcDepositResultInformField["Deposit"] = "float"
#请求编号
CThostFtdcDepositResultInformField["RequestID"] = "int"
#返回代码
CThostFtdcDepositResultInformField["ReturnCode"] = "string"
#返回码描述
CThostFtdcDepositResultInformField["DescrInfoForReturnCode"] = "string"
structDict['CThostFtdcDepositResultInformField'] = CThostFtdcDepositResultInformField
#交易核心向银期报盘发出密钥同步请求
CThostFtdcReqSyncKeyField = {}
#业务功能码
CThostFtdcReqSyncKeyField["TradeCode"] = "string"
#银行代码
CThostFtdcReqSyncKeyField["BankID"] = "string"
#银行分支机构代码
CThostFtdcReqSyncKeyField["BankBranchID"] = "string"
#期商代码
CThostFtdcReqSyncKeyField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcReqSyncKeyField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcReqSyncKeyField["TradeDate"] = "string"
#交易时间
CThostFtdcReqSyncKeyField["TradeTime"] = "string"
#银行流水号
CThostFtdcReqSyncKeyField["BankSerial"] = "string"
#交易系统日期
CThostFtdcReqSyncKeyField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcReqSyncKeyField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcReqSyncKeyField["LastFragment"] = "char"
#会话号
CThostFtdcReqSyncKeyField["SessionID"] = "int"
#安装编号
CThostFtdcReqSyncKeyField["InstallID"] = "int"
#用户标识
CThostFtdcReqSyncKeyField["UserID"] = "string"
#交易核心给银期报盘的消息
CThostFtdcReqSyncKeyField["Message"] = "string"
#渠道标志
CThostFtdcReqSyncKeyField["DeviceID"] = "string"
#期货公司银行编码
CThostFtdcReqSyncKeyField["BrokerIDByBank"] = "string"
#交易柜员
CThostFtdcReqSyncKeyField["OperNo"] = "string"
#请求编号
CThostFtdcReqSyncKeyField["RequestID"] = "int"
#交易ID
CThostFtdcReqSyncKeyField["TID"] = "int"
structDict['CThostFtdcReqSyncKeyField'] = CThostFtdcReqSyncKeyField
#交易核心向银期报盘发出密钥同步响应
CThostFtdcRspSyncKeyField = {}
#业务功能码
CThostFtdcRspSyncKeyField["TradeCode"] = "string"
#银行代码
CThostFtdcRspSyncKeyField["BankID"] = "string"
#银行分支机构代码
CThostFtdcRspSyncKeyField["BankBranchID"] = "string"
#期商代码
CThostFtdcRspSyncKeyField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcRspSyncKeyField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcRspSyncKeyField["TradeDate"] = "string"
#交易时间
CThostFtdcRspSyncKeyField["TradeTime"] = "string"
#银行流水号
CThostFtdcRspSyncKeyField["BankSerial"] = "string"
#交易系统日期
CThostFtdcRspSyncKeyField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcRspSyncKeyField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcRspSyncKeyField["LastFragment"] = "char"
#会话号
CThostFtdcRspSyncKeyField["SessionID"] = "int"
#安装编号
CThostFtdcRspSyncKeyField["InstallID"] = "int"
#用户标识
CThostFtdcRspSyncKeyField["UserID"] = "string"
#交易核心给银期报盘的消息
CThostFtdcRspSyncKeyField["Message"] = "string"
#渠道标志
CThostFtdcRspSyncKeyField["DeviceID"] = "string"
#期货公司银行编码
CThostFtdcRspSyncKeyField["BrokerIDByBank"] = "string"
#交易柜员
CThostFtdcRspSyncKeyField["OperNo"] = "string"
#请求编号
CThostFtdcRspSyncKeyField["RequestID"] = "int"
#交易ID
CThostFtdcRspSyncKeyField["TID"] = "int"
#错误代码
CThostFtdcRspSyncKeyField["ErrorID"] = "int"
#错误信息
CThostFtdcRspSyncKeyField["ErrorMsg"] = "string"
structDict['CThostFtdcRspSyncKeyField'] = CThostFtdcRspSyncKeyField
#查询账户信息通知
CThostFtdcNotifyQueryAccountField = {}
#业务功能码
CThostFtdcNotifyQueryAccountField["TradeCode"] = "string"
#银行代码
CThostFtdcNotifyQueryAccountField["BankID"] = "string"
#银行分支机构代码
CThostFtdcNotifyQueryAccountField["BankBranchID"] = "string"
#期商代码
CThostFtdcNotifyQueryAccountField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcNotifyQueryAccountField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcNotifyQueryAccountField["TradeDate"] = "string"
#交易时间
CThostFtdcNotifyQueryAccountField["TradeTime"] = "string"
#银行流水号
CThostFtdcNotifyQueryAccountField["BankSerial"] = "string"
#交易系统日期
CThostFtdcNotifyQueryAccountField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcNotifyQueryAccountField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcNotifyQueryAccountField["LastFragment"] = "char"
#会话号
CThostFtdcNotifyQueryAccountField["SessionID"] = "int"
#客户姓名
CThostFtdcNotifyQueryAccountField["CustomerName"] = "string"
#证件类型
CThostFtdcNotifyQueryAccountField["IdCardType"] = "char"
#证件号码
CThostFtdcNotifyQueryAccountField["IdentifiedCardNo"] = "string"
#客户类型
CThostFtdcNotifyQueryAccountField["CustType"] = "char"
#银行帐号
CThostFtdcNotifyQueryAccountField["BankAccount"] = "string"
#银行密码
CThostFtdcNotifyQueryAccountField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcNotifyQueryAccountField["AccountID"] = "string"
#期货密码
CThostFtdcNotifyQueryAccountField["Password"] = "string"
#期货公司流水号
CThostFtdcNotifyQueryAccountField["FutureSerial"] = "int"
#安装编号
CThostFtdcNotifyQueryAccountField["InstallID"] = "int"
#用户标识
CThostFtdcNotifyQueryAccountField["UserID"] = "string"
#验证客户证件号码标志
CThostFtdcNotifyQueryAccountField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcNotifyQueryAccountField["CurrencyID"] = "string"
#摘要
CThostFtdcNotifyQueryAccountField["Digest"] = "string"
#银行帐号类型
CThostFtdcNotifyQueryAccountField["BankAccType"] = "char"
#渠道标志
CThostFtdcNotifyQueryAccountField["DeviceID"] = "string"
#期货单位帐号类型
CThostFtdcNotifyQueryAccountField["BankSecuAccType"] = "char"
#期货公司银行编码
CThostFtdcNotifyQueryAccountField["BrokerIDByBank"] = "string"
#期货单位帐号
CThostFtdcNotifyQueryAccountField["BankSecuAcc"] = "string"
#银行密码标志
CThostFtdcNotifyQueryAccountField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcNotifyQueryAccountField["SecuPwdFlag"] = "char"
#交易柜员
CThostFtdcNotifyQueryAccountField["OperNo"] = "string"
#请求编号
CThostFtdcNotifyQueryAccountField["RequestID"] = "int"
#交易ID
CThostFtdcNotifyQueryAccountField["TID"] = "int"
#银行可用金额
CThostFtdcNotifyQueryAccountField["BankUseAmount"] = "float"
#银行可取金额
CThostFtdcNotifyQueryAccountField["BankFetchAmount"] = "float"
#错误代码
CThostFtdcNotifyQueryAccountField["ErrorID"] = "int"
#错误信息
CThostFtdcNotifyQueryAccountField["ErrorMsg"] = "string"
structDict['CThostFtdcNotifyQueryAccountField'] = CThostFtdcNotifyQueryAccountField
#银期转账交易流水表
CThostFtdcTransferSerialField = {}
#平台流水号
CThostFtdcTransferSerialField["PlateSerial"] = "int"
#交易发起方日期
CThostFtdcTransferSerialField["TradeDate"] = "string"
#交易日期
CThostFtdcTransferSerialField["TradingDay"] = "string"
#交易时间
CThostFtdcTransferSerialField["TradeTime"] = "string"
#交易代码
CThostFtdcTransferSerialField["TradeCode"] = "string"
#会话编号
CThostFtdcTransferSerialField["SessionID"] = "int"
#银行编码
CThostFtdcTransferSerialField["BankID"] = "string"
#银行分支机构编码
CThostFtdcTransferSerialField["BankBranchID"] = "string"
#银行帐号类型
CThostFtdcTransferSerialField["BankAccType"] = "char"
#银行帐号
CThostFtdcTransferSerialField["BankAccount"] = "string"
#银行流水号
CThostFtdcTransferSerialField["BankSerial"] = "string"
#期货公司编码
CThostFtdcTransferSerialField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcTransferSerialField["BrokerBranchID"] = "string"
#期货公司帐号类型
CThostFtdcTransferSerialField["FutureAccType"] = "char"
#投资者帐号
CThostFtdcTransferSerialField["AccountID"] = "string"
#投资者代码
CThostFtdcTransferSerialField["InvestorID"] = "string"
#期货公司流水号
CThostFtdcTransferSerialField["FutureSerial"] = "int"
#证件类型
CThostFtdcTransferSerialField["IdCardType"] = "char"
#证件号码
CThostFtdcTransferSerialField["IdentifiedCardNo"] = "string"
#币种代码
CThostFtdcTransferSerialField["CurrencyID"] = "string"
#交易金额
CThostFtdcTransferSerialField["TradeAmount"] = "float"
#应收客户费用
CThostFtdcTransferSerialField["CustFee"] = "float"
#应收期货公司费用
CThostFtdcTransferSerialField["BrokerFee"] = "float"
#有效标志
CThostFtdcTransferSerialField["AvailabilityFlag"] = "char"
#操作员
CThostFtdcTransferSerialField["OperatorCode"] = "string"
#新银行帐号
CThostFtdcTransferSerialField["BankNewAccount"] = "string"
#错误代码
CThostFtdcTransferSerialField["ErrorID"] = "int"
#错误信息
CThostFtdcTransferSerialField["ErrorMsg"] = "string"
structDict['CThostFtdcTransferSerialField'] = CThostFtdcTransferSerialField
#请求查询转帐流水
CThostFtdcQryTransferSerialField = {}
#经纪公司代码
CThostFtdcQryTransferSerialField["BrokerID"] = "string"
#投资者帐号
CThostFtdcQryTransferSerialField["AccountID"] = "string"
#银行编码
CThostFtdcQryTransferSerialField["BankID"] = "string"
#币种代码
CThostFtdcQryTransferSerialField["CurrencyID"] = "string"
structDict['CThostFtdcQryTransferSerialField'] = CThostFtdcQryTransferSerialField
#期商签到通知
CThostFtdcNotifyFutureSignInField = {}
#业务功能码
CThostFtdcNotifyFutureSignInField["TradeCode"] = "string"
#银行代码
CThostFtdcNotifyFutureSignInField["BankID"] = "string"
#银行分支机构代码
CThostFtdcNotifyFutureSignInField["BankBranchID"] = "string"
#期商代码
CThostFtdcNotifyFutureSignInField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcNotifyFutureSignInField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcNotifyFutureSignInField["TradeDate"] = "string"
#交易时间
CThostFtdcNotifyFutureSignInField["TradeTime"] = "string"
#银行流水号
CThostFtdcNotifyFutureSignInField["BankSerial"] = "string"
#交易系统日期
CThostFtdcNotifyFutureSignInField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcNotifyFutureSignInField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcNotifyFutureSignInField["LastFragment"] = "char"
#会话号
CThostFtdcNotifyFutureSignInField["SessionID"] = "int"
#安装编号
CThostFtdcNotifyFutureSignInField["InstallID"] = "int"
#用户标识
CThostFtdcNotifyFutureSignInField["UserID"] = "string"
#摘要
CThostFtdcNotifyFutureSignInField["Digest"] = "string"
#币种代码
CThostFtdcNotifyFutureSignInField["CurrencyID"] = "string"
#渠道标志
CThostFtdcNotifyFutureSignInField["DeviceID"] = "string"
#期货公司银行编码
CThostFtdcNotifyFutureSignInField["BrokerIDByBank"] = "string"
#交易柜员
CThostFtdcNotifyFutureSignInField["OperNo"] = "string"
#请求编号
CThostFtdcNotifyFutureSignInField["RequestID"] = "int"
#交易ID
CThostFtdcNotifyFutureSignInField["TID"] = "int"
#错误代码
CThostFtdcNotifyFutureSignInField["ErrorID"] = "int"
#错误信息
CThostFtdcNotifyFutureSignInField["ErrorMsg"] = "string"
#PIN密钥
CThostFtdcNotifyFutureSignInField["PinKey"] = "string"
#MAC密钥
CThostFtdcNotifyFutureSignInField["MacKey"] = "string"
structDict['CThostFtdcNotifyFutureSignInField'] = CThostFtdcNotifyFutureSignInField
#期商签退通知
CThostFtdcNotifyFutureSignOutField = {}
#业务功能码
CThostFtdcNotifyFutureSignOutField["TradeCode"] = "string"
#银行代码
CThostFtdcNotifyFutureSignOutField["BankID"] = "string"
#银行分支机构代码
CThostFtdcNotifyFutureSignOutField["BankBranchID"] = "string"
#期商代码
CThostFtdcNotifyFutureSignOutField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcNotifyFutureSignOutField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcNotifyFutureSignOutField["TradeDate"] = "string"
#交易时间
CThostFtdcNotifyFutureSignOutField["TradeTime"] = "string"
#银行流水号
CThostFtdcNotifyFutureSignOutField["BankSerial"] = "string"
#交易系统日期
CThostFtdcNotifyFutureSignOutField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcNotifyFutureSignOutField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcNotifyFutureSignOutField["LastFragment"] = "char"
#会话号
CThostFtdcNotifyFutureSignOutField["SessionID"] = "int"
#安装编号
CThostFtdcNotifyFutureSignOutField["InstallID"] = "int"
#用户标识
CThostFtdcNotifyFutureSignOutField["UserID"] = "string"
#摘要
CThostFtdcNotifyFutureSignOutField["Digest"] = "string"
#币种代码
CThostFtdcNotifyFutureSignOutField["CurrencyID"] = "string"
#渠道标志
CThostFtdcNotifyFutureSignOutField["DeviceID"] = "string"
#期货公司银行编码
CThostFtdcNotifyFutureSignOutField["BrokerIDByBank"] = "string"
#交易柜员
CThostFtdcNotifyFutureSignOutField["OperNo"] = "string"
#请求编号
CThostFtdcNotifyFutureSignOutField["RequestID"] = "int"
#交易ID
CThostFtdcNotifyFutureSignOutField["TID"] = "int"
#错误代码
CThostFtdcNotifyFutureSignOutField["ErrorID"] = "int"
#错误信息
CThostFtdcNotifyFutureSignOutField["ErrorMsg"] = "string"
structDict['CThostFtdcNotifyFutureSignOutField'] = CThostFtdcNotifyFutureSignOutField
#交易核心向银期报盘发出密钥同步处理结果的通知
CThostFtdcNotifySyncKeyField = {}
#业务功能码
CThostFtdcNotifySyncKeyField["TradeCode"] = "string"
#银行代码
CThostFtdcNotifySyncKeyField["BankID"] = "string"
#银行分支机构代码
CThostFtdcNotifySyncKeyField["BankBranchID"] = "string"
#期商代码
CThostFtdcNotifySyncKeyField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcNotifySyncKeyField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcNotifySyncKeyField["TradeDate"] = "string"
#交易时间
CThostFtdcNotifySyncKeyField["TradeTime"] = "string"
#银行流水号
CThostFtdcNotifySyncKeyField["BankSerial"] = "string"
#交易系统日期
CThostFtdcNotifySyncKeyField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcNotifySyncKeyField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcNotifySyncKeyField["LastFragment"] = "char"
#会话号
CThostFtdcNotifySyncKeyField["SessionID"] = "int"
#安装编号
CThostFtdcNotifySyncKeyField["InstallID"] = "int"
#用户标识
CThostFtdcNotifySyncKeyField["UserID"] = "string"
#交易核心给银期报盘的消息
CThostFtdcNotifySyncKeyField["Message"] = "string"
#渠道标志
CThostFtdcNotifySyncKeyField["DeviceID"] = "string"
#期货公司银行编码
CThostFtdcNotifySyncKeyField["BrokerIDByBank"] = "string"
#交易柜员
CThostFtdcNotifySyncKeyField["OperNo"] = "string"
#请求编号
CThostFtdcNotifySyncKeyField["RequestID"] = "int"
#交易ID
CThostFtdcNotifySyncKeyField["TID"] = "int"
#错误代码
CThostFtdcNotifySyncKeyField["ErrorID"] = "int"
#错误信息
CThostFtdcNotifySyncKeyField["ErrorMsg"] = "string"
structDict['CThostFtdcNotifySyncKeyField'] = CThostFtdcNotifySyncKeyField
#请求查询银期签约关系
CThostFtdcQryAccountregisterField = {}
#经纪公司代码
CThostFtdcQryAccountregisterField["BrokerID"] = "string"
#投资者帐号
CThostFtdcQryAccountregisterField["AccountID"] = "string"
#银行编码
CThostFtdcQryAccountregisterField["BankID"] = "string"
#银行分支机构编码
CThostFtdcQryAccountregisterField["BankBranchID"] = "string"
#币种代码
CThostFtdcQryAccountregisterField["CurrencyID"] = "string"
structDict['CThostFtdcQryAccountregisterField'] = CThostFtdcQryAccountregisterField
#客户开销户信息表
CThostFtdcAccountregisterField = {}
#交易日期
CThostFtdcAccountregisterField["TradeDay"] = "string"
#银行编码
CThostFtdcAccountregisterField["BankID"] = "string"
#银行分支机构编码
CThostFtdcAccountregisterField["BankBranchID"] = "string"
#银行帐号
CThostFtdcAccountregisterField["BankAccount"] = "string"
#期货公司编码
CThostFtdcAccountregisterField["BrokerID"] = "string"
#期货公司分支机构编码
CThostFtdcAccountregisterField["BrokerBranchID"] = "string"
#投资者帐号
CThostFtdcAccountregisterField["AccountID"] = "string"
#证件类型
CThostFtdcAccountregisterField["IdCardType"] = "char"
#证件号码
CThostFtdcAccountregisterField["IdentifiedCardNo"] = "string"
#客户姓名
CThostFtdcAccountregisterField["CustomerName"] = "string"
#币种代码
CThostFtdcAccountregisterField["CurrencyID"] = "string"
#开销户类别
CThostFtdcAccountregisterField["OpenOrDestroy"] = "char"
#签约日期
CThostFtdcAccountregisterField["RegDate"] = "string"
#解约日期
CThostFtdcAccountregisterField["OutDate"] = "string"
#交易ID
CThostFtdcAccountregisterField["TID"] = "int"
#客户类型
CThostFtdcAccountregisterField["CustType"] = "char"
#银行帐号类型
CThostFtdcAccountregisterField["BankAccType"] = "char"
structDict['CThostFtdcAccountregisterField'] = CThostFtdcAccountregisterField
#银期开户信息
CThostFtdcOpenAccountField = {}
#业务功能码
CThostFtdcOpenAccountField["TradeCode"] = "string"
#银行代码
CThostFtdcOpenAccountField["BankID"] = "string"
#银行分支机构代码
CThostFtdcOpenAccountField["BankBranchID"] = "string"
#期商代码
CThostFtdcOpenAccountField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcOpenAccountField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcOpenAccountField["TradeDate"] = "string"
#交易时间
CThostFtdcOpenAccountField["TradeTime"] = "string"
#银行流水号
CThostFtdcOpenAccountField["BankSerial"] = "string"
#交易系统日期
CThostFtdcOpenAccountField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcOpenAccountField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcOpenAccountField["LastFragment"] = "char"
#会话号
CThostFtdcOpenAccountField["SessionID"] = "int"
#客户姓名
CThostFtdcOpenAccountField["CustomerName"] = "string"
#证件类型
CThostFtdcOpenAccountField["IdCardType"] = "char"
#证件号码
CThostFtdcOpenAccountField["IdentifiedCardNo"] = "string"
#性别
CThostFtdcOpenAccountField["Gender"] = "char"
#国家代码
CThostFtdcOpenAccountField["CountryCode"] = "string"
#客户类型
CThostFtdcOpenAccountField["CustType"] = "char"
#地址
CThostFtdcOpenAccountField["Address"] = "string"
#邮编
CThostFtdcOpenAccountField["ZipCode"] = "string"
#电话号码
CThostFtdcOpenAccountField["Telephone"] = "string"
#手机
CThostFtdcOpenAccountField["MobilePhone"] = "string"
#传真
CThostFtdcOpenAccountField["Fax"] = "string"
#电子邮件
CThostFtdcOpenAccountField["EMail"] = "string"
#资金账户状态
CThostFtdcOpenAccountField["MoneyAccountStatus"] = "char"
#银行帐号
CThostFtdcOpenAccountField["BankAccount"] = "string"
#银行密码
CThostFtdcOpenAccountField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcOpenAccountField["AccountID"] = "string"
#期货密码
CThostFtdcOpenAccountField["Password"] = "string"
#安装编号
CThostFtdcOpenAccountField["InstallID"] = "int"
#验证客户证件号码标志
CThostFtdcOpenAccountField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcOpenAccountField["CurrencyID"] = "string"
#汇钞标志
CThostFtdcOpenAccountField["CashExchangeCode"] = "char"
#摘要
CThostFtdcOpenAccountField["Digest"] = "string"
#银行帐号类型
CThostFtdcOpenAccountField["BankAccType"] = "char"
#渠道标志
CThostFtdcOpenAccountField["DeviceID"] = "string"
#期货单位帐号类型
CThostFtdcOpenAccountField["BankSecuAccType"] = "char"
#期货公司银行编码
CThostFtdcOpenAccountField["BrokerIDByBank"] = "string"
#期货单位帐号
CThostFtdcOpenAccountField["BankSecuAcc"] = "string"
#银行密码标志
CThostFtdcOpenAccountField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcOpenAccountField["SecuPwdFlag"] = "char"
#交易柜员
CThostFtdcOpenAccountField["OperNo"] = "string"
#交易ID
CThostFtdcOpenAccountField["TID"] = "int"
#用户标识
CThostFtdcOpenAccountField["UserID"] = "string"
#错误代码
CThostFtdcOpenAccountField["ErrorID"] = "int"
#错误信息
CThostFtdcOpenAccountField["ErrorMsg"] = "string"
structDict['CThostFtdcOpenAccountField'] = CThostFtdcOpenAccountField
#银期销户信息
CThostFtdcCancelAccountField = {}
#业务功能码
CThostFtdcCancelAccountField["TradeCode"] = "string"
#银行代码
CThostFtdcCancelAccountField["BankID"] = "string"
#银行分支机构代码
CThostFtdcCancelAccountField["BankBranchID"] = "string"
#期商代码
CThostFtdcCancelAccountField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcCancelAccountField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcCancelAccountField["TradeDate"] = "string"
#交易时间
CThostFtdcCancelAccountField["TradeTime"] = "string"
#银行流水号
CThostFtdcCancelAccountField["BankSerial"] = "string"
#交易系统日期
CThostFtdcCancelAccountField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcCancelAccountField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcCancelAccountField["LastFragment"] = "char"
#会话号
CThostFtdcCancelAccountField["SessionID"] = "int"
#客户姓名
CThostFtdcCancelAccountField["CustomerName"] = "string"
#证件类型
CThostFtdcCancelAccountField["IdCardType"] = "char"
#证件号码
CThostFtdcCancelAccountField["IdentifiedCardNo"] = "string"
#性别
CThostFtdcCancelAccountField["Gender"] = "char"
#国家代码
CThostFtdcCancelAccountField["CountryCode"] = "string"
#客户类型
CThostFtdcCancelAccountField["CustType"] = "char"
#地址
CThostFtdcCancelAccountField["Address"] = "string"
#邮编
CThostFtdcCancelAccountField["ZipCode"] = "string"
#电话号码
CThostFtdcCancelAccountField["Telephone"] = "string"
#手机
CThostFtdcCancelAccountField["MobilePhone"] = "string"
#传真
CThostFtdcCancelAccountField["Fax"] = "string"
#电子邮件
CThostFtdcCancelAccountField["EMail"] = "string"
#资金账户状态
CThostFtdcCancelAccountField["MoneyAccountStatus"] = "char"
#银行帐号
CThostFtdcCancelAccountField["BankAccount"] = "string"
#银行密码
CThostFtdcCancelAccountField["BankPassWord"] = "string"
#投资者帐号
CThostFtdcCancelAccountField["AccountID"] = "string"
#期货密码
CThostFtdcCancelAccountField["Password"] = "string"
#安装编号
CThostFtdcCancelAccountField["InstallID"] = "int"
#验证客户证件号码标志
CThostFtdcCancelAccountField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcCancelAccountField["CurrencyID"] = "string"
#汇钞标志
CThostFtdcCancelAccountField["CashExchangeCode"] = "char"
#摘要
CThostFtdcCancelAccountField["Digest"] = "string"
#银行帐号类型
CThostFtdcCancelAccountField["BankAccType"] = "char"
#渠道标志
CThostFtdcCancelAccountField["DeviceID"] = "string"
#期货单位帐号类型
CThostFtdcCancelAccountField["BankSecuAccType"] = "char"
#期货公司银行编码
CThostFtdcCancelAccountField["BrokerIDByBank"] = "string"
#期货单位帐号
CThostFtdcCancelAccountField["BankSecuAcc"] = "string"
#银行密码标志
CThostFtdcCancelAccountField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcCancelAccountField["SecuPwdFlag"] = "char"
#交易柜员
CThostFtdcCancelAccountField["OperNo"] = "string"
#交易ID
CThostFtdcCancelAccountField["TID"] = "int"
#用户标识
CThostFtdcCancelAccountField["UserID"] = "string"
#错误代码
CThostFtdcCancelAccountField["ErrorID"] = "int"
#错误信息
CThostFtdcCancelAccountField["ErrorMsg"] = "string"
structDict['CThostFtdcCancelAccountField'] = CThostFtdcCancelAccountField
#银期变更银行账号信息
CThostFtdcChangeAccountField = {}
#业务功能码
CThostFtdcChangeAccountField["TradeCode"] = "string"
#银行代码
CThostFtdcChangeAccountField["BankID"] = "string"
#银行分支机构代码
CThostFtdcChangeAccountField["BankBranchID"] = "string"
#期商代码
CThostFtdcChangeAccountField["BrokerID"] = "string"
#期商分支机构代码
CThostFtdcChangeAccountField["BrokerBranchID"] = "string"
#交易日期
CThostFtdcChangeAccountField["TradeDate"] = "string"
#交易时间
CThostFtdcChangeAccountField["TradeTime"] = "string"
#银行流水号
CThostFtdcChangeAccountField["BankSerial"] = "string"
#交易系统日期
CThostFtdcChangeAccountField["TradingDay"] = "string"
#银期平台消息流水号
CThostFtdcChangeAccountField["PlateSerial"] = "int"
#最后分片标志
CThostFtdcChangeAccountField["LastFragment"] = "char"
#会话号
CThostFtdcChangeAccountField["SessionID"] = "int"
#客户姓名
CThostFtdcChangeAccountField["CustomerName"] = "string"
#证件类型
CThostFtdcChangeAccountField["IdCardType"] = "char"
#证件号码
CThostFtdcChangeAccountField["IdentifiedCardNo"] = "string"
#性别
CThostFtdcChangeAccountField["Gender"] = "char"
#国家代码
CThostFtdcChangeAccountField["CountryCode"] = "string"
#客户类型
CThostFtdcChangeAccountField["CustType"] = "char"
#地址
CThostFtdcChangeAccountField["Address"] = "string"
#邮编
CThostFtdcChangeAccountField["ZipCode"] = "string"
#电话号码
CThostFtdcChangeAccountField["Telephone"] = "string"
#手机
CThostFtdcChangeAccountField["MobilePhone"] = "string"
#传真
CThostFtdcChangeAccountField["Fax"] = "string"
#电子邮件
CThostFtdcChangeAccountField["EMail"] = "string"
#资金账户状态
CThostFtdcChangeAccountField["MoneyAccountStatus"] = "char"
#银行帐号
CThostFtdcChangeAccountField["BankAccount"] = "string"
#银行密码
CThostFtdcChangeAccountField["BankPassWord"] = "string"
#新银行帐号
CThostFtdcChangeAccountField["NewBankAccount"] = "string"
#新银行密码
CThostFtdcChangeAccountField["NewBankPassWord"] = "string"
#投资者帐号
CThostFtdcChangeAccountField["AccountID"] = "string"
#期货密码
CThostFtdcChangeAccountField["Password"] = "string"
#银行帐号类型
CThostFtdcChangeAccountField["BankAccType"] = "char"
#安装编号
CThostFtdcChangeAccountField["InstallID"] = "int"
#验证客户证件号码标志
CThostFtdcChangeAccountField["VerifyCertNoFlag"] = "char"
#币种代码
CThostFtdcChangeAccountField["CurrencyID"] = "string"
#期货公司银行编码
CThostFtdcChangeAccountField["BrokerIDByBank"] = "string"
#银行密码标志
CThostFtdcChangeAccountField["BankPwdFlag"] = "char"
#期货资金密码核对标志
CThostFtdcChangeAccountField["SecuPwdFlag"] = "char"
#交易ID
CThostFtdcChangeAccountField["TID"] = "int"
#摘要
CThostFtdcChangeAccountField["Digest"] = "string"
#错误代码
CThostFtdcChangeAccountField["ErrorID"] = "int"
#错误信息
CThostFtdcChangeAccountField["ErrorMsg"] = "string"
structDict['CThostFtdcChangeAccountField'] = CThostFtdcChangeAccountField
#二级代理操作员银期权限
CThostFtdcSecAgentACIDMapField = {}
#经纪公司代码
CThostFtdcSecAgentACIDMapField["BrokerID"] = "string"
#用户代码
CThostFtdcSecAgentACIDMapField["UserID"] = "string"
#资金账户
CThostFtdcSecAgentACIDMapField["AccountID"] = "string"
#币种
CThostFtdcSecAgentACIDMapField["CurrencyID"] = "string"
#境外中介机构资金帐号
CThostFtdcSecAgentACIDMapField["BrokerSecAgentID"] = "string"
structDict['CThostFtdcSecAgentACIDMapField'] = CThostFtdcSecAgentACIDMapField
#二级代理操作员银期权限查询
CThostFtdcQrySecAgentACIDMapField = {}
#经纪公司代码
CThostFtdcQrySecAgentACIDMapField["BrokerID"] = "string"
#用户代码
CThostFtdcQrySecAgentACIDMapField["UserID"] = "string"
#资金账户
CThostFtdcQrySecAgentACIDMapField["AccountID"] = "string"
#币种
CThostFtdcQrySecAgentACIDMapField["CurrencyID"] = "string"
structDict['CThostFtdcQrySecAgentACIDMapField'] = CThostFtdcQrySecAgentACIDMapField
#灾备中心交易权限
CThostFtdcUserRightsAssignField = {}
#应用单元代码
CThostFtdcUserRightsAssignField["BrokerID"] = "string"
#用户代码
CThostFtdcUserRightsAssignField["UserID"] = "string"
#交易中心代码
CThostFtdcUserRightsAssignField["DRIdentityID"] = "int"
structDict['CThostFtdcUserRightsAssignField'] = CThostFtdcUserRightsAssignField
#经济公司是否有在本标示的交易权限
CThostFtdcBrokerUserRightAssignField = {}
#应用单元代码
CThostFtdcBrokerUserRightAssignField["BrokerID"] = "string"
#交易中心代码
CThostFtdcBrokerUserRightAssignField["DRIdentityID"] = "int"
#能否交易
CThostFtdcBrokerUserRightAssignField["Tradeable"] = "int"
structDict['CThostFtdcBrokerUserRightAssignField'] = CThostFtdcBrokerUserRightAssignField
#灾备交易转换报文
CThostFtdcDRTransferField = {}
#原交易中心代码
CThostFtdcDRTransferField["OrigDRIdentityID"] = "int"
#目标交易中心代码
CThostFtdcDRTransferField["DestDRIdentityID"] = "int"
#原应用单元代码
CThostFtdcDRTransferField["OrigBrokerID"] = "string"
#目标易用单元代码
CThostFtdcDRTransferField["DestBrokerID"] = "string"
structDict['CThostFtdcDRTransferField'] = CThostFtdcDRTransferField
#Fens用户信息
CThostFtdcFensUserInfoField = {}
#经纪公司代码
CThostFtdcFensUserInfoField["BrokerID"] = "string"
#用户代码
CThostFtdcFensUserInfoField["UserID"] = "string"
#登录模式
CThostFtdcFensUserInfoField["LoginMode"] = "char"
structDict['CThostFtdcFensUserInfoField'] = CThostFtdcFensUserInfoField
#当前银期所属交易中心
CThostFtdcCurrTransferIdentityField = {}
#交易中心代码
CThostFtdcCurrTransferIdentityField["IdentityID"] = "int"
structDict['CThostFtdcCurrTransferIdentityField'] = CThostFtdcCurrTransferIdentityField
#禁止登录用户
CThostFtdcLoginForbiddenUserField = {}
#经纪公司代码
CThostFtdcLoginForbiddenUserField["BrokerID"] = "string"
#用户代码
CThostFtdcLoginForbiddenUserField["UserID"] = "string"
#IP地址
CThostFtdcLoginForbiddenUserField["IPAddress"] = "string"
structDict['CThostFtdcLoginForbiddenUserField'] = CThostFtdcLoginForbiddenUserField
#查询禁止登录用户
CThostFtdcQryLoginForbiddenUserField = {}
#经纪公司代码
CThostFtdcQryLoginForbiddenUserField["BrokerID"] = "string"
#用户代码
CThostFtdcQryLoginForbiddenUserField["UserID"] = "string"
structDict['CThostFtdcQryLoginForbiddenUserField'] = CThostFtdcQryLoginForbiddenUserField
#UDP组播组信息
CThostFtdcMulticastGroupInfoField = {}
#组播组IP地址
CThostFtdcMulticastGroupInfoField["GroupIP"] = "string"
#组播组IP端口
CThostFtdcMulticastGroupInfoField["GroupPort"] = "int"
#源地址
CThostFtdcMulticastGroupInfoField["SourceIP"] = "string"
structDict['CThostFtdcMulticastGroupInfoField'] = CThostFtdcMulticastGroupInfoField
#资金账户基本准备金
CThostFtdcTradingAccountReserveField = {}
#经纪公司代码
CThostFtdcTradingAccountReserveField["BrokerID"] = "string"
#投资者帐号
CThostFtdcTradingAccountReserveField["AccountID"] = "string"
#基本准备金
CThostFtdcTradingAccountReserveField["Reserve"] = "float"
#币种代码
CThostFtdcTradingAccountReserveField["CurrencyID"] = "string"
structDict['CThostFtdcTradingAccountReserveField'] = CThostFtdcTradingAccountReserveField
#DBF记录
CThostFtdcDBFRecordField = {}
#DBF命令类型
CThostFtdcDBFRecordField["DBFComdType"] = "string"
#DBF时间类型
CThostFtdcDBFRecordField["DBFComTime"] = "string"
#DBF原始流水号类型
CThostFtdcDBFRecordField["DBFOComNo"] = "string"
#DBF流水号类型
CThostFtdcDBFRecordField["DBFComNo"] = "string"
#DBF字段类型
CThostFtdcDBFRecordField["DBFFdName1"] = "string"
#DBF字段内容类型
CThostFtdcDBFRecordField["DBFFdContent1"] = "string"
#DBF字段类型
CThostFtdcDBFRecordField["DBFFdName2"] = "string"
#DBF字段内容类型
CThostFtdcDBFRecordField["DBFFdContent2"] = "string"
#DBF字段类型
CThostFtdcDBFRecordField["DBFFdName3"] = "string"
#DBF字段内容类型
CThostFtdcDBFRecordField["DBFFdContent3"] = "string"
#DBF字段类型
CThostFtdcDBFRecordField["DBFFdName4"] = "string"
#DBF字段内容类型
CThostFtdcDBFRecordField["DBFFdContent4"] = "string"
structDict['CThostFtdcDBFRecordField'] = CThostFtdcDBFRecordField
| mit |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.